mirror of
https://github.com/hydralauncher/hydra.git
synced 2026-01-29 13:51:02 +00:00
fix: merge with ci
This commit is contained in:
13
.github/workflows/build.yml
vendored
13
.github/workflows/build.yml
vendored
@@ -10,7 +10,8 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [windows-latest, ubuntu-latest]
|
# os: [windows-latest, ubuntu-latest]
|
||||||
|
os: [windows-latest]
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
@@ -31,6 +32,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
|
- name: Build Rust
|
||||||
|
run: cargo build --release
|
||||||
|
working-directory: ./rust_rpc
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pip install -r requirements.txt
|
run: pip install -r requirements.txt
|
||||||
|
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -9,10 +9,12 @@ out
|
|||||||
.vite
|
.vite
|
||||||
ludusavi/
|
ludusavi/
|
||||||
hydra-python-rpc/
|
hydra-python-rpc/
|
||||||
aria2/
|
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
# Sentry Config File
|
# Sentry Config File
|
||||||
.env.sentry-build-plugin
|
.env.sentry-build-plugin
|
||||||
|
|
||||||
*storybook.log
|
*storybook.log
|
||||||
|
|
||||||
|
|
||||||
|
target/
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ productName: Hydra
|
|||||||
directories:
|
directories:
|
||||||
buildResources: build
|
buildResources: build
|
||||||
extraResources:
|
extraResources:
|
||||||
- aria2
|
|
||||||
- ludusavi
|
- ludusavi
|
||||||
- hydra-python-rpc
|
- hydra-python-rpc
|
||||||
- seeds
|
- seeds
|
||||||
@@ -23,6 +22,7 @@ win:
|
|||||||
extraResources:
|
extraResources:
|
||||||
- from: binaries/7z.exe
|
- from: binaries/7z.exe
|
||||||
- from: binaries/7z.dll
|
- from: binaries/7z.dll
|
||||||
|
- from: rust_rpc/target/release/hydra-httpdl.exe
|
||||||
target:
|
target:
|
||||||
- nsis
|
- nsis
|
||||||
- portable
|
- portable
|
||||||
@@ -40,6 +40,7 @@ mac:
|
|||||||
entitlementsInherit: build/entitlements.mac.plist
|
entitlementsInherit: build/entitlements.mac.plist
|
||||||
extraResources:
|
extraResources:
|
||||||
- from: binaries/7zz
|
- from: binaries/7zz
|
||||||
|
- from: rust_rpc/target/release/hydra-httpdl
|
||||||
extendInfo:
|
extendInfo:
|
||||||
- NSCameraUsageDescription: Application requests access to the device's camera.
|
- NSCameraUsageDescription: Application requests access to the device's camera.
|
||||||
- NSMicrophoneUsageDescription: Application requests access to the device's microphone.
|
- NSMicrophoneUsageDescription: Application requests access to the device's microphone.
|
||||||
@@ -51,6 +52,7 @@ dmg:
|
|||||||
linux:
|
linux:
|
||||||
extraResources:
|
extraResources:
|
||||||
- from: binaries/7zzs
|
- from: binaries/7zzs
|
||||||
|
- from: rust_rpc/target/release/hydra-httpdl
|
||||||
target:
|
target:
|
||||||
- AppImage
|
- AppImage
|
||||||
- snap
|
- snap
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
|
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
|
||||||
"typecheck": "npm run typecheck:node && npm run typecheck:web",
|
"typecheck": "npm run typecheck:node && npm run typecheck:web",
|
||||||
"start": "electron-vite preview",
|
"start": "electron-vite preview",
|
||||||
"dev": "electron-vite dev",
|
"dev": "cargo build --manifest-path=rust_rpc/Cargo.toml && electron-vite dev",
|
||||||
"build": "npm run typecheck && electron-vite build",
|
"build": "npm run typecheck && electron-vite build",
|
||||||
"postinstall": "electron-builder install-app-deps && node ./scripts/postinstall.cjs",
|
"postinstall": "electron-builder install-app-deps && node ./scripts/postinstall.cjs",
|
||||||
"build:unpack": "npm run build && electron-builder --dir",
|
"build:unpack": "npm run build && electron-builder --dir",
|
||||||
|
|||||||
@@ -1,257 +1,92 @@
|
|||||||
import os
|
import os
|
||||||
import requests
|
import subprocess
|
||||||
import threading
|
import json
|
||||||
import time
|
|
||||||
import urllib.parse
|
|
||||||
import re
|
|
||||||
from typing import Dict, Optional, Union
|
|
||||||
|
|
||||||
|
|
||||||
class HttpDownloader:
|
class HttpDownloader:
|
||||||
def __init__(self):
|
def __init__(self, hydra_httpdl_bin: str):
|
||||||
self.download = None
|
self.hydra_exe = hydra_httpdl_bin
|
||||||
self.thread = None
|
self.process = None
|
||||||
self.pause_event = threading.Event()
|
self.last_status = None
|
||||||
self.cancel_event = threading.Event()
|
|
||||||
self.download_info = None
|
def start_download(self, url: str, save_path: str, header: str = None, out: str = None, allow_multiple_connections: bool = False):
|
||||||
|
cmd = [self.hydra_exe]
|
||||||
|
|
||||||
|
cmd.append(url)
|
||||||
|
|
||||||
|
cmd.extend([
|
||||||
|
"--chunk-size", "10",
|
||||||
|
"--buffer-size", "16",
|
||||||
|
"--log",
|
||||||
|
"--silent"
|
||||||
|
])
|
||||||
|
|
||||||
def start_download(self, url: str, save_path: str, header: str, out: str = None, allow_multiple_connections: bool = False):
|
|
||||||
"""Start a download with the given parameters"""
|
|
||||||
# Parse header string into dictionary
|
|
||||||
headers = {}
|
|
||||||
if header:
|
if header:
|
||||||
for line in header.split('\n'):
|
cmd.extend(["--header", header])
|
||||||
if ':' in line:
|
|
||||||
key, value = line.split(':', 1)
|
|
||||||
headers[key.strip()] = value.strip()
|
|
||||||
|
|
||||||
# Determine output filename
|
if allow_multiple_connections:
|
||||||
if out:
|
cmd.extend(["--connections", "24"])
|
||||||
filename = out
|
|
||||||
else:
|
else:
|
||||||
# Extract filename from URL
|
cmd.extend(["--connections", "1"])
|
||||||
raw_filename = self._extract_filename_from_url(url)
|
|
||||||
if not raw_filename:
|
|
||||||
filename = 'download'
|
|
||||||
else:
|
|
||||||
filename = raw_filename
|
|
||||||
|
|
||||||
# Create full path
|
print(f"running hydra-httpdl: {' '.join(cmd)}")
|
||||||
if not os.path.exists(save_path):
|
|
||||||
os.makedirs(save_path)
|
|
||||||
|
|
||||||
full_path = os.path.join(save_path, filename)
|
|
||||||
|
|
||||||
# Initialize download info
|
|
||||||
self.download_info = {
|
|
||||||
'url': url,
|
|
||||||
'save_path': save_path,
|
|
||||||
'full_path': full_path,
|
|
||||||
'headers': headers,
|
|
||||||
'filename': filename,
|
|
||||||
'folderName': filename,
|
|
||||||
'fileSize': 0,
|
|
||||||
'progress': 0,
|
|
||||||
'downloadSpeed': 0,
|
|
||||||
'status': 'waiting',
|
|
||||||
'bytesDownloaded': 0,
|
|
||||||
'start_time': time.time(),
|
|
||||||
'supports_resume': False
|
|
||||||
}
|
|
||||||
|
|
||||||
# Reset events
|
|
||||||
self.pause_event.clear()
|
|
||||||
self.cancel_event.clear()
|
|
||||||
|
|
||||||
# Start download in a separate thread
|
|
||||||
self.thread = threading.Thread(target=self._download_worker)
|
|
||||||
self.thread.daemon = True
|
|
||||||
self.thread.start()
|
|
||||||
|
|
||||||
def _download_worker(self):
|
|
||||||
"""Worker thread that performs the actual download"""
|
|
||||||
url = self.download_info['url']
|
|
||||||
full_path = self.download_info['full_path']
|
|
||||||
headers = self.download_info['headers'].copy()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Start with a HEAD request to get file size and check if server supports range requests
|
self.process = subprocess.Popen(
|
||||||
head_response = requests.head(url, headers=headers, allow_redirects=True)
|
cmd,
|
||||||
total_size = int(head_response.headers.get('content-length', 0))
|
cwd=save_path,
|
||||||
self.download_info['fileSize'] = total_size
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
universal_newlines=True
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"error running hydra-httpdl: {e}")
|
||||||
|
|
||||||
# Check if server supports range requests
|
|
||||||
accept_ranges = head_response.headers.get('accept-ranges', '')
|
|
||||||
supports_resume = accept_ranges.lower() == 'bytes' and total_size > 0
|
|
||||||
self.download_info['supports_resume'] = supports_resume
|
|
||||||
|
|
||||||
# Check if we're resuming a download
|
def get_download_status(self):
|
||||||
file_exists = os.path.exists(full_path)
|
|
||||||
downloaded = 0
|
|
||||||
|
|
||||||
if file_exists and supports_resume:
|
if not self.process:
|
||||||
# Get current file size for resume
|
|
||||||
downloaded = os.path.getsize(full_path)
|
|
||||||
|
|
||||||
# If file is already complete, mark as done
|
|
||||||
if downloaded >= total_size and total_size > 0:
|
|
||||||
self.download_info['status'] = 'complete'
|
|
||||||
self.download_info['progress'] = 1.0
|
|
||||||
self.download_info['bytesDownloaded'] = total_size
|
|
||||||
return
|
|
||||||
|
|
||||||
# Add range header for resuming
|
|
||||||
if downloaded > 0:
|
|
||||||
headers['Range'] = f'bytes={downloaded}-'
|
|
||||||
self.download_info['bytesDownloaded'] = downloaded
|
|
||||||
self.download_info['progress'] = downloaded / total_size if total_size > 0 else 0
|
|
||||||
elif file_exists:
|
|
||||||
# If server doesn't support resume but file exists, delete and start over
|
|
||||||
os.remove(full_path)
|
|
||||||
downloaded = 0
|
|
||||||
|
|
||||||
# Open the request as a stream
|
|
||||||
self.download_info['status'] = 'active'
|
|
||||||
response = requests.get(url, headers=headers, stream=True, allow_redirects=True)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
# If we didn't get file size from HEAD request, try from GET
|
|
||||||
if total_size == 0:
|
|
||||||
total_size = int(response.headers.get('content-length', 0))
|
|
||||||
if 'content-range' in response.headers:
|
|
||||||
content_range = response.headers['content-range']
|
|
||||||
match = re.search(r'bytes \d+-\d+/(\d+)', content_range)
|
|
||||||
if match:
|
|
||||||
total_size = int(match.group(1))
|
|
||||||
|
|
||||||
self.download_info['fileSize'] = total_size
|
|
||||||
|
|
||||||
# Setup for tracking speed
|
|
||||||
start_time = time.time()
|
|
||||||
last_update_time = start_time
|
|
||||||
bytes_since_last_update = 0
|
|
||||||
|
|
||||||
# Open file in append mode if resuming, otherwise write mode
|
|
||||||
mode = 'ab' if downloaded > 0 and supports_resume else 'wb'
|
|
||||||
with open(full_path, mode) as f:
|
|
||||||
for chunk in response.iter_content(chunk_size=8192):
|
|
||||||
# Check if cancelled
|
|
||||||
if self.cancel_event.is_set():
|
|
||||||
self.download_info['status'] = 'cancelled'
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check if paused
|
|
||||||
if self.pause_event.is_set():
|
|
||||||
self.download_info['status'] = 'paused'
|
|
||||||
# Wait until resumed or cancelled
|
|
||||||
while self.pause_event.is_set() and not self.cancel_event.is_set():
|
|
||||||
time.sleep(0.5)
|
|
||||||
|
|
||||||
# Update status if resumed
|
|
||||||
if not self.cancel_event.is_set():
|
|
||||||
self.download_info['status'] = 'active'
|
|
||||||
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
downloaded += len(chunk)
|
|
||||||
bytes_since_last_update += len(chunk)
|
|
||||||
|
|
||||||
# Update progress and speed every 0.5 seconds
|
|
||||||
current_time = time.time()
|
|
||||||
if current_time - last_update_time >= 0.5:
|
|
||||||
elapsed = current_time - last_update_time
|
|
||||||
speed = bytes_since_last_update / elapsed if elapsed > 0 else 0
|
|
||||||
|
|
||||||
self.download_info['bytesDownloaded'] = downloaded
|
|
||||||
self.download_info['progress'] = downloaded / total_size if total_size > 0 else 0
|
|
||||||
self.download_info['downloadSpeed'] = speed
|
|
||||||
|
|
||||||
last_update_time = current_time
|
|
||||||
bytes_since_last_update = 0
|
|
||||||
|
|
||||||
# Download completed
|
|
||||||
self.download_info['status'] = 'complete'
|
|
||||||
self.download_info['progress'] = 1.0
|
|
||||||
self.download_info['bytesDownloaded'] = total_size
|
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
self.download_info['status'] = 'error'
|
|
||||||
print(f"Download error: {str(e)}")
|
|
||||||
|
|
||||||
def pause_download(self):
|
|
||||||
"""Pause the current download"""
|
|
||||||
if self.thread and self.thread.is_alive():
|
|
||||||
self.pause_event.set()
|
|
||||||
self.download_info['status'] = 'pausing' # Intermediate state until worker confirms
|
|
||||||
|
|
||||||
def resume_download(self):
|
|
||||||
"""Resume a paused download"""
|
|
||||||
if self.download_info and self.download_info['status'] == 'paused':
|
|
||||||
self.pause_event.clear()
|
|
||||||
# If thread is no longer alive, restart it
|
|
||||||
if not self.thread or not self.thread.is_alive():
|
|
||||||
self.thread = threading.Thread(target=self._download_worker)
|
|
||||||
self.thread.daemon = True
|
|
||||||
self.thread.start()
|
|
||||||
|
|
||||||
def cancel_download(self):
|
|
||||||
"""Cancel the current download and reset the download object"""
|
|
||||||
if self.thread and self.thread.is_alive():
|
|
||||||
self.cancel_event.set()
|
|
||||||
self.pause_event.clear() # Clear pause if it was set
|
|
||||||
|
|
||||||
# Give the thread a moment to clean up
|
|
||||||
self.thread.join(timeout=2.0)
|
|
||||||
|
|
||||||
if self.download_info:
|
|
||||||
# Attempt to delete the partial file if not resumable
|
|
||||||
if not self.download_info.get('supports_resume', False):
|
|
||||||
try:
|
|
||||||
if os.path.exists(self.download_info['full_path']):
|
|
||||||
os.remove(self.download_info['full_path'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
self.download_info['status'] = 'cancelled'
|
|
||||||
|
|
||||||
self.download_info = None
|
|
||||||
|
|
||||||
def _extract_filename_from_url(self, url: str) -> str:
|
|
||||||
"""Extract a clean filename from URL, handling URL encoding and query parameters"""
|
|
||||||
# Parse the URL to get the path
|
|
||||||
parsed_url = urllib.parse.urlparse(url)
|
|
||||||
|
|
||||||
# Extract the path component
|
|
||||||
path = parsed_url.path
|
|
||||||
|
|
||||||
# Get the last part of the path (filename with potential URL encoding)
|
|
||||||
encoded_filename = os.path.basename(path)
|
|
||||||
|
|
||||||
# URL decode the filename
|
|
||||||
decoded_filename = urllib.parse.unquote(encoded_filename)
|
|
||||||
|
|
||||||
# Remove query parameters if present
|
|
||||||
if '?' in decoded_filename:
|
|
||||||
decoded_filename = decoded_filename.split('?')[0]
|
|
||||||
|
|
||||||
# If we get an empty string, use the domain as a fallback
|
|
||||||
if not decoded_filename:
|
|
||||||
return 'download'
|
|
||||||
|
|
||||||
return decoded_filename
|
|
||||||
|
|
||||||
def get_download_status(self) -> Optional[Dict]:
|
|
||||||
"""Get the current status of the download"""
|
|
||||||
if not self.download_info:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return {
|
try:
|
||||||
'folderName': self.download_info['filename'],
|
line = self.process.stdout.readline()
|
||||||
'fileSize': self.download_info['fileSize'],
|
if line:
|
||||||
'progress': self.download_info['progress'],
|
status = json.loads(line.strip())
|
||||||
'downloadSpeed': self.download_info['downloadSpeed'],
|
self.last_status = status
|
||||||
'numPeers': 0, # Not applicable for HTTP
|
elif self.last_status:
|
||||||
'numSeeds': 0, # Not applicable for HTTP
|
status = self.last_status
|
||||||
'status': self.download_info['status'],
|
else:
|
||||||
'bytesDownloaded': self.download_info['bytesDownloaded'],
|
return None
|
||||||
'supports_resume': self.download_info.get('supports_resume', False)
|
|
||||||
}
|
response = {
|
||||||
|
"status": "active",
|
||||||
|
"progress": status["progress"],
|
||||||
|
"downloadSpeed": status["speed_bps"],
|
||||||
|
"numPeers": 0,
|
||||||
|
"numSeeds": 0,
|
||||||
|
"bytesDownloaded": status["downloaded_bytes"],
|
||||||
|
"fileSize": status["total_bytes"],
|
||||||
|
"folderName": status["filename"]
|
||||||
|
}
|
||||||
|
|
||||||
|
if status["progress"] == 1:
|
||||||
|
response["status"] = "complete"
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"error getting download status: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def stop_download(self):
|
||||||
|
if self.process:
|
||||||
|
self.process.terminate()
|
||||||
|
self.process = None
|
||||||
|
self.last_status = None
|
||||||
|
|
||||||
|
def pause_download(self):
|
||||||
|
self.stop_download()
|
||||||
|
|
||||||
|
def cancel_download(self):
|
||||||
|
self.stop_download()
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ http_port = sys.argv[2]
|
|||||||
rpc_password = sys.argv[3]
|
rpc_password = sys.argv[3]
|
||||||
start_download_payload = sys.argv[4]
|
start_download_payload = sys.argv[4]
|
||||||
start_seeding_payload = sys.argv[5]
|
start_seeding_payload = sys.argv[5]
|
||||||
|
hydra_httpdl_bin = sys.argv[6]
|
||||||
|
|
||||||
downloads = {}
|
downloads = {}
|
||||||
# This can be streamed down from Node
|
# This can be streamed down from Node
|
||||||
@@ -32,7 +33,7 @@ if start_download_payload:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Error starting torrent download", e)
|
print("Error starting torrent download", e)
|
||||||
else:
|
else:
|
||||||
http_downloader = HttpDownloader()
|
http_downloader = HttpDownloader(hydra_httpdl_bin)
|
||||||
downloads[initial_download['game_id']] = http_downloader
|
downloads[initial_download['game_id']] = http_downloader
|
||||||
try:
|
try:
|
||||||
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
|
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
|
||||||
@@ -147,11 +148,11 @@ def action():
|
|||||||
torrent_downloader.start_download(url, data['save_path'])
|
torrent_downloader.start_download(url, data['save_path'])
|
||||||
else:
|
else:
|
||||||
if existing_downloader and isinstance(existing_downloader, HttpDownloader):
|
if existing_downloader and isinstance(existing_downloader, HttpDownloader):
|
||||||
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
|
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'), data.get('allow_multiple_connections', False))
|
||||||
else:
|
else:
|
||||||
http_downloader = HttpDownloader()
|
http_downloader = HttpDownloader(hydra_httpdl_bin)
|
||||||
downloads[game_id] = http_downloader
|
downloads[game_id] = http_downloader
|
||||||
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
|
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'), data.get('allow_multiple_connections', False))
|
||||||
|
|
||||||
downloading_game_id = game_id
|
downloading_game_id = game_id
|
||||||
|
|
||||||
|
|||||||
@@ -5,5 +5,3 @@ pywin32; sys_platform == 'win32'
|
|||||||
psutil
|
psutil
|
||||||
Pillow
|
Pillow
|
||||||
flask
|
flask
|
||||||
aria2p
|
|
||||||
requests
|
|
||||||
|
|||||||
2040
rust_rpc/Cargo.lock
generated
Normal file
2040
rust_rpc/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
rust_rpc/Cargo.toml
Normal file
25
rust_rpc/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[package]
|
||||||
|
name = "hydra-httpdl"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tokio = { version = "1", features = ["full", "macros", "rt-multi-thread"] }
|
||||||
|
reqwest = { version = "0.12.5", features = ["stream"] }
|
||||||
|
futures = "0.3"
|
||||||
|
bytes = "1.4"
|
||||||
|
indicatif = "0.17"
|
||||||
|
anyhow = "1.0"
|
||||||
|
async-trait = "0.1"
|
||||||
|
tokio-util = { version = "0.7", features = ["io"] }
|
||||||
|
clap = { version = "4.4", features = ["derive"] }
|
||||||
|
urlencoding = "2.1"
|
||||||
|
serde_json = "1.0"
|
||||||
|
bitvec = "1.0"
|
||||||
|
sha2 = "0.10"
|
||||||
|
[profile.release]
|
||||||
|
opt-level = 3
|
||||||
|
lto = "fat"
|
||||||
|
codegen-units = 1
|
||||||
|
panic = "abort"
|
||||||
|
strip = true
|
||||||
943
rust_rpc/src/main.rs
Normal file
943
rust_rpc/src/main.rs
Normal file
@@ -0,0 +1,943 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use bitvec::prelude::*;
|
||||||
|
use clap::Parser;
|
||||||
|
use futures::stream::{FuturesUnordered, StreamExt};
|
||||||
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
|
use reqwest::{Client, StatusCode, Url};
|
||||||
|
use serde_json::json;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::fs::{File, OpenOptions};
|
||||||
|
use std::io::{BufReader, BufWriter, Read, Seek, SeekFrom, Write};
|
||||||
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
const DEFAULT_MAX_RETRIES: usize = 3;
|
||||||
|
const RETRY_BACKOFF_MS: u64 = 500;
|
||||||
|
const DEFAULT_OUTPUT_FILENAME: &str = "output.bin";
|
||||||
|
const DEFAULT_CONNECTIONS: usize = 16;
|
||||||
|
const DEFAULT_CHUNK_SIZE_MB: usize = 5;
|
||||||
|
const DEFAULT_BUFFER_SIZE_MB: usize = 8;
|
||||||
|
const DEFAULT_VERBOSE: bool = false;
|
||||||
|
const DEFAULT_SILENT: bool = false;
|
||||||
|
const DEFAULT_LOG: bool = false;
|
||||||
|
const DEFAULT_FORCE_NEW: bool = false;
|
||||||
|
const DEFAULT_RESUME_ONLY: bool = false;
|
||||||
|
const HEADER_SIZE: usize = 4096;
|
||||||
|
const MAGIC_NUMBER: &[u8; 5] = b"HYDRA";
|
||||||
|
const FORMAT_VERSION: u8 = 1;
|
||||||
|
const FINALIZE_BUFFER_SIZE: usize = 1024 * 1024;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(name = "hydra-httpdl")]
|
||||||
|
#[command(author = "los-broxas")]
|
||||||
|
#[command(version = "0.2.0")]
|
||||||
|
#[command(about = "high speed and low resource usage http downloader with resume capability", long_about = None)]
|
||||||
|
struct CliArgs {
|
||||||
|
/// file url to download
|
||||||
|
#[arg(required = true)]
|
||||||
|
url: String,
|
||||||
|
|
||||||
|
/// output file path (or directory to save with original filename)
|
||||||
|
#[arg(default_value = DEFAULT_OUTPUT_FILENAME)]
|
||||||
|
output: String,
|
||||||
|
|
||||||
|
/// number of concurrent connections for parallel download
|
||||||
|
#[arg(short = 'c', long, default_value_t = DEFAULT_CONNECTIONS)]
|
||||||
|
connections: usize,
|
||||||
|
|
||||||
|
/// chunk size in MB for each connection
|
||||||
|
#[arg(short = 'k', long, default_value_t = DEFAULT_CHUNK_SIZE_MB)]
|
||||||
|
chunk_size: usize,
|
||||||
|
|
||||||
|
/// buffer size in MB for file writing
|
||||||
|
#[arg(short, long, default_value_t = DEFAULT_BUFFER_SIZE_MB)]
|
||||||
|
buffer_size: usize,
|
||||||
|
|
||||||
|
/// show detailed progress information
|
||||||
|
#[arg(short = 'v', long, default_value_t = DEFAULT_VERBOSE)]
|
||||||
|
verbose: bool,
|
||||||
|
|
||||||
|
/// suppress progress bar
|
||||||
|
#[arg(short = 's', long, default_value_t = DEFAULT_SILENT)]
|
||||||
|
silent: bool,
|
||||||
|
|
||||||
|
/// log download statistics in JSON format every second
|
||||||
|
#[arg(short = 'l', long, default_value_t = DEFAULT_LOG)]
|
||||||
|
log: bool,
|
||||||
|
|
||||||
|
/// force new download, ignore existing partial files
|
||||||
|
#[arg(short = 'f', long, default_value_t = DEFAULT_FORCE_NEW)]
|
||||||
|
force_new: bool,
|
||||||
|
|
||||||
|
/// only resume existing download, exit if no partial file exists
|
||||||
|
#[arg(short = 'r', long, default_value_t = DEFAULT_RESUME_ONLY)]
|
||||||
|
resume_only: bool,
|
||||||
|
|
||||||
|
/// HTTP headers to send with request (format: "Key: Value")
|
||||||
|
#[arg(short = 'H', long)]
|
||||||
|
header: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DownloadConfig {
|
||||||
|
url: String,
|
||||||
|
output_path: String,
|
||||||
|
num_connections: usize,
|
||||||
|
chunk_size: usize,
|
||||||
|
buffer_size: usize,
|
||||||
|
verbose: bool,
|
||||||
|
silent: bool,
|
||||||
|
log: bool,
|
||||||
|
force_new: bool,
|
||||||
|
resume_only: bool,
|
||||||
|
headers: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DownloadConfig {
|
||||||
|
fn should_log(&self) -> bool {
|
||||||
|
self.verbose && !self.silent
|
||||||
|
}
|
||||||
|
|
||||||
|
fn should_log_stats(&self) -> bool {
|
||||||
|
self.log
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DownloadStats {
|
||||||
|
progress_percent: f64,
|
||||||
|
bytes_downloaded: u64,
|
||||||
|
total_size: u64,
|
||||||
|
speed_bytes_per_sec: f64,
|
||||||
|
eta_seconds: u64,
|
||||||
|
elapsed_seconds: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct HydraHeader {
|
||||||
|
magic: [u8; 5], // "HYDRA" identifier
|
||||||
|
version: u8, // header version
|
||||||
|
file_size: u64, // file size
|
||||||
|
etag: [u8; 32], // etag hash
|
||||||
|
url_hash: [u8; 32], // url hash
|
||||||
|
chunk_size: u32, // chunk size
|
||||||
|
chunk_count: u32, // chunk count
|
||||||
|
chunks_bitmap: BitVec<u8>, // chunks bitmap
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HydraHeader {
|
||||||
|
fn new(file_size: u64, etag: &str, url: &str, chunk_size: u32) -> Self {
|
||||||
|
let chunk_count = ((file_size as f64) / (chunk_size as f64)).ceil() as u32;
|
||||||
|
let chunks_bitmap = bitvec![u8, Lsb0; 0; chunk_count as usize];
|
||||||
|
|
||||||
|
let mut etag_hash = [0u8; 32];
|
||||||
|
let etag_digest = Sha256::digest(etag.as_bytes());
|
||||||
|
etag_hash.copy_from_slice(&etag_digest[..]);
|
||||||
|
|
||||||
|
let mut url_hash = [0u8; 32];
|
||||||
|
let url_digest = Sha256::digest(url.as_bytes());
|
||||||
|
url_hash.copy_from_slice(&url_digest[..]);
|
||||||
|
|
||||||
|
Self {
|
||||||
|
magic: *MAGIC_NUMBER,
|
||||||
|
version: FORMAT_VERSION,
|
||||||
|
file_size,
|
||||||
|
etag: etag_hash,
|
||||||
|
url_hash,
|
||||||
|
chunk_size,
|
||||||
|
chunk_count,
|
||||||
|
chunks_bitmap,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_to_file<W: Write + Seek>(&self, writer: &mut W) -> Result<()> {
|
||||||
|
writer.write_all(&self.magic)?;
|
||||||
|
writer.write_all(&[self.version])?;
|
||||||
|
writer.write_all(&self.file_size.to_le_bytes())?;
|
||||||
|
writer.write_all(&self.etag)?;
|
||||||
|
writer.write_all(&self.url_hash)?;
|
||||||
|
writer.write_all(&self.chunk_size.to_le_bytes())?;
|
||||||
|
writer.write_all(&self.chunk_count.to_le_bytes())?;
|
||||||
|
|
||||||
|
let bitmap_bytes = self.chunks_bitmap.as_raw_slice();
|
||||||
|
writer.write_all(bitmap_bytes)?;
|
||||||
|
|
||||||
|
let header_size = 5 + 1 + 8 + 32 + 32 + 4 + 4 + bitmap_bytes.len();
|
||||||
|
let padding_size = HEADER_SIZE - header_size;
|
||||||
|
let padding = vec![0u8; padding_size];
|
||||||
|
writer.write_all(&padding)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_from_file<R: Read + Seek>(reader: &mut R) -> Result<Self> {
|
||||||
|
let mut magic = [0u8; 5];
|
||||||
|
reader.read_exact(&mut magic)?;
|
||||||
|
|
||||||
|
if magic != *MAGIC_NUMBER {
|
||||||
|
anyhow::bail!("Not a valid Hydra download file");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut version = [0u8; 1];
|
||||||
|
reader.read_exact(&mut version)?;
|
||||||
|
|
||||||
|
if version[0] != FORMAT_VERSION {
|
||||||
|
anyhow::bail!("Incompatible format version");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut file_size_bytes = [0u8; 8];
|
||||||
|
reader.read_exact(&mut file_size_bytes)?;
|
||||||
|
let file_size = u64::from_le_bytes(file_size_bytes);
|
||||||
|
|
||||||
|
let mut etag = [0u8; 32];
|
||||||
|
reader.read_exact(&mut etag)?;
|
||||||
|
|
||||||
|
let mut url_hash = [0u8; 32];
|
||||||
|
reader.read_exact(&mut url_hash)?;
|
||||||
|
|
||||||
|
let mut chunk_size_bytes = [0u8; 4];
|
||||||
|
reader.read_exact(&mut chunk_size_bytes)?;
|
||||||
|
let chunk_size = u32::from_le_bytes(chunk_size_bytes);
|
||||||
|
|
||||||
|
let mut chunk_count_bytes = [0u8; 4];
|
||||||
|
reader.read_exact(&mut chunk_count_bytes)?;
|
||||||
|
let chunk_count = u32::from_le_bytes(chunk_count_bytes);
|
||||||
|
|
||||||
|
let bitmap_bytes_len = (chunk_count as usize + 7) / 8;
|
||||||
|
let mut bitmap_bytes = vec![0u8; bitmap_bytes_len];
|
||||||
|
reader.read_exact(&mut bitmap_bytes)?;
|
||||||
|
|
||||||
|
let chunks_bitmap = BitVec::<u8, Lsb0>::from_vec(bitmap_bytes);
|
||||||
|
|
||||||
|
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
magic,
|
||||||
|
version: version[0],
|
||||||
|
file_size,
|
||||||
|
etag,
|
||||||
|
url_hash,
|
||||||
|
chunk_size,
|
||||||
|
chunk_count,
|
||||||
|
chunks_bitmap,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_chunk_complete(&mut self, chunk_index: usize) -> Result<()> {
|
||||||
|
if chunk_index >= self.chunk_count as usize {
|
||||||
|
anyhow::bail!("Chunk index out of bounds");
|
||||||
|
}
|
||||||
|
|
||||||
|
self.chunks_bitmap.set(chunk_index, true);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_chunk_complete(&self, chunk_index: usize) -> bool {
|
||||||
|
if chunk_index >= self.chunk_count as usize {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.chunks_bitmap[chunk_index]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_incomplete_chunks(&self) -> Vec<(u64, u64)> {
|
||||||
|
let incomplete_count = self.chunk_count as usize - self.chunks_bitmap.count_ones();
|
||||||
|
let mut chunks = Vec::with_capacity(incomplete_count);
|
||||||
|
let chunk_size = self.chunk_size as u64;
|
||||||
|
|
||||||
|
for i in 0..self.chunk_count as usize {
|
||||||
|
if !self.is_chunk_complete(i) {
|
||||||
|
let start = i as u64 * chunk_size;
|
||||||
|
let end = std::cmp::min((i as u64 + 1) * chunk_size - 1, self.file_size - 1);
|
||||||
|
chunks.push((start, end));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
chunks
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_download_complete(&self) -> bool {
|
||||||
|
self.chunks_bitmap.count_ones() == self.chunk_count as usize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ProgressTracker {
|
||||||
|
bar: Option<ProgressBar>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProgressTracker {
|
||||||
|
fn new(file_size: u64, silent: bool, enable_stats: bool) -> Result<Self> {
|
||||||
|
let bar = if !silent || enable_stats {
|
||||||
|
let pb = ProgressBar::new(file_size);
|
||||||
|
pb.set_style(
|
||||||
|
ProgressStyle::default_bar()
|
||||||
|
.template("[{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})")?
|
||||||
|
);
|
||||||
|
if silent {
|
||||||
|
pb.set_draw_target(indicatif::ProgressDrawTarget::hidden());
|
||||||
|
}
|
||||||
|
Some(pb)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Self { bar })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn increment(&self, amount: u64) {
|
||||||
|
if let Some(pb) = &self.bar {
|
||||||
|
pb.inc(amount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish(&self) {
|
||||||
|
if let Some(pb) = &self.bar {
|
||||||
|
pb.finish_with_message("Download complete");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_stats(&self) -> Option<DownloadStats> {
|
||||||
|
if let Some(pb) = &self.bar {
|
||||||
|
let position = pb.position();
|
||||||
|
let total = pb.length().unwrap_or(1);
|
||||||
|
|
||||||
|
Some(DownloadStats {
|
||||||
|
progress_percent: position as f64 / total as f64,
|
||||||
|
bytes_downloaded: position,
|
||||||
|
total_size: total,
|
||||||
|
speed_bytes_per_sec: pb.per_sec(),
|
||||||
|
eta_seconds: pb.eta().as_secs(),
|
||||||
|
elapsed_seconds: pb.elapsed().as_secs(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Downloader {
|
||||||
|
client: Client,
|
||||||
|
config: DownloadConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Downloader {
|
||||||
|
async fn download(&self) -> Result<()> {
|
||||||
|
let (file_size, filename, etag) = self.get_file_info().await?;
|
||||||
|
let output_path = self.determine_output_path(filename);
|
||||||
|
|
||||||
|
if self.config.should_log() {
|
||||||
|
println!("Detected filename: {}", output_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
let resume_manager = ResumeManager::try_from_file(
|
||||||
|
&output_path,
|
||||||
|
file_size,
|
||||||
|
&etag,
|
||||||
|
&self.config.url,
|
||||||
|
self.config.chunk_size as u32,
|
||||||
|
self.config.force_new,
|
||||||
|
self.config.resume_only,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let file = self.prepare_output_file(&output_path, file_size)?;
|
||||||
|
let progress = ProgressTracker::new(file_size, self.config.silent, self.config.log)?;
|
||||||
|
|
||||||
|
let chunks = if resume_manager.is_download_complete() {
|
||||||
|
if self.config.should_log() {
|
||||||
|
println!("File is already fully downloaded, finalizing...");
|
||||||
|
}
|
||||||
|
resume_manager.finalize_download()?;
|
||||||
|
return Ok(());
|
||||||
|
} else {
|
||||||
|
let completed_chunks = resume_manager.header.chunks_bitmap.count_ones() as u32;
|
||||||
|
let total_chunks = resume_manager.header.chunk_count;
|
||||||
|
|
||||||
|
if completed_chunks > 0 {
|
||||||
|
if self.config.should_log() {
|
||||||
|
let percent_done = (completed_chunks as f64 / total_chunks as f64) * 100.0;
|
||||||
|
println!("Resuming download: {:.1}% already downloaded", percent_done);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(pb) = &progress.bar {
|
||||||
|
let downloaded = file_size * completed_chunks as u64 / total_chunks as u64;
|
||||||
|
pb.inc(downloaded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resume_manager.get_incomplete_chunks()
|
||||||
|
};
|
||||||
|
|
||||||
|
if self.config.should_log() {
|
||||||
|
println!(
|
||||||
|
"Downloading {} chunks of total {}",
|
||||||
|
chunks.len(),
|
||||||
|
resume_manager.header.chunk_count
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.process_chunks_with_resume(
|
||||||
|
chunks,
|
||||||
|
file,
|
||||||
|
file_size,
|
||||||
|
progress,
|
||||||
|
output_path.clone(),
|
||||||
|
resume_manager,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn determine_output_path(&self, filename: Option<String>) -> String {
|
||||||
|
if Path::new(&self.config.output_path)
|
||||||
|
.file_name()
|
||||||
|
.unwrap_or_default()
|
||||||
|
== DEFAULT_OUTPUT_FILENAME
|
||||||
|
&& filename.is_some()
|
||||||
|
{
|
||||||
|
filename.unwrap()
|
||||||
|
} else {
|
||||||
|
self.config.output_path.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prepare_output_file(&self, path: &str, size: u64) -> Result<Arc<Mutex<BufWriter<File>>>> {
|
||||||
|
let file = if Path::new(path).exists() {
|
||||||
|
OpenOptions::new().read(true).write(true).open(path)?
|
||||||
|
} else {
|
||||||
|
let file = File::create(path)?;
|
||||||
|
file.set_len(HEADER_SIZE as u64 + size)?;
|
||||||
|
file
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Arc::new(Mutex::new(BufWriter::with_capacity(
|
||||||
|
self.config.buffer_size,
|
||||||
|
file,
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn process_chunks_with_resume(
|
||||||
|
&self,
|
||||||
|
chunks: Vec<(u64, u64)>,
|
||||||
|
file: Arc<Mutex<BufWriter<File>>>,
|
||||||
|
_file_size: u64,
|
||||||
|
progress: ProgressTracker,
|
||||||
|
real_filename: String,
|
||||||
|
resume_manager: ResumeManager,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut tasks = FuturesUnordered::new();
|
||||||
|
|
||||||
|
let log_progress = if self.config.should_log_stats() {
|
||||||
|
let progress_clone = progress.bar.clone();
|
||||||
|
let filename = real_filename.clone();
|
||||||
|
|
||||||
|
let (log_cancel_tx, mut log_cancel_rx) = tokio::sync::oneshot::channel();
|
||||||
|
|
||||||
|
let log_task = tokio::spawn(async move {
|
||||||
|
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(1));
|
||||||
|
let tracker = ProgressTracker {
|
||||||
|
bar: progress_clone,
|
||||||
|
};
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::select! {
|
||||||
|
_ = interval.tick() => {
|
||||||
|
if let Some(stats) = tracker.get_stats() {
|
||||||
|
let json_output = json!({
|
||||||
|
"progress": stats.progress_percent,
|
||||||
|
"speed_bps": stats.speed_bytes_per_sec,
|
||||||
|
"downloaded_bytes": stats.bytes_downloaded,
|
||||||
|
"total_bytes": stats.total_size,
|
||||||
|
"eta_seconds": stats.eta_seconds,
|
||||||
|
"elapsed_seconds": stats.elapsed_seconds,
|
||||||
|
"filename": filename
|
||||||
|
});
|
||||||
|
println!("{}", json_output);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ = &mut log_cancel_rx => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
Some((log_task, log_cancel_tx))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let resume_manager = Arc::new(Mutex::new(resume_manager));
|
||||||
|
|
||||||
|
for (start, end) in chunks {
|
||||||
|
let client = self.client.clone();
|
||||||
|
let url = self.config.url.clone();
|
||||||
|
let file_clone = Arc::clone(&file);
|
||||||
|
let pb_clone = progress.bar.clone();
|
||||||
|
let manager_clone = Arc::clone(&resume_manager);
|
||||||
|
let headers = self.config.headers.clone();
|
||||||
|
|
||||||
|
let chunk_size = self.config.chunk_size as u64;
|
||||||
|
let chunk_index = (start / chunk_size) as usize;
|
||||||
|
|
||||||
|
tasks.push(tokio::spawn(async move {
|
||||||
|
let result = Self::download_chunk_with_retry(
|
||||||
|
client,
|
||||||
|
url,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
file_clone,
|
||||||
|
pb_clone,
|
||||||
|
DEFAULT_MAX_RETRIES,
|
||||||
|
&headers,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
if result.is_ok() {
|
||||||
|
let mut manager = manager_clone.lock().await;
|
||||||
|
manager.set_chunk_complete(chunk_index)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}));
|
||||||
|
|
||||||
|
if tasks.len() >= self.config.num_connections {
|
||||||
|
if let Some(result) = tasks.next().await {
|
||||||
|
result??;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while let Some(result) = tasks.next().await {
|
||||||
|
result??;
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut writer = file.lock().await;
|
||||||
|
writer.flush()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
progress.finish();
|
||||||
|
|
||||||
|
if let Some((log_handle, log_cancel_tx)) = log_progress {
|
||||||
|
if self.config.should_log_stats() {
|
||||||
|
let json_output = json!({
|
||||||
|
"progress": 1.0,
|
||||||
|
"speed_bps": 0.0,
|
||||||
|
"downloaded_bytes": _file_size,
|
||||||
|
"total_bytes": _file_size,
|
||||||
|
"eta_seconds": 0,
|
||||||
|
"elapsed_seconds": if let Some(pb) = &progress.bar { pb.elapsed().as_secs() } else { 0 },
|
||||||
|
"filename": real_filename
|
||||||
|
});
|
||||||
|
println!("{}", json_output);
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = log_cancel_tx.send(());
|
||||||
|
let _ = log_handle.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let manager = resume_manager.lock().await;
|
||||||
|
if manager.is_download_complete() {
|
||||||
|
if self.config.should_log() {
|
||||||
|
println!("Download complete, finalizing file...");
|
||||||
|
}
|
||||||
|
manager.finalize_download()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_chunk_with_retry(
|
||||||
|
client: Client,
|
||||||
|
url: String,
|
||||||
|
start: u64,
|
||||||
|
end: u64,
|
||||||
|
file: Arc<Mutex<BufWriter<File>>>,
|
||||||
|
progress_bar: Option<ProgressBar>,
|
||||||
|
max_retries: usize,
|
||||||
|
headers: &[String],
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut retries = 0;
|
||||||
|
loop {
|
||||||
|
match Self::download_chunk(
|
||||||
|
client.clone(),
|
||||||
|
url.clone(),
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
file.clone(),
|
||||||
|
progress_bar.clone(),
|
||||||
|
headers,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => return Ok(()),
|
||||||
|
Err(e) => {
|
||||||
|
retries += 1;
|
||||||
|
if retries >= max_retries {
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
tokio::time::sleep(tokio::time::Duration::from_millis(
|
||||||
|
RETRY_BACKOFF_MS * (2_u64.pow(retries as u32 - 1)),
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download_chunk(
|
||||||
|
client: Client,
|
||||||
|
url: String,
|
||||||
|
start: u64,
|
||||||
|
end: u64,
|
||||||
|
file: Arc<Mutex<BufWriter<File>>>,
|
||||||
|
progress_bar: Option<ProgressBar>,
|
||||||
|
headers: &[String],
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut req = client
|
||||||
|
.get(&url)
|
||||||
|
.header("Range", format!("bytes={}-{}", start, end));
|
||||||
|
|
||||||
|
for header in headers {
|
||||||
|
if let Some(idx) = header.find(':') {
|
||||||
|
let (name, value) = header.split_at(idx);
|
||||||
|
let value = value[1..].trim();
|
||||||
|
req = req.header(name.trim(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = req.send().await?;
|
||||||
|
|
||||||
|
if resp.status() != StatusCode::PARTIAL_CONTENT && resp.status() != StatusCode::OK {
|
||||||
|
anyhow::bail!("Server does not support Range requests");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut stream = resp.bytes_stream();
|
||||||
|
let mut position = start;
|
||||||
|
let mut total_bytes = 0;
|
||||||
|
let expected_bytes = end - start + 1;
|
||||||
|
|
||||||
|
while let Some(chunk_result) = stream.next().await {
|
||||||
|
let chunk = chunk_result?;
|
||||||
|
let chunk_size = chunk.len() as u64;
|
||||||
|
|
||||||
|
total_bytes += chunk_size;
|
||||||
|
if total_bytes > expected_bytes {
|
||||||
|
let remaining = expected_bytes - (total_bytes - chunk_size);
|
||||||
|
let mut writer = file.lock().await;
|
||||||
|
writer.seek(SeekFrom::Start(HEADER_SIZE as u64 + position))?;
|
||||||
|
writer.write_all(&chunk[..remaining as usize])?;
|
||||||
|
|
||||||
|
let tracker = ProgressTracker {
|
||||||
|
bar: progress_bar.clone(),
|
||||||
|
};
|
||||||
|
tracker.increment(remaining);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut writer = file.lock().await;
|
||||||
|
writer.seek(SeekFrom::Start(HEADER_SIZE as u64 + position))?;
|
||||||
|
writer.write_all(&chunk)?;
|
||||||
|
drop(writer);
|
||||||
|
|
||||||
|
position += chunk_size;
|
||||||
|
let tracker = ProgressTracker {
|
||||||
|
bar: progress_bar.clone(),
|
||||||
|
};
|
||||||
|
tracker.increment(chunk_size);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_file_info(&self) -> Result<(u64, Option<String>, String)> {
|
||||||
|
let mut req = self.client.head(&self.config.url);
|
||||||
|
|
||||||
|
for header in &self.config.headers {
|
||||||
|
if let Some(idx) = header.find(':') {
|
||||||
|
let (name, value) = header.split_at(idx);
|
||||||
|
let value = value[1..].trim();
|
||||||
|
req = req.header(name.trim(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = req.send().await?;
|
||||||
|
|
||||||
|
let accepts_ranges = resp
|
||||||
|
.headers()
|
||||||
|
.get("accept-ranges")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.map(|v| v.contains("bytes"))
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if !accepts_ranges {
|
||||||
|
let range_check = self
|
||||||
|
.client
|
||||||
|
.get(&self.config.url)
|
||||||
|
.header("Range", "bytes=0-0")
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if range_check.status() != StatusCode::PARTIAL_CONTENT {
|
||||||
|
anyhow::bail!(
|
||||||
|
"Server does not support Range requests, cannot continue with parallel download"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let file_size = if let Some(content_length) = resp.headers().get("content-length") {
|
||||||
|
content_length.to_str()?.parse()?
|
||||||
|
} else {
|
||||||
|
anyhow::bail!("Could not determine file size")
|
||||||
|
};
|
||||||
|
|
||||||
|
let etag = if let Some(etag_header) = resp.headers().get("etag") {
|
||||||
|
etag_header.to_str()?.to_string()
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"no-etag-{}",
|
||||||
|
std::time::SystemTime::now()
|
||||||
|
.duration_since(std::time::UNIX_EPOCH)
|
||||||
|
.unwrap()
|
||||||
|
.as_secs()
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let filename = self.extract_filename_from_response(&resp);
|
||||||
|
|
||||||
|
Ok((file_size, filename, etag))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_filename_from_response(&self, resp: &reqwest::Response) -> Option<String> {
|
||||||
|
if let Some(disposition) = resp.headers().get("content-disposition") {
|
||||||
|
if let Ok(disposition_str) = disposition.to_str() {
|
||||||
|
if let Some(filename) = Self::parse_content_disposition(disposition_str) {
|
||||||
|
return Some(filename);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::extract_filename_from_url(&self.config.url)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_content_disposition(disposition: &str) -> Option<String> {
|
||||||
|
if let Some(idx) = disposition.find("filename=") {
|
||||||
|
let start = idx + 9;
|
||||||
|
let mut end = disposition.len();
|
||||||
|
|
||||||
|
if disposition.as_bytes().get(start) == Some(&b'"') {
|
||||||
|
let quoted_name = &disposition[start + 1..];
|
||||||
|
if let Some(quote_end) = quoted_name.find('"') {
|
||||||
|
return Some(quoted_name[..quote_end].to_string());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let Some(semicolon) = disposition[start..].find(';') {
|
||||||
|
end = start + semicolon;
|
||||||
|
}
|
||||||
|
return Some(disposition[start..end].to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_filename_from_url(url: &str) -> Option<String> {
|
||||||
|
if let Ok(parsed_url) = Url::parse(url) {
|
||||||
|
let path = parsed_url.path();
|
||||||
|
if let Some(path_filename) = Path::new(path).file_name() {
|
||||||
|
if let Some(filename_str) = path_filename.to_str() {
|
||||||
|
if !filename_str.is_empty() {
|
||||||
|
if let Ok(decoded) = urlencoding::decode(filename_str) {
|
||||||
|
return Some(decoded.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ResumeManager {
|
||||||
|
header: HydraHeader,
|
||||||
|
file_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ResumeManager {
|
||||||
|
fn try_from_file(
|
||||||
|
path: &str,
|
||||||
|
file_size: u64,
|
||||||
|
etag: &str,
|
||||||
|
url: &str,
|
||||||
|
chunk_size: u32,
|
||||||
|
force_new: bool,
|
||||||
|
resume_only: bool,
|
||||||
|
) -> Result<Self> {
|
||||||
|
if force_new {
|
||||||
|
if Path::new(path).exists() {
|
||||||
|
std::fs::remove_file(path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Self::create_new_file(path, file_size, etag, url, chunk_size);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(file) = File::open(path) {
|
||||||
|
let mut reader = BufReader::new(file);
|
||||||
|
match HydraHeader::read_from_file(&mut reader) {
|
||||||
|
Ok(header) => {
|
||||||
|
let current_url_hash = Sha256::digest(url.as_bytes());
|
||||||
|
let current_etag_hash = Sha256::digest(etag.as_bytes());
|
||||||
|
|
||||||
|
let url_matches = header.url_hash == current_url_hash.as_slice();
|
||||||
|
let etag_matches = header.etag == current_etag_hash.as_slice();
|
||||||
|
let size_matches = header.file_size == file_size;
|
||||||
|
|
||||||
|
if url_matches && etag_matches && size_matches {
|
||||||
|
return Ok(Self {
|
||||||
|
header,
|
||||||
|
file_path: path.to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if resume_only {
|
||||||
|
anyhow::bail!(
|
||||||
|
"Existing file is not compatible and resume_only option is active"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::fs::remove_file(path)?;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if resume_only {
|
||||||
|
return Err(anyhow::anyhow!("Could not read file to resume: {}", e));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::fs::remove_file(path)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if resume_only {
|
||||||
|
anyhow::bail!("File not found and resume_only option is active");
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::create_new_file(path, file_size, etag, url, chunk_size)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_new_file(
|
||||||
|
path: &str,
|
||||||
|
file_size: u64,
|
||||||
|
etag: &str,
|
||||||
|
url: &str,
|
||||||
|
chunk_size: u32,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let header = HydraHeader::new(file_size, etag, url, chunk_size);
|
||||||
|
let file = File::create(path)?;
|
||||||
|
file.set_len(HEADER_SIZE as u64 + file_size)?;
|
||||||
|
|
||||||
|
let mut writer = BufWriter::new(file);
|
||||||
|
header.write_to_file(&mut writer)?;
|
||||||
|
writer.flush()?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
header,
|
||||||
|
file_path: path.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_incomplete_chunks(&self) -> Vec<(u64, u64)> {
|
||||||
|
self.header.get_incomplete_chunks()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_chunk_complete(&mut self, chunk_index: usize) -> Result<()> {
|
||||||
|
self.header.set_chunk_complete(chunk_index)?;
|
||||||
|
|
||||||
|
let file = OpenOptions::new().write(true).open(&self.file_path)?;
|
||||||
|
let mut writer = BufWriter::new(file);
|
||||||
|
|
||||||
|
let bitmap_offset = 5 + 1 + 8 + 32 + 32 + 4 + 4;
|
||||||
|
writer.seek(SeekFrom::Start(bitmap_offset as u64))?;
|
||||||
|
|
||||||
|
let bitmap_bytes = self.header.chunks_bitmap.as_raw_slice();
|
||||||
|
writer.write_all(bitmap_bytes)?;
|
||||||
|
writer.flush()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_download_complete(&self) -> bool {
|
||||||
|
self.header.is_download_complete()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finalize_download(&self) -> Result<()> {
|
||||||
|
if !self.is_download_complete() {
|
||||||
|
anyhow::bail!("Download is not complete");
|
||||||
|
}
|
||||||
|
|
||||||
|
let temp_path = format!("{}.tmp", self.file_path);
|
||||||
|
let source = File::open(&self.file_path)?;
|
||||||
|
let dest = File::create(&temp_path)?;
|
||||||
|
|
||||||
|
let mut reader = BufReader::with_capacity(FINALIZE_BUFFER_SIZE, source);
|
||||||
|
let mut writer = BufWriter::with_capacity(FINALIZE_BUFFER_SIZE, dest);
|
||||||
|
|
||||||
|
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?;
|
||||||
|
|
||||||
|
std::io::copy(&mut reader, &mut writer)?;
|
||||||
|
writer.flush()?;
|
||||||
|
drop(writer);
|
||||||
|
|
||||||
|
match std::fs::rename(&temp_path, &self.file_path) {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(_) => {
|
||||||
|
let _ = std::fs::remove_file(&self.file_path);
|
||||||
|
std::fs::rename(&temp_path, &self.file_path)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
let args = CliArgs::parse();
|
||||||
|
|
||||||
|
let config = DownloadConfig {
|
||||||
|
url: args.url.clone(),
|
||||||
|
output_path: args.output,
|
||||||
|
num_connections: args.connections,
|
||||||
|
chunk_size: args.chunk_size * 1024 * 1024,
|
||||||
|
buffer_size: args.buffer_size * 1024 * 1024,
|
||||||
|
verbose: args.verbose,
|
||||||
|
silent: args.silent,
|
||||||
|
log: args.log,
|
||||||
|
force_new: args.force_new,
|
||||||
|
resume_only: args.resume_only,
|
||||||
|
headers: args.header,
|
||||||
|
};
|
||||||
|
|
||||||
|
if config.force_new && config.resume_only {
|
||||||
|
eprintln!("Error: --force-new and --resume-only options cannot be used together");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let downloader = Downloader {
|
||||||
|
client: Client::new(),
|
||||||
|
config,
|
||||||
|
};
|
||||||
|
|
||||||
|
if downloader.config.should_log() {
|
||||||
|
println!(
|
||||||
|
"Starting download with {} connections, chunk size: {}MB, buffer: {}MB",
|
||||||
|
downloader.config.num_connections, args.chunk_size, args.buffer_size
|
||||||
|
);
|
||||||
|
println!("URL: {}", args.url);
|
||||||
|
|
||||||
|
if downloader.config.force_new {
|
||||||
|
println!("Forcing new download, ignoring existing files");
|
||||||
|
} else if downloader.config.resume_only {
|
||||||
|
println!("Only resuming existing download");
|
||||||
|
} else {
|
||||||
|
println!("Resuming download if possible");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
downloader.download().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -2,7 +2,6 @@ const { default: axios } = require("axios");
|
|||||||
const util = require("node:util");
|
const util = require("node:util");
|
||||||
const fs = require("node:fs");
|
const fs = require("node:fs");
|
||||||
const path = require("node:path");
|
const path = require("node:path");
|
||||||
const { spawnSync } = require("node:child_process");
|
|
||||||
|
|
||||||
const exec = util.promisify(require("node:child_process").exec);
|
const exec = util.promisify(require("node:child_process").exec);
|
||||||
|
|
||||||
@@ -47,81 +46,6 @@ const downloadLudusavi = async () => {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const downloadAria2WindowsAndLinux = async () => {
|
|
||||||
const file =
|
|
||||||
process.platform === "win32"
|
|
||||||
? "aria2-1.37.0-win-64bit-build1.zip"
|
|
||||||
: "aria2-1.37.0-1-x86_64.pkg.tar.zst";
|
|
||||||
|
|
||||||
const downloadUrl =
|
|
||||||
process.platform === "win32"
|
|
||||||
? `https://github.com/aria2/aria2/releases/download/release-1.37.0/${file}`
|
|
||||||
: "https://archlinux.org/packages/extra/x86_64/aria2/download/";
|
|
||||||
|
|
||||||
console.log(`Downloading ${file}...`);
|
|
||||||
|
|
||||||
const response = await axios.get(downloadUrl, { responseType: "stream" });
|
|
||||||
|
|
||||||
const stream = response.data.pipe(fs.createWriteStream(file));
|
|
||||||
|
|
||||||
stream.on("finish", async () => {
|
|
||||||
console.log(`Downloaded ${file}, extracting...`);
|
|
||||||
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
await exec(`npx extract-zip ${file}`);
|
|
||||||
console.log("Extracted. Renaming folder...");
|
|
||||||
|
|
||||||
fs.mkdirSync("aria2");
|
|
||||||
fs.copyFileSync(
|
|
||||||
path.join(file.replace(".zip", ""), "aria2c.exe"),
|
|
||||||
"aria2/aria2c.exe"
|
|
||||||
);
|
|
||||||
fs.rmSync(file.replace(".zip", ""), { recursive: true });
|
|
||||||
} else {
|
|
||||||
await exec(`tar --zstd -xvf ${file} usr/bin/aria2c`);
|
|
||||||
console.log("Extracted. Copying binary file...");
|
|
||||||
fs.mkdirSync("aria2");
|
|
||||||
fs.copyFileSync("usr/bin/aria2c", "aria2/aria2c");
|
|
||||||
fs.rmSync("usr", { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`Extracted ${file}, removing compressed downloaded file...`);
|
|
||||||
fs.rmSync(file);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const copyAria2Macos = async () => {
|
|
||||||
console.log("Checking if aria2 is installed...");
|
|
||||||
|
|
||||||
const isAria2Installed = spawnSync("which", ["aria2c"]).status;
|
|
||||||
|
|
||||||
if (isAria2Installed != 0) {
|
|
||||||
console.log("Please install aria2");
|
|
||||||
console.log("brew install aria2");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("Copying aria2 binary...");
|
|
||||||
fs.mkdirSync("aria2");
|
|
||||||
await exec(`cp $(which aria2c) aria2/aria2c`);
|
|
||||||
};
|
|
||||||
|
|
||||||
const copyAria2 = () => {
|
|
||||||
const aria2Path =
|
|
||||||
process.platform === "win32" ? "aria2/aria2c.exe" : "aria2/aria2c";
|
|
||||||
|
|
||||||
if (fs.existsSync(aria2Path)) {
|
|
||||||
console.log("Aria2 already exists, skipping download...");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (process.platform == "darwin") {
|
|
||||||
copyAria2Macos();
|
|
||||||
} else {
|
|
||||||
downloadAria2WindowsAndLinux();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
copyAria2();
|
|
||||||
downloadLudusavi();
|
downloadLudusavi();
|
||||||
|
|
||||||
if (process.platform !== "win32") {
|
if (process.platform !== "win32") {
|
||||||
|
|||||||
@@ -354,7 +354,8 @@
|
|||||||
"common_redist": "Common redistributables",
|
"common_redist": "Common redistributables",
|
||||||
"common_redist_description": "Common redistributables are required to run some games. Installing them is recommended to avoid issues.",
|
"common_redist_description": "Common redistributables are required to run some games. Installing them is recommended to avoid issues.",
|
||||||
"install_common_redist": "Install",
|
"install_common_redist": "Install",
|
||||||
"installing_common_redist": "Installing…"
|
"installing_common_redist": "Installing…",
|
||||||
|
"show_download_speed_in_megabits": "Show download speed in megabits per second"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"download_complete": "Download complete",
|
"download_complete": "Download complete",
|
||||||
|
|||||||
@@ -341,7 +341,8 @@
|
|||||||
"common_redist": "Componentes recomendados",
|
"common_redist": "Componentes recomendados",
|
||||||
"common_redist_description": "Componentes recomendados são necessários para executar alguns jogos. A instalação deles é recomendada para evitar problemas.",
|
"common_redist_description": "Componentes recomendados são necessários para executar alguns jogos. A instalação deles é recomendada para evitar problemas.",
|
||||||
"install_common_redist": "Instalar",
|
"install_common_redist": "Instalar",
|
||||||
"installing_common_redist": "Instalando…"
|
"installing_common_redist": "Instalando…",
|
||||||
|
"show_download_speed_in_megabits": "Exibir taxas de download em megabits por segundo"
|
||||||
},
|
},
|
||||||
"notifications": {
|
"notifications": {
|
||||||
"download_complete": "Download concluído",
|
"download_complete": "Download concluído",
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import { electronApp, optimizer } from "@electron-toolkit/utils";
|
|||||||
import { logger, WindowManager } from "@main/services";
|
import { logger, WindowManager } from "@main/services";
|
||||||
import resources from "@locales";
|
import resources from "@locales";
|
||||||
import { PythonRPC } from "./services/python-rpc";
|
import { PythonRPC } from "./services/python-rpc";
|
||||||
import { Aria2 } from "./services/aria2";
|
|
||||||
import { db, levelKeys } from "./level";
|
import { db, levelKeys } from "./level";
|
||||||
import { loadState } from "./main";
|
import { loadState } from "./main";
|
||||||
|
|
||||||
@@ -143,7 +142,6 @@ app.on("window-all-closed", () => {
|
|||||||
app.on("before-quit", () => {
|
app.on("before-quit", () => {
|
||||||
/* Disconnects libtorrent */
|
/* Disconnects libtorrent */
|
||||||
PythonRPC.kill();
|
PythonRPC.kill();
|
||||||
Aria2.kill();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
app.on("activate", () => {
|
app.on("activate", () => {
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { DownloadManager, Ludusavi, startMainLoop } from "./services";
|
|||||||
import { RealDebridClient } from "./services/download/real-debrid";
|
import { RealDebridClient } from "./services/download/real-debrid";
|
||||||
import { HydraApi } from "./services/hydra-api";
|
import { HydraApi } from "./services/hydra-api";
|
||||||
import { uploadGamesBatch } from "./services/library-sync";
|
import { uploadGamesBatch } from "./services/library-sync";
|
||||||
import { Aria2 } from "./services/aria2";
|
|
||||||
import { downloadsSublevel } from "./level/sublevels/downloads";
|
import { downloadsSublevel } from "./level/sublevels/downloads";
|
||||||
import { sortBy } from "lodash-es";
|
import { sortBy } from "lodash-es";
|
||||||
import { Downloader } from "@shared";
|
import { Downloader } from "@shared";
|
||||||
@@ -21,8 +20,6 @@ export const loadState = async () => {
|
|||||||
|
|
||||||
await import("./events");
|
await import("./events");
|
||||||
|
|
||||||
Aria2.spawn();
|
|
||||||
|
|
||||||
if (userPreferences?.realDebridApiToken) {
|
if (userPreferences?.realDebridApiToken) {
|
||||||
RealDebridClient.authorize(userPreferences.realDebridApiToken);
|
RealDebridClient.authorize(userPreferences.realDebridApiToken);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
import path from "node:path";
|
|
||||||
import cp from "node:child_process";
|
|
||||||
import { app } from "electron";
|
|
||||||
|
|
||||||
export class Aria2 {
|
|
||||||
private static process: cp.ChildProcess | null = null;
|
|
||||||
private static readonly binaryPath = app.isPackaged
|
|
||||||
? path.join(process.resourcesPath, "aria2", "aria2c")
|
|
||||||
: path.join(__dirname, "..", "..", "aria2", "aria2c");
|
|
||||||
|
|
||||||
public static spawn() {
|
|
||||||
this.process = cp.spawn(
|
|
||||||
this.binaryPath,
|
|
||||||
[
|
|
||||||
"--enable-rpc",
|
|
||||||
"--rpc-listen-all",
|
|
||||||
"--file-allocation=none",
|
|
||||||
"--allow-overwrite=true",
|
|
||||||
"--disk-cache=64M",
|
|
||||||
],
|
|
||||||
{ stdio: "inherit", windowsHide: true }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static kill() {
|
|
||||||
this.process?.kill();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -372,6 +372,7 @@ export class DownloadManager {
|
|||||||
game_id: downloadId,
|
game_id: downloadId,
|
||||||
url: downloadUrl,
|
url: downloadUrl,
|
||||||
save_path: download.downloadPath,
|
save_path: download.downloadPath,
|
||||||
|
allow_multiple_connections: true,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
case Downloader.TorBox: {
|
case Downloader.TorBox: {
|
||||||
@@ -384,6 +385,7 @@ export class DownloadManager {
|
|||||||
url,
|
url,
|
||||||
save_path: download.downloadPath,
|
save_path: download.downloadPath,
|
||||||
out: name,
|
out: name,
|
||||||
|
allow_multiple_connections: true,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
case Downloader.Hydra: {
|
case Downloader.Hydra: {
|
||||||
@@ -398,6 +400,7 @@ export class DownloadManager {
|
|||||||
game_id: downloadId,
|
game_id: downloadId,
|
||||||
url: downloadUrl,
|
url: downloadUrl,
|
||||||
save_path: download.downloadPath,
|
save_path: download.downloadPath,
|
||||||
|
allow_multiple_connections: true,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,6 +21,12 @@ const binaryNameByPlatform: Partial<Record<NodeJS.Platform, string>> = {
|
|||||||
win32: "hydra-python-rpc.exe",
|
win32: "hydra-python-rpc.exe",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const rustBinaryNameByPlatform: Partial<Record<NodeJS.Platform, string>> = {
|
||||||
|
darwin: "hydra-httpdl",
|
||||||
|
linux: "hydra-httpdl",
|
||||||
|
win32: "hydra-httpdl.exe",
|
||||||
|
};
|
||||||
|
|
||||||
export class PythonRPC {
|
export class PythonRPC {
|
||||||
public static readonly BITTORRENT_PORT = "5881";
|
public static readonly BITTORRENT_PORT = "5881";
|
||||||
public static readonly RPC_PORT = "8084";
|
public static readonly RPC_PORT = "8084";
|
||||||
@@ -52,6 +58,20 @@ export class PythonRPC {
|
|||||||
this.RPC_PASSWORD,
|
this.RPC_PASSWORD,
|
||||||
initialDownload ? JSON.stringify(initialDownload) : "",
|
initialDownload ? JSON.stringify(initialDownload) : "",
|
||||||
initialSeeding ? JSON.stringify(initialSeeding) : "",
|
initialSeeding ? JSON.stringify(initialSeeding) : "",
|
||||||
|
app.isPackaged
|
||||||
|
? path.join(
|
||||||
|
process.resourcesPath,
|
||||||
|
rustBinaryNameByPlatform[process.platform]!
|
||||||
|
)
|
||||||
|
: path.join(
|
||||||
|
__dirname,
|
||||||
|
"..",
|
||||||
|
"..",
|
||||||
|
"rust_rpc",
|
||||||
|
"target",
|
||||||
|
"debug",
|
||||||
|
rustBinaryNameByPlatform[process.platform]!
|
||||||
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
if (app.isPackaged) {
|
if (app.isPackaged) {
|
||||||
|
|||||||
@@ -138,6 +138,7 @@ export function App() {
|
|||||||
}, [fetchUserDetails, updateUserDetails, dispatch]);
|
}, [fetchUserDetails, updateUserDetails, dispatch]);
|
||||||
|
|
||||||
const syncDownloadSources = useCallback(async () => {
|
const syncDownloadSources = useCallback(async () => {
|
||||||
|
console.log("SYNC CALLED");
|
||||||
const downloadSources = await window.electron.getDownloadSources();
|
const downloadSources = await window.electron.getDownloadSources();
|
||||||
|
|
||||||
const existingDownloadSources: DownloadSource[] =
|
const existingDownloadSources: DownloadSource[] =
|
||||||
|
|||||||
@@ -15,12 +15,14 @@ import type {
|
|||||||
StartGameDownloadPayload,
|
StartGameDownloadPayload,
|
||||||
} from "@types";
|
} from "@types";
|
||||||
import { useDate } from "./use-date";
|
import { useDate } from "./use-date";
|
||||||
import { formatBytes } from "@shared";
|
import { formatBytes, formatBytesToMbps } from "@shared";
|
||||||
|
|
||||||
export function useDownload() {
|
export function useDownload() {
|
||||||
const { updateLibrary } = useLibrary();
|
const { updateLibrary } = useLibrary();
|
||||||
const { formatDistance } = useDate();
|
const { formatDistance } = useDate();
|
||||||
|
|
||||||
|
const userPrefs = useAppSelector((state) => state.userPreferences.value);
|
||||||
|
|
||||||
const { lastPacket, gamesWithDeletionInProgress } = useAppSelector(
|
const { lastPacket, gamesWithDeletionInProgress } = useAppSelector(
|
||||||
(state) => state.download
|
(state) => state.download
|
||||||
);
|
);
|
||||||
@@ -99,8 +101,14 @@ export function useDownload() {
|
|||||||
return gamesWithDeletionInProgress.includes(objectId);
|
return gamesWithDeletionInProgress.includes(objectId);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const formatDownloadSpeed = (downloadSpeed: number): string => {
|
||||||
|
return userPrefs?.showDownloadSpeedInMegabits
|
||||||
|
? `${formatBytes(downloadSpeed)}/s`
|
||||||
|
: formatBytesToMbps(downloadSpeed);
|
||||||
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
downloadSpeed: `${formatBytes(lastPacket?.downloadSpeed ?? 0)}/s`,
|
downloadSpeed: formatDownloadSpeed(lastPacket?.downloadSpeed ?? 0),
|
||||||
progress: formatDownloadProgress(lastPacket?.progress ?? 0),
|
progress: formatDownloadProgress(lastPacket?.progress ?? 0),
|
||||||
lastPacket,
|
lastPacket,
|
||||||
eta: calculateETA(),
|
eta: calculateETA(),
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ export function SettingsBehavior() {
|
|||||||
enableAutoInstall: false,
|
enableAutoInstall: false,
|
||||||
seedAfterDownloadComplete: false,
|
seedAfterDownloadComplete: false,
|
||||||
showHiddenAchievementsDescription: false,
|
showHiddenAchievementsDescription: false,
|
||||||
|
showDownloadSpeedInMegabits: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { t } = useTranslation("settings");
|
const { t } = useTranslation("settings");
|
||||||
@@ -40,6 +41,8 @@ export function SettingsBehavior() {
|
|||||||
userPreferences.seedAfterDownloadComplete ?? false,
|
userPreferences.seedAfterDownloadComplete ?? false,
|
||||||
showHiddenAchievementsDescription:
|
showHiddenAchievementsDescription:
|
||||||
userPreferences.showHiddenAchievementsDescription ?? false,
|
userPreferences.showHiddenAchievementsDescription ?? false,
|
||||||
|
showDownloadSpeedInMegabits:
|
||||||
|
userPreferences.showDownloadSpeedInMegabits ?? false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, [userPreferences]);
|
}, [userPreferences]);
|
||||||
@@ -139,6 +142,16 @@ export function SettingsBehavior() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
<CheckboxField
|
||||||
|
label={t("show_download_speed_in_megabits")}
|
||||||
|
checked={form.showDownloadSpeedInMegabits}
|
||||||
|
onChange={() =>
|
||||||
|
handleChange({
|
||||||
|
showDownloadSpeedInMegabits: !form.showDownloadSpeedInMegabits,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -49,6 +49,12 @@ export const formatBytes = (bytes: number): string => {
|
|||||||
return `${Math.trunc(formatedByte * 10) / 10} ${FORMAT[base]}`;
|
return `${Math.trunc(formatedByte * 10) / 10} ${FORMAT[base]}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const formatBytesToMbps = (bytesPerSecond: number): string => {
|
||||||
|
const bitsPerSecond = bytesPerSecond * 8;
|
||||||
|
const mbps = bitsPerSecond / (1024 * 1024);
|
||||||
|
return `${Math.trunc(mbps * 10) / 10} Mbps`;
|
||||||
|
};
|
||||||
|
|
||||||
export const pipe =
|
export const pipe =
|
||||||
<T>(...fns: ((arg: T) => any)[]) =>
|
<T>(...fns: ((arg: T) => any)[]) =>
|
||||||
(arg: T) =>
|
(arg: T) =>
|
||||||
|
|||||||
@@ -85,6 +85,7 @@ export interface UserPreferences {
|
|||||||
repackUpdatesNotificationsEnabled?: boolean;
|
repackUpdatesNotificationsEnabled?: boolean;
|
||||||
achievementNotificationsEnabled?: boolean;
|
achievementNotificationsEnabled?: boolean;
|
||||||
friendRequestNotificationsEnabled?: boolean;
|
friendRequestNotificationsEnabled?: boolean;
|
||||||
|
showDownloadSpeedInMegabits?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ScreenState {
|
export interface ScreenState {
|
||||||
|
|||||||
Reference in New Issue
Block a user