mirror of
https://github.com/LightZirconite/Microsoft-Rewards-Bot.git
synced 2026-01-08 00:26:16 +00:00
Initial commit
This commit is contained in:
3
.eslintignore
Normal file
3
.eslintignore
Normal file
@@ -0,0 +1,3 @@
|
||||
dist/
|
||||
node_modules/
|
||||
setup/
|
||||
37
.eslintrc.js
Normal file
37
.eslintrc.js
Normal file
@@ -0,0 +1,37 @@
|
||||
module.exports = {
|
||||
'env': {
|
||||
'es2021': true,
|
||||
'node': true
|
||||
},
|
||||
'extends': [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended'
|
||||
],
|
||||
'parser': '@typescript-eslint/parser',
|
||||
'parserOptions': {
|
||||
'ecmaVersion': 12,
|
||||
'sourceType': 'module'
|
||||
},
|
||||
'plugins': [
|
||||
'@typescript-eslint'
|
||||
],
|
||||
'rules': {
|
||||
'linebreak-style': 'off',
|
||||
'quotes': [
|
||||
'error',
|
||||
'single'
|
||||
],
|
||||
'semi': [
|
||||
'error',
|
||||
'never'
|
||||
],
|
||||
'@typescript-eslint/no-explicit-any':
|
||||
['warn', {
|
||||
fixToUnknown: true // This line is optional and only relevant if you are using TypeScript
|
||||
}],
|
||||
'comma-dangle': 'off',
|
||||
'@typescript-eslint/comma-dangle': 'error',
|
||||
'prefer-arrow-callback': 'error'
|
||||
// Add any other rules you want to enforce here
|
||||
}
|
||||
}
|
||||
28
.eslintrc.json
Normal file
28
.eslintrc.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"es2021": true,
|
||||
"node": true
|
||||
},
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"project": ["./tsconfig.json"],
|
||||
"sourceType": "module",
|
||||
"ecmaVersion": 2021
|
||||
},
|
||||
"plugins": ["@typescript-eslint", "modules-newline"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"modules-newline/import-declaration-newline": ["warn", { "count": 3 }],
|
||||
"@typescript-eslint/consistent-type-imports": ["warn", { "prefer": "type-imports" }],
|
||||
"@typescript-eslint/no-unused-vars": ["warn", { "argsIgnorePattern": "^_", "varsIgnorePattern": "^_" }],
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"no-console": ["warn", { "allow": ["error", "warn"] }],
|
||||
"quotes": ["error", "double", { "avoidEscape": true }],
|
||||
"linebreak-style": "off"
|
||||
},
|
||||
"ignorePatterns": ["dist/**", "node_modules/**", "setup/**"]
|
||||
}
|
||||
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*.sh text eol=lf
|
||||
*.template text eol=lf
|
||||
18
.gitignore
vendored
Normal file
18
.gitignore
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
sessions/
|
||||
dist/
|
||||
node_modules/
|
||||
.vscode/
|
||||
.github/
|
||||
diagnostics/
|
||||
reports/
|
||||
accounts.json
|
||||
accounts.jsonc
|
||||
notes
|
||||
accounts.dev.json
|
||||
accounts.dev.jsonc
|
||||
accounts.main.json
|
||||
accounts.main.jsonc
|
||||
.DS_Store
|
||||
.playwright-chromium-installed
|
||||
*.log
|
||||
.update-backup/
|
||||
87
Dockerfile
Normal file
87
Dockerfile
Normal file
@@ -0,0 +1,87 @@
|
||||
###############################################################################
|
||||
# Stage 1: Builder
|
||||
###############################################################################
|
||||
FROM node:22-slim AS builder
|
||||
|
||||
WORKDIR /usr/src/microsoft-rewards-script
|
||||
|
||||
ENV PLAYWRIGHT_BROWSERS_PATH=0
|
||||
|
||||
# Copy package files
|
||||
COPY package.json package-lock.json tsconfig.json ./
|
||||
|
||||
# Install all dependencies required to build the script
|
||||
RUN npm ci --ignore-scripts
|
||||
|
||||
# Copy source and build
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Remove build dependencies, and reinstall only runtime dependencies
|
||||
RUN rm -rf node_modules \
|
||||
&& npm ci --omit=dev --ignore-scripts \
|
||||
&& npm cache clean --force
|
||||
|
||||
# Install Chromium Headless Shell, and cleanup
|
||||
RUN npx playwright install --with-deps --only-shell chromium \
|
||||
&& rm -rf /root/.cache /tmp/* /var/tmp/*
|
||||
|
||||
###############################################################################
|
||||
# Stage 2: Runtime
|
||||
###############################################################################
|
||||
FROM node:22-slim AS runtime
|
||||
|
||||
WORKDIR /usr/src/microsoft-rewards-script
|
||||
|
||||
# Set production environment variables
|
||||
ENV NODE_ENV=production \
|
||||
TZ=UTC \
|
||||
PLAYWRIGHT_BROWSERS_PATH=0 \
|
||||
FORCE_HEADLESS=1
|
||||
|
||||
# Install minimal system libraries required for Chromium headless to run
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
libglib2.0-0 \
|
||||
libdbus-1-3 \
|
||||
libexpat1 \
|
||||
libfontconfig1 \
|
||||
libgtk-3-0 \
|
||||
libnspr4 \
|
||||
libnss3 \
|
||||
libasound2 \
|
||||
libflac12 \
|
||||
libatk1.0-0 \
|
||||
libatspi2.0-0 \
|
||||
libdrm2 \
|
||||
libgbm1 \
|
||||
libdav1d6 \
|
||||
libx11-6 \
|
||||
libx11-xcb1 \
|
||||
libxcomposite1 \
|
||||
libxcursor1 \
|
||||
libxdamage1 \
|
||||
libxext6 \
|
||||
libxfixes3 \
|
||||
libxi6 \
|
||||
libxrandr2 \
|
||||
libxrender1 \
|
||||
libxss1 \
|
||||
libxtst6 \
|
||||
libdouble-conversion3 \
|
||||
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
|
||||
|
||||
# Copy compiled application and dependencies from builder stage
|
||||
COPY --from=builder /usr/src/microsoft-rewards-script/dist ./dist
|
||||
COPY --from=builder /usr/src/microsoft-rewards-script/package*.json ./
|
||||
COPY --from=builder /usr/src/microsoft-rewards-script/node_modules ./node_modules
|
||||
|
||||
# Copy entrypoint script
|
||||
COPY docker-entrypoint.sh /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
# Use entrypoint that supports both scheduler and cron
|
||||
ENTRYPOINT ["docker-entrypoint.sh"]
|
||||
|
||||
# Default: use built-in scheduler
|
||||
CMD ["npm", "run", "start:schedule"]
|
||||
43
LICENSE
Normal file
43
LICENSE
Normal file
@@ -0,0 +1,43 @@
|
||||
PROPRIETARY LICENSE
|
||||
|
||||
Copyright (c) 2024-2025 LightZirconite and Contributors
|
||||
|
||||
All rights reserved.
|
||||
|
||||
TERMS AND CONDITIONS:
|
||||
|
||||
1. PERMITTED USE:
|
||||
- You may download and use this software for personal, non-commercial purposes.
|
||||
- You may modify the source code for your own personal use.
|
||||
- You may submit pull requests (PRs) to the official repository to propose improvements.
|
||||
|
||||
2. PROHIBITED ACTIONS:
|
||||
- You may NOT redistribute this software, modified or unmodified, in any form.
|
||||
- You may NOT share, publish, or distribute your modifications publicly.
|
||||
- You may NOT use this software for commercial purposes.
|
||||
- You may NOT sublicense, sell, rent, or lease this software.
|
||||
- You may NOT create derivative works for public distribution.
|
||||
- You may NOT remove or modify this license or copyright notices.
|
||||
|
||||
3. CONTRIBUTIONS:
|
||||
- By submitting a pull request, you grant the copyright holder the right to use,
|
||||
modify, and distribute your contributions under this license.
|
||||
- You retain no rights to your contributions once merged into the official repository.
|
||||
|
||||
4. DISCLAIMER:
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
5. TERMINATION:
|
||||
This license is effective until terminated. Your rights under this license will
|
||||
terminate automatically without notice if you fail to comply with any term herein.
|
||||
|
||||
6. GOVERNING LAW:
|
||||
This license shall be governed by and construed in accordance with applicable laws,
|
||||
without regard to its conflict of law provisions.
|
||||
|
||||
For permissions beyond the scope of this license, please contact the copyright holder.
|
||||
127
NOTICE
Normal file
127
NOTICE
Normal file
@@ -0,0 +1,127 @@
|
||||
# IMPORTANT LEGAL NOTICES
|
||||
|
||||
## 🚨 Terms of Service Violation Warning
|
||||
|
||||
**Using this software violates Microsoft's Terms of Service.**
|
||||
|
||||
Microsoft Rewards explicitly prohibits:
|
||||
- Automated point collection
|
||||
- Bot usage for completing tasks
|
||||
- Any form of automation on their platform
|
||||
|
||||
### Potential Consequences:
|
||||
- ❌ **Immediate account suspension**
|
||||
- ❌ **Permanent ban from Microsoft Rewards**
|
||||
- ❌ **Forfeiture of all accumulated points**
|
||||
- ❌ **Loss of redemption history**
|
||||
- ⚠️ Possible restrictions on other Microsoft services
|
||||
|
||||
---
|
||||
|
||||
## 🏢 Proprietary License - Restricted Use
|
||||
|
||||
This software is licensed under a **PROPRIETARY LICENSE** with strict restrictions.
|
||||
|
||||
### ✅ Permitted Activities:
|
||||
- Download and use for personal, non-commercial purposes
|
||||
- Modify the source code for your own personal use
|
||||
- Submit pull requests to the official repository
|
||||
|
||||
### ❌ Prohibited Activities:
|
||||
- Redistributing this software (modified or unmodified)
|
||||
- Sharing or publishing your modifications publicly
|
||||
- Using this software for commercial purposes
|
||||
- Selling, renting, or leasing this software
|
||||
- Creating derivative works for public distribution
|
||||
- Removing or modifying license or copyright notices
|
||||
|
||||
---
|
||||
|
||||
## ⚖️ Legal Disclaimer
|
||||
|
||||
### No Warranty
|
||||
This software is provided "AS IS" without any warranty of any kind.
|
||||
|
||||
### No Liability
|
||||
The authors and contributors:
|
||||
- Are NOT responsible for account suspensions
|
||||
- Are NOT responsible for lost points or rewards
|
||||
- Are NOT responsible for any damages
|
||||
- Do NOT encourage ToS violations
|
||||
- Provide this for educational purposes ONLY
|
||||
|
||||
### Your Responsibility
|
||||
You are solely responsible for:
|
||||
- Your use of this software
|
||||
- Compliance with Microsoft's policies
|
||||
- Any consequences from automation
|
||||
- Legal implications in your jurisdiction
|
||||
|
||||
---
|
||||
|
||||
## 🎓 Educational Purpose Statement
|
||||
|
||||
This project is developed and maintained for **educational purposes**:
|
||||
- To demonstrate browser automation techniques
|
||||
- To showcase TypeScript and Playwright capabilities
|
||||
- To teach software architecture patterns
|
||||
- To explore anti-detection methodologies
|
||||
|
||||
**The authors do not condone using this software in violation of any Terms of Service.**
|
||||
|
||||
---
|
||||
|
||||
## 🔒 Privacy & Security
|
||||
|
||||
### Your Data:
|
||||
- This software stores credentials **locally only**
|
||||
- No data is sent to third parties
|
||||
- Sessions are stored in the `sessions/` folder
|
||||
- You can delete all data by removing local files
|
||||
|
||||
### Third-Party Services:
|
||||
- Google Trends (for search queries)
|
||||
- Bing Search (for automation)
|
||||
- Discord/NTFY (optional, for notifications)
|
||||
|
||||
### Your Responsibility:
|
||||
- Protect your `accounts.json` file
|
||||
- Use strong passwords
|
||||
- Enable 2FA where possible
|
||||
- Don't share your configuration publicly
|
||||
|
||||
---
|
||||
|
||||
## 🌍 Geographic Restrictions
|
||||
|
||||
Microsoft Rewards availability and terms vary by region:
|
||||
- Available in select countries only
|
||||
- Region-specific earning rates
|
||||
- Local laws may apply
|
||||
- Check your local regulations
|
||||
|
||||
**By using this software, you confirm:**
|
||||
1. Microsoft Rewards is available in your region
|
||||
2. You understand the risks of automation
|
||||
3. You accept full responsibility for your actions
|
||||
4. You will not use this for commercial purposes
|
||||
|
||||
---
|
||||
|
||||
## 📞 Contact & Reporting
|
||||
|
||||
**Questions about licensing?**
|
||||
Open an issue at: https://github.com/LightZirconite/Microsoft-Rewards-Rewi/issues
|
||||
|
||||
**Found a security issue?**
|
||||
See: SECURITY.md
|
||||
|
||||
**General discussion?**
|
||||
Join Discord: https://discord.gg/kn3695Kx32
|
||||
|
||||
---
|
||||
|
||||
**Last Updated:** October 2025
|
||||
**Applies to:** Microsoft Rewards Script V2.1.5 and later
|
||||
|
||||
**BY USING THIS SOFTWARE, YOU ACKNOWLEDGE THAT YOU HAVE READ AND UNDERSTOOD THESE NOTICES.**
|
||||
353
README.md
Normal file
353
README.md
Normal file
@@ -0,0 +1,353 @@
|
||||
# Microsoft-Rewards-Rewi
|
||||
|
||||
[](https://discord.gg/kn3695Kx32)
|
||||
[](https://github.com/LightZirconite/Microsoft-Rewards-Rewi)
|
||||
|
||||
---
|
||||
|
||||
# 🚀 Quick Setup (Recommended)
|
||||
|
||||
**Easiest way to get started — download and run:**
|
||||
|
||||
1. **Clone the repository:**
|
||||
```bash
|
||||
git clone https://github.com/LightZirconite/Microsoft-Rewards-Rewi.git
|
||||
cd Microsoft-Rewards-Rewi
|
||||
```
|
||||
|
||||
2. **Run the setup script:**
|
||||
|
||||
* **Windows:** double-click `setup/setup.bat` or run it from a command prompt
|
||||
* **Linux / macOS / WSL:**
|
||||
```bash
|
||||
bash setup/setup.sh
|
||||
```
|
||||
* **Alternative (any platform):**
|
||||
```bash
|
||||
npm run setup
|
||||
```
|
||||
|
||||
3. **Follow the setup prompts.** The script will:
|
||||
* Rename `accounts.example.jsonc` → `accounts.jsonc`
|
||||
* Ask for Microsoft account credentials
|
||||
* Remind you to review `config.jsonc`
|
||||
* Install dependencies (`npm install`)
|
||||
* Build the project (`npm run build`)
|
||||
* Optionally start the script
|
||||
|
||||
**That's it — the setup script handles the rest.**
|
||||
|
||||
---
|
||||
|
||||
# ⚙️ Advanced Setup Options
|
||||
|
||||
### Nix Users
|
||||
|
||||
1. Install Nix from [https://nixos.org/](https://nixos.org/)
|
||||
2. Run:
|
||||
```bash
|
||||
./run.sh
|
||||
```
|
||||
|
||||
### Manual Setup (if setup script fails)
|
||||
|
||||
1. Copy `src/accounts.example.jsonc` → `src/accounts.jsonc` and add your accounts.
|
||||
2. Edit `src/config.jsonc` as needed.
|
||||
3. Install dependencies:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
4. Build:
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
5. Start:
|
||||
```bash
|
||||
npm run start
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
# 🐳 Docker Setup (Experimental)
|
||||
|
||||
**Before starting:**
|
||||
|
||||
* Remove local `/node_modules` and `/dist` if you previously built.
|
||||
* Remove old Docker volumes when upgrading from older versions.
|
||||
* You can reuse older `accounts.jsonc`.
|
||||
|
||||
**Quick Docker (recommended for scheduling):**
|
||||
|
||||
1. Clone the repository and configure `accounts.jsonc` (or rename from `accounts.example.jsonc`).
|
||||
2. Ensure `config.jsonc` has `"headless": true` in browser settings.
|
||||
3. Edit `compose.yaml`:
|
||||
* Set `TZ` (timezone)
|
||||
* **Choose scheduling mode:**
|
||||
* **Option A (default):** Built-in scheduler — configure `schedule` in `config.jsonc`
|
||||
* **Option B (cron):** Uncomment `USE_CRON: "true"` and set `CRON_SCHEDULE`
|
||||
* Optional: `RUN_ON_START=true` (runs once immediately on container start)
|
||||
4. Start:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
5. Monitor:
|
||||
```bash
|
||||
docker logs -f microsoft-rewards-script
|
||||
```
|
||||
|
||||
### Scheduling Options
|
||||
|
||||
**Built-in Scheduler (Default):**
|
||||
```yaml
|
||||
# In docker-compose.yml - no cron variables needed
|
||||
environment:
|
||||
TZ: "Europe/Paris"
|
||||
```
|
||||
```jsonc
|
||||
// In config.jsonc
|
||||
{
|
||||
"schedule": {
|
||||
"enabled": true,
|
||||
"time24": "09:00",
|
||||
"timeZone": "Europe/Paris"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Native Cron (Traditional):**
|
||||
```yaml
|
||||
# In docker-compose.yml
|
||||
environment:
|
||||
TZ: "Europe/Paris"
|
||||
USE_CRON: "true"
|
||||
CRON_SCHEDULE: "0 9,16,21 * * *" # 9 AM, 4 PM, 9 PM
|
||||
RUN_ON_START: "true"
|
||||
```
|
||||
|
||||
Use [crontab.guru](https://crontab.guru) for cron syntax help.
|
||||
|
||||
**See [Docker Documentation](docs/docker.md) for detailed setup and troubleshooting.**
|
||||
|
||||
---
|
||||
|
||||
# 📋 Usage Notes
|
||||
|
||||
* **Headless=false cleanup:** If you stop the script without closing browser windows, use Task Manager or run `npm run kill-chrome-win` (Windows) to close leftover instances.
|
||||
* **Scheduling advice:** Run at least once or twice daily. Use `"runOnZeroPoints": false` in config to skip runs when no points are available.
|
||||
* **Multiple accounts:** Use `clusters` in `config.jsonc` to run accounts in parallel.
|
||||
* **Built-in scheduler:** Enable `schedule.enabled` in `config.jsonc` to run automatically without external cron jobs.
|
||||
|
||||
---
|
||||
|
||||
# ⚙️ Configuration Reference
|
||||
|
||||
Edit `src/config.jsonc` to customize behavior. See the [full configuration documentation](docs/config.md) for detailed explanations.
|
||||
|
||||
<details>
|
||||
<summary><b>Core Settings</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `baseURL` | Microsoft Rewards URL | `https://rewards.bing.com` |
|
||||
| `sessionPath` | Session/fingerprint storage | `sessions` |
|
||||
| `browser.headless` | Run browser in background | `false` |
|
||||
| `browser.globalTimeout` | Max timeout for operations | `30s` |
|
||||
| `execution.parallel` | Run mobile/desktop tasks at once | `false` |
|
||||
| `execution.runOnZeroPoints` | Run when no points available | `false` |
|
||||
| `execution.clusters` | Concurrent account instances | `1` |
|
||||
| `execution.passesPerRun` | How many times to process each account | `3` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Fingerprint Settings</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `fingerprinting.saveFingerprint.mobile` | Reuse mobile fingerprint | `true` |
|
||||
| `fingerprinting.saveFingerprint.desktop` | Reuse desktop fingerprint | `true` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Task Settings</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `workers.doDailySet` | Complete daily set | `true` |
|
||||
| `workers.doMorePromotions` | Complete promotional offers | `true` |
|
||||
| `workers.doPunchCards` | Complete punchcard tasks | `true` |
|
||||
| `workers.doDesktopSearch` | Perform desktop searches | `true` |
|
||||
| `workers.doMobileSearch` | Perform mobile searches | `true` |
|
||||
| `workers.doDailyCheckIn` | Complete daily check-in | `true` |
|
||||
| `workers.doReadToEarn` | Complete read-to-earn tasks | `true` |
|
||||
| `workers.bundleDailySetWithSearch` | Run desktop searches after Daily Set | `true` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Search Settings</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `search.useLocalQueries` | Use locale-specific query sources | `true` |
|
||||
| `search.settings.useGeoLocaleQueries` | Use region-specific queries | `true` |
|
||||
| `search.settings.scrollRandomResults` | Random scrolling on results | `true` |
|
||||
| `search.settings.clickRandomResults` | Random link clicks | `true` |
|
||||
| `search.settings.retryMobileSearchAmount` | Mobile retry attempts | `2` |
|
||||
| `search.settings.delay.min` | Minimum delay between searches | `1min` |
|
||||
| `search.settings.delay.max` | Maximum delay between searches | `5min` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Query Diversity Engine</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `queryDiversity.enabled` | Multi-source query generation | `true` |
|
||||
| `queryDiversity.sources` | Available query sources | `["google-trends", "reddit", "local-fallback"]` |
|
||||
| `queryDiversity.maxQueriesPerSource` | Max queries per source | `10` |
|
||||
| `queryDiversity.cacheMinutes` | Cache duration in minutes | `30` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Humanization & Natural Behavior</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `humanization.enabled` | Enable human-like behavior patterns | `true` |
|
||||
| `humanization.stopOnBan` | Stop processing accounts on ban detection | `true` |
|
||||
| `humanization.immediateBanAlert` | Send immediate alert on ban | `true` |
|
||||
| `humanization.actionDelay.min` | Minimum action delay (ms) | `500` |
|
||||
| `humanization.actionDelay.max` | Maximum action delay (ms) | `2200` |
|
||||
| `humanization.gestureMoveProb` | Mouse gesture probability | `0.65` |
|
||||
| `humanization.gestureScrollProb` | Scroll gesture probability | `0.4` |
|
||||
| `vacation.enabled` | Monthly vacation mode | `true` |
|
||||
| `vacation.minDays` | Minimum vacation days per month | `2` |
|
||||
| `vacation.maxDays` | Maximum vacation days per month | `4` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Risk Management & Security</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `riskManagement.enabled` | Dynamic delay adjustment | `true` |
|
||||
| `riskManagement.autoAdjustDelays` | Auto-adjust delays on risk detection | `true` |
|
||||
| `riskManagement.stopOnCritical` | Stop on critical risk level | `false` |
|
||||
| `riskManagement.banPrediction` | ML-based ban prediction | `true` |
|
||||
| `riskManagement.riskThreshold` | Risk threshold (0-100) | `75` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Scheduling (Built-in)</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `schedule.enabled` | Enable built-in scheduler | `false` |
|
||||
| `schedule.useAmPm` | Use 12-hour time format | `false` |
|
||||
| `schedule.time12` | Time in 12-hour format | `9:00 AM` |
|
||||
| `schedule.time24` | Time in 24-hour format | `09:00` |
|
||||
| `schedule.timeZone` | IANA timezone | `Europe/Paris` |
|
||||
| `schedule.runImmediatelyOnStart` | Run on process start | `false` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Job State Management</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `jobState.enabled` | Save state to avoid duplicate work | `true` |
|
||||
| `jobState.dir` | Custom state directory | `""` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Proxy Settings</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `proxy.proxyGoogleTrends` | Proxy Google Trends requests | `true` |
|
||||
| `proxy.proxyBingTerms` | Proxy Bing Terms requests | `true` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Notification Settings</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `webhook.enabled` | Enable Discord webhook | `false` |
|
||||
| `webhook.url` | Discord webhook URL | `""` |
|
||||
| `conclusionWebhook.enabled` | Summary-only webhook | `false` |
|
||||
| `conclusionWebhook.url` | Summary webhook URL | `""` |
|
||||
| `ntfy.enabled` | Enable NTFY notifications | `false` |
|
||||
| `ntfy.url` | NTFY server URL | `""` |
|
||||
| `ntfy.topic` | NTFY topic | `rewards` |
|
||||
| `ntfy.authToken` | NTFY auth token | `""` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Logging & Diagnostics</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `logging.excludeFunc` | Exclude log categories | `["SEARCH-CLOSE-TABS", "LOGIN-NO-PROMPT", "FLOW"]` |
|
||||
| `logging.webhookExcludeFunc` | Exclude from webhook logs | `["SEARCH-CLOSE-TABS", "LOGIN-NO-PROMPT", "FLOW"]` |
|
||||
| `logging.redactEmails` | Redact email addresses in logs | `true` |
|
||||
| `diagnostics.enabled` | Capture diagnostic data | `true` |
|
||||
| `diagnostics.saveScreenshot` | Save screenshots on failure | `true` |
|
||||
| `diagnostics.saveHtml` | Save HTML on failure | `true` |
|
||||
| `diagnostics.maxPerRun` | Max diagnostics per run | `2` |
|
||||
| `diagnostics.retentionDays` | Days to keep diagnostics | `7` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Analytics</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `analytics.enabled` | Performance dashboard tracking | `true` |
|
||||
| `analytics.retentionDays` | Data retention period | `30` |
|
||||
| `analytics.exportMarkdown` | Generate markdown reports | `true` |
|
||||
| `analytics.webhookSummary` | Send analytics via webhook | `true` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Update Settings</b></summary>
|
||||
|
||||
| Setting | Description | Default |
|
||||
| ------- | ----------- | ------- |
|
||||
| `update.git` | Enable git auto-update | `true` |
|
||||
| `update.docker` | Enable docker auto-update | `false` |
|
||||
| `update.scriptPath` | Custom updater script path | `setup/update/update.mjs` |
|
||||
| `update.autoUpdateConfig` | Auto-merge config changes | `true` |
|
||||
| `update.autoUpdateAccounts` | Auto-merge account changes | `true` |
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
# 📚 Documentation
|
||||
|
||||
For detailed information about configuration, features, and advanced usage, please refer to the documentation in the `docs/` folder. Start with `docs/index.md` for an overview and navigation to specific topics.
|
||||
|
||||
---
|
||||
|
||||
# ⚠️ Disclaimer
|
||||
|
||||
**Use at your own risk.** Automation may violate Microsoft's Terms of Service and can result in suspension or permanent banning of your Microsoft Rewards account. This project is provided **for educational purposes only**. The developer is **not responsible** for any actions taken by Microsoft against your account.
|
||||
|
||||
---
|
||||
|
||||
# 📄 License
|
||||
|
||||
This project is licensed under a **PROPRIETARY** license. See [LICENSE](LICENSE) for details.
|
||||
368
SECURITY.md
Normal file
368
SECURITY.md
Normal file
@@ -0,0 +1,368 @@
|
||||
# 🔐 Security & Privacy Policy# Security & Privacy Guidelines# Security & Privacy Policy
|
||||
|
||||
|
||||
|
||||
<div align="center">
|
||||
|
||||
|
||||
|
||||
**Your data, your control — transparency first**This document describes how the Microsoft Rewards Script V2 handles data, the assumptions we make about your environment, and how to report security concerns. The codebase runs entirely under your control; there is no built-in telemetry or remote service component.Hi there! 👋 Thanks for caring about security and privacy — we do too. This document explains how this project approaches data handling, security practices, and how to report issues responsibly.
|
||||
|
||||
|
||||
|
||||
This document explains how the Microsoft Rewards Script handles your information,
|
||||
|
||||
what we do (and don't do) with it, and how to keep your setup secure.
|
||||
|
||||
---## TL;DR
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
---
|
||||
|
||||
## Data Flow Summary- We do not collect, phone-home, or exfiltrate your data. No hidden telemetry. 🚫📡
|
||||
|
||||
## 🎯 TL;DR — The Quick Version
|
||||
|
||||
- Your credentials stay on your machine (or in your container volumes). 🔒
|
||||
|
||||
| Topic | What You Need to Know |
|
||||
|
||||
|:------|:----------------------|- Configuration is loaded from local files (`src/config.jsonc`, `src/accounts.json`) or environment variables you supply.- Sessions/cookies are stored locally to reduce re-login friction. 🍪
|
||||
|
||||
| **📡 Data Collection** | ❌ **None** — No telemetry, no phone-home, no tracking |
|
||||
|
||||
| **🔒 Credentials** | ✅ Stored **locally only** in `src/accounts.json` or environment variables |- The automation layer drives Playwright browsers to interact with Microsoft Rewards. No other network destinations are contacted unless you configure outbound notifications.- Use at your own risk. Microsoft may take action on accounts that use automation.
|
||||
|
||||
| **🍪 Sessions** | ✅ Saved **on your machine** to avoid repeated logins |
|
||||
|
||||
| **🔔 Webhooks** | ⚠️ Optional — **You control** what gets sent where |- Diagnosed artifacts (logs, screenshots, HTML snapshots) are written to local folders within the repository.
|
||||
|
||||
| **🐳 Docker** | ✅ Read-only mounts, no data leaves the container |
|
||||
|
||||
| **⚖️ Terms of Service** | ⚠️ Automation **may violate** Microsoft's ToS — use at your own risk |## What this project does (and doesn’t)
|
||||
|
||||
|
||||
|
||||
---We do not collect or transmit your credentials, points totals, or activity history. All state remains on the host executing the script.
|
||||
|
||||
|
||||
|
||||
## 🔍 What This Script DoesThis is a local automation tool that drives a browser (Playwright) to perform Microsoft Rewards activities. By default:
|
||||
|
||||
|
||||
|
||||
This is a **local automation tool** that drives a browser (Playwright) to complete Microsoft Rewards activities on your behalf.---
|
||||
|
||||
|
||||
|
||||
### ✅ What it does:- It reads configuration from local files (e.g., `src/config.jsonc`, `src/accounts.json`).
|
||||
|
||||
- Reads configuration from local files (`src/config.jsonc`, `src/accounts.json`)
|
||||
|
||||
- Stores session data (cookies, fingerprints) locally under `src/browser/`## Stored Data- It can save session data (cookies and optional fingerprints) locally under `./src/browser/<sessionPath>/<email>/`.
|
||||
|
||||
- Optionally sends notifications to endpoints **you configure**
|
||||
|
||||
- Saves diagnostic logs and screenshots to `reports/` when troubleshooting- It can send optional notifications/webhooks if you enable them and provide a URL.
|
||||
|
||||
|
||||
|
||||
### ❌ What it does NOT do:| Item | Location | Notes |
|
||||
|
||||
- Collect or transmit your data to any third-party service by default
|
||||
|
||||
- Include any hidden telemetry or analytics|------|----------|-------|It does not:
|
||||
|
||||
- Store credentials anywhere except where you specify
|
||||
|
||||
- Run background processes you don't know about| Account credentials | `src/accounts.json` or environment variables | Keep this file out of source control. Use `.gitignore` and restrict file permissions. |
|
||||
|
||||
|
||||
|
||||
---| Configuration | `src/config.jsonc` | Comment-friendly JSONC; may contain secrets (webhook URLs, proxies). |- Send your accounts or secrets to any third-party service by default.
|
||||
|
||||
|
||||
|
||||
## 📦 Where Your Data Lives| Sessions & fingerprints | `src/browser/<sessionPath>/` | Contains cookies and optional device fingerprints used for continuity. Safe to delete if you want fresh sessions. |- Embed any “phone-home” or analytics endpoints.
|
||||
|
||||
|
||||
|
||||
| Data Type | Location | Purpose | Security Notes || Diagnostics | `reports/` (when enabled) | Screenshots, HTML, and logs captured for debugging. Review before sharing. |- Include built-in monetization, miners, or adware. 🚫🐛
|
||||
|
||||
|:----------|:---------|:--------|:--------------|
|
||||
|
||||
| **🔑 Credentials** | `src/accounts.json` or env vars | Login automation | **Keep out of Git!** Add to `.gitignore` |
|
||||
|
||||
| **⚙️ Configuration** | `src/config.jsonc` | Runtime settings | May contain webhook URLs — treat as sensitive |
|
||||
|
||||
| **🍪 Sessions** | `src/browser/<sessionPath>/` | Persist logins | Cookies + fingerprints — delete to reset |When running inside Docker, the compose file mounts `./src/accounts.json`, `./src/config.jsonc`, and `./sessions` as read-only or persistent volumes. No data leaves those mounts unless you explicitly configure additional outputs.## Data handling and storage
|
||||
|
||||
| **📊 Reports** | `reports/` folder | Diagnostics | Screenshots/logs — review before sharing |
|
||||
|
||||
| **🐳 Docker volumes** | Container mounts | Same as above | Read-only where possible |
|
||||
|
||||
|
||||
|
||||
------- Accounts: You control the `accounts.json` file. Keep it safe. Consider environment variables or secrets managers in CI/CD.
|
||||
|
||||
|
||||
|
||||
## 🔐 Keeping Your Setup Secure- Sessions: Cookies are stored locally to speed up login. You can delete them anytime by removing the session folder.
|
||||
|
||||
|
||||
|
||||
### 1️⃣ **Protect Your Credentials**## Credentials & Secrets- Fingerprints: If you enable fingerprint saving, they are saved locally only. Disable this feature if you prefer ephemeral fingerprints.
|
||||
|
||||
|
||||
|
||||
```bash- Logs/Reports: Diagnostic artifacts and daily summaries are written to the local `reports/` directory.
|
||||
|
||||
# ✅ DO: Keep accounts.json out of version control
|
||||
|
||||
echo "src/accounts.json" >> .gitignore- Do not commit `src/accounts.json` or any file containing secrets. The sample `.gitignore` already excludes them; verify your local overrides do the same.- Webhooks/Notifications: If enabled, we send only the minimal information necessary (e.g., summary text, embed fields) to the endpoint you configured.
|
||||
|
||||
|
||||
|
||||
# ✅ DO: Use environment variables in CI/CD- If you use TOTP, the Base32 secret remains local and is only used to respond to Microsoft login challenges.
|
||||
|
||||
export ACCOUNTS_JSON='[{"email":"...","password":"..."}]'
|
||||
|
||||
- For CI or scripted deployments, prefer supplying credentials through environment variables (`ACCOUNTS_JSON` or `ACCOUNTS_FILE`).Tip: For Docker, mount a dedicated data volume for sessions and reports so you can manage them easily. 📦
|
||||
|
||||
# ❌ DON'T: Commit secrets to GitHub
|
||||
|
||||
# ❌ DON'T: Share accounts.json publicly- Rotate your Microsoft account passwords and webhook tokens periodically.
|
||||
|
||||
```
|
||||
|
||||
## Credentials and secrets
|
||||
|
||||
### 2️⃣ **Secure Your Configuration**
|
||||
|
||||
---
|
||||
|
||||
- **Webhook URLs** in `config.jsonc` are essentially passwords — anyone with the URL can post to your channel
|
||||
|
||||
- **TOTP secrets** stay local and are only used during Microsoft login challenges- Do not commit secrets. Use `src/accounts.json` locally or set `ACCOUNTS_JSON`/`ACCOUNTS_FILE` via environment variables when running in containers.
|
||||
|
||||
- **Proxy credentials** should be treated like passwords
|
||||
|
||||
## Notifications- Consider using OS keychains or external secret managers where possible.
|
||||
|
||||
### 3️⃣ **Session Management**
|
||||
|
||||
- TOTP: If you include a Base32 TOTP secret per account, it remains local and is used strictly during login challenge flows.
|
||||
|
||||
```bash
|
||||
|
||||
# Clear sessions if you suspect compromiseOptional integrations (Discord webhooks, NTFY, others you add) send only the payloads you configure. Review each provider’s privacy policy before enabling. Treat webhook URLs as shared secrets; they allow anyone with the URL to post into the channel.
|
||||
|
||||
rm -rf src/browser/sessions/*
|
||||
|
||||
## Buy Mode safety
|
||||
|
||||
# Or in Docker
|
||||
|
||||
docker compose down -v # Removes volumes---
|
||||
|
||||
```
|
||||
|
||||
Buy Mode opens a monitor tab (read-only points polling) and a separate user tab for your manual actions. The monitor tab doesn’t redeem or click on your behalf — it just reads dashboard data to keep totals up to date. 🛍️
|
||||
|
||||
### 4️⃣ **Docker Best Practices**
|
||||
|
||||
## Recommended Practices
|
||||
|
||||
The included `compose.yaml` already:
|
||||
|
||||
- ✅ Uses read-only mounts for config files## Responsible disclosure
|
||||
|
||||
- ✅ Runs as non-root user where possible
|
||||
|
||||
- ✅ Limits container privileges- Run the script with least privilege. When using Docker, the provided compose file uses non-root execution and read-only mounts where possible.
|
||||
|
||||
|
||||
|
||||
Additional hardening:- Back up `sessions` only if you understand the contents (cookies, fingerprints). Delete the directory if you suspect compromise or want to reset state.We value coordinated disclosure. If you find a security issue:
|
||||
|
||||
```yaml
|
||||
|
||||
security_opt:- Enable the diagnostics bundle (`docs/diagnostics.md`) only when troubleshooting, and scrub artifacts before sharing.
|
||||
|
||||
- no-new-privileges:true
|
||||
|
||||
cap_drop:- Keep dependencies updated (`npm run build` after `npm install`) to receive security patches for Playwright and transitive packages.1. Please report it privately first via an issue marked “Security” with a note to request contact details, or by contacting the repository owner directly if available.
|
||||
|
||||
- ALL
|
||||
|
||||
```- Review `src/config.jsonc` comments; several fields (e.g., `humanization`, `retryPolicy`) influence how aggressively the automation behaves. Conservative defaults reduce the chance of account flags.2. Provide a minimal reproduction and version info.
|
||||
|
||||
|
||||
|
||||
### 5️⃣ **Regular Maintenance**3. We will acknowledge within a reasonable timeframe and work on a fix. 🙏
|
||||
|
||||
|
||||
|
||||
- 🔄 **Update dependencies:** `npm install && npm run build`---
|
||||
|
||||
- 🔑 **Rotate credentials** periodically
|
||||
|
||||
- 🧹 **Clean diagnostics:** Review and delete `reports/` contentsPlease do not open public issues with sensitive details before we have had a chance to remediate.
|
||||
|
||||
- 🔍 **Monitor logs** for suspicious activity
|
||||
|
||||
## Responsible Disclosure
|
||||
|
||||
---
|
||||
|
||||
## Scope and assumptions
|
||||
|
||||
## 🔔 Notifications & Privacy
|
||||
|
||||
If you discover a vulnerability in this project:
|
||||
|
||||
When you enable Discord webhooks or NTFY:
|
||||
|
||||
- The script sends **only the summary data you configure**- This project is open-source and runs on your infrastructure (local machine or container). You are responsible for host hardening and network policies.
|
||||
|
||||
- No credentials or session data is included
|
||||
|
||||
- The receiving service (Discord, NTFY, etc.) has its own privacy policy1. Privately reach out via a GitHub issue tagged “Security” requesting a direct contact channel, or message a maintainer through the listed GitHub profile if available.- Automation can violate terms of service. You assume all responsibility for how you use this tool.
|
||||
|
||||
|
||||
|
||||
**Control what gets sent:**2. Include a minimal reproduction, environment details, and the commit or release you tested.- Browsers and dependencies evolve. Keep the project and your runtime up to date.
|
||||
|
||||
```jsonc
|
||||
|
||||
// In config.jsonc3. Allow a reasonable window for investigation and remediation before publishing details.
|
||||
|
||||
"notifications": {
|
||||
|
||||
"conclusionWebhook": {## Dependency and update policy
|
||||
|
||||
"enabled": false, // Disable to send nothing
|
||||
|
||||
"url": "https://discord.com/api/webhooks/..."We appreciate coordinated disclosure and will credit contributions in the changelog when permitted.
|
||||
|
||||
}
|
||||
|
||||
}- We pin key dependencies where practical and avoid risky postinstall scripts in production builds.
|
||||
|
||||
```
|
||||
|
||||
---- Periodic updates are encouraged. The project includes an optional auto-update helper. Review changes before enabling in sensitive environments.
|
||||
|
||||
---
|
||||
|
||||
- Use Playwright official images when running in containers to receive timely browser security updates. 🛡️
|
||||
|
||||
## 🛡️ Responsible Use Guidelines
|
||||
|
||||
## Scope & Assumptions
|
||||
|
||||
### ✅ **Good Practices**
|
||||
|
||||
- Run with least privileges (avoid root/admin unless needed)## Safe use guidelines
|
||||
|
||||
- Use environment variables for secrets in production
|
||||
|
||||
- Keep the repository and dependencies updated- The project runs on infrastructure you control. Host hardening, firewall rules, and secret storage are your responsibility.
|
||||
|
||||
- Review code changes before pulling updates
|
||||
|
||||
- Monitor account health regularly- Automation may violate Microsoft’s terms of service. Use at your own risk; the maintainers are not liable for account actions Microsoft may take.- Run with least privileges. In Docker, prefer non-root where feasible and set `no-new-privileges` if supported.
|
||||
|
||||
|
||||
|
||||
### ⚠️ **Risk Awareness**- Playwright and Chromium evolve quickly. Rebuild after dependency updates and monitor upstream advisories.- Limit outbound network access if your threat model requires it.
|
||||
|
||||
- **Microsoft ToS:** Automation violates their terms — accounts may be suspended
|
||||
|
||||
- **Rate limiting:** Aggressive settings increase ban risk- Rotate credentials periodically and revoke unused secrets.
|
||||
|
||||
- **Shared environments:** Don't run on untrusted machines
|
||||
|
||||
- **Network exposure:** Limit outbound connections if your threat model requires it---- Clean up diagnostics and reports if they contain sensitive metadata.
|
||||
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
|
||||
## 🐛 Vulnerability Reporting## Contact## Privacy statement
|
||||
|
||||
|
||||
|
||||
We value security research and coordinated disclosure.
|
||||
|
||||
|
||||
|
||||
### 📧 **How to Report**Open a GitHub issue labeled “Security” or reach out to the repository owner if you require a private communication channel. Provide as much diagnostic context as you can share safely.We don’t collect personal data. The repository does not embed analytics. Any processing done by this tool happens locally or against the Microsoft endpoints it drives on your behalf.
|
||||
|
||||
|
||||
|
||||
1. **Privately open a GitHub issue** labeled "Security"
|
||||
|
||||
2. **Include:**
|
||||
|
||||
- Description of the vulnerabilityStay safe, and automate responsibly.If you enable third-party notifications (Discord, NTFY, etc.), data sent there is under your control and subject to those services’ privacy policies.
|
||||
|
||||
- Steps to reproduce
|
||||
|
||||
- Affected versions/commits
|
||||
|
||||
- Suggested remediation (if any)## Contact
|
||||
|
||||
3. **Give us time** to investigate and patch before public disclosure
|
||||
|
||||
To report a security issue or ask a question, please open an issue with the “Security” label and we’ll follow up with a private channel. You can also reach out to the project owner/maintainers via GitHub if contact details are listed. 💬
|
||||
|
||||
### 🏆 **Recognition**
|
||||
|
||||
Security contributors will be credited in the changelog (with permission).— Stay safe and have fun automating! ✨
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 📋 Security Checklist
|
||||
|
||||
<details>
|
||||
<summary><strong>🔒 Click to expand the complete security checklist</strong></summary>
|
||||
|
||||
- [ ] `src/accounts.json` is in `.gitignore`
|
||||
- [ ] File permissions restrict access to sensitive configs
|
||||
- [ ] Using TOTP for 2FA (reduces password-only exposure)
|
||||
- [ ] Webhook URLs treated as secrets
|
||||
- [ ] Sessions folder backed up securely (if at all)
|
||||
- [ ] Running with minimal privileges
|
||||
- [ ] Docker using read-only mounts where possible
|
||||
- [ ] Dependencies updated regularly
|
||||
- [ ] Diagnostic reports reviewed before sharing
|
||||
- [ ] Monitoring for unusual account activity
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## 📞 Contact
|
||||
|
||||
- **Security issues:** Open a GitHub issue with "Security" label
|
||||
- **General support:** [Discord community](https://discord.gg/kn3695Kx32)
|
||||
- **Bug reports:** [GitHub Issues](https://github.com/LightZirconite/Microsoft-Rewards-Rewi/issues)
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
|
||||
**Stay safe, automate responsibly** ✨
|
||||
|
||||
---
|
||||
|
||||
*Last updated: 2025*
|
||||
|
||||
</div>
|
||||
42
compose.yaml
Normal file
42
compose.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
services:
|
||||
microsoft-rewards-script:
|
||||
build: .
|
||||
container_name: microsoft-rewards-script
|
||||
restart: unless-stopped
|
||||
|
||||
# Volume mounts: Specify a location where you want to save the files on your local machine.
|
||||
volumes:
|
||||
- ./src/accounts.jsonc:/usr/src/microsoft-rewards-script/src/accounts.jsonc:ro
|
||||
- ./src/config.jsonc:/usr/src/microsoft-rewards-script/src/config.jsonc:ro
|
||||
- ./sessions:/usr/src/microsoft-rewards-script/sessions # Optional, saves your login session
|
||||
|
||||
environment:
|
||||
TZ: "America/Toronto" # Set your timezone for proper scheduling (used by image and scheduler)
|
||||
NODE_ENV: "production"
|
||||
# Force headless when running in Docker (uses Chromium Headless Shell only)
|
||||
FORCE_HEADLESS: "1"
|
||||
|
||||
# ============================================================
|
||||
# SCHEDULING MODE: Choose one
|
||||
# ============================================================
|
||||
# Option 1: Built-in JavaScript Scheduler (default, recommended)
|
||||
# - No additional setup needed
|
||||
# - Uses config.jsonc schedule settings
|
||||
# - Lighter resource usage
|
||||
#SCHEDULER_DAILY_JITTER_MINUTES_MIN: "2"
|
||||
#SCHEDULER_DAILY_JITTER_MINUTES_MAX: "10"
|
||||
#SCHEDULER_PASS_TIMEOUT_MINUTES: "180"
|
||||
#SCHEDULER_FORK_PER_PASS: "true"
|
||||
|
||||
# Option 2: Native Cron (for users who prefer traditional cron)
|
||||
# Uncomment these lines to enable cron instead:
|
||||
#USE_CRON: "true"
|
||||
#CRON_SCHEDULE: "0 9 * * *" # Daily at 9 AM (see https://crontab.guru)
|
||||
#RUN_ON_START: "true" # Run once immediately on container start
|
||||
|
||||
# Security hardening
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
|
||||
# Default: use built-in scheduler (entrypoint handles mode selection)
|
||||
command: ["npm", "run", "start:schedule"]
|
||||
66
docker-entrypoint.sh
Normal file
66
docker-entrypoint.sh
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Docker entrypoint with cron support
|
||||
# Usage:
|
||||
# Default (scheduler): npm run start:schedule
|
||||
# Cron mode: set USE_CRON=true
|
||||
|
||||
# If USE_CRON is set, configure cron instead of using built-in scheduler
|
||||
if [ "$USE_CRON" = "true" ] || [ "$USE_CRON" = "1" ]; then
|
||||
echo "==> Cron mode enabled"
|
||||
|
||||
# Default cron schedule if not provided (daily at 9 AM with random jitter)
|
||||
CRON_SCHEDULE="${CRON_SCHEDULE:-0 9 * * *}"
|
||||
|
||||
echo "==> Installing cron..."
|
||||
apt-get update -qq && apt-get install -y -qq cron > /dev/null 2>&1
|
||||
|
||||
# Create cron job file
|
||||
echo "==> Setting up cron schedule: $CRON_SCHEDULE"
|
||||
|
||||
# Build environment variables for cron
|
||||
ENV_VARS=$(printenv | grep -E '^(TZ|NODE_ENV|FORCE_HEADLESS|PLAYWRIGHT_BROWSERS_PATH|ACCOUNTS_JSON|ACCOUNTS_FILE)=' | sed 's/^/export /' | tr '\n' ';')
|
||||
|
||||
# Create cron job that runs the script
|
||||
CRON_JOB="$CRON_SCHEDULE cd /usr/src/microsoft-rewards-script && $ENV_VARS node --enable-source-maps ./dist/index.js >> /var/log/cron.log 2>&1"
|
||||
|
||||
echo "$CRON_JOB" > /etc/cron.d/microsoft-rewards
|
||||
chmod 0644 /etc/cron.d/microsoft-rewards
|
||||
|
||||
# Apply cron job
|
||||
crontab /etc/cron.d/microsoft-rewards
|
||||
|
||||
# Create log file
|
||||
touch /var/log/cron.log
|
||||
|
||||
echo "==> Cron job installed:"
|
||||
echo " Schedule: $CRON_SCHEDULE"
|
||||
echo " Command: node --enable-source-maps ./dist/index.js"
|
||||
echo " Logs: /var/log/cron.log"
|
||||
echo ""
|
||||
|
||||
# Run once immediately if requested
|
||||
if [ "$RUN_ON_START" = "true" ] || [ "$RUN_ON_START" = "1" ]; then
|
||||
echo "==> Running initial execution (RUN_ON_START=true)..."
|
||||
cd /usr/src/microsoft-rewards-script
|
||||
node --enable-source-maps ./dist/index.js 2>&1 | tee -a /var/log/cron.log
|
||||
echo "==> Initial execution completed"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
echo "==> Starting cron daemon..."
|
||||
echo "==> Container ready. Cron will execute: $CRON_SCHEDULE"
|
||||
echo "==> View logs: docker logs -f <container>"
|
||||
echo ""
|
||||
|
||||
# Start cron in foreground and tail logs
|
||||
cron && tail -f /var/log/cron.log
|
||||
else
|
||||
echo "==> Using built-in scheduler (JavaScript)"
|
||||
echo "==> To use cron instead, set USE_CRON=true"
|
||||
echo ""
|
||||
|
||||
# Execute passed command (default: npm run start:schedule)
|
||||
exec "$@"
|
||||
fi
|
||||
168
docs/accounts.md
Normal file
168
docs/accounts.md
Normal file
@@ -0,0 +1,168 @@
|
||||
# 👤 Accounts & 2FA Setup
|
||||
|
||||
**Add your Microsoft accounts with secure TOTP authentication**
|
||||
|
||||
---
|
||||
|
||||
## 📍 Quick Start
|
||||
|
||||
### Basic Setup (No 2FA)
|
||||
|
||||
**Edit** `src/accounts.json`:
|
||||
```json
|
||||
{
|
||||
"accounts": [
|
||||
{
|
||||
"email": "your@email.com",
|
||||
"password": "your_password",
|
||||
"recoveryEmail": "backup@email.com"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
> ℹ️ `recoveryEmail` is **mandatory**. It lets the bot verify Microsoft’s masked hint during login and alert you if the recovery address ever changes.
|
||||
|
||||
**That's it!** Run `npm start` to test.
|
||||
|
||||
---
|
||||
|
||||
## 🔐 Add 2FA/TOTP (Recommended)
|
||||
|
||||
### Why Use TOTP?
|
||||
- ✅ **Automated login** — No manual code entry
|
||||
- ✅ **More secure** — Better than SMS
|
||||
- ✅ **Works 24/7** — Scheduler-friendly
|
||||
|
||||
### How to Get Your TOTP Secret
|
||||
|
||||
1. **Open** https://account.live.com/proofs/Manage/additional (Security → Advanced security options → Additional security).
|
||||
2. Enable two-step verification and click **Next** until you see the setup wizard.
|
||||
3. Click the blue link **"Set up a different authenticator app"**.
|
||||
4. On the next screen click **"I can't scan the bar code"** to reveal the Base32 secret.
|
||||
5. Scan the QR with your preferred authenticator (Google Authenticator recommended to keep data separate from Microsoft) **and** copy the secret:
|
||||
- The same secret can stay in your app and be saved in this file (multiple authenticators can share it).
|
||||
6. Enter the 6-digit code in Microsoft’s wizard to finish pairing.
|
||||
7. **Add the secret to** `accounts.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"accounts": [
|
||||
{
|
||||
"email": "your@email.com",
|
||||
"password": "your_password",
|
||||
"recoveryEmail": "backup@email.com",
|
||||
"totp": "JBSWY3DPEHPK3PXP"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Multiple Accounts
|
||||
|
||||
```json
|
||||
{
|
||||
"accounts": [
|
||||
{
|
||||
"email": "account1@email.com",
|
||||
"password": "password1",
|
||||
"recoveryEmail": "backup1@email.com",
|
||||
"totp": "SECRET1"
|
||||
},
|
||||
{
|
||||
"email": "account2@email.com",
|
||||
"password": "password2",
|
||||
"recoveryEmail": "backup2@email.com",
|
||||
"totp": "SECRET2"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🌐 Per-Account Proxy (Optional)
|
||||
|
||||
```json
|
||||
{
|
||||
"accounts": [
|
||||
{
|
||||
"email": "your@email.com",
|
||||
"password": "password",
|
||||
"recoveryEmail": "backup@email.com",
|
||||
"totp": "",
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "proxy.example.com",
|
||||
"port": 8080,
|
||||
"username": "proxyuser",
|
||||
"password": "proxypass"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
→ **[Full Proxy Guide](./proxy.md)**
|
||||
|
||||
---
|
||||
|
||||
## 🔒 Environment Variables (Docker/CI)
|
||||
|
||||
### Option 1: File Path
|
||||
```bash
|
||||
export ACCOUNTS_FILE=/path/to/accounts.json
|
||||
```
|
||||
|
||||
### Option 2: Inline JSON
|
||||
```bash
|
||||
export ACCOUNTS_JSON='{"accounts":[{"email":"test@example.com","password":"pass"}]}'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **"accounts.json not found"** | Create file or set `ACCOUNTS_FILE` env var |
|
||||
| **"2FA prompt not auto-filled"** | Check TOTP secret is valid Base32 |
|
||||
| **"Invalid TOTP"** | Verify system time is correct |
|
||||
| **"Account locked"** | Manually unlock in Microsoft Account |
|
||||
| **"Login timeout"** | Check internet connection, try proxy |
|
||||
|
||||
### 2FA Not Working?
|
||||
|
||||
1. **Check secret format** — Should be Base32 (only letters/numbers, no spaces)
|
||||
2. **Verify system time** — Must be accurate (NTP sync)
|
||||
3. **Test manually** — Use authenticator app to verify code works
|
||||
4. **Remove backup codes** — Some security settings block TOTP
|
||||
|
||||
---
|
||||
|
||||
## 🔒 Security Tips
|
||||
|
||||
- 🔐 **Use strong passwords** — Unique for each account
|
||||
- 🔑 **Enable TOTP** — More secure than SMS
|
||||
- 📁 **Restrict file permissions** — `chmod 600 accounts.json` (Linux)
|
||||
- 🔄 **Rotate passwords** — Change every 90 days
|
||||
- 🚫 **Never commit** — Add `accounts.json` to `.gitignore`
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**TOTP setup?**
|
||||
→ **[Security Guide](./security.md)** for best practices
|
||||
|
||||
**Ready for automation?**
|
||||
→ **[Scheduler Setup](./schedule.md)**
|
||||
|
||||
**Need proxies?**
|
||||
→ **[Proxy Guide](./proxy.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Getting Started](./getting-started.md)**
|
||||
108
docs/buy-mode.md
Normal file
108
docs/buy-mode.md
Normal file
@@ -0,0 +1,108 @@
|
||||
# 💳 Buy Mode
|
||||
|
||||
**Manually redeem rewards while monitoring points**
|
||||
|
||||
---
|
||||
|
||||
## 💡 What Is It?
|
||||
|
||||
Launches browser and **passively monitors** your points balance while you manually shop/redeem.
|
||||
|
||||
**Use case:** Safely redeem gift cards without automation interference.
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
```bash
|
||||
npm start -- -buy your@email.com
|
||||
```
|
||||
|
||||
**What happens:**
|
||||
1. Opens 2 browser tabs:
|
||||
- **Monitor tab** — Background point tracking (auto-refresh)
|
||||
- **Your tab** — Use this for manual purchases
|
||||
2. Monitors points every ~10 seconds
|
||||
3. Alerts you when spending detected
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Example Usage
|
||||
|
||||
### Redeem Gift Card
|
||||
|
||||
```bash
|
||||
npm start -- -buy myaccount@outlook.com
|
||||
```
|
||||
|
||||
1. Script opens Microsoft Rewards in browser
|
||||
2. Use the **user tab** to browse and redeem
|
||||
3. **Monitor tab** tracks your balance in background
|
||||
4. Get notification when points decrease
|
||||
|
||||
---
|
||||
|
||||
## ⚙️ Configuration
|
||||
|
||||
**Set max session time:**
|
||||
|
||||
**Edit** `src/config.jsonc`:
|
||||
```jsonc
|
||||
{
|
||||
"buyMode": {
|
||||
"enabled": false,
|
||||
"maxMinutes": 45
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔔 Notifications
|
||||
|
||||
Buy mode sends alerts when:
|
||||
- 💳 **Points spent** — Shows amount and new balance
|
||||
- 📉 **Balance changes** — Tracks cumulative spending
|
||||
|
||||
**Example alert:**
|
||||
```
|
||||
💳 Spend detected (Buy Mode)
|
||||
Account: user@email.com
|
||||
Spent: -500 points
|
||||
Current: 12,500 points
|
||||
Session spent: 1,200 points
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **Monitor tab closes** | Script auto-reopens it |
|
||||
| **No spending alerts** | Check webhook/NTFY config |
|
||||
| **Session too short** | Increase `maxMinutes` in config |
|
||||
|
||||
---
|
||||
|
||||
## ⚠️ Important Notes
|
||||
|
||||
- ✅ **Browser visible** — Always runs in visible mode
|
||||
- ✅ **No automation** — Script only monitors, never clicks
|
||||
- ✅ **Safe** — Use your browsing tab normally
|
||||
- ✅ **Notifications** — Uses existing webhook/NTFY settings
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Setup notifications?**
|
||||
→ **[Discord Webhooks](./conclusionwebhook.md)**
|
||||
→ **[NTFY Push](./ntfy.md)**
|
||||
|
||||
**Back to automation?**
|
||||
→ **[Getting Started](./getting-started.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
122
docs/conclusionwebhook.md
Normal file
122
docs/conclusionwebhook.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# 📊 Discord Webhooks
|
||||
|
||||
**Get run summaries in Discord**
|
||||
|
||||
---
|
||||
|
||||
## 💡 What Is It?
|
||||
|
||||
Sends a **rich embed** to your Discord server after each run with:
|
||||
- 📊 Total accounts processed
|
||||
- 💎 Points earned
|
||||
- ⏱️ Execution time
|
||||
- ❌ Errors encountered
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
### 1. Create Webhook in Discord
|
||||
|
||||
1. **Open Discord** → Right-click channel
|
||||
2. **Edit Channel** → **Integrations** tab
|
||||
3. **Create Webhook**
|
||||
4. **Copy webhook URL**
|
||||
|
||||
### 2. Configure Script
|
||||
|
||||
**Edit** `src/config.jsonc`:
|
||||
```jsonc
|
||||
{
|
||||
"notifications": {
|
||||
"conclusionWebhook": {
|
||||
"enabled": true,
|
||||
"url": "https://discord.com/api/webhooks/123456789/abcdef-your-webhook-token"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**That's it!** You'll get a summary after each run.
|
||||
|
||||
---
|
||||
|
||||
## 📋 Example Summary
|
||||
|
||||
```
|
||||
🎯 Microsoft Rewards Summary
|
||||
|
||||
📊 Accounts: 3 • 0 with issues
|
||||
💎 Points: 15,230 → 16,890 (+1,660)
|
||||
⏱️ Average Duration: 8m 32s
|
||||
📈 Cumulative Runtime: 25m 36s
|
||||
|
||||
👤 user1@example.com
|
||||
Points: 5,420 → 6,140 (+720)
|
||||
Duration: 7m 23s
|
||||
Status: ✅ Completed successfully
|
||||
|
||||
👤 user2@example.com
|
||||
Points: 4,810 → 5,750 (+940)
|
||||
Duration: 9m 41s
|
||||
Status: ✅ Completed successfully
|
||||
|
||||
👤 user3@example.com
|
||||
Points: 5,000 → 5,000 (+0)
|
||||
Duration: 8m 32s
|
||||
Status: ✅ Completed successfully
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Advanced: Separate Channels
|
||||
|
||||
Use different webhooks for different notifications:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"notifications": {
|
||||
"webhook": {
|
||||
"enabled": true,
|
||||
"url": "https://discord.com/api/webhooks/.../errors-channel"
|
||||
},
|
||||
"conclusionWebhook": {
|
||||
"enabled": true,
|
||||
"url": "https://discord.com/api/webhooks/.../summary-channel"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- **`webhook`** — Real-time errors during execution
|
||||
- **`conclusionWebhook`** — End-of-run summary
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **No message received** | Check webhook URL is complete |
|
||||
| **"Invalid webhook"** | Regenerate webhook in Discord |
|
||||
| **Partial data** | Ensure script completed fully |
|
||||
|
||||
### Test Webhook Manually
|
||||
|
||||
```bash
|
||||
curl -X POST -H "Content-Type: application/json" -d '{"content":"Test message"}' "YOUR_WEBHOOK_URL"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Want mobile alerts?**
|
||||
→ **[NTFY Push Notifications](./ntfy.md)**
|
||||
|
||||
**Need detailed logs?**
|
||||
→ **[Diagnostics Guide](./diagnostics.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
605
docs/config.md
Normal file
605
docs/config.md
Normal file
@@ -0,0 +1,605 @@
|
||||
# ⚙️ Configuration Guide# ⚙️ Configuration Guide
|
||||
|
||||
|
||||
|
||||
**Customize script behavior in `src/config.jsonc`**This page documents every field in the configuration file. The default ships as `src/config.jsonc` so you get inline `//` guidance without editor warnings, and the loader still accepts traditional `config.json` files if you prefer plain JSON.
|
||||
|
||||
|
||||
|
||||
---Looking for ready-to-use presets? Check `docs/config-presets/` for curated examples such as `balanced.jsonc` (full automation with humanization) and `minimal.jsonc` (lean runs with quick scheduling).
|
||||
|
||||
|
||||
|
||||
## ⚡ Quick Start (Essentials)> NOTE: Previous versions had `logging.live` (live streaming webhook); it was removed and replaced by a simple `logging.redactEmails` flag.
|
||||
|
||||
|
||||
|
||||
### Minimal Working Config---
|
||||
|
||||
## Top-Level Fields
|
||||
|
||||
```jsonc
|
||||
|
||||
{### baseURL
|
||||
|
||||
"humanization": {Internal Microsoft Rewards base. Leave it unless you know what you are doing.
|
||||
|
||||
"enabled": true // Natural behavior (recommended)
|
||||
|
||||
},### sessionPath
|
||||
|
||||
"workers": {Directory where session data (cookies / fingerprints / job-state) is stored.
|
||||
|
||||
"doDailySet": true,
|
||||
|
||||
"doDesktopSearch": true,---
|
||||
|
||||
"doMobileSearch": true## browser
|
||||
|
||||
}| Key | Type | Default | Description |
|
||||
|
||||
}|-----|------|---------|-------------|
|
||||
|
||||
```| headless | boolean | false | Run browser UI-less. Set to `false` to keep the browser visible (default). |
|
||||
|
||||
| globalTimeout | string/number | "30s" | Max time for common Playwright operations. Accepts ms number or time string (e.g. `"45s"`, `"2min"`). |
|
||||
|
||||
**That's all you need!** Everything else has good defaults.
|
||||
|
||||
---
|
||||
|
||||
---## execution
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
|
||||
## 🎯 Popular Configurations|-----|------|---------|-------------|
|
||||
|
||||
| parallel | boolean | false | Run desktop + mobile simultaneously (higher resource usage). |
|
||||
|
||||
### 1. Daily Automation| runOnZeroPoints | boolean | false | Skip full run early if there are zero points available (saves time). |
|
||||
|
||||
| clusters | number | 1 | Number of process clusters (multi-process concurrency). |
|
||||
|
||||
```jsonc| passesPerRun | number | 1 | Advanced: extra full passes per started run. |
|
||||
|
||||
{
|
||||
|
||||
"humanization": { "enabled": true },---
|
||||
|
||||
"schedule": {## buyMode
|
||||
|
||||
"enabled": true,Manual redeem / purchase assistance.
|
||||
|
||||
"time": "09:00",| Key | Type | Default | Description |
|
||||
|
||||
"timeZone": "America/New_York"|-----|------|---------|-------------|
|
||||
|
||||
}| enabled (CLI `-buy`) | boolean | false | Enable buy mode (usually via CLI argument). |
|
||||
|
||||
}| maxMinutes | number | 45 | Max session length for buy mode. |
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
→ **[Full Scheduler Guide](./schedule.md)**## fingerprinting.saveFingerprint
|
||||
|
||||
Persist browser fingerprints per device type for consistency.
|
||||
|
||||
---| Key | Type | Default | Description |
|
||||
|
||||
|-----|------|---------|-------------|
|
||||
|
||||
### 2. With Notifications| mobile | boolean | false | Save/reuse a consistent mobile fingerprint. |
|
||||
|
||||
| desktop | boolean | false | Save/reuse a consistent desktop fingerprint. |
|
||||
|
||||
```jsonc
|
||||
|
||||
{---
|
||||
|
||||
"humanization": { "enabled": true },## search
|
||||
|
||||
"conclusionWebhook": {| Key | Type | Default | Description |
|
||||
|
||||
"enabled": true,|-----|------|---------|-------------|
|
||||
|
||||
"url": "https://discord.com/api/webhooks/YOUR_WEBHOOK"| useLocalQueries | boolean | false | Use locale-specific query sources instead of global ones. |
|
||||
|
||||
}
|
||||
|
||||
}### search.settings
|
||||
|
||||
```| Key | Type | Default | Description |
|
||||
|
||||
|-----|------|---------|-------------|
|
||||
|
||||
→ **[Discord Setup](./conclusionwebhook.md)** | **[NTFY Setup](./ntfy.md)**| useGeoLocaleQueries | boolean | false | Blend geo / locale into chosen queries. |
|
||||
|
||||
| scrollRandomResults | boolean | true | Random scroll during search pages to look natural. |
|
||||
|
||||
---| clickRandomResults | boolean | true | Occasionally click safe results. |
|
||||
|
||||
| retryMobileSearchAmount | number | 2 | Retries if mobile searches didn’t yield points. |
|
||||
|
||||
### 3. Background Mode (Headless)| delay.min / delay.max | string/number | 3–5min | Delay between searches (ms or time string). |
|
||||
|
||||
|
||||
|
||||
```jsonc---
|
||||
|
||||
{## humanization
|
||||
|
||||
"browser": {Human‑like behavior simulation.
|
||||
|
||||
"headless": true| Key | Type | Default | Description |
|
||||
|
||||
},|-----|------|---------|-------------|
|
||||
|
||||
"humanization": { "enabled": true }| enabled | boolean | true | Global on/off. |
|
||||
|
||||
}| stopOnBan | boolean | true | Stop processing further accounts if a ban is detected. |
|
||||
|
||||
```| immediateBanAlert | boolean | true | Fire notification immediately upon ban detection. |
|
||||
|
||||
| actionDelay.min/max | number/string | 150–450ms | Random micro-delay per action. |
|
||||
|
||||
**Note:** Set `headless: false` to see the browser during development.| gestureMoveProb | number | 0.4 | Probability of a small mouse move gesture. |
|
||||
|
||||
| gestureScrollProb | number | 0.2 | Probability of a small scroll gesture. |
|
||||
|
||||
---| allowedWindows | string[] | [] | Local time windows (e.g. `["08:30-11:00","19:00-22:00"]`). Outside windows, run waits. |
|
||||
|
||||
|
||||
|
||||
### 4. Skip When No Points---
|
||||
|
||||
## vacation
|
||||
|
||||
```jsoncRandom contiguous block of days off per month.
|
||||
|
||||
{| Key | Type | Default | Description |
|
||||
|
||||
"execution": {|-----|------|---------|-------------|
|
||||
|
||||
"runOnZeroPoints": false| enabled | boolean | false | Activate monthly break behavior. |
|
||||
|
||||
}| minDays | number | 3 | Minimum skipped days per month. |
|
||||
|
||||
}| maxDays | number | 5 | Maximum skipped days per month. |
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Saves time by skipping accounts with 0 available points.## retryPolicy
|
||||
|
||||
Generic transient retry/backoff.
|
||||
|
||||
---| Key | Type | Default | Description |
|
||||
|
||||
|-----|------|---------|-------------|
|
||||
|
||||
### 5. Multiple Accounts Faster| maxAttempts | number | 3 | Max tries for retryable blocks. |
|
||||
|
||||
| baseDelay | number | 1000 | Initial delay in ms. |
|
||||
|
||||
```jsonc| maxDelay | number/string | 30s | Max backoff delay. |
|
||||
|
||||
{| multiplier | number | 2 | Exponential backoff multiplier. |
|
||||
|
||||
"execution": {| jitter | number | 0.2 | Randomization factor (0..1). |
|
||||
|
||||
"parallel": false, // Desktop + mobile simultaneously
|
||||
|
||||
"clusters": 1 // Process multiple accounts in parallel---
|
||||
|
||||
}## workers
|
||||
|
||||
}Enable/disable scripted task categories.
|
||||
|
||||
```| Key | Default | Description |
|
||||
|
||||
|-----|---------|-------------|
|
||||
|
||||
⚠️ **Higher detection risk** with parallel execution.| doDailySet | true | Daily set activities. |
|
||||
|
||||
| doMorePromotions | true | Promotional tasks. |
|
||||
|
||||
---| doPunchCards | true | Punch card flows. |
|
||||
|
||||
| doDesktopSearch | true | Desktop searches. |
|
||||
|
||||
## 🛡️ Anti-Ban Settings| doMobileSearch | true | Mobile searches. |
|
||||
|
||||
| doDailyCheckIn | true | Daily check-in. |
|
||||
|
||||
### Humanization (Recommended)| doReadToEarn | true | Reading tasks. |
|
||||
|
||||
| bundleDailySetWithSearch | false | Immediately start desktop search bundle after daily set. |
|
||||
|
||||
```jsonc
|
||||
|
||||
{---
|
||||
|
||||
"humanization": {## proxy
|
||||
|
||||
"enabled": true,| Key | Default | Description |
|
||||
|
||||
"actionDelay": { "min": 150, "max": 450 },|-----|---------|-------------|
|
||||
|
||||
"gestureMoveProb": 0.4,| proxyGoogleTrends | true | Route Google Trends fetch through proxy if set. |
|
||||
|
||||
"gestureScrollProb": 0.2,| proxyBingTerms | true | Route Bing query source fetch through proxy if set. |
|
||||
|
||||
"randomOffDaysPerWeek": 1
|
||||
|
||||
}---
|
||||
|
||||
}## notifications
|
||||
|
||||
```Manages notification channels (Discord webhooks, NTFY, etc.).
|
||||
|
||||
|
||||
|
||||
→ **[Full Humanization Guide](./humanization.md)**### notifications.webhook
|
||||
|
||||
Primary webhook (can be used for summary or generic messages).
|
||||
|
||||
---| Key | Default | Description |
|
||||
|
||||
|-----|---------|-------------|
|
||||
|
||||
### Vacation Mode| enabled | false | Allow sending webhook-based notifications and live log streaming. |
|
||||
|
||||
| url | "" | Webhook endpoint. |
|
||||
|
||||
```jsonc
|
||||
|
||||
{### notifications.conclusionWebhook
|
||||
|
||||
"vacation": {Rich end-of-run summary (if enabled separately).
|
||||
|
||||
"enabled": true,| Key | Default | Description |
|
||||
|
||||
"minDays": 3,|-----|---------|-------------|
|
||||
|
||||
"maxDays": 5| enabled | false | Enable run summary posting. |
|
||||
|
||||
}| url | "" | Webhook endpoint. |
|
||||
|
||||
}
|
||||
|
||||
```### notifications.ntfy
|
||||
|
||||
Lightweight push notifications.
|
||||
|
||||
Skips 3-5 random consecutive days per month.| Key | Default | Description |
|
||||
|
||||
|-----|---------|-------------|
|
||||
|
||||
---| enabled | false | Enable NTFY push. |
|
||||
|
||||
| url | "" | Base NTFY server URL (e.g. https://ntfy.sh). |
|
||||
|
||||
## 🔧 Advanced Options| topic | rewards | Topic/channel name. |
|
||||
|
||||
| authToken | "" | Bearer token if your server requires auth. |
|
||||
|
||||
<details>
|
||||
|
||||
<summary><strong>Click to expand all options</strong></summary>---
|
||||
|
||||
## logging
|
||||
|
||||
### Browser| Key | Type | Description |
|
||||
|
||||
|-----|------|-------------|
|
||||
|
||||
```jsonc| excludeFunc | string[] | Log buckets suppressed in console + any webhook usage. |
|
||||
|
||||
{| webhookExcludeFunc | string[] | Buckets suppressed specifically for webhook output. |
|
||||
|
||||
"browser": {| redactEmails | boolean | If true, email addresses are partially masked in logs. |
|
||||
|
||||
"headless": false,| liveWebhookUrl | string | Optional override URL for live log streaming (falls back to `notifications.webhook.url`). |
|
||||
|
||||
"globalTimeout": "30s"
|
||||
|
||||
}_Removed fields_: `live.enabled`, `live.url`, `live.redactEmails` — replaced by `redactEmails` only.
|
||||
|
||||
}
|
||||
|
||||
```---
|
||||
|
||||
## diagnostics
|
||||
|
||||
### Workers (Tasks)Capture evidence when something fails.
|
||||
|
||||
| Key | Default | Description |
|
||||
|
||||
```jsonc|-----|---------|-------------|
|
||||
|
||||
{| enabled | true | Master switch for diagnostics. |
|
||||
|
||||
"workers": {| saveScreenshot | true | Save screenshot on failure. |
|
||||
|
||||
"doDailySet": true,| saveHtml | true | Save HTML snapshot on failure. |
|
||||
|
||||
"doMorePromotions": true,| maxPerRun | 2 | Cap artifacts per run per failure type. |
|
||||
|
||||
"doPunchCards": true,| retentionDays | 7 | Old run artifacts pruned after this many days. |
|
||||
|
||||
"doDesktopSearch": true,
|
||||
|
||||
"doMobileSearch": true,---
|
||||
|
||||
"doDailyCheckIn": true,## jobState
|
||||
|
||||
"doReadToEarn": trueCheckpoint system to avoid duplicate work.
|
||||
|
||||
}| Key | Default | Description |
|
||||
|
||||
}|-----|---------|-------------|
|
||||
|
||||
```| enabled | true | Enable job state tracking. |
|
||||
|
||||
| dir | "" | Custom directory (default: `<sessionPath>/job-state`). |
|
||||
|
||||
### Search Behavior
|
||||
|
||||
---
|
||||
|
||||
```jsonc## schedule
|
||||
|
||||
{Built-in scheduler (avoids external cron inside container or host).
|
||||
|
||||
"search": {| Key | Default | Description |
|
||||
|
||||
"useLocalQueries": false,|-----|---------|-------------|
|
||||
|
||||
"settings": {| enabled | false | Enable scheduling loop. |
|
||||
|
||||
"useGeoLocaleQueries": false,| useAmPm | false | If true, parse `time12`; else use `time24`. |
|
||||
|
||||
"scrollRandomResults": true,| time12 | 9:00 AM | 12‑hour format time (only if useAmPm=true). |
|
||||
|
||||
"clickRandomResults": true| time24 | 09:00 | 24‑hour format time (only if useAmPm=false). |
|
||||
|
||||
}| timeZone | America/New_York | IANA zone string (e.g. Europe/Paris). |
|
||||
|
||||
}| runImmediatelyOnStart | false | Run one pass instantly in addition to daily schedule. |
|
||||
|
||||
}
|
||||
|
||||
```_Legacy_: If both `time12` and `time24` are empty, a legacy `time` (HH:mm) may still be read.
|
||||
|
||||
|
||||
|
||||
### Diagnostics---
|
||||
|
||||
## update
|
||||
|
||||
```jsoncAuto-update behavior after a run.
|
||||
|
||||
{| Key | Default | Description |
|
||||
|
||||
"diagnostics": {|-----|---------|-------------|
|
||||
|
||||
"enabled": true,| git | true | Pull latest git changes after run. |
|
||||
|
||||
"saveScreenshot": true,| docker | false | Recreate container (if running in Docker orchestration). |
|
||||
|
||||
"saveHtml": true,| scriptPath | setup/update/update.mjs | Custom script executed for update flow. |
|
||||
|
||||
"maxPerRun": 2,
|
||||
|
||||
"retentionDays": 7---
|
||||
|
||||
}## Security / Best Practices
|
||||
|
||||
}- Keep `redactEmails` true if you share logs publicly.
|
||||
|
||||
```- Use a private NTFY instance or secure Discord webhooks (do not leak URLs).
|
||||
|
||||
- Avoid setting `headless` false on untrusted remote servers.
|
||||
|
||||
### Job State
|
||||
|
||||
---
|
||||
|
||||
```jsonc## Minimal Example
|
||||
|
||||
{```jsonc
|
||||
|
||||
"jobState": {{
|
||||
|
||||
"enabled": true, "browser": { "headless": true },
|
||||
|
||||
"dir": "" // Empty = use default location "execution": { "parallel": false },
|
||||
|
||||
} "workers": { "doDailySet": true, "doDesktopSearch": true, "doMobileSearch": true },
|
||||
|
||||
} "logging": { "redactEmails": true }
|
||||
|
||||
```}
|
||||
|
||||
```
|
||||
|
||||
### Auto-Update
|
||||
|
||||
## Common Tweaks
|
||||
|
||||
```jsonc| Goal | Change |
|
||||
|
||||
{|------|--------|
|
||||
|
||||
"update": {| Faster dev feedback | Set `browser.headless` to false and shorten search delays. |
|
||||
|
||||
"git": true,| Reduce detection risk | Keep humanization enabled, add vacation window. |
|
||||
|
||||
"docker": false,| Silent mode | Add more buckets to `excludeFunc`. |
|
||||
|
||||
"scriptPath": "setup/update/update.mjs"| Skip mobile searches | Set `workers.doMobileSearch=false`. |
|
||||
|
||||
}| Use daily schedule | Set `schedule.enabled=true` and adjust `time24` + `timeZone`. |
|
||||
|
||||
}
|
||||
|
||||
```---
|
||||
|
||||
## NEW INTELLIGENT FEATURES
|
||||
|
||||
</details>
|
||||
|
||||
### riskManagement
|
||||
|
||||
---Dynamic risk assessment and ban prediction.
|
||||
|
||||
|
||||
|
||||
## 🎛️ Intelligent Features (v2.2+)| Key | Type | Default | Description |
|
||||
|
||||
|-----|------|---------|-------------|
|
||||
|
||||
### Risk Management| enabled | boolean | true | Enable risk-aware throttling. |
|
||||
|
||||
| autoAdjustDelays | boolean | true | Automatically increase delays when captchas/errors are detected. |
|
||||
|
||||
```jsonc| stopOnCritical | boolean | false | Stop execution if risk score exceeds threshold. |
|
||||
|
||||
{| banPrediction | boolean | true | Enable ML-style pattern analysis to predict ban risk. |
|
||||
|
||||
"riskManagement": {| riskThreshold | number | 75 | Risk score (0-100) above which bot pauses or alerts. |
|
||||
|
||||
"enabled": true,
|
||||
|
||||
"autoAdjustDelays": true,**How it works:** Monitors captchas, errors, timeouts, and account patterns. Dynamically adjusts delays (e.g., 1x → 2.5x) and warns you before bans happen.
|
||||
|
||||
"banPrediction": true,
|
||||
|
||||
"riskThreshold": 75---
|
||||
|
||||
}### analytics
|
||||
|
||||
}Performance dashboard and metrics tracking.
|
||||
|
||||
```
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
|
||||
Dynamically adjusts delays when detecting captchas/errors.|-----|------|---------|-------------|
|
||||
|
||||
| enabled | boolean | true | Track points earned, success rates, execution times. |
|
||||
|
||||
---| retentionDays | number | 30 | How long to keep analytics data. |
|
||||
|
||||
| exportMarkdown | boolean | true | Generate human-readable markdown reports. |
|
||||
|
||||
### Query Diversity| webhookSummary | boolean | false | Send analytics summary via webhook. |
|
||||
|
||||
|
||||
|
||||
```jsonc**Output location:** `analytics/` folder (JSON files per account per day).
|
||||
|
||||
{
|
||||
|
||||
"queryDiversity": {---
|
||||
|
||||
"enabled": true,### queryDiversity
|
||||
|
||||
"sources": ["google-trends", "reddit", "local-fallback"],Multi-source search query generation.
|
||||
|
||||
"maxQueriesPerSource": 10
|
||||
|
||||
}| Key | Type | Default | Description |
|
||||
|
||||
}|-----|------|---------|-------------|
|
||||
|
||||
```| enabled | boolean | true | Use diverse sources instead of just Google Trends. |
|
||||
|
||||
| sources | array | `["google-trends", "reddit", "local-fallback"]` | Which sources to query (google-trends, reddit, news, wikipedia, local-fallback). |
|
||||
|
||||
Uses multiple search sources to avoid patterns.| maxQueriesPerSource | number | 10 | Max queries to fetch per source. |
|
||||
|
||||
| cacheMinutes | number | 30 | Cache duration to avoid hammering APIs. |
|
||||
|
||||
---
|
||||
|
||||
**Why?** Reduces patterns by mixing Reddit posts, news headlines, Wikipedia topics instead of predictable Google Trends.
|
||||
|
||||
### Analytics
|
||||
|
||||
---
|
||||
|
||||
```jsonc### dryRun
|
||||
|
||||
{Test mode: simulate execution without actually running tasks.
|
||||
|
||||
"analytics": {
|
||||
|
||||
"enabled": true,| Key | Type | Default | Description |
|
||||
|
||||
"retentionDays": 30,|-----|------|---------|-------------|
|
||||
|
||||
"exportMarkdown": true| dryRun | boolean | false | When true, logs actions but doesn't execute (useful for testing config). |
|
||||
|
||||
}
|
||||
|
||||
}**Use case:** Validate new config changes, estimate execution time, debug issues without touching accounts.
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Tracks points earned, success rates, execution times.## Changelog Notes
|
||||
|
||||
- **v2.2.0**: Added risk-aware throttling, analytics dashboard, query diversity, ban prediction, dry-run mode.
|
||||
|
||||
---- Removed live webhook streaming complexity; now simpler logging.
|
||||
|
||||
- Centralized redaction logic under `logging.redactEmails`.
|
||||
|
||||
### Dry Run (Test Mode)
|
||||
|
||||
If something feels undocumented or unclear, open a documentation issue or extend this page.
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"dryRun": true
|
||||
}
|
||||
```
|
||||
|
||||
**Or via CLI:**
|
||||
```bash
|
||||
npm start -- --dry-run
|
||||
```
|
||||
|
||||
Simulates execution without actually running tasks.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Issue | Solution |
|
||||
|-------|----------|
|
||||
| **Config not loading** | Check JSON syntax (trailing commas OK in `.jsonc`) |
|
||||
| **Script ignoring config** | Verify file is `src/config.jsonc` |
|
||||
| **Errors after update** | Compare with example config |
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Setup scheduler?**
|
||||
→ **[Scheduler Guide](./schedule.md)**
|
||||
|
||||
**Want notifications?**
|
||||
→ **[Discord Webhooks](./conclusionwebhook.md)**
|
||||
|
||||
**Need proxies?**
|
||||
→ **[Proxy Guide](./proxy.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Getting Started](./getting-started.md)**
|
||||
103
docs/diagnostics.md
Normal file
103
docs/diagnostics.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# 🔍 Diagnostics
|
||||
|
||||
**Auto-capture errors with screenshots and HTML**
|
||||
|
||||
---
|
||||
|
||||
## 💡 What Is It?
|
||||
|
||||
When errors occur, the script automatically saves:
|
||||
- 📸 **Screenshots** — Visual error capture
|
||||
- 📄 **HTML snapshots** — Page source
|
||||
|
||||
Helps you debug issues without re-running the script.
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
**Already enabled by default!**
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"diagnostics": {
|
||||
"enabled": true,
|
||||
"saveScreenshot": true,
|
||||
"saveHtml": true,
|
||||
"maxPerRun": 2,
|
||||
"retentionDays": 7
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📁 Where Are Files Saved?
|
||||
|
||||
```
|
||||
reports/
|
||||
├── 2025-10-16/
|
||||
│ ├── error_abc123_001.png
|
||||
│ ├── error_abc123_001.html
|
||||
│ └── error_def456_002.png
|
||||
└── 2025-10-17/
|
||||
└── ...
|
||||
```
|
||||
|
||||
**Auto-cleanup:** Files older than 7 days are deleted automatically.
|
||||
|
||||
---
|
||||
|
||||
## 🎯 When It Captures
|
||||
|
||||
- ⏱️ **Timeouts** — Page navigation failures
|
||||
- 🎯 **Element not found** — Selector errors
|
||||
- 🔐 **Login failures** — Authentication issues
|
||||
- 🌐 **Network errors** — Request failures
|
||||
|
||||
---
|
||||
|
||||
## 🔧 Configuration Options
|
||||
|
||||
| Setting | Default | Description |
|
||||
|---------|---------|-------------|
|
||||
| `enabled` | `true` | Enable diagnostics |
|
||||
| `saveScreenshot` | `true` | Capture PNG screenshots |
|
||||
| `saveHtml` | `true` | Save page HTML |
|
||||
| `maxPerRun` | `2` | Max captures per run |
|
||||
| `retentionDays` | `7` | Auto-delete after N days |
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **No captures despite errors** | Check `enabled: true` |
|
||||
| **Too many files** | Reduce `retentionDays` |
|
||||
| **Permission denied** | Check `reports/` write access |
|
||||
|
||||
### Manual Cleanup
|
||||
|
||||
```powershell
|
||||
# Delete all diagnostic reports
|
||||
Remove-Item -Recurse -Force reports/
|
||||
|
||||
# Keep last 3 days only
|
||||
Get-ChildItem reports/ | Where-Object {$_.LastWriteTime -lt (Get-Date).AddDays(-3)} | Remove-Item -Recurse
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Need live notifications?**
|
||||
→ **[Discord Webhooks](./conclusionwebhook.md)**
|
||||
→ **[NTFY Push](./ntfy.md)**
|
||||
|
||||
**Security issues?**
|
||||
→ **[Security Guide](./security.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
277
docs/docker.md
Normal file
277
docs/docker.md
Normal file
@@ -0,0 +1,277 @@
|
||||
# 🐳 Docker Guide
|
||||
|
||||
**Run the script in a container**
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
### 1. Create Required Files
|
||||
|
||||
Ensure you have:
|
||||
- `src/accounts.jsonc` with your credentials
|
||||
- `src/config.jsonc` (uses defaults if missing)
|
||||
|
||||
### 2. Start Container
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### 3. View Logs
|
||||
|
||||
```bash
|
||||
docker logs -f microsoft-rewards-script
|
||||
```
|
||||
|
||||
**That's it!** Script runs automatically.
|
||||
|
||||
---
|
||||
|
||||
## 🎯 What's Included
|
||||
|
||||
The Docker setup:
|
||||
- ✅ **Chromium Headless Shell** — Lightweight browser
|
||||
- ✅ **Scheduler enabled** — Daily automation
|
||||
- ✅ **Volume mounts** — Persistent sessions
|
||||
- ✅ **Force headless** — Required for containers
|
||||
|
||||
---
|
||||
|
||||
## 📁 Mounted Volumes
|
||||
|
||||
| Host Path | Container Path | Purpose |
|
||||
|-----------|----------------|---------|
|
||||
| `./src/accounts.jsonc` | `/usr/src/.../src/accounts.jsonc` | Account credentials (read-only) |
|
||||
| `./src/config.jsonc` | `/usr/src/.../src/config.jsonc` | Configuration (read-only) |
|
||||
| `./sessions` | `/usr/src/.../sessions` | Cookies & fingerprints |
|
||||
|
||||
---
|
||||
|
||||
## 🌍 Environment Variables
|
||||
|
||||
### Set Timezone
|
||||
|
||||
```yaml
|
||||
services:
|
||||
rewards:
|
||||
environment:
|
||||
TZ: Europe/Paris
|
||||
```
|
||||
|
||||
### Use Inline JSON
|
||||
|
||||
```bash
|
||||
docker run -e ACCOUNTS_JSON='{"accounts":[...]}' ...
|
||||
```
|
||||
|
||||
### Custom Config Path
|
||||
|
||||
```bash
|
||||
docker run -e ACCOUNTS_FILE=/custom/path/accounts.json ...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔧 Common Commands
|
||||
|
||||
```bash
|
||||
# Start container
|
||||
docker compose up -d
|
||||
|
||||
# View logs
|
||||
docker logs -f microsoft-rewards-script
|
||||
|
||||
# Stop container
|
||||
docker compose down
|
||||
|
||||
# Rebuild image
|
||||
docker compose build --no-cache
|
||||
|
||||
# Restart container
|
||||
docker compose restart
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **"accounts.json not found"** | Ensure `./src/accounts.jsonc` exists and is mounted in compose.yaml |
|
||||
| **"Browser launch failed"** | Ensure `FORCE_HEADLESS=1` is set |
|
||||
| **"Permission denied"** | Check file permissions (`chmod 644 accounts.jsonc config.jsonc`) |
|
||||
| **Scheduler not running** | Verify `schedule.enabled: true` in config |
|
||||
| **Cron not working** | See [Cron Troubleshooting](#-cron-troubleshooting) above |
|
||||
|
||||
### Debug Container
|
||||
|
||||
```bash
|
||||
# Enter container shell
|
||||
docker exec -it microsoft-rewards-script /bin/bash
|
||||
|
||||
# Check Node.js version
|
||||
docker exec -it microsoft-rewards-script node --version
|
||||
|
||||
# View config (mounted in /src/)
|
||||
docker exec -it microsoft-rewards-script cat src/config.jsonc
|
||||
|
||||
# Check if cron is enabled
|
||||
docker exec -it microsoft-rewards-script printenv | grep USE_CRON
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎛️ Custom Configuration
|
||||
|
||||
### Option 1: Built-in Scheduler (Default, Recommended)
|
||||
|
||||
**Pros:**
|
||||
- ✅ Lighter resource usage
|
||||
- ✅ Better integration with config.jsonc
|
||||
- ✅ No additional setup needed
|
||||
- ✅ Automatic jitter for natural timing
|
||||
|
||||
**Default** `docker-compose.yml`:
|
||||
```yaml
|
||||
services:
|
||||
rewards:
|
||||
build: .
|
||||
environment:
|
||||
TZ: "Europe/Paris"
|
||||
command: ["npm", "run", "start:schedule"]
|
||||
```
|
||||
|
||||
Configure schedule in `src/config.jsonc`:
|
||||
```jsonc
|
||||
{
|
||||
"schedule": {
|
||||
"enabled": true,
|
||||
"useAmPm": false,
|
||||
"time24": "09:00",
|
||||
"timeZone": "Europe/Paris"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Option 2: Native Cron (For Traditional Cron Users)
|
||||
|
||||
**Pros:**
|
||||
- ✅ Familiar cron syntax
|
||||
- ✅ Multiple daily runs with standard crontab
|
||||
- ✅ Native Linux scheduling
|
||||
|
||||
**Setup:**
|
||||
|
||||
1. **Enable cron in `docker-compose.yml`:**
|
||||
```yaml
|
||||
services:
|
||||
rewards:
|
||||
build: .
|
||||
environment:
|
||||
TZ: "Europe/Paris"
|
||||
USE_CRON: "true"
|
||||
CRON_SCHEDULE: "0 9,16,21 * * *" # 9 AM, 4 PM, 9 PM daily
|
||||
RUN_ON_START: "true" # Optional: run once on start
|
||||
```
|
||||
|
||||
2. **Cron Schedule Examples:**
|
||||
|
||||
| Schedule | Description | Cron Expression |
|
||||
|----------|-------------|-----------------|
|
||||
| Daily at 9 AM | Once per day | `0 9 * * *` |
|
||||
| Twice daily | 9 AM and 9 PM | `0 9,21 * * *` |
|
||||
| Three times | 9 AM, 4 PM, 9 PM | `0 9,16,21 * * *` |
|
||||
| Every 6 hours | 4 times daily | `0 */6 * * *` |
|
||||
| Weekdays only | Mon-Fri at 8 AM | `0 8 * * 1-5` |
|
||||
|
||||
**Use [crontab.guru](https://crontab.guru) to validate your cron expressions.**
|
||||
|
||||
3. **Rebuild and restart:**
|
||||
```bash
|
||||
docker compose down
|
||||
docker compose build --no-cache
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
4. **Verify cron is running:**
|
||||
```bash
|
||||
# Check container logs
|
||||
docker logs -f microsoft-rewards-script
|
||||
|
||||
# Should see: "==> Cron mode enabled"
|
||||
|
||||
# View cron logs inside container
|
||||
docker exec microsoft-rewards-script tail -f /var/log/cron.log
|
||||
```
|
||||
|
||||
### Option 3: Single Run (Manual)
|
||||
|
||||
```yaml
|
||||
services:
|
||||
rewards:
|
||||
build: .
|
||||
command: ["node", "./dist/index.js"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔄 Switching Between Scheduler and Cron
|
||||
|
||||
**From Built-in → Cron:**
|
||||
1. Add `USE_CRON: "true"` to environment
|
||||
2. Add `CRON_SCHEDULE` with desired timing
|
||||
3. Rebuild: `docker compose up -d --build`
|
||||
|
||||
**From Cron → Built-in:**
|
||||
1. Remove or comment `USE_CRON` variable
|
||||
2. Configure `schedule` in `src/config.jsonc`
|
||||
3. Rebuild: `docker compose up -d --build`
|
||||
|
||||
---
|
||||
|
||||
## 🐛 Cron Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **Cron not executing** | Check `docker logs` for "Cron mode enabled" message |
|
||||
| **Wrong timezone** | Verify `TZ` environment variable matches your location |
|
||||
| **Syntax error** | Validate cron expression at [crontab.guru](https://crontab.guru) |
|
||||
| **No logs** | Use `docker exec <container> tail -f /var/log/cron.log` |
|
||||
| **Multiple executions** | Check for duplicate cron entries |
|
||||
|
||||
### Debug Cron Inside Container
|
||||
|
||||
```bash
|
||||
# Enter container
|
||||
docker exec -it microsoft-rewards-script /bin/bash
|
||||
|
||||
# Check cron is running
|
||||
ps aux | grep cron
|
||||
|
||||
# View installed cron jobs
|
||||
crontab -l
|
||||
|
||||
# Check cron logs
|
||||
tail -100 /var/log/cron.log
|
||||
|
||||
# Test environment variables
|
||||
printenv | grep -E 'TZ|NODE_ENV'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Need 2FA?**
|
||||
→ **[Accounts & TOTP Setup](./accounts.md)**
|
||||
|
||||
**Want notifications?**
|
||||
→ **[Discord Webhooks](./conclusionwebhook.md)**
|
||||
|
||||
**Scheduler config?**
|
||||
→ **[Scheduler Guide](./schedule.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Getting Started](./getting-started.md)**
|
||||
136
docs/getting-started.md
Normal file
136
docs/getting-started.md
Normal file
@@ -0,0 +1,136 @@
|
||||
# 🚀 Getting Started
|
||||
|
||||
<div align="center">
|
||||
|
||||
**🎯 From zero to earning Microsoft Rewards points in minutes**
|
||||
*Complete setup guide for beginners*
|
||||
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
## ✅ Requirements
|
||||
|
||||
- **Node.js 18+** (22 recommended) — [Download here](https://nodejs.org/)
|
||||
- **Microsoft accounts** with email + password
|
||||
- **Optional:** Docker for containerized deployment
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Setup (Recommended)
|
||||
|
||||
<div align="center">
|
||||
|
||||
### **🎬 One Command, Total Automation**
|
||||
|
||||
</div>
|
||||
|
||||
```bash
|
||||
# 🪟 Windows
|
||||
setup/setup.bat
|
||||
|
||||
# 🐧 Linux/macOS/WSL
|
||||
bash setup/setup.sh
|
||||
|
||||
# 🌍 Any platform
|
||||
npm run setup
|
||||
```
|
||||
|
||||
**That's it!** The wizard will:
|
||||
- ✅ Help you create `src/accounts.json` with your Microsoft credentials
|
||||
- ✅ Install all dependencies automatically
|
||||
- ✅ Build the TypeScript project
|
||||
- ✅ Start earning points immediately
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Manual Setup
|
||||
|
||||
<details>
|
||||
<summary><strong>📖 Prefer step-by-step? Click here</strong></summary>
|
||||
|
||||
### 1️⃣ **Configure Your Accounts**
|
||||
```bash
|
||||
cp src/accounts.example.json src/accounts.json
|
||||
# Edit accounts.json with your Microsoft credentials
|
||||
```
|
||||
|
||||
### 2️⃣ **Install Dependencies & Build**
|
||||
```bash
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
### 3️⃣ **Choose Your Mode**
|
||||
```bash
|
||||
# Single run (test it works)
|
||||
npm start
|
||||
|
||||
# Automated daily scheduler (set and forget)
|
||||
npm run start:schedule
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## 🎯 What Happens Next?
|
||||
|
||||
The script will automatically:
|
||||
- 🔍 **Search Bing** for points (desktop + mobile)
|
||||
- 📅 **Complete daily sets** (quizzes, polls, activities)
|
||||
- 🎁 **Grab promotions** and bonus opportunities
|
||||
- 🃏 **Work on punch cards** (multi-day challenges)
|
||||
- ✅ **Daily check-ins** for easy points
|
||||
- 📚 **Read articles** for additional rewards
|
||||
|
||||
**All while looking completely natural to Microsoft!** 🤖
|
||||
|
||||
---
|
||||
|
||||
## 🐳 Docker Alternative
|
||||
|
||||
If you prefer containers:
|
||||
|
||||
```bash
|
||||
# Ensure accounts.json and config.json exist
|
||||
docker compose up -d
|
||||
|
||||
# Follow logs
|
||||
docker logs -f microsoft-rewards-script
|
||||
```
|
||||
|
||||
**[Full Docker Guide →](./docker.md)**
|
||||
|
||||
---
|
||||
|
||||
## 🔧 Next Steps
|
||||
|
||||
Once running, explore these guides:
|
||||
|
||||
| Priority | Guide | Why Important |
|
||||
|----------|-------|---------------|
|
||||
| **High** | **[Accounts & 2FA](./accounts.md)** | Set up TOTP for secure automation |
|
||||
| **High** | **[Scheduling](./schedule.md)** | Configure automated daily runs |
|
||||
| **Medium** | **[Notifications](./ntfy.md)** | Get alerts on your phone |
|
||||
| **Low** | **[Humanization](./humanization.md)** | Advanced anti-detection |
|
||||
|
||||
---
|
||||
|
||||
## 🆘 Need Help?
|
||||
|
||||
**Script not starting?** → [Troubleshooting Guide](./diagnostics.md)
|
||||
**Login issues?** → [Accounts & 2FA Setup](./accounts.md)
|
||||
**Want Docker?** → [Container Guide](./docker.md)
|
||||
|
||||
**Found a bug?** [Report it here](https://github.com/TheNetsky/Microsoft-Rewards-Script/issues)
|
||||
**Need support?** [Join our Discord](https://discord.gg/KRBFxxsU)
|
||||
|
||||
---
|
||||
|
||||
## 🔗 Related Guides
|
||||
|
||||
- **[Accounts & 2FA](./accounts.md)** — Add Microsoft accounts with TOTP
|
||||
- **[Docker](./docker.md)** — Deploy with containers
|
||||
- **[Scheduler](./schedule.md)** — Automate daily execution
|
||||
- **[Discord Webhooks](./conclusionwebhook.md)** — Get run summaries
|
||||
193
docs/git-conflict-resolution.md
Normal file
193
docs/git-conflict-resolution.md
Normal file
@@ -0,0 +1,193 @@
|
||||
# Git Conflict Resolution Guide
|
||||
|
||||
## Problem: "Pulling is not possible because you have unmerged files"
|
||||
|
||||
This error occurs when Git has conflicting changes between your local repository and the remote repository.
|
||||
|
||||
## Quick Fix (Recommended)
|
||||
|
||||
### Option 1: Keep Remote Changes (Safest for updates)
|
||||
|
||||
```bash
|
||||
# Abort any ongoing operations
|
||||
git merge --abort
|
||||
git rebase --abort
|
||||
|
||||
# Reset to remote version (discards local changes)
|
||||
git fetch --all
|
||||
git reset --hard origin/main
|
||||
|
||||
# Reinstall and rebuild
|
||||
npm ci
|
||||
npm run build
|
||||
```
|
||||
|
||||
### Option 2: Keep Local Changes
|
||||
|
||||
```bash
|
||||
# Save your changes
|
||||
git stash push -m "My local changes"
|
||||
|
||||
# Get remote changes
|
||||
git fetch --all
|
||||
git reset --hard origin/main
|
||||
|
||||
# Reapply your changes (may cause conflicts again)
|
||||
git stash pop
|
||||
```
|
||||
|
||||
## Automatic Conflict Prevention
|
||||
|
||||
The update script (`setup/update/update.mjs`) now automatically:
|
||||
|
||||
1. **Detects conflicts** before attempting updates
|
||||
2. **Aborts** failed merge/rebase operations
|
||||
3. **Preserves** your stashed changes
|
||||
4. **Reports** exactly what went wrong
|
||||
|
||||
### Update Script Features
|
||||
|
||||
- ✅ Pre-flight conflict detection
|
||||
- ✅ Automatic abort of failed operations
|
||||
- ✅ Smart backup of config.jsonc and accounts.json
|
||||
- ✅ User-configurable auto-update preferences
|
||||
- ✅ Detailed error reporting with recovery instructions
|
||||
|
||||
## Config Options
|
||||
|
||||
In `config.jsonc`, set these to control what gets auto-updated:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"update": {
|
||||
"autoUpdateConfig": false, // Keep your local config.jsonc
|
||||
"autoUpdateAccounts": false, // Keep your local accounts.json
|
||||
"git": true, // Enable Git updates
|
||||
"docker": false // Enable Docker updates
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Manual Conflict Resolution
|
||||
|
||||
If you need to manually resolve conflicts:
|
||||
|
||||
### 1. Check Status
|
||||
|
||||
```bash
|
||||
git status
|
||||
```
|
||||
|
||||
### 2. View Conflicted Files
|
||||
|
||||
```bash
|
||||
git ls-files -u
|
||||
```
|
||||
|
||||
### 3. For Each Conflicted File
|
||||
|
||||
**Option A: Keep Remote Version**
|
||||
```bash
|
||||
git checkout --theirs <file>
|
||||
git add <file>
|
||||
```
|
||||
|
||||
**Option B: Keep Local Version**
|
||||
```bash
|
||||
git checkout --ours <file>
|
||||
git add <file>
|
||||
```
|
||||
|
||||
**Option C: Manual Edit**
|
||||
- Open the file
|
||||
- Look for `<<<<<<<`, `=======`, `>>>>>>>` markers
|
||||
- Edit to keep what you want
|
||||
- Remove the markers
|
||||
- Save the file
|
||||
|
||||
```bash
|
||||
git add <file>
|
||||
```
|
||||
|
||||
### 4. Complete the Merge
|
||||
|
||||
```bash
|
||||
git commit -m "Resolved conflicts"
|
||||
```
|
||||
|
||||
## Prevention Tips
|
||||
|
||||
1. **Don't edit code files directly** - they're meant to be updated from Git
|
||||
2. **Only customize** `config.jsonc` and `accounts.json`
|
||||
3. **Use the auto-update feature** with proper config flags
|
||||
4. **Commit your config changes** if you want version control
|
||||
5. **Use branches** for custom modifications
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "detached HEAD state"
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull
|
||||
```
|
||||
|
||||
### "Your branch has diverged"
|
||||
|
||||
```bash
|
||||
git fetch origin
|
||||
git reset --hard origin/main
|
||||
```
|
||||
|
||||
### "Permission denied" or file locks
|
||||
|
||||
On Windows:
|
||||
```powershell
|
||||
# Close all Node/VS Code instances
|
||||
taskkill /F /IM node.exe
|
||||
git clean -fd
|
||||
git reset --hard origin/main
|
||||
```
|
||||
|
||||
On Linux/macOS:
|
||||
```bash
|
||||
sudo chown -R $USER:$USER .git
|
||||
git clean -fd
|
||||
git reset --hard origin/main
|
||||
```
|
||||
|
||||
## Emergency Recovery
|
||||
|
||||
If everything is broken:
|
||||
|
||||
```bash
|
||||
# Backup your config and accounts
|
||||
cp src/config.jsonc ~/backup-config.jsonc
|
||||
cp src/accounts.json ~/backup-accounts.json
|
||||
|
||||
# Nuclear option: fresh clone
|
||||
cd ..
|
||||
rm -rf Microsoft-Rewards-Rewi
|
||||
git clone https://github.com/Light60-1/Microsoft-Rewards-Rewi.git
|
||||
cd Microsoft-Rewards-Rewi
|
||||
|
||||
# Restore your files
|
||||
cp ~/backup-config.jsonc src/config.jsonc
|
||||
cp ~/backup-accounts.json src/accounts.json
|
||||
|
||||
# Reinstall
|
||||
npm ci
|
||||
npm run build
|
||||
```
|
||||
|
||||
## Support
|
||||
|
||||
If conflicts persist:
|
||||
1. Check GitHub Issues
|
||||
2. Create a new issue with the output of `git status`
|
||||
3. Include your update configuration settings
|
||||
4. Mention your OS and Git version
|
||||
|
||||
---
|
||||
|
||||
**Remember**: The safest approach is to let Git updates manage code files, and only customize config and accounts files.
|
||||
160
docs/humanization.md
Normal file
160
docs/humanization.md
Normal file
@@ -0,0 +1,160 @@
|
||||
# 🤖 Humanization
|
||||
|
||||
**Make automation look natural to avoid detection**
|
||||
|
||||
---
|
||||
|
||||
## 💡 What Is It?
|
||||
|
||||
Humanization adds **random delays** and **subtle gestures** to mimic real human behavior.
|
||||
|
||||
### Why Use It?
|
||||
- ✅ **Lower detection risk** — Looks less like a bot
|
||||
- ✅ **Natural patterns** — Random timing, mouse moves
|
||||
- ✅ **Built-in** — No configuration needed
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
**Edit** `src/config.jsonc`:
|
||||
```jsonc
|
||||
{
|
||||
"humanization": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**That's it!** Default settings work for most users.
|
||||
|
||||
---
|
||||
|
||||
## 🎯 What It Does
|
||||
|
||||
### Random Delays
|
||||
- **150-450ms pauses** between actions
|
||||
- Mimics human decision-making time
|
||||
- Prevents robotic patterns
|
||||
|
||||
### Subtle Gestures
|
||||
- **Mouse movements** — Small cursor adjustments (40% chance)
|
||||
- **Scrolling** — Minor page movements (20% chance)
|
||||
- **Never clicks** random elements (safe by design)
|
||||
|
||||
### Temporal Patterns
|
||||
- **Random off days** — Skip 1 day per week by default
|
||||
- **Time windows** — Run only during certain hours (optional)
|
||||
|
||||
---
|
||||
|
||||
## 🎛️ Presets
|
||||
|
||||
### Default (Recommended)
|
||||
```jsonc
|
||||
{
|
||||
"humanization": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Balanced safety and speed.
|
||||
|
||||
---
|
||||
|
||||
### Conservative (More Natural)
|
||||
```jsonc
|
||||
{
|
||||
"humanization": {
|
||||
"enabled": true,
|
||||
"actionDelay": { "min": 300, "max": 800 },
|
||||
"gestureMoveProb": 0.6,
|
||||
"gestureScrollProb": 0.4,
|
||||
"randomOffDaysPerWeek": 2
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Slower but safer.
|
||||
|
||||
---
|
||||
|
||||
### Fast (Less Natural)
|
||||
```jsonc
|
||||
{
|
||||
"humanization": {
|
||||
"enabled": true,
|
||||
"actionDelay": { "min": 100, "max": 250 },
|
||||
"gestureMoveProb": 0.2,
|
||||
"gestureScrollProb": 0.1,
|
||||
"randomOffDaysPerWeek": 0
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Faster execution, higher risk.
|
||||
|
||||
---
|
||||
|
||||
## ⏰ Time Windows (Optional)
|
||||
|
||||
Run only during specific hours:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"humanization": {
|
||||
"enabled": true,
|
||||
"allowedWindows": ["08:00-10:30", "20:00-22:30"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Script **waits** until next allowed window if started outside.
|
||||
|
||||
---
|
||||
|
||||
## 📅 Random Off Days
|
||||
|
||||
Skip random days per week:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"humanization": {
|
||||
"enabled": true,
|
||||
"randomOffDaysPerWeek": 1 // Skip 1 random day/week
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Options:**
|
||||
- `0` — Never skip days
|
||||
- `1` — Skip 1 day/week (default)
|
||||
- `2` — Skip 2 days/week
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **Too slow** | Lower `actionDelay`, reduce probabilities |
|
||||
| **Too fast/robotic** | Increase delays, higher probabilities |
|
||||
| **Not running at all** | Check `allowedWindows` time format |
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Need vacation mode?**
|
||||
→ See [Scheduler Vacation](./schedule.md#vacation-mode)
|
||||
|
||||
**Want scheduling?**
|
||||
→ **[Scheduler Guide](./schedule.md)**
|
||||
|
||||
**More security?**
|
||||
→ **[Security Guide](./security.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
78
docs/index.md
Normal file
78
docs/index.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# 📚 Documentation Hub
|
||||
|
||||
**Complete guide to automate Microsoft Rewards**
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Start Here (In Order)
|
||||
|
||||
### For Complete Beginners
|
||||
|
||||
1. **[Accounts & 2FA](./accounts.md)** — Add your Microsoft accounts
|
||||
2. **[Basic Config](./config.md#quick-start)** — 5 essential options
|
||||
3. **[Scheduler](./schedule.md#quick-start)** — Automate daily runs
|
||||
|
||||
**You're all set! 🎉**
|
||||
|
||||
---
|
||||
|
||||
## 🔥 Popular Features
|
||||
|
||||
### Notifications & Monitoring
|
||||
- **[Discord Webhooks](./conclusionwebhook.md)** — Get run summaries
|
||||
- **[NTFY Push](./ntfy.md)** — Mobile alerts
|
||||
|
||||
### Anti-Ban & Privacy
|
||||
- **[Humanization](./humanization.md)** — Natural behavior simulation
|
||||
- **[Proxy Setup](./proxy.md)** — Change your IP (optional)
|
||||
|
||||
### Deployment
|
||||
- **[Docker](./docker.md)** — Container deployment
|
||||
- **[Diagnostics](./diagnostics.md)** — Troubleshooting
|
||||
|
||||
---
|
||||
|
||||
## 📖 All Documentation
|
||||
|
||||
### Configuration & Setup
|
||||
- [Complete Configuration Reference](./config.md) — All options explained
|
||||
- [Scheduler Setup](./schedule.md) — Automated timing
|
||||
- [Job State](./jobstate.md) — Progress tracking
|
||||
- [Auto-Update](./update.md) — Keep script current
|
||||
|
||||
### Advanced Features
|
||||
- [Buy Mode](./buy-mode.md) — Manual purchase monitoring
|
||||
- [Security Guide](./security.md) — Privacy & incident response
|
||||
|
||||
---
|
||||
|
||||
## 🆘 Need Help?
|
||||
|
||||
**Technical issue?** → [Diagnostics Guide](./diagnostics.md)
|
||||
**Login problem?** → [Accounts & 2FA](./accounts.md#troubleshooting)
|
||||
**Banned?** → [Security Guide](./security.md)
|
||||
|
||||
**Join Discord** → [Support Server](https://discord.gg/kn3695Kx32)
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Quick Links by Use Case
|
||||
|
||||
### "I just installed the script"
|
||||
→ [Getting Started](./getting-started.md) → [Accounts](./accounts.md) → [Scheduler](./schedule.md)
|
||||
|
||||
### "I want daily automation"
|
||||
→ [Scheduler Guide](./schedule.md) → [Humanization](./humanization.md)
|
||||
|
||||
### "I need notifications"
|
||||
→ [Discord Webhooks](./conclusionwebhook.md) or [NTFY](./ntfy.md)
|
||||
|
||||
### "I want to use Docker"
|
||||
→ [Docker Guide](./docker.md)
|
||||
|
||||
### "Something's broken"
|
||||
→ [Diagnostics](./diagnostics.md) → [Security](./security.md)
|
||||
|
||||
---
|
||||
|
||||
**[← Back to README](../README.md)**
|
||||
118
docs/jobstate.md
Normal file
118
docs/jobstate.md
Normal file
@@ -0,0 +1,118 @@
|
||||
# 💾 Job State
|
||||
|
||||
**Resume interrupted tasks automatically**
|
||||
|
||||
---
|
||||
|
||||
## 💡 What Is It?
|
||||
|
||||
Saves progress after each completed task. If script crashes or stops, it resumes exactly where it left off.
|
||||
|
||||
🔁 **New:** Completed accounts are tracked per day. When you restart the bot after a failure, it skips accounts already finished and jumps directly to the remaining ones.
|
||||
|
||||
**Already enabled by default!**
|
||||
|
||||
---
|
||||
|
||||
## ⚡ How It Works
|
||||
|
||||
### Progress Tracking
|
||||
|
||||
```
|
||||
sessions/job-state/
|
||||
├── account1@email.com/
|
||||
│ ├── daily-set-2025-10-16.json
|
||||
│ ├── desktop-search-2025-10-16.json
|
||||
│ └── mobile-search-2025-10-16.json
|
||||
└── account2@email.com/
|
||||
└── ...
|
||||
```
|
||||
|
||||
- ✅ **Per-account** — Independent progress
|
||||
- ✅ **Date-specific** — Fresh start each day
|
||||
- ✅ **Auto-cleanup** — Old files remain for history
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Benefits
|
||||
|
||||
### Interrupted Runs
|
||||
|
||||
| Scenario | Without Job State | With Job State |
|
||||
|----------|-------------------|----------------|
|
||||
| **Power outage** | Start from beginning | Resume from last task |
|
||||
| **Manual stop** | Lose all progress | Pick up where left off |
|
||||
| **Network failure** | Redo everything | Continue remaining tasks |
|
||||
|
||||
---
|
||||
|
||||
## ⚙️ Configuration
|
||||
|
||||
**Already enabled:**
|
||||
```jsonc
|
||||
{
|
||||
"jobState": {
|
||||
"enabled": true,
|
||||
"skipCompletedAccounts": true, // Skip accounts already finished today
|
||||
"dir": "" // Empty = use default location
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Custom location:**
|
||||
```jsonc
|
||||
{
|
||||
"jobState": {
|
||||
"enabled": true,
|
||||
"skipCompletedAccounts": true,
|
||||
"dir": "/custom/path/job-state"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
> ℹ️ Set `skipCompletedAccounts` to `false` (or export `REWARDS_DISABLE_ACCOUNT_SKIP=1`/`true`) if you need to force every pass to run all accounts—for example when using `passesPerRun` > 1 or when intentionally repeating the whole rotation in the same day.
|
||||
|
||||
---
|
||||
|
||||
## 🧹 Maintenance
|
||||
|
||||
### Reset Progress (Fresh Start)
|
||||
|
||||
```powershell
|
||||
# Reset all accounts
|
||||
Remove-Item -Recurse -Force sessions/job-state/
|
||||
|
||||
# Reset one account
|
||||
Remove-Item -Recurse -Force sessions/job-state/user@email.com/
|
||||
```
|
||||
|
||||
### Cleanup Old Files
|
||||
|
||||
```powershell
|
||||
# Keep last 7 days only
|
||||
Get-ChildItem sessions/job-state -Recurse -Filter "*.json" | Where-Object {$_.LastWriteTime -lt (Get-Date).AddDays(-7)} | Remove-Item
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **Tasks not resuming** | Check file permissions |
|
||||
| **Duplicate execution** | Ensure system time is accurate |
|
||||
| **Excessive files** | Implement cleanup schedule |
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Need scheduler?**
|
||||
→ **[Scheduler Guide](./schedule.md)**
|
||||
|
||||
**Want diagnostics?**
|
||||
→ **[Diagnostics Guide](./diagnostics.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
118
docs/ntfy.md
Normal file
118
docs/ntfy.md
Normal file
@@ -0,0 +1,118 @@
|
||||
# 📱 NTFY Push Notifications
|
||||
|
||||
**Get alerts on your phone instantly**
|
||||
|
||||
---
|
||||
|
||||
## 💡 What Is NTFY?
|
||||
|
||||
Simple push notification service that sends alerts to your phone/desktop.
|
||||
|
||||
**Free to use:** No account required for basic features.
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
### 1. Install NTFY App
|
||||
|
||||
- **Android:** [Google Play](https://play.google.com/store/apps/details?id=io.heckel.ntfy)
|
||||
- **iOS:** [App Store](https://apps.apple.com/app/ntfy/id1625396347)
|
||||
|
||||
### 2. Choose a Topic Name
|
||||
|
||||
Pick any unique name (e.g., `rewards-myname-2025`)
|
||||
|
||||
### 3. Subscribe in App
|
||||
|
||||
Open NTFY app → Add subscription → Enter your topic name
|
||||
|
||||
### 4. Configure Script
|
||||
|
||||
**Edit** `src/config.jsonc`:
|
||||
```jsonc
|
||||
{
|
||||
"notifications": {
|
||||
"ntfy": {
|
||||
"enabled": true,
|
||||
"url": "https://ntfy.sh",
|
||||
"topic": "rewards-myname-2025"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**That's it!** You'll get push notifications on your phone.
|
||||
|
||||
---
|
||||
|
||||
## 🔔 What Notifications You Get
|
||||
|
||||
- 🚨 **Errors** — Script crashes, login failures
|
||||
- ⚠️ **Warnings** — Missing points, suspicious activity
|
||||
- 🏆 **Milestones** — Account completed successfully
|
||||
- 💳 **Buy mode** — Point spending detected
|
||||
- 📊 **Summary** — End-of-run report
|
||||
|
||||
---
|
||||
|
||||
## 🔒 Use Private Server (Optional)
|
||||
|
||||
### Self-Host NTFY
|
||||
|
||||
**Docker:**
|
||||
```yaml
|
||||
services:
|
||||
ntfy:
|
||||
image: binwiederhier/ntfy
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- ./ntfy-data:/var/lib/ntfy
|
||||
command: serve
|
||||
```
|
||||
|
||||
**Then configure:**
|
||||
```jsonc
|
||||
{
|
||||
"notifications": {
|
||||
"ntfy": {
|
||||
"enabled": true,
|
||||
"url": "https://ntfy.yourdomain.com",
|
||||
"topic": "rewards",
|
||||
"authToken": "tk_your_token_here"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **No notifications** | Check topic name matches exactly |
|
||||
| **Wrong server** | Verify URL includes `https://` |
|
||||
| **Auth failures** | Token must start with `tk_` |
|
||||
|
||||
### Test Manually
|
||||
|
||||
```bash
|
||||
# Send test message
|
||||
curl -d "Test from rewards script" https://ntfy.sh/your-topic
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Want Discord too?**
|
||||
→ **[Discord Webhooks](./conclusionwebhook.md)**
|
||||
|
||||
**Need detailed logs?**
|
||||
→ **[Diagnostics Guide](./diagnostics.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
126
docs/proxy.md
Normal file
126
docs/proxy.md
Normal file
@@ -0,0 +1,126 @@
|
||||
# 🌐 Proxy Setup
|
||||
|
||||
**Route traffic through proxy servers**
|
||||
|
||||
---
|
||||
|
||||
## 💡 Do You Need a Proxy?
|
||||
|
||||
**Most users DON'T need proxies.** Only use if:
|
||||
- ✅ You run many accounts from same IP
|
||||
- ✅ You want geographic flexibility
|
||||
- ✅ Your IP is already flagged
|
||||
|
||||
**Otherwise, skip this guide.**
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
### Per-Account Proxy
|
||||
|
||||
**Edit** `src/accounts.json`:
|
||||
```json
|
||||
{
|
||||
"accounts": [
|
||||
{
|
||||
"email": "your@email.com",
|
||||
"password": "password",
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "proxy.example.com",
|
||||
"port": 8080,
|
||||
"username": "proxyuser",
|
||||
"password": "proxypass"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**That's it!** Script uses proxy for this account only.
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Proxy Types
|
||||
|
||||
### HTTP Proxy (Most Common)
|
||||
|
||||
```json
|
||||
{
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "http://proxy.example.com",
|
||||
"port": 8080,
|
||||
"username": "user",
|
||||
"password": "pass"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### SOCKS5 Proxy
|
||||
|
||||
```json
|
||||
{
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "socks5://proxy.example.com",
|
||||
"port": 1080,
|
||||
"username": "user",
|
||||
"password": "pass"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🏢 Recommended Providers
|
||||
|
||||
### Residential Proxies (Best)
|
||||
- **Bright Data** — Premium quality, expensive
|
||||
- **Smartproxy** — User-friendly
|
||||
- **Oxylabs** — Enterprise-grade
|
||||
|
||||
### Datacenter Proxies (Cheaper)
|
||||
- **SquidProxies** — Reliable
|
||||
- **MyPrivateProxy** — Dedicated IPs
|
||||
|
||||
⚠️ **Avoid free proxies** — Unreliable and often blocked.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **"Connection refused"** | Check proxy URL and port |
|
||||
| **"407 Auth required"** | Verify username/password |
|
||||
| **"Timeout"** | Try different proxy server |
|
||||
| **"SSL error"** | Use HTTP instead of HTTPS |
|
||||
|
||||
### Test Proxy Manually
|
||||
|
||||
```bash
|
||||
# Windows (PowerShell)
|
||||
curl --proxy http://user:pass@proxy.com:8080 http://httpbin.org/ip
|
||||
|
||||
# Linux/macOS
|
||||
curl --proxy http://user:pass@proxy.com:8080 http://httpbin.org/ip
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Proxy working?**
|
||||
→ **[Setup Scheduler](./schedule.md)**
|
||||
|
||||
**Need humanization?**
|
||||
→ **[Humanization Guide](./humanization.md)**
|
||||
|
||||
**Multiple accounts?**
|
||||
→ **[Accounts Guide](./accounts.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
179
docs/schedule.md
Normal file
179
docs/schedule.md
Normal file
@@ -0,0 +1,179 @@
|
||||
# ⏰ Scheduler
|
||||
|
||||
**Automate daily script execution**
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start
|
||||
|
||||
### Basic Setup
|
||||
|
||||
**Edit** `src/config.jsonc`:
|
||||
```jsonc
|
||||
{
|
||||
"schedule": {
|
||||
"enabled": true,
|
||||
"time": "09:00",
|
||||
"timeZone": "America/New_York"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Start scheduler:**
|
||||
```bash
|
||||
npm run start:schedule
|
||||
```
|
||||
|
||||
**That's it!** Script runs automatically at 9 AM daily.
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Common Configurations
|
||||
|
||||
### Morning Run
|
||||
```jsonc
|
||||
{
|
||||
"schedule": {
|
||||
"enabled": true,
|
||||
"time": "08:00",
|
||||
"timeZone": "America/New_York"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Evening Run
|
||||
```jsonc
|
||||
{
|
||||
"schedule": {
|
||||
"enabled": true,
|
||||
"time": "20:00",
|
||||
"timeZone": "Europe/Paris"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Multiple Passes Per Day
|
||||
```jsonc
|
||||
{
|
||||
"schedule": {
|
||||
"enabled": true,
|
||||
"time": "10:00",
|
||||
"timeZone": "America/Los_Angeles"
|
||||
},
|
||||
"passesPerRun": 2
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🌍 Common Timezones
|
||||
|
||||
| Region | Timezone |
|
||||
|--------|----------|
|
||||
| **US East** | `America/New_York` |
|
||||
| **US West** | `America/Los_Angeles` |
|
||||
| **UK** | `Europe/London` |
|
||||
| **France** | `Europe/Paris` |
|
||||
| **Germany** | `Europe/Berlin` |
|
||||
|
||||
[All timezones](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)
|
||||
|
||||
---
|
||||
|
||||
## 🎲 Advanced: Cron Expressions
|
||||
|
||||
Want more control? Use cron:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"schedule": {
|
||||
"enabled": true,
|
||||
"cron": "0 9 * * *", // Every day at 9 AM
|
||||
"timeZone": "America/New_York"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Cron Examples
|
||||
```bash
|
||||
"0 7 * * *" # Every day at 7:00 AM
|
||||
"30 20 * * *" # Every day at 8:30 PM
|
||||
"0 9,21 * * *" # Twice daily: 9 AM and 9 PM
|
||||
"0 10 * * 1-5" # Weekdays only at 10 AM
|
||||
```
|
||||
|
||||
[Cron syntax helper](https://crontab.guru/)
|
||||
|
||||
---
|
||||
|
||||
## 🏖️ Vacation Mode (Optional)
|
||||
|
||||
Skip random days each month to look more natural:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"vacation": {
|
||||
"enabled": true,
|
||||
"minDays": 3,
|
||||
"maxDays": 5
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Example:** Script will randomly skip 3-5 consecutive days per month.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **Scheduler not running** | Check `enabled: true` in config |
|
||||
| **Wrong execution time** | Verify timezone spelling |
|
||||
| **Runs multiple times** | Only use ONE scheduler instance |
|
||||
| **Missed run** | Check if computer was off/sleeping |
|
||||
|
||||
### Debug Commands
|
||||
|
||||
**Check timezone:**
|
||||
```powershell
|
||||
node -e "console.log(new Date().toLocaleString('en-US', {timeZone: 'America/New_York'}))"
|
||||
```
|
||||
|
||||
**Validate config:**
|
||||
```powershell
|
||||
npm run typecheck
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🐳 Docker Integration
|
||||
|
||||
### Built-in Scheduler (Recommended)
|
||||
```yaml
|
||||
services:
|
||||
rewards:
|
||||
build: .
|
||||
command: ["npm", "run", "start:schedule"]
|
||||
environment:
|
||||
TZ: Europe/Paris
|
||||
```
|
||||
|
||||
Uses config from `src/config.jsonc`.
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Want natural behavior?**
|
||||
→ **[Humanization Guide](./humanization.md)**
|
||||
|
||||
**Need notifications?**
|
||||
→ **[Discord Webhooks](./conclusionwebhook.md)**
|
||||
|
||||
**Docker setup?**
|
||||
→ **[Docker Guide](./docker.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Getting Started](./getting-started.md)**
|
||||
207
docs/security.md
Normal file
207
docs/security.md
Normal file
@@ -0,0 +1,207 @@
|
||||
# 🔒 Security Guide
|
||||
|
||||
**Protect your accounts and handle security incidents**
|
||||
|
||||
---
|
||||
|
||||
## ⚠️ Important Disclaimer
|
||||
|
||||
**Using automation violates Microsoft's Terms of Service.**
|
||||
|
||||
Your accounts **may be banned**. Use at your own risk.
|
||||
|
||||
---
|
||||
|
||||
## 🛡️ Best Practices
|
||||
|
||||
### ✅ DO
|
||||
|
||||
- **Enable humanization** — Natural behavior reduces detection
|
||||
- **Use 2FA/TOTP** — More secure authentication
|
||||
- **Run 1-2x daily max** — Don't be greedy
|
||||
- **Test on secondary accounts** — Never risk your main account
|
||||
- **Enable vacation mode** — Random off days look natural
|
||||
- **Monitor regularly** — Check diagnostics and logs
|
||||
|
||||
### ❌ DON'T
|
||||
|
||||
- **Run on main account** — Too risky
|
||||
- **Schedule hourly** — Obvious bot pattern
|
||||
- **Ignore warnings** — Security alerts matter
|
||||
- **Use shared proxies** — Higher detection risk
|
||||
- **Skip humanization** — Robotic behavior gets caught
|
||||
|
||||
---
|
||||
|
||||
## 🚨 Security Incidents
|
||||
|
||||
### Recovery Email Mismatch
|
||||
|
||||
**What:** Login shows unfamiliar recovery email (e.g., `ko*****@hacker.net`)
|
||||
|
||||
**Action:**
|
||||
1. **Stop immediately** — Script halts automatically
|
||||
2. **Check Microsoft Account** → Security settings
|
||||
3. **Update config** if you changed email yourself:
|
||||
```json
|
||||
{
|
||||
"recoveryEmail": "ko*****@hacker.net"
|
||||
}
|
||||
```
|
||||
4. **Change password** if compromise suspected
|
||||
|
||||
---
|
||||
|
||||
### "We Can't Sign You In" (Blocked)
|
||||
|
||||
**What:** Microsoft blocks login attempt
|
||||
|
||||
**Action:**
|
||||
1. **Wait 24-48 hours** — Temporary locks usually lift
|
||||
2. **Complete any challenges** — SMS, authenticator, etc.
|
||||
3. **Reduce frequency** — Run less often
|
||||
4. **Enable humanization** — If not already enabled
|
||||
5. **Check proxy** — Ensure consistent IP/location
|
||||
|
||||
---
|
||||
|
||||
## 🔐 Account Security
|
||||
|
||||
### Strong Credentials
|
||||
|
||||
```json
|
||||
{
|
||||
"accounts": [
|
||||
{
|
||||
"email": "your@email.com",
|
||||
"password": "strong-unique-password",
|
||||
"totp": "JBSWY3DPEHPK3PXP"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- ✅ **Unique passwords** per account
|
||||
- ✅ **TOTP enabled** for all accounts (see below)
|
||||
- ✅ **Strong passwords** (16+ characters)
|
||||
- 🔄 **Rotate every 90 days**
|
||||
|
||||
**How to enable TOTP:**
|
||||
- Go to https://account.live.com/proofs/Manage/additional and turn on two-step verification.
|
||||
- Choose **"Set up a different authenticator app"**, then click **"I can't scan the bar code"** to reveal the Base32 secret.
|
||||
- Scan the QR with an authenticator you control (Google Authenticator recommended) and copy the secret into `totp`.
|
||||
- Enter the app-generated code once to finish pairing. The same secret powers both your app and the bot.
|
||||
|
||||
### File Permissions
|
||||
|
||||
```bash
|
||||
# Linux/macOS - Restrict access
|
||||
chmod 600 src/accounts.json
|
||||
|
||||
# Windows - Right-click → Properties → Security
|
||||
# Remove all users except yourself
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🌐 Network Security
|
||||
|
||||
### Use Proxies (Optional)
|
||||
|
||||
```json
|
||||
{
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "proxy.example.com",
|
||||
"port": 8080,
|
||||
"username": "user",
|
||||
"password": "pass"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- IP masking
|
||||
- Geographic flexibility
|
||||
- Reduces pattern detection
|
||||
|
||||
→ **[Full Proxy Guide](./proxy.md)**
|
||||
|
||||
---
|
||||
|
||||
## 📊 Monitoring
|
||||
|
||||
### Enable Diagnostics
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"diagnostics": {
|
||||
"enabled": true,
|
||||
"saveScreenshot": true,
|
||||
"saveHtml": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
→ **[Diagnostics Guide](./diagnostics.md)**
|
||||
|
||||
### Enable Notifications
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"conclusionWebhook": {
|
||||
"enabled": true,
|
||||
"url": "https://discord.com/api/webhooks/..."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
→ **[Webhook Setup](./conclusionwebhook.md)**
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Incident Response
|
||||
|
||||
### Account Compromised
|
||||
|
||||
1. **Stop all automation**
|
||||
2. **Change password immediately**
|
||||
3. **Check sign-in activity** in Microsoft Account
|
||||
4. **Enable 2FA** if not already
|
||||
5. **Review security info** (recovery email, phone)
|
||||
6. **Contact Microsoft** if unauthorized access
|
||||
|
||||
### Temporary Ban
|
||||
|
||||
1. **Pause automation** for 48-72 hours
|
||||
2. **Reduce frequency** when resuming
|
||||
3. **Increase delays** in humanization
|
||||
4. **Use proxy** from your region
|
||||
5. **Monitor closely** after resuming
|
||||
|
||||
---
|
||||
|
||||
## 🔗 Privacy Tips
|
||||
|
||||
- 🔐 **Local-only** — All data stays on your machine
|
||||
- 🚫 **No telemetry** — Script doesn't phone home
|
||||
- 📁 **File security** — Restrict permissions
|
||||
- 🔄 **Regular backups** — Keep config backups
|
||||
- 🗑️ **Clean logs** — Delete old diagnostics
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Setup humanization?**
|
||||
→ **[Humanization Guide](./humanization.md)**
|
||||
|
||||
**Need proxies?**
|
||||
→ **[Proxy Guide](./proxy.md)**
|
||||
|
||||
**Want monitoring?**
|
||||
→ **[Diagnostics](./diagnostics.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
104
docs/update.md
Normal file
104
docs/update.md
Normal file
@@ -0,0 +1,104 @@
|
||||
# 🔄 Auto-Update
|
||||
|
||||
**Keep script up to date automatically**
|
||||
|
||||
---
|
||||
|
||||
## 💡 What Is It?
|
||||
|
||||
After each run, script checks for updates and installs them automatically.
|
||||
|
||||
**Already enabled by default!**
|
||||
|
||||
---
|
||||
|
||||
## ⚡ How It Works
|
||||
|
||||
### After Each Run
|
||||
|
||||
1. **Fetch latest** from GitHub
|
||||
2. **Pull changes** (safe fast-forward only)
|
||||
3. **Install dependencies** (`npm ci`)
|
||||
4. **Rebuild** (`npm run build`)
|
||||
|
||||
**No action needed from you!**
|
||||
|
||||
---
|
||||
|
||||
## ⚙️ Configuration
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"update": {
|
||||
"git": true, // Auto-update from Git
|
||||
"docker": false // Docker container updates (if using Docker)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🐳 Docker Updates
|
||||
|
||||
If using Docker:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"update": {
|
||||
"git": false,
|
||||
"docker": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Pulls latest Docker image and restarts container.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Manual Update
|
||||
|
||||
### Git
|
||||
```bash
|
||||
git pull
|
||||
npm ci
|
||||
npm run build
|
||||
```
|
||||
|
||||
### Docker
|
||||
```bash
|
||||
docker compose pull
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## ⚠️ Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---------|----------|
|
||||
| **"Not a git repository"** | Clone repo (don't download ZIP) |
|
||||
| **"Local changes"** | Commit or stash your changes |
|
||||
| **"Update failed"** | Check internet connection |
|
||||
|
||||
### Reset to Remote
|
||||
|
||||
```bash
|
||||
git fetch origin
|
||||
git reset --hard origin/v2
|
||||
npm ci
|
||||
npm run build
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 Next Steps
|
||||
|
||||
**Need security tips?**
|
||||
→ **[Security Guide](./security.md)**
|
||||
|
||||
**Setup scheduler?**
|
||||
→ **[Scheduler Guide](./schedule.md)**
|
||||
|
||||
---
|
||||
|
||||
**[← Back to Hub](./index.md)** | **[Config Guide](./config.md)**
|
||||
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1749727998,
|
||||
"narHash": "sha256-mHv/yeUbmL91/TvV95p+mBVahm9mdQMJoqaTVTALaFw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "fd487183437963a59ba763c0cc4f27e3447dd6dd",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-25.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
40
flake.nix
Normal file
40
flake.nix
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||
flake-utils = {
|
||||
url = "github:numtide/flake-utils";
|
||||
};
|
||||
};
|
||||
|
||||
outputs =
|
||||
{ nixpkgs, flake-utils, ... }:
|
||||
flake-utils.lib.eachDefaultSystem (
|
||||
system:
|
||||
let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
};
|
||||
in
|
||||
{
|
||||
devShell = pkgs.mkShell {
|
||||
nativeBuildInputs = with pkgs; [
|
||||
nodejs
|
||||
playwright-driver.browsers
|
||||
typescript
|
||||
playwright-test
|
||||
|
||||
# fixes "waiting until load" issue compared to
|
||||
# setting headless in config.jsonc
|
||||
xvfb-run
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
export PLAYWRIGHT_BROWSERS_PATH=${pkgs.playwright-driver.browsers}
|
||||
export PLAYWRIGHT_SKIP_VALIDATE_HOST_REQUIREMENTS=true
|
||||
npm i
|
||||
npm run build
|
||||
'';
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
3400
package-lock.json
generated
Normal file
3400
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
77
package.json
Normal file
77
package.json
Normal file
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"name": "microsoft-rewards-rewi",
|
||||
"version": "2.50.5",
|
||||
"description": "Automatically do tasks for Microsoft Rewards but in TS!",
|
||||
"private": true,
|
||||
"main": "index.js",
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Light60-1/Microsoft-Rewards-Rewi.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/Light60-1/Microsoft-Rewards-Rewi/issues"
|
||||
},
|
||||
"homepage": "https://github.com/Light60-1/Microsoft-Rewards-Rewi#readme",
|
||||
"scripts": {
|
||||
"clean": "rimraf dist",
|
||||
"pre-build": "npm i && npm run clean && node -e \"process.exit(process.env.SKIP_PLAYWRIGHT_INSTALL?0:1)\" || npx playwright install chromium",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"build": "tsc",
|
||||
"start": "node --enable-source-maps ./dist/index.js",
|
||||
"ts-start": "node --loader ts-node/esm ./src/index.ts",
|
||||
"dev": "ts-node ./src/index.ts -dev",
|
||||
"ts-schedule": "ts-node ./src/scheduler.ts",
|
||||
"start:schedule": "node --enable-source-maps ./dist/scheduler.js",
|
||||
"lint": "eslint \"src/**/*.{ts,tsx}\"",
|
||||
"prepare": "npm run build",
|
||||
"setup": "node ./setup/update/setup.mjs",
|
||||
"kill-chrome-win": "powershell -Command \"Get-Process | Where-Object { $_.MainModule.FileVersionInfo.FileDescription -eq 'Google Chrome for Testing' } | ForEach-Object { Stop-Process -Id $_.Id -Force }\"",
|
||||
"create-docker": "docker build -t microsoft-rewards-rewi-docker ."
|
||||
},
|
||||
"keywords": [
|
||||
"Bing Rewards",
|
||||
"Microsoft Rewards",
|
||||
"Bot",
|
||||
"Script",
|
||||
"TypeScript",
|
||||
"Playwright",
|
||||
"Cheerio"
|
||||
],
|
||||
"author": "Light60-1",
|
||||
"contributors": [
|
||||
"Microsoft-Rewards-Rewi (https://github.com/Light60-1/Microsoft-Rewards-Rewi)"
|
||||
],
|
||||
"license": "PROPRIETARY",
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/Light60-1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/ms": "^0.7.34",
|
||||
"@types/node": "^20.19.24",
|
||||
"@typescript-eslint/eslint-plugin": "^7.17.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-plugin-modules-newline": "^0.0.6",
|
||||
"rimraf": "^6.0.1",
|
||||
"typescript": "^5.5.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^1.8.4",
|
||||
"chalk": "^4.1.2",
|
||||
"cheerio": "^1.0.0",
|
||||
"cron-parser": "^4.9.0",
|
||||
"fingerprint-generator": "^2.1.66",
|
||||
"fingerprint-injector": "^2.1.66",
|
||||
"http-proxy-agent": "^7.0.2",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
"luxon": "^3.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"playwright": "1.52.0",
|
||||
"rebrowser-playwright": "1.52.0",
|
||||
"socks-proxy-agent": "^8.0.5",
|
||||
"ts-node": "^10.9.2"
|
||||
}
|
||||
}
|
||||
3
run.sh
Normal file
3
run.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
nix develop --command bash -c "xvfb-run npm run start"
|
||||
25
setup/setup.bat
Normal file
25
setup/setup.bat
Normal file
@@ -0,0 +1,25 @@
|
||||
@echo off
|
||||
setlocal
|
||||
REM Wrapper to run setup via npm (Windows)
|
||||
REM Navigates to project root and runs npm run setup
|
||||
|
||||
set SCRIPT_DIR=%~dp0
|
||||
set PROJECT_ROOT=%SCRIPT_DIR%..
|
||||
|
||||
if not exist "%PROJECT_ROOT%\package.json" (
|
||||
echo [ERROR] package.json not found in project root.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo Navigating to project root...
|
||||
cd /d "%PROJECT_ROOT%"
|
||||
|
||||
echo Running setup script via npm...
|
||||
call npm run setup
|
||||
set EXITCODE=%ERRORLEVEL%
|
||||
echo.
|
||||
echo Setup finished with exit code %EXITCODE%.
|
||||
echo Press Enter to close.
|
||||
pause >NUL
|
||||
exit /b %EXITCODE%
|
||||
35
setup/setup.sh
Normal file
35
setup/setup.sh
Normal file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Wrapper to run setup via npm (Linux/macOS)
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
|
||||
echo "=== Prerequisite Check ==="
|
||||
|
||||
if command -v npm >/dev/null 2>&1; then
|
||||
NPM_VERSION="$(npm -v 2>/dev/null || true)"
|
||||
echo "npm detected: ${NPM_VERSION}"
|
||||
else
|
||||
echo "[ERROR] npm not detected."
|
||||
echo " Install Node.js and npm from nodejs.org or your package manager"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
GIT_VERSION="$(git --version 2>/dev/null || true)"
|
||||
echo "Git detected: ${GIT_VERSION}"
|
||||
else
|
||||
echo "[WARN] Git not detected."
|
||||
echo " Install (Linux): e.g. 'sudo apt install git' (or your distro equivalent)."
|
||||
fi
|
||||
|
||||
if [ ! -f "${PROJECT_ROOT}/package.json" ]; then
|
||||
echo "[ERROR] package.json not found at ${PROJECT_ROOT}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "=== Running setup script via npm ==="
|
||||
cd "${PROJECT_ROOT}"
|
||||
exec npm run setup
|
||||
214
setup/update/setup.mjs
Normal file
214
setup/update/setup.mjs
Normal file
@@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Unified cross-platform setup script for Microsoft Rewards Script V2.
|
||||
*
|
||||
* Features:
|
||||
* - Renames accounts.example.jsonc -> accounts.json (idempotent)
|
||||
* - Guides user through account configuration (email, password, TOTP, proxy)
|
||||
* - Explains config.jsonc structure and key settings
|
||||
* - Installs dependencies (npm install)
|
||||
* - Builds TypeScript project (npm run build)
|
||||
* - Installs Playwright Chromium browser (idempotent with marker)
|
||||
* - Optional immediate start or manual start instructions
|
||||
*
|
||||
* V2 Updates:
|
||||
* - Enhanced prompts for new config.jsonc structure
|
||||
* - Explains humanization, scheduling, notifications
|
||||
* - References updated documentation (docs/config.md, docs/accounts.md)
|
||||
* - Improved user guidance for first-time setup
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { spawn } from 'child_process';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
// Project root = two levels up from setup/update directory
|
||||
const PROJECT_ROOT = path.resolve(__dirname, '..', '..');
|
||||
const SRC_DIR = path.join(PROJECT_ROOT, 'src');
|
||||
|
||||
function log(msg) { console.log(msg); }
|
||||
function warn(msg) { console.warn(msg); }
|
||||
function error(msg) { console.error(msg); }
|
||||
|
||||
function renameAccountsIfNeeded() {
|
||||
const accounts = path.join(SRC_DIR, 'accounts.json');
|
||||
const example = path.join(SRC_DIR, 'accounts.example.jsonc');
|
||||
if (fs.existsSync(accounts)) {
|
||||
log('accounts.json already exists - skipping rename.');
|
||||
return;
|
||||
}
|
||||
if (fs.existsSync(example)) {
|
||||
log('Renaming accounts.example.jsonc to accounts.json...');
|
||||
fs.renameSync(example, accounts);
|
||||
} else {
|
||||
warn('Neither accounts.json nor accounts.example.jsonc found.');
|
||||
}
|
||||
}
|
||||
|
||||
async function prompt(question) {
|
||||
return await new Promise(resolve => {
|
||||
process.stdout.write(question);
|
||||
const onData = (data) => {
|
||||
const ans = data.toString().trim();
|
||||
process.stdin.off('data', onData);
|
||||
resolve(ans);
|
||||
};
|
||||
process.stdin.on('data', onData);
|
||||
});
|
||||
}
|
||||
|
||||
async function loopForAccountsConfirmation() {
|
||||
log('\n📝 Please configure your Microsoft accounts:');
|
||||
log(' - Open: src/accounts.json');
|
||||
log(' - Add your email and password for each account');
|
||||
log(' - Optional: Add TOTP secret for 2FA (see docs/accounts.md)');
|
||||
log(' - Optional: Configure proxy settings per account');
|
||||
log(' - Save the file (Ctrl+S or Cmd+S)\n');
|
||||
|
||||
// Keep asking until user says yes
|
||||
for (;;) {
|
||||
const ans = (await prompt('Have you configured your accounts in accounts.json? (yes/no): ')).toLowerCase();
|
||||
if (['yes', 'y'].includes(ans)) break;
|
||||
if (['no', 'n'].includes(ans)) {
|
||||
log('Please configure accounts.json and save the file, then answer yes.');
|
||||
continue;
|
||||
}
|
||||
log('Please answer yes or no.');
|
||||
}
|
||||
}
|
||||
|
||||
function runCommand(cmd, args, opts = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
log(`Running: ${cmd} ${args.join(' ')}`);
|
||||
const child = spawn(cmd, args, { stdio: 'inherit', shell: process.platform === 'win32', ...opts });
|
||||
child.on('exit', (code) => {
|
||||
if (code === 0) return resolve();
|
||||
reject(new Error(`${cmd} exited with code ${code}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function ensureNpmAvailable() {
|
||||
try {
|
||||
await runCommand(process.platform === 'win32' ? 'npm.cmd' : 'npm', ['-v']);
|
||||
} catch (e) {
|
||||
throw new Error('npm not found in PATH. Install Node.js first.');
|
||||
}
|
||||
}
|
||||
|
||||
async function startOnly() {
|
||||
log('Starting program (npm run start)...');
|
||||
await ensureNpmAvailable();
|
||||
// Assume user already installed & built; if dist missing inform user.
|
||||
const distIndex = path.join(PROJECT_ROOT, 'dist', 'index.js');
|
||||
if (!fs.existsSync(distIndex)) {
|
||||
warn('Build output not found. Running build first.');
|
||||
await runCommand(process.platform === 'win32' ? 'npm.cmd' : 'npm', ['run', 'build']);
|
||||
await installPlaywrightBrowsers();
|
||||
} else {
|
||||
// Even if build exists, ensure browsers are installed once.
|
||||
await installPlaywrightBrowsers();
|
||||
}
|
||||
await runCommand(process.platform === 'win32' ? 'npm.cmd' : 'npm', ['run', 'start']);
|
||||
}
|
||||
|
||||
async function fullSetup() {
|
||||
renameAccountsIfNeeded();
|
||||
await loopForAccountsConfirmation();
|
||||
|
||||
log('\n⚙️ Configuration Options (src/config.jsonc):');
|
||||
log(' - browser.headless: Set to true for background operation');
|
||||
log(' - execution.clusters: Number of parallel account processes');
|
||||
log(' - workers: Enable/disable specific tasks (dailySet, searches, etc.)');
|
||||
log(' - humanization: Add natural delays and behavior (recommended: enabled)');
|
||||
log(' - schedule: Configure automated daily runs');
|
||||
log(' - notifications: Discord webhooks, NTFY push alerts');
|
||||
log(' 📚 Full guide: docs/config.md\n');
|
||||
|
||||
const reviewConfig = (await prompt('Do you want to review config.jsonc now? (yes/no): ')).toLowerCase();
|
||||
if (['yes', 'y'].includes(reviewConfig)) {
|
||||
log('⏸️ Setup paused. Please review src/config.jsonc, then re-run this setup.');
|
||||
log(' Common settings to check:');
|
||||
log(' - browser.headless (false = visible browser, true = background)');
|
||||
log(' - execution.runOnZeroPoints (false = skip when no points available)');
|
||||
log(' - humanization.enabled (true = natural behavior, recommended)');
|
||||
log(' - schedule.enabled (false = manual runs, true = automated scheduling)');
|
||||
log('\n After editing config.jsonc, run: npm run setup');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
await ensureNpmAvailable();
|
||||
await runCommand(process.platform === 'win32' ? 'npm.cmd' : 'npm', ['install']);
|
||||
await runCommand(process.platform === 'win32' ? 'npm.cmd' : 'npm', ['run', 'build']);
|
||||
await installPlaywrightBrowsers();
|
||||
|
||||
log('\n✅ Setup complete!');
|
||||
log(' - Accounts configured: src/accounts.json');
|
||||
log(' - Configuration: src/config.jsonc');
|
||||
log(' - Documentation: docs/index.md\n');
|
||||
|
||||
const start = (await prompt('Do you want to start the automation now? (yes/no): ')).toLowerCase();
|
||||
if (['yes', 'y'].includes(start)) {
|
||||
await runCommand(process.platform === 'win32' ? 'npm.cmd' : 'npm', ['run', 'start']);
|
||||
} else {
|
||||
log('\nFinished setup. To start later, run: npm start');
|
||||
log('For automated scheduling, run: npm run start:schedule');
|
||||
}
|
||||
}
|
||||
|
||||
async function installPlaywrightBrowsers() {
|
||||
const PLAYWRIGHT_MARKER = path.join(PROJECT_ROOT, '.playwright-chromium-installed');
|
||||
// Idempotent: skip if marker exists
|
||||
if (fs.existsSync(PLAYWRIGHT_MARKER)) {
|
||||
log('Playwright chromium already installed (marker found).');
|
||||
return;
|
||||
}
|
||||
log('Ensuring Playwright chromium browser is installed...');
|
||||
try {
|
||||
await runCommand(process.platform === 'win32' ? 'npx.cmd' : 'npx', ['playwright', 'install', 'chromium']);
|
||||
fs.writeFileSync(PLAYWRIGHT_MARKER, new Date().toISOString());
|
||||
log('Playwright chromium install complete.');
|
||||
} catch (e) {
|
||||
warn('Failed to install Playwright chromium automatically. You can manually run: npx playwright install chromium');
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (!fs.existsSync(SRC_DIR)) {
|
||||
error('[ERROR] Cannot find src directory at ' + SRC_DIR);
|
||||
process.exit(1);
|
||||
}
|
||||
process.chdir(PROJECT_ROOT);
|
||||
|
||||
for (;;) {
|
||||
log('============================');
|
||||
log(' Microsoft Rewards Setup ');
|
||||
log('============================');
|
||||
log('Select an option:');
|
||||
log(' 1) Start program now (skip setup)');
|
||||
log(' 2) Full first-time setup');
|
||||
log(' 3) Exit');
|
||||
const choice = (await prompt('Enter choice (1/2/3): ')).trim();
|
||||
if (choice === '1') { await startOnly(); break; }
|
||||
if (choice === '2') { await fullSetup(); break; }
|
||||
if (choice === '3') { log('Exiting.'); process.exit(0); }
|
||||
log('\nInvalid choice. Please select 1, 2 or 3.\n');
|
||||
}
|
||||
// After completing action, optionally pause if launched by double click on Windows (no TTY detection simple heuristic)
|
||||
if (process.platform === 'win32' && process.stdin.isTTY) {
|
||||
log('\nDone. Press Enter to close.');
|
||||
await prompt('');
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Allow clean Ctrl+C
|
||||
process.on('SIGINT', () => { console.log('\nInterrupted.'); process.exit(1); });
|
||||
|
||||
main().catch(err => {
|
||||
error('\nSetup failed: ' + err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
412
setup/update/update.mjs
Normal file
412
setup/update/update.mjs
Normal file
@@ -0,0 +1,412 @@
|
||||
/* eslint-disable linebreak-style */
|
||||
/**
|
||||
* Smart Auto-Update Script
|
||||
*
|
||||
* Intelligently updates while preserving user settings:
|
||||
* - ALWAYS updates code files (*.ts, *.js, etc.)
|
||||
* - ONLY updates config.jsonc if remote has changes to it
|
||||
* - ONLY updates accounts.json if remote has changes to it
|
||||
* - KEEPS user passwords/emails/settings otherwise
|
||||
*
|
||||
* Usage:
|
||||
* node setup/update/update.mjs --git
|
||||
* node setup/update/update.mjs --docker
|
||||
*/
|
||||
|
||||
import { spawn, execSync } from 'node:child_process'
|
||||
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs'
|
||||
import { join } from 'node:path'
|
||||
|
||||
function stripJsonComments(input) {
|
||||
let result = ""
|
||||
let inString = false
|
||||
let stringChar = ""
|
||||
let inLineComment = false
|
||||
let inBlockComment = false
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const char = input[i]
|
||||
const next = input[i + 1]
|
||||
|
||||
if (inLineComment) {
|
||||
if (char === "\n" || char === "\r") {
|
||||
inLineComment = false
|
||||
result += char
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (inBlockComment) {
|
||||
if (char === "*" && next === "/") {
|
||||
inBlockComment = false
|
||||
i++
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (inString) {
|
||||
result += char
|
||||
if (char === "\\") {
|
||||
i++
|
||||
if (i < input.length) result += input[i]
|
||||
continue
|
||||
}
|
||||
if (char === stringChar) inString = false
|
||||
continue
|
||||
}
|
||||
|
||||
if (char === "\"" || char === "'") {
|
||||
inString = true
|
||||
stringChar = char
|
||||
result += char
|
||||
continue
|
||||
}
|
||||
|
||||
if (char === "/" && next === "/") {
|
||||
inLineComment = true
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
if (char === "/" && next === "*") {
|
||||
inBlockComment = true
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
result += char
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function readJsonConfig(preferredPaths) {
|
||||
for (const candidate of preferredPaths) {
|
||||
if (!existsSync(candidate)) continue
|
||||
try {
|
||||
const raw = readFileSync(candidate, "utf8").replace(/^\uFEFF/, "")
|
||||
return JSON.parse(stripJsonComments(raw))
|
||||
} catch {
|
||||
// Try next candidate on parse errors
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function run(cmd, args, opts = {}) {
|
||||
return new Promise((resolve) => {
|
||||
const child = spawn(cmd, args, { stdio: 'inherit', shell: process.platform === 'win32', ...opts })
|
||||
child.on('close', (code) => resolve(code ?? 0))
|
||||
child.on('error', () => resolve(1))
|
||||
})
|
||||
}
|
||||
|
||||
async function which(cmd) {
|
||||
const probe = process.platform === 'win32' ? 'where' : 'which'
|
||||
const code = await run(probe, [cmd], { stdio: 'ignore' })
|
||||
return code === 0
|
||||
}
|
||||
|
||||
function exec(cmd) {
|
||||
try {
|
||||
return execSync(cmd, { encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }).trim()
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function hasUnresolvedConflicts() {
|
||||
// Check for unmerged files
|
||||
const unmerged = exec('git ls-files -u')
|
||||
if (unmerged) {
|
||||
return { hasConflicts: true, files: unmerged.split('\n').filter(Boolean) }
|
||||
}
|
||||
|
||||
// Check if in middle of merge/rebase
|
||||
const gitDir = exec('git rev-parse --git-dir')
|
||||
if (gitDir) {
|
||||
const mergePath = join(gitDir, 'MERGE_HEAD')
|
||||
const rebasePath = join(gitDir, 'rebase-merge')
|
||||
const rebaseApplyPath = join(gitDir, 'rebase-apply')
|
||||
|
||||
if (existsSync(mergePath) || existsSync(rebasePath) || existsSync(rebaseApplyPath)) {
|
||||
return { hasConflicts: true, files: ['merge/rebase in progress'] }
|
||||
}
|
||||
}
|
||||
|
||||
return { hasConflicts: false, files: [] }
|
||||
}
|
||||
|
||||
function abortAllGitOperations() {
|
||||
console.log('Aborting any ongoing Git operations...')
|
||||
|
||||
// Try to abort merge
|
||||
exec('git merge --abort')
|
||||
|
||||
// Try to abort rebase
|
||||
exec('git rebase --abort')
|
||||
|
||||
// Try to abort cherry-pick
|
||||
exec('git cherry-pick --abort')
|
||||
|
||||
console.log('Git operations aborted.')
|
||||
}
|
||||
|
||||
async function updateGit() {
|
||||
const hasGit = await which('git')
|
||||
if (!hasGit) {
|
||||
console.log('Git not found. Skipping update.')
|
||||
return 1
|
||||
}
|
||||
|
||||
console.log('\n' + '='.repeat(60))
|
||||
console.log('Smart Git Update')
|
||||
console.log('='.repeat(60))
|
||||
|
||||
// Step 0: Check for existing conflicts FIRST
|
||||
const conflictCheck = hasUnresolvedConflicts()
|
||||
if (conflictCheck.hasConflicts) {
|
||||
console.log('\n⚠️ ERROR: Git repository has unresolved conflicts!')
|
||||
console.log('Conflicted files:')
|
||||
conflictCheck.files.forEach(f => console.log(` - ${f}`))
|
||||
console.log('\nAttempting automatic resolution...')
|
||||
|
||||
// Abort any ongoing operations
|
||||
abortAllGitOperations()
|
||||
|
||||
// Verify conflicts are cleared
|
||||
const recheckConflicts = hasUnresolvedConflicts()
|
||||
if (recheckConflicts.hasConflicts) {
|
||||
console.log('\n❌ Could not automatically resolve conflicts.')
|
||||
console.log('Manual intervention required. Please run:')
|
||||
console.log(' git status')
|
||||
console.log(' git reset --hard origin/main # WARNING: This will discard ALL local changes')
|
||||
console.log('\nUpdate aborted for safety.')
|
||||
return 1
|
||||
}
|
||||
|
||||
console.log('✓ Conflicts cleared. Continuing with update...\n')
|
||||
}
|
||||
|
||||
// Step 1: Read config to get user preferences
|
||||
let userConfig = { autoUpdateConfig: false, autoUpdateAccounts: false }
|
||||
const configData = readJsonConfig([
|
||||
"src/config.jsonc",
|
||||
"config.jsonc",
|
||||
"src/config.json",
|
||||
"config.json"
|
||||
])
|
||||
|
||||
if (!configData) {
|
||||
console.log('Warning: Could not read config.jsonc, using defaults (preserve local files)')
|
||||
} else if (configData.update) {
|
||||
userConfig.autoUpdateConfig = configData.update.autoUpdateConfig ?? false
|
||||
userConfig.autoUpdateAccounts = configData.update.autoUpdateAccounts ?? false
|
||||
}
|
||||
|
||||
console.log('\nUser preferences:')
|
||||
console.log(` Auto-update config.jsonc: ${userConfig.autoUpdateConfig}`)
|
||||
console.log(` Auto-update accounts.json: ${userConfig.autoUpdateAccounts}`)
|
||||
|
||||
// Step 2: Fetch
|
||||
console.log('\nFetching latest changes...')
|
||||
await run('git', ['fetch', '--all', '--prune'])
|
||||
|
||||
// Step 3: Get current branch
|
||||
const currentBranch = exec('git branch --show-current')
|
||||
if (!currentBranch) {
|
||||
console.log('Could not determine current branch.')
|
||||
return 1
|
||||
}
|
||||
|
||||
// Step 4: Check which files changed in remote
|
||||
const remoteBranch = `origin/${currentBranch}`
|
||||
const filesChanged = exec(`git diff --name-only HEAD ${remoteBranch}`)
|
||||
|
||||
if (!filesChanged) {
|
||||
console.log('Already up to date!')
|
||||
return 0
|
||||
}
|
||||
|
||||
const changedFiles = filesChanged.split('\n').filter(f => f.trim())
|
||||
const configChanged = changedFiles.includes('src/config.jsonc')
|
||||
const accountsChanged = changedFiles.includes('src/accounts.json')
|
||||
|
||||
// Step 5: ALWAYS backup config and accounts (smart strategy!)
|
||||
const backupDir = join(process.cwd(), '.update-backup')
|
||||
mkdirSync(backupDir, { recursive: true })
|
||||
|
||||
const filesToRestore = []
|
||||
|
||||
if (existsSync('src/config.jsonc')) {
|
||||
console.log('\nBacking up config.jsonc...')
|
||||
writeFileSync(join(backupDir, 'config.jsonc'), readFileSync('src/config.jsonc', 'utf8'))
|
||||
// ALWAYS restore config unless user explicitly wants auto-update
|
||||
if (!userConfig.autoUpdateConfig) {
|
||||
filesToRestore.push('config.jsonc')
|
||||
}
|
||||
}
|
||||
|
||||
if (existsSync('src/accounts.json')) {
|
||||
console.log('Backing up accounts.json...')
|
||||
writeFileSync(join(backupDir, 'accounts.json'), readFileSync('src/accounts.json', 'utf8'))
|
||||
// ALWAYS restore accounts unless user explicitly wants auto-update
|
||||
if (!userConfig.autoUpdateAccounts) {
|
||||
filesToRestore.push('accounts.json')
|
||||
}
|
||||
}
|
||||
|
||||
// Show what will happen
|
||||
console.log('\nUpdate strategy:')
|
||||
console.log(` config.jsonc: ${userConfig.autoUpdateConfig ? 'WILL UPDATE from remote' : 'KEEPING YOUR LOCAL VERSION (always)'}`)
|
||||
console.log(` accounts.json: ${userConfig.autoUpdateAccounts ? 'WILL UPDATE from remote' : 'KEEPING YOUR LOCAL VERSION (always)'}`)
|
||||
console.log(' All other files: will update from remote')
|
||||
|
||||
// Step 6: Handle local changes intelligently
|
||||
// Check if there are uncommitted changes to config/accounts
|
||||
const localChanges = exec('git status --porcelain')
|
||||
const hasConfigChanges = localChanges && localChanges.includes('src/config.jsonc')
|
||||
const hasAccountChanges = localChanges && localChanges.includes('src/accounts.json')
|
||||
|
||||
if (hasConfigChanges && !userConfig.autoUpdateConfig) {
|
||||
console.log('\n✓ Detected local changes to config.jsonc - will preserve them')
|
||||
}
|
||||
|
||||
if (hasAccountChanges && !userConfig.autoUpdateAccounts) {
|
||||
console.log('✓ Detected local changes to accounts.json - will preserve them')
|
||||
}
|
||||
|
||||
// Step 7: Stash ALL changes (including untracked)
|
||||
const hasChanges = exec('git status --porcelain')
|
||||
let stashCreated = false
|
||||
if (hasChanges) {
|
||||
console.log('\nStashing local changes (including config/accounts)...')
|
||||
await run('git', ['stash', 'push', '-u', '-m', 'Auto-update backup with untracked files'])
|
||||
stashCreated = true
|
||||
}
|
||||
|
||||
// Step 8: Pull with strategy to handle diverged branches
|
||||
console.log('\nPulling latest code...')
|
||||
let pullCode = await run('git', ['pull', '--rebase'])
|
||||
|
||||
if (pullCode !== 0) {
|
||||
console.log('\n❌ Pull failed! Checking for conflicts...')
|
||||
|
||||
// Check if it's a conflict
|
||||
const postPullConflicts = hasUnresolvedConflicts()
|
||||
if (postPullConflicts.hasConflicts) {
|
||||
console.log('Conflicts detected during pull:')
|
||||
postPullConflicts.files.forEach(f => console.log(` - ${f}`))
|
||||
|
||||
// Abort the rebase/merge
|
||||
console.log('\nAborting failed pull...')
|
||||
abortAllGitOperations()
|
||||
|
||||
// Pop stash before giving up
|
||||
if (stashCreated) {
|
||||
console.log('Restoring stashed changes...')
|
||||
await run('git', ['stash', 'pop'])
|
||||
}
|
||||
|
||||
console.log('\n⚠️ Update failed due to conflicts.')
|
||||
console.log('Your local changes have been preserved.')
|
||||
console.log('\nTo force update (DISCARDS local changes), run:')
|
||||
console.log(' git fetch --all')
|
||||
console.log(' git reset --hard origin/main')
|
||||
console.log(' npm ci && npm run build')
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
// Not a conflict, just a generic pull failure
|
||||
console.log('Pull failed for unknown reason.')
|
||||
if (stashCreated) await run('git', ['stash', 'pop'])
|
||||
return pullCode
|
||||
}
|
||||
|
||||
// Step 9: Restore user files based on preferences
|
||||
if (filesToRestore.length > 0) {
|
||||
console.log('\nRestoring your local files (per config preferences)...')
|
||||
for (const file of filesToRestore) {
|
||||
const content = readFileSync(join(backupDir, file), 'utf8')
|
||||
writeFileSync(join('src', file), content)
|
||||
console.log(` ✓ Restored ${file}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Step 10: Restore stash (but skip config/accounts if we already restored them)
|
||||
if (stashCreated) {
|
||||
console.log('\nRestoring stashed changes...')
|
||||
// Pop stash but auto-resolve conflicts by keeping our versions
|
||||
const popCode = await run('git', ['stash', 'pop'])
|
||||
|
||||
if (popCode !== 0) {
|
||||
console.log('⚠️ Stash pop had conflicts - resolving automatically...')
|
||||
|
||||
// For config/accounts, keep our version (--ours)
|
||||
if (!userConfig.autoUpdateConfig) {
|
||||
await run('git', ['checkout', '--ours', 'src/config.jsonc'])
|
||||
await run('git', ['add', 'src/config.jsonc'])
|
||||
}
|
||||
|
||||
if (!userConfig.autoUpdateAccounts) {
|
||||
await run('git', ['checkout', '--ours', 'src/accounts.json'])
|
||||
await run('git', ['add', 'src/accounts.json'])
|
||||
}
|
||||
|
||||
// Drop the stash since we resolved manually
|
||||
await run('git', ['reset'])
|
||||
await run('git', ['stash', 'drop'])
|
||||
|
||||
console.log('✓ Conflicts auto-resolved')
|
||||
}
|
||||
}
|
||||
|
||||
// Step 9: Install & build
|
||||
const hasNpm = await which('npm')
|
||||
if (!hasNpm) return 0
|
||||
|
||||
console.log('\nInstalling dependencies...')
|
||||
await run('npm', ['ci'])
|
||||
|
||||
console.log('\nBuilding project...')
|
||||
const buildCode = await run('npm', ['run', 'build'])
|
||||
|
||||
console.log('\n' + '='.repeat(60))
|
||||
console.log('Update completed!')
|
||||
console.log('='.repeat(60) + '\n')
|
||||
|
||||
return buildCode
|
||||
}
|
||||
|
||||
async function updateDocker() {
|
||||
const hasDocker = await which('docker')
|
||||
if (!hasDocker) return 1
|
||||
// Prefer compose v2 (docker compose)
|
||||
await run('docker', ['compose', 'pull'])
|
||||
return run('docker', ['compose', 'up', '-d'])
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = new Set(process.argv.slice(2))
|
||||
const doGit = args.has('--git')
|
||||
const doDocker = args.has('--docker')
|
||||
|
||||
let code = 0
|
||||
if (doGit) {
|
||||
code = await updateGit()
|
||||
}
|
||||
if (doDocker && code === 0) {
|
||||
code = await updateDocker()
|
||||
}
|
||||
|
||||
// CRITICAL FIX: Always exit with code, even from scheduler
|
||||
// The scheduler expects the update script to complete and exit
|
||||
// Otherwise the process hangs indefinitely and gets killed by watchdog
|
||||
process.exit(code)
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('Update script error:', err)
|
||||
process.exit(1)
|
||||
})
|
||||
155
src/accounts.example.jsonc
Normal file
155
src/accounts.example.jsonc
Normal file
@@ -0,0 +1,155 @@
|
||||
{
|
||||
// ============================================================
|
||||
// 📧 MICROSOFT ACCOUNTS CONFIGURATION
|
||||
// ============================================================
|
||||
|
||||
// ⚠️ IMPORTANT SECURITY NOTICE
|
||||
// This file contains sensitive credentials. Never commit the real accounts.jsonc to version control.
|
||||
// The .gitignore is configured to exclude accounts.jsonc but you should verify it's not tracked.
|
||||
|
||||
// 📊 MICROSOFT ACCOUNT LIMITS (Unofficial Guidelines)
|
||||
// - New accounts per IP per day: ~3 (official soft limit)
|
||||
// - Recommended active accounts per household IP: ~5 (to avoid suspicion)
|
||||
// - Creating too many accounts quickly may trigger verification (phone, OTP, captcha)
|
||||
// - Unusual activity can result in temporary blocks or account restrictions
|
||||
|
||||
"accounts": [
|
||||
{
|
||||
// ============================================================
|
||||
// 👤 ACCOUNT 1
|
||||
// ============================================================
|
||||
|
||||
// Enable or disable this account (true = active, false = skip)
|
||||
"enabled": true,
|
||||
|
||||
// Microsoft account email address
|
||||
"email": "email_1@outlook.com",
|
||||
|
||||
// Account password
|
||||
"password": "password_1",
|
||||
|
||||
// Two-Factor Authentication (2FA) TOTP secret (optional but HIGHLY recommended for security)
|
||||
// Steps:
|
||||
// 1. Visit https://account.live.com/proofs/Manage/additional and enable two-step verification.
|
||||
// 2. When the wizard appears, click the blue link "Set up a different authenticator app".
|
||||
// 3. On the next page click "I can't scan the bar code" to reveal the Base32 secret (letters + numbers).
|
||||
// 4. Scan the QR with your authenticator (Google Authenticator recommended) AND copy the secret shown.
|
||||
// 5. Paste the secret here. The same secret can stay in your app and power the bot simultaneously.
|
||||
// Format: Base32 secret key (e.g., "JBSWY3DPEHPK3PXP"). Leave empty "" if 2FA is not enabled.
|
||||
"totp": "",
|
||||
|
||||
// ⚠️ REQUIRED: Recovery email address associated with this Microsoft account
|
||||
// During login, Microsoft shows the first 2 characters and the domain of the recovery email (e.g., "ab***@example.com")
|
||||
// This field is MANDATORY to detect account compromise or bans:
|
||||
// - The script compares what Microsoft displays with this configured recovery email
|
||||
// - If they don't match, it alerts you that the account may be compromised or the recovery email was changed
|
||||
// - This security check helps identify hijacked accounts before they cause issues
|
||||
// Format: Full recovery email address (e.g., "backup@gmail.com")
|
||||
"recoveryEmail": "your_email@domain.com",
|
||||
|
||||
// ============================================================
|
||||
// 🌐 PROXY CONFIGURATION (Optional)
|
||||
// ============================================================
|
||||
|
||||
"proxy": {
|
||||
// Enable proxy for HTTP requests (axios/API calls)
|
||||
// If false, proxy is only used for browser automation
|
||||
"proxyAxios": true,
|
||||
|
||||
// Proxy server URL (protocol optional)
|
||||
// Examples: "proxy.example.com", "http://proxy.example.com", "socks5://proxy.example.com"
|
||||
// Leave empty "" to disable proxy for this account
|
||||
"url": "",
|
||||
|
||||
// Proxy port number
|
||||
"port": 0,
|
||||
|
||||
// Proxy authentication username (leave empty if no auth required)
|
||||
"username": "",
|
||||
|
||||
// Proxy authentication password (leave empty if no auth required)
|
||||
"password": ""
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
// ============================================================
|
||||
// 👤 ACCOUNT 2
|
||||
// ============================================================
|
||||
|
||||
"enabled": false,
|
||||
"email": "email_2@outlook.com",
|
||||
"password": "password_2",
|
||||
"totp": "",
|
||||
"recoveryEmail": "your_email@domain.com",
|
||||
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "",
|
||||
"port": 0,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
// ============================================================
|
||||
// 👤 ACCOUNT 3
|
||||
// ============================================================
|
||||
|
||||
"enabled": false,
|
||||
"email": "email_3@outlook.com",
|
||||
"password": "password_3",
|
||||
"totp": "",
|
||||
"recoveryEmail": "your_email@domain.com",
|
||||
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "",
|
||||
"port": 0,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
// ============================================================
|
||||
// 👤 ACCOUNT 4
|
||||
// ============================================================
|
||||
|
||||
"enabled": false,
|
||||
"email": "email_4@outlook.com",
|
||||
"password": "password_4",
|
||||
"totp": "",
|
||||
"recoveryEmail": "your_email@domain.com",
|
||||
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "",
|
||||
"port": 0,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
// ============================================================
|
||||
// 👤 ACCOUNT 5
|
||||
// ============================================================
|
||||
|
||||
"enabled": false,
|
||||
"email": "email_5@outlook.com",
|
||||
"password": "password_5",
|
||||
"totp": "",
|
||||
"recoveryEmail": "your_email@domain.com",
|
||||
|
||||
"proxy": {
|
||||
"proxyAxios": true,
|
||||
"url": "",
|
||||
"port": 0,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
192
src/browser/Browser.ts
Normal file
192
src/browser/Browser.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import playwright, { BrowserContext } from 'rebrowser-playwright'
|
||||
|
||||
import { newInjectedContext } from 'fingerprint-injector'
|
||||
import { FingerprintGenerator } from 'fingerprint-generator'
|
||||
|
||||
import { MicrosoftRewardsBot } from '../index'
|
||||
import { loadSessionData, saveFingerprintData } from '../util/Load'
|
||||
import { updateFingerprintUserAgent } from '../util/UserAgent'
|
||||
|
||||
import { AccountProxy } from '../interface/Account'
|
||||
|
||||
/* Test Stuff
|
||||
https://abrahamjuliot.github.io/creepjs/
|
||||
https://botcheck.luminati.io/
|
||||
https://fv.pro/
|
||||
https://pixelscan.net/
|
||||
https://www.browserscan.net/
|
||||
*/
|
||||
|
||||
class Browser {
|
||||
private bot: MicrosoftRewardsBot
|
||||
|
||||
constructor(bot: MicrosoftRewardsBot) {
|
||||
this.bot = bot
|
||||
}
|
||||
|
||||
async createBrowser(proxy: AccountProxy, email: string): Promise<BrowserContext> {
|
||||
// Optional automatic browser installation (set AUTO_INSTALL_BROWSERS=1)
|
||||
if (process.env.AUTO_INSTALL_BROWSERS === '1') {
|
||||
try {
|
||||
// Dynamically import child_process to avoid overhead otherwise
|
||||
const { execSync } = await import('child_process')
|
||||
execSync('npx playwright install chromium', { stdio: 'ignore' })
|
||||
} catch { /* silent */ }
|
||||
}
|
||||
|
||||
let browser: import('rebrowser-playwright').Browser
|
||||
try {
|
||||
// FORCE_HEADLESS env takes precedence (used in Docker with headless shell only)
|
||||
const envForceHeadless = process.env.FORCE_HEADLESS === '1'
|
||||
// Support legacy config.headless OR nested config.browser.headless
|
||||
const legacyHeadless = (this.bot.config as { headless?: boolean }).headless
|
||||
const nestedHeadless = (this.bot.config.browser as { headless?: boolean } | undefined)?.headless
|
||||
let headlessValue = envForceHeadless ? true : (legacyHeadless ?? nestedHeadless ?? false)
|
||||
if (this.bot.isBuyModeEnabled() && !envForceHeadless) {
|
||||
if (headlessValue !== false) {
|
||||
const target = this.bot.getBuyModeTarget()
|
||||
this.bot.log(this.bot.isMobile, 'BROWSER', `Buy mode detected${target ? ` for ${target}` : ''}; forcing headless=false so captchas and manual flows remain interactive.`, 'warn')
|
||||
}
|
||||
headlessValue = false
|
||||
}
|
||||
const headless: boolean = Boolean(headlessValue)
|
||||
|
||||
const engineName = 'chromium' // current hard-coded engine
|
||||
this.bot.log(this.bot.isMobile, 'BROWSER', `Launching ${engineName} (headless=${headless})`) // explicit engine log
|
||||
const proxyConfig = this.buildPlaywrightProxy(proxy)
|
||||
|
||||
browser = await playwright.chromium.launch({
|
||||
// Optional: uncomment to use Edge instead of Chromium
|
||||
// channel: 'msedge',
|
||||
headless,
|
||||
...(proxyConfig && { proxy: proxyConfig }),
|
||||
args: [
|
||||
'--no-sandbox',
|
||||
'--mute-audio',
|
||||
'--disable-setuid-sandbox',
|
||||
'--ignore-certificate-errors',
|
||||
'--ignore-certificate-errors-spki-list',
|
||||
'--ignore-ssl-errors'
|
||||
]
|
||||
})
|
||||
} catch (e: unknown) {
|
||||
const msg = (e instanceof Error ? e.message : String(e))
|
||||
// Common missing browser executable guidance
|
||||
if (/Executable doesn't exist/i.test(msg)) {
|
||||
this.bot.log(this.bot.isMobile, 'BROWSER', 'Chromium not installed for Playwright. Run "npm run pre-build" to install all dependencies (or set AUTO_INSTALL_BROWSERS=1 to auto-attempt).', 'error')
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'BROWSER', 'Failed to launch browser: ' + msg, 'error')
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
// Resolve saveFingerprint from legacy root or new fingerprinting.saveFingerprint
|
||||
const legacyFp = (this.bot.config as { saveFingerprint?: { mobile: boolean; desktop: boolean } }).saveFingerprint
|
||||
const nestedFp = (this.bot.config.fingerprinting as { saveFingerprint?: { mobile: boolean; desktop: boolean } } | undefined)?.saveFingerprint
|
||||
const saveFingerprint = legacyFp || nestedFp || { mobile: false, desktop: false }
|
||||
|
||||
const sessionData = await loadSessionData(this.bot.config.sessionPath, email, this.bot.isMobile, saveFingerprint)
|
||||
|
||||
const fingerprint = sessionData.fingerprint ? sessionData.fingerprint : await this.generateFingerprint()
|
||||
|
||||
const context = await newInjectedContext(browser as unknown as import('playwright').Browser, { fingerprint: fingerprint })
|
||||
|
||||
// Set timeout to preferred amount (supports legacy globalTimeout or browser.globalTimeout)
|
||||
const legacyTimeout = (this.bot.config as { globalTimeout?: number | string }).globalTimeout
|
||||
const nestedTimeout = (this.bot.config.browser as { globalTimeout?: number | string } | undefined)?.globalTimeout
|
||||
const globalTimeout = legacyTimeout ?? nestedTimeout ?? 30000
|
||||
context.setDefaultTimeout(this.bot.utils.stringToMs(globalTimeout))
|
||||
|
||||
// Normalize viewport and page rendering so content fits typical screens
|
||||
try {
|
||||
const desktopViewport = { width: 1280, height: 800 }
|
||||
const mobileViewport = { width: 390, height: 844 }
|
||||
|
||||
context.on('page', async (page) => {
|
||||
try {
|
||||
// Set a reasonable viewport size depending on device type
|
||||
if (this.bot.isMobile) {
|
||||
await page.setViewportSize(mobileViewport)
|
||||
} else {
|
||||
await page.setViewportSize(desktopViewport)
|
||||
}
|
||||
|
||||
// Inject a tiny CSS to avoid gigantic scaling on some environments
|
||||
await page.addInitScript(() => {
|
||||
try {
|
||||
const style = document.createElement('style')
|
||||
style.id = '__mrs_fit_style'
|
||||
style.textContent = `
|
||||
html, body { overscroll-behavior: contain; }
|
||||
/* Mild downscale to keep content within window on very large DPI */
|
||||
@media (min-width: 1000px) {
|
||||
html { zoom: 0.9 !important; }
|
||||
}
|
||||
`
|
||||
document.documentElement.appendChild(style)
|
||||
} catch { /* ignore */ }
|
||||
})
|
||||
} catch { /* ignore */ }
|
||||
})
|
||||
} catch { /* ignore */ }
|
||||
|
||||
await context.addCookies(sessionData.cookies)
|
||||
|
||||
// Persist fingerprint when feature is configured
|
||||
if (saveFingerprint.mobile || saveFingerprint.desktop) {
|
||||
await saveFingerprintData(this.bot.config.sessionPath, email, this.bot.isMobile, fingerprint)
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'BROWSER', `Created browser with User-Agent: "${fingerprint.fingerprint.navigator.userAgent}"`)
|
||||
|
||||
return context as BrowserContext
|
||||
}
|
||||
|
||||
private buildPlaywrightProxy(proxy: AccountProxy): { server: string; username?: string; password?: string } | undefined {
|
||||
const { url, port, username, password } = proxy
|
||||
if (!url) return undefined
|
||||
|
||||
const trimmed = url.trim()
|
||||
const hasScheme = /^[a-zA-Z][a-zA-Z0-9+.-]*:/.test(trimmed)
|
||||
const candidate = hasScheme ? trimmed : `http://${trimmed}`
|
||||
|
||||
let parsed: URL
|
||||
try {
|
||||
parsed = new URL(candidate)
|
||||
} catch (err) {
|
||||
this.bot.log(this.bot.isMobile, 'BROWSER', `Invalid proxy URL "${url}": ${err instanceof Error ? err.message : String(err)}`, 'error')
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!parsed.port) {
|
||||
if (port) {
|
||||
parsed.port = String(port)
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'BROWSER', `Proxy port missing for "${url}"`, 'error')
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
const server = `${parsed.protocol}//${parsed.hostname}${parsed.port ? `:${parsed.port}` : ''}`
|
||||
|
||||
const auth: { username?: string; password?: string } = {}
|
||||
if (username) auth.username = username
|
||||
if (password) auth.password = password
|
||||
|
||||
return { server, ...auth }
|
||||
}
|
||||
|
||||
async generateFingerprint() {
|
||||
const fingerPrintData = new FingerprintGenerator().getFingerprint({
|
||||
devices: this.bot.isMobile ? ['mobile'] : ['desktop'],
|
||||
operatingSystems: this.bot.isMobile ? ['android'] : ['windows'],
|
||||
browsers: [{ name: 'edge' }]
|
||||
})
|
||||
|
||||
const updatedFingerPrintData = await updateFingerprintUserAgent(fingerPrintData, this.bot.isMobile)
|
||||
|
||||
return updatedFingerPrintData
|
||||
}
|
||||
}
|
||||
|
||||
export default Browser
|
||||
544
src/browser/BrowserFunc.ts
Normal file
544
src/browser/BrowserFunc.ts
Normal file
@@ -0,0 +1,544 @@
|
||||
import { BrowserContext, Page } from 'rebrowser-playwright'
|
||||
import { CheerioAPI, load } from 'cheerio'
|
||||
import { AxiosRequestConfig } from 'axios'
|
||||
|
||||
import { MicrosoftRewardsBot } from '../index'
|
||||
import { saveSessionData } from '../util/Load'
|
||||
import { TIMEOUTS, RETRY_LIMITS, SELECTORS, URLS } from '../constants'
|
||||
|
||||
import { Counters, DashboardData, MorePromotion, PromotionalItem } from '../interface/DashboardData'
|
||||
import { QuizData } from '../interface/QuizData'
|
||||
import { AppUserData } from '../interface/AppUserData'
|
||||
import { EarnablePoints } from '../interface/Points'
|
||||
|
||||
|
||||
export default class BrowserFunc {
|
||||
private bot: MicrosoftRewardsBot
|
||||
|
||||
constructor(bot: MicrosoftRewardsBot) {
|
||||
this.bot = bot
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Navigate the provided page to rewards homepage
|
||||
* @param {Page} page Playwright page
|
||||
*/
|
||||
async goHome(page: Page) {
|
||||
|
||||
try {
|
||||
const dashboardURL = new URL(this.bot.config.baseURL)
|
||||
|
||||
if (page.url() === dashboardURL.href) {
|
||||
return
|
||||
}
|
||||
|
||||
await page.goto(this.bot.config.baseURL)
|
||||
|
||||
for (let iteration = 1; iteration <= RETRY_LIMITS.GO_HOME_MAX; iteration++) {
|
||||
await this.bot.utils.wait(TIMEOUTS.LONG)
|
||||
await this.bot.browser.utils.tryDismissAllMessages(page)
|
||||
|
||||
try {
|
||||
// If activities are found, exit the loop (SUCCESS - account is OK)
|
||||
await page.waitForSelector(SELECTORS.MORE_ACTIVITIES, { timeout: 1000 })
|
||||
this.bot.log(this.bot.isMobile, 'GO-HOME', 'Visited homepage successfully')
|
||||
break
|
||||
|
||||
} catch (error) {
|
||||
// Activities not found yet - check if it's because account is suspended
|
||||
// Only check suspension if we can't find activities (reduces false positives)
|
||||
const suspendedByHeader = await page.waitForSelector(SELECTORS.SUSPENDED_ACCOUNT, { state: 'visible', timeout: 500 }).then(() => true).catch(() => false)
|
||||
|
||||
if (suspendedByHeader) {
|
||||
this.bot.log(this.bot.isMobile, 'GO-HOME', `Account suspension detected by header selector (iteration ${iteration})`, 'error')
|
||||
throw new Error('Account has been suspended!')
|
||||
}
|
||||
|
||||
// Secondary check: look for suspension text in main content area only
|
||||
try {
|
||||
const mainContent = (await page.locator('#contentContainer, #main, .main-content').first().textContent({ timeout: 500 }).catch(() => '')) || ''
|
||||
const suspensionPatterns = [
|
||||
/account\s+has\s+been\s+suspended/i,
|
||||
/suspended\s+due\s+to\s+unusual\s+activity/i,
|
||||
/your\s+account\s+is\s+temporarily\s+suspended/i
|
||||
]
|
||||
|
||||
const isSuspended = suspensionPatterns.some(pattern => pattern.test(mainContent))
|
||||
if (isSuspended) {
|
||||
this.bot.log(this.bot.isMobile, 'GO-HOME', `Account suspension detected by content text (iteration ${iteration})`, 'error')
|
||||
throw new Error('Account has been suspended!')
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors in text check - not critical
|
||||
this.bot.log(this.bot.isMobile, 'GO-HOME', `Suspension text check skipped: ${e}`, 'warn')
|
||||
}
|
||||
|
||||
// Not suspended, just activities not loaded yet - continue to next iteration
|
||||
this.bot.log(this.bot.isMobile, 'GO-HOME', `Activities not found yet (iteration ${iteration}/${RETRY_LIMITS.GO_HOME_MAX}), retrying...`, 'warn')
|
||||
}
|
||||
|
||||
// Below runs if the homepage was unable to be visited
|
||||
const currentURL = new URL(page.url())
|
||||
|
||||
if (currentURL.hostname !== dashboardURL.hostname) {
|
||||
await this.bot.browser.utils.tryDismissAllMessages(page)
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM_LONG)
|
||||
await page.goto(this.bot.config.baseURL)
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'GO-HOME', 'Visited homepage successfully')
|
||||
break
|
||||
}
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.VERY_LONG)
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'GO-HOME', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Go home failed: ' + errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch user dashboard data
|
||||
* @returns {DashboardData} Object of user bing rewards dashboard data
|
||||
*/
|
||||
async getDashboardData(page?: Page): Promise<DashboardData> {
|
||||
const target = page ?? this.bot.homePage
|
||||
const dashboardURL = new URL(this.bot.config.baseURL)
|
||||
const currentURL = new URL(target.url())
|
||||
|
||||
try {
|
||||
// Should never happen since tasks are opened in a new tab!
|
||||
if (currentURL.hostname !== dashboardURL.hostname) {
|
||||
this.bot.log(this.bot.isMobile, 'DASHBOARD-DATA', 'Provided page did not equal dashboard page, redirecting to dashboard page')
|
||||
await this.goHome(target)
|
||||
}
|
||||
|
||||
let lastError: unknown = null
|
||||
for (let attempt = 1; attempt <= 2; attempt++) {
|
||||
try {
|
||||
// Reload the page to get new data
|
||||
await target.reload({ waitUntil: 'domcontentloaded' })
|
||||
lastError = null
|
||||
break
|
||||
} catch (re) {
|
||||
lastError = re
|
||||
const msg = (re instanceof Error ? re.message : String(re))
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Reload failed attempt ${attempt}: ${msg}`, 'warn')
|
||||
// If page/context closed => bail early after first retry
|
||||
if (msg.includes('has been closed')) {
|
||||
if (attempt === 1) {
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Page appears closed; trying one navigation fallback', 'warn')
|
||||
try {
|
||||
await this.goHome(target)
|
||||
} catch {/* ignore */}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if (attempt === 2) {
|
||||
await this.bot.utils.wait(1000)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If reload failed after all attempts, throw the last error
|
||||
if (lastError) {
|
||||
throw lastError
|
||||
}
|
||||
|
||||
// Wait a bit longer for scripts to load, especially on mobile
|
||||
await this.bot.utils.wait(this.bot.isMobile ? TIMEOUTS.LONG : TIMEOUTS.MEDIUM)
|
||||
|
||||
// Wait for the more-activities element to ensure page is fully loaded
|
||||
await target.waitForSelector(SELECTORS.MORE_ACTIVITIES, { timeout: TIMEOUTS.DASHBOARD_WAIT }).catch(() => {
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Activities element not found, continuing anyway', 'warn')
|
||||
})
|
||||
|
||||
let scriptContent = await target.evaluate(() => {
|
||||
const scripts = Array.from(document.querySelectorAll('script'))
|
||||
// Try multiple patterns for better compatibility
|
||||
const targetScript = scripts.find(script =>
|
||||
script.innerText.includes('var dashboard') ||
|
||||
script.innerText.includes('dashboard=') ||
|
||||
script.innerText.includes('dashboard :')
|
||||
)
|
||||
|
||||
return targetScript?.innerText ? targetScript.innerText : null
|
||||
})
|
||||
|
||||
if (!scriptContent) {
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Dashboard script not found on first try, attempting recovery', 'warn')
|
||||
await this.bot.browser.utils.captureDiagnostics(target, 'dashboard-data-missing').catch((e) => {
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Failed to capture diagnostics: ${e}`, 'warn')
|
||||
})
|
||||
|
||||
// Force a navigation retry once before failing hard
|
||||
try {
|
||||
await this.goHome(target)
|
||||
await target.waitForLoadState('domcontentloaded', { timeout: TIMEOUTS.VERY_LONG }).catch((e) => {
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Wait for load state failed: ${e}`, 'warn')
|
||||
})
|
||||
await this.bot.utils.wait(this.bot.isMobile ? TIMEOUTS.LONG : TIMEOUTS.MEDIUM)
|
||||
} catch (e) {
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Recovery navigation failed: ${e}`, 'warn')
|
||||
}
|
||||
|
||||
const retryContent = await target.evaluate(() => {
|
||||
const scripts = Array.from(document.querySelectorAll('script'))
|
||||
const targetScript = scripts.find(script =>
|
||||
script.innerText.includes('var dashboard') ||
|
||||
script.innerText.includes('dashboard=') ||
|
||||
script.innerText.includes('dashboard :')
|
||||
)
|
||||
return targetScript?.innerText ? targetScript.innerText : null
|
||||
}).catch(()=>null)
|
||||
|
||||
if (!retryContent) {
|
||||
// Log additional debug info
|
||||
const scriptsDebug = await target.evaluate(() => {
|
||||
const scripts = Array.from(document.querySelectorAll('script'))
|
||||
return scripts.map(s => s.innerText.substring(0, 100)).join(' | ')
|
||||
}).catch(() => 'Unable to get script debug info')
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Available scripts preview: ${scriptsDebug}`, 'warn')
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Dashboard data not found within script', 'error')
|
||||
throw new Error('Dashboard data not found within script - check page structure')
|
||||
}
|
||||
scriptContent = retryContent
|
||||
}
|
||||
|
||||
// Extract the dashboard object from the script content
|
||||
const dashboardData = await target.evaluate((scriptContent: string) => {
|
||||
// Try multiple regex patterns for better compatibility
|
||||
const patterns = [
|
||||
/var dashboard = (\{.*?\});/s, // Original pattern
|
||||
/var dashboard=(\{.*?\});/s, // No spaces
|
||||
/var\s+dashboard\s*=\s*(\{.*?\});/s, // Flexible whitespace
|
||||
/dashboard\s*=\s*(\{[\s\S]*?\});/ // More permissive
|
||||
]
|
||||
|
||||
for (const regex of patterns) {
|
||||
const match = regex.exec(scriptContent)
|
||||
if (match && match[1]) {
|
||||
try {
|
||||
return JSON.parse(match[1])
|
||||
} catch (e) {
|
||||
// Try next pattern if JSON parsing fails
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
|
||||
}, scriptContent)
|
||||
|
||||
if (!dashboardData) {
|
||||
// Log a snippet of the script content for debugging
|
||||
const scriptPreview = scriptContent.substring(0, 200)
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Script preview: ${scriptPreview}`, 'warn')
|
||||
await this.bot.browser.utils.captureDiagnostics(target, 'dashboard-data-parse').catch((e) => {
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Failed to capture diagnostics: ${e}`, 'warn')
|
||||
})
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Unable to parse dashboard script', 'error')
|
||||
throw new Error('Unable to parse dashboard script - check diagnostics')
|
||||
}
|
||||
|
||||
return dashboardData
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Error fetching dashboard data: ${errorMessage}`, 'error')
|
||||
throw new Error('Get dashboard data failed: ' + errorMessage)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Get search point counters
|
||||
* @returns {Counters} Object of search counter data
|
||||
*/
|
||||
async getSearchPoints(): Promise<Counters> {
|
||||
const dashboardData = await this.getDashboardData() // Always fetch newest data
|
||||
|
||||
return dashboardData.userStatus.counters
|
||||
}
|
||||
|
||||
/**
|
||||
* Get total earnable points with web browser
|
||||
* @returns {number} Total earnable points
|
||||
*/
|
||||
async getBrowserEarnablePoints(): Promise<EarnablePoints> {
|
||||
try {
|
||||
let desktopSearchPoints = 0
|
||||
let mobileSearchPoints = 0
|
||||
let dailySetPoints = 0
|
||||
let morePromotionsPoints = 0
|
||||
|
||||
const data = await this.getDashboardData()
|
||||
|
||||
// Desktop Search Points
|
||||
if (data.userStatus.counters.pcSearch?.length) {
|
||||
data.userStatus.counters.pcSearch.forEach(x => desktopSearchPoints += (x.pointProgressMax - x.pointProgress))
|
||||
}
|
||||
|
||||
// Mobile Search Points
|
||||
if (data.userStatus.counters.mobileSearch?.length) {
|
||||
data.userStatus.counters.mobileSearch.forEach(x => mobileSearchPoints += (x.pointProgressMax - x.pointProgress))
|
||||
}
|
||||
|
||||
// Daily Set
|
||||
data.dailySetPromotions[this.bot.utils.getFormattedDate()]?.forEach(x => dailySetPoints += (x.pointProgressMax - x.pointProgress))
|
||||
|
||||
// More Promotions
|
||||
if (data.morePromotions?.length) {
|
||||
data.morePromotions.forEach(x => {
|
||||
// Only count points from supported activities
|
||||
if (['quiz', 'urlreward'].includes(x.promotionType) && x.exclusiveLockedFeatureStatus !== 'locked') {
|
||||
morePromotionsPoints += (x.pointProgressMax - x.pointProgress)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const totalEarnablePoints = desktopSearchPoints + mobileSearchPoints + dailySetPoints + morePromotionsPoints
|
||||
|
||||
return {
|
||||
dailySetPoints,
|
||||
morePromotionsPoints,
|
||||
desktopSearchPoints,
|
||||
mobileSearchPoints,
|
||||
totalEarnablePoints
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'GET-BROWSER-EARNABLE-POINTS', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Get browser earnable points failed: ' + errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get total earnable points with mobile app
|
||||
* @returns {number} Total earnable points
|
||||
*/
|
||||
async getAppEarnablePoints(accessToken: string) {
|
||||
try {
|
||||
const points = {
|
||||
readToEarn: 0,
|
||||
checkIn: 0,
|
||||
totalEarnablePoints: 0
|
||||
}
|
||||
|
||||
const eligibleOffers = [
|
||||
'ENUS_readarticle3_30points',
|
||||
'Gamification_Sapphire_DailyCheckIn'
|
||||
]
|
||||
|
||||
const data = await this.getDashboardData()
|
||||
// Guard against missing profile/attributes and undefined settings
|
||||
let geoLocale = data?.userProfile?.attributes?.country || 'US'
|
||||
const useGeo = !!(this.bot?.config?.searchSettings?.useGeoLocaleQueries)
|
||||
geoLocale = (useGeo && typeof geoLocale === 'string' && geoLocale.length === 2)
|
||||
? geoLocale.toLowerCase()
|
||||
: 'us'
|
||||
|
||||
const userDataRequest: AxiosRequestConfig = {
|
||||
url: URLS.APP_USER_DATA,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
'X-Rewards-Country': geoLocale,
|
||||
'X-Rewards-Language': 'en'
|
||||
}
|
||||
}
|
||||
|
||||
const userDataResponse: AppUserData = (await this.bot.axios.request(userDataRequest)).data
|
||||
const userData = userDataResponse.response
|
||||
const eligibleActivities = userData.promotions.filter((x) => eligibleOffers.includes(x.attributes.offerid ?? ''))
|
||||
|
||||
for (const item of eligibleActivities) {
|
||||
if (item.attributes.type === 'msnreadearn') {
|
||||
points.readToEarn = parseInt(item.attributes.pointmax ?? '', 10) - parseInt(item.attributes.pointprogress ?? '', 10)
|
||||
break
|
||||
} else if (item.attributes.type === 'checkin') {
|
||||
const checkInDay = parseInt(item.attributes.progress ?? '', 10) % 7
|
||||
const today = new Date()
|
||||
const lastUpdated = new Date(item.attributes.last_updated ?? '')
|
||||
|
||||
if (checkInDay < 6 && today.getDate() !== lastUpdated.getDate()) {
|
||||
points.checkIn = parseInt(item.attributes['day_' + (checkInDay + 1) + '_points'] ?? '', 10)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
points.totalEarnablePoints = points.readToEarn + points.checkIn
|
||||
|
||||
return points
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'GET-APP-EARNABLE-POINTS', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Get app earnable points failed: ' + errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current point amount
|
||||
* @returns {number} Current total point amount
|
||||
*/
|
||||
async getCurrentPoints(): Promise<number> {
|
||||
try {
|
||||
const data = await this.getDashboardData()
|
||||
|
||||
return data.userStatus.availablePoints
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'GET-CURRENT-POINTS', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Get current points failed: ' + errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse quiz data from provided page
|
||||
* @param {Page} page Playwright page
|
||||
* @returns {QuizData} Quiz data object
|
||||
*/
|
||||
async getQuizData(page: Page): Promise<QuizData> {
|
||||
try {
|
||||
// Wait for page to be fully loaded
|
||||
await page.waitForLoadState('domcontentloaded')
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM)
|
||||
|
||||
const html = await page.content()
|
||||
const $ = load(html)
|
||||
|
||||
// Try multiple possible variable names
|
||||
const possibleVariables = [
|
||||
'_w.rewardsQuizRenderInfo',
|
||||
'rewardsQuizRenderInfo',
|
||||
'_w.quizRenderInfo',
|
||||
'quizRenderInfo'
|
||||
]
|
||||
|
||||
let scriptContent = ''
|
||||
let foundVariable = ''
|
||||
|
||||
for (const varName of possibleVariables) {
|
||||
scriptContent = $('script')
|
||||
.toArray()
|
||||
.map(el => $(el).text())
|
||||
.find(t => t.includes(varName)) || ''
|
||||
|
||||
if (scriptContent) {
|
||||
foundVariable = varName
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (scriptContent && foundVariable) {
|
||||
// Escape dots in variable name for regex
|
||||
const escapedVar = foundVariable.replace(/\./g, '\\.')
|
||||
const regex = new RegExp(`${escapedVar}\\s*=\\s*({.*?});`, 's')
|
||||
const match = regex.exec(scriptContent)
|
||||
|
||||
if (match && match[1]) {
|
||||
const quizData = JSON.parse(match[1])
|
||||
this.bot.log(this.bot.isMobile, 'GET-QUIZ-DATA', `Found quiz data using variable: ${foundVariable}`, 'log')
|
||||
return quizData
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'GET-QUIZ-DATA', `Variable ${foundVariable} found but could not extract JSON data`, 'error')
|
||||
throw new Error(`Quiz data variable ${foundVariable} found but JSON extraction failed`)
|
||||
}
|
||||
} else {
|
||||
// Log available scripts for debugging
|
||||
const allScripts = $('script')
|
||||
.toArray()
|
||||
.map(el => $(el).text())
|
||||
.filter(t => t.length > 0)
|
||||
.map(t => t.substring(0, 100))
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'GET-QUIZ-DATA', `Script not found. Tried variables: ${possibleVariables.join(', ')}`, 'error')
|
||||
this.bot.log(this.bot.isMobile, 'GET-QUIZ-DATA', `Found ${allScripts.length} scripts on page`, 'warn')
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'GET-QUIZ-DATA', 'Script containing quiz data not found', 'error')
|
||||
throw new Error('Script containing quiz data not found - check page structure')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'GET-QUIZ-DATA', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Get quiz data failed: ' + errorMessage)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async waitForQuizRefresh(page: Page): Promise<boolean> {
|
||||
try {
|
||||
await page.waitForSelector(SELECTORS.QUIZ_CREDITS, { state: 'visible', timeout: TIMEOUTS.DASHBOARD_WAIT })
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM_LONG)
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ-REFRESH', 'An error occurred:' + error, 'error')
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async checkQuizCompleted(page: Page): Promise<boolean> {
|
||||
try {
|
||||
await page.waitForSelector(SELECTORS.QUIZ_COMPLETE, { state: 'visible', timeout: TIMEOUTS.MEDIUM_LONG })
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM_LONG)
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async loadInCheerio(page: Page): Promise<CheerioAPI> {
|
||||
const html = await page.content()
|
||||
const $ = load(html)
|
||||
|
||||
return $
|
||||
}
|
||||
|
||||
async getPunchCardActivity(page: Page, activity: PromotionalItem | MorePromotion): Promise<string> {
|
||||
let selector = ''
|
||||
try {
|
||||
const html = await page.content()
|
||||
const $ = load(html)
|
||||
|
||||
const element = $('.offer-cta').toArray().find((x: unknown) => {
|
||||
const el = x as { attribs?: { href?: string } }
|
||||
return !!el.attribs?.href?.includes(activity.offerId)
|
||||
})
|
||||
if (element) {
|
||||
selector = `a[href*="${element.attribs.href}"]`
|
||||
}
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'GET-PUNCHCARD-ACTIVITY', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
|
||||
return selector
|
||||
}
|
||||
|
||||
async closeBrowser(browser: BrowserContext, email: string) {
|
||||
try {
|
||||
// Save cookies
|
||||
await saveSessionData(this.bot.config.sessionPath, browser, email, this.bot.isMobile)
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM_LONG)
|
||||
|
||||
// Close browser
|
||||
await browser.close()
|
||||
this.bot.log(this.bot.isMobile, 'CLOSE-BROWSER', 'Browser closed cleanly!')
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'CLOSE-BROWSER', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Close browser failed: ' + errorMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
233
src/browser/BrowserUtil.ts
Normal file
233
src/browser/BrowserUtil.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
import { load } from 'cheerio'
|
||||
|
||||
import { MicrosoftRewardsBot } from '../index'
|
||||
import { captureDiagnostics as captureSharedDiagnostics } from '../util/Diagnostics'
|
||||
|
||||
type DismissButton = { selector: string; label: string; isXPath?: boolean }
|
||||
|
||||
export default class BrowserUtil {
|
||||
private bot: MicrosoftRewardsBot
|
||||
|
||||
private static readonly DISMISS_BUTTONS: readonly DismissButton[] = [
|
||||
{ selector: '#acceptButton', label: 'AcceptButton' },
|
||||
{ selector: '.optanon-allow-all, .optanon-alert-box-button', label: 'OneTrust Accept' },
|
||||
{ selector: '.ext-secondary.ext-button', label: 'Skip For Now' },
|
||||
{ selector: '#iLandingViewAction', label: 'Landing Continue' },
|
||||
{ selector: '#iShowSkip', label: 'Show Skip' },
|
||||
{ selector: '#iNext', label: 'Next' },
|
||||
{ selector: '#iLooksGood', label: 'LooksGood' },
|
||||
{ selector: '#idSIButton9', label: 'PrimaryLoginButton' },
|
||||
{ selector: '.ms-Button.ms-Button--primary', label: 'Primary Generic' },
|
||||
{ selector: '.c-glyph.glyph-cancel', label: 'Mobile Welcome Cancel' },
|
||||
{ selector: '.maybe-later, button[data-automation-id*="maybeLater" i]', label: 'Maybe Later' },
|
||||
{ selector: '#bnp_btn_reject', label: 'Bing Cookie Reject' },
|
||||
{ selector: '#bnp_btn_accept', label: 'Bing Cookie Accept' },
|
||||
{ selector: '#bnp_close_link', label: 'Bing Cookie Close' },
|
||||
{ selector: '#reward_pivot_earn', label: 'Rewards Pivot Earn' },
|
||||
{ selector: '//div[@id="cookieConsentContainer"]//button[contains(text(), "Accept")]', label: 'Legacy Cookie Accept', isXPath: true }
|
||||
]
|
||||
|
||||
private static readonly OVERLAY_SELECTORS = {
|
||||
container: '#bnp_overlay_wrapper',
|
||||
reject: '#bnp_btn_reject, button[aria-label*="Reject" i]',
|
||||
accept: '#bnp_btn_accept'
|
||||
} as const
|
||||
|
||||
private static readonly STREAK_DIALOG_SELECTORS = {
|
||||
container: '[role="dialog"], div[role="alert"], div.ms-Dialog',
|
||||
textFilter: /streak protection has run out/i,
|
||||
closeButtons: 'button[aria-label*="close" i], button:has-text("Close"), button:has-text("Dismiss"), button:has-text("Got it"), button:has-text("OK"), button:has-text("Ok")'
|
||||
} as const
|
||||
|
||||
private static readonly TERMS_UPDATE_SELECTORS = {
|
||||
titleId: '#iTOUTitle',
|
||||
titleText: /we're updating our terms/i,
|
||||
nextButton: 'button[data-testid="primaryButton"]:has-text("Next"), button[type="submit"]:has-text("Next")'
|
||||
} as const
|
||||
|
||||
constructor(bot: MicrosoftRewardsBot) {
|
||||
this.bot = bot
|
||||
}
|
||||
|
||||
async tryDismissAllMessages(page: Page): Promise<void> {
|
||||
const maxRounds = 3
|
||||
for (let round = 0; round < maxRounds; round++) {
|
||||
const dismissCount = await this.dismissRound(page)
|
||||
if (dismissCount === 0) break
|
||||
}
|
||||
}
|
||||
|
||||
private async dismissRound(page: Page): Promise<number> {
|
||||
let count = 0
|
||||
count += await this.dismissStandardButtons(page)
|
||||
count += await this.dismissOverlayButtons(page)
|
||||
count += await this.dismissStreakDialog(page)
|
||||
count += await this.dismissTermsUpdateDialog(page)
|
||||
return count
|
||||
}
|
||||
|
||||
private async dismissStandardButtons(page: Page): Promise<number> {
|
||||
let count = 0
|
||||
for (const btn of BrowserUtil.DISMISS_BUTTONS) {
|
||||
const dismissed = await this.tryClickButton(page, btn)
|
||||
if (dismissed) {
|
||||
count++
|
||||
await page.waitForTimeout(150)
|
||||
}
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
private async tryClickButton(page: Page, btn: DismissButton): Promise<boolean> {
|
||||
try {
|
||||
const loc = btn.isXPath ? page.locator(`xpath=${btn.selector}`) : page.locator(btn.selector)
|
||||
const visible = await loc.first().isVisible({ timeout: 200 }).catch(() => false)
|
||||
if (!visible) return false
|
||||
|
||||
await loc.first().click({ timeout: 500 }).catch(() => {})
|
||||
this.bot.log(this.bot.isMobile, 'DISMISS-ALL-MESSAGES', `Dismissed: ${btn.label}`)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
private async dismissOverlayButtons(page: Page): Promise<number> {
|
||||
try {
|
||||
const { container, reject, accept } = BrowserUtil.OVERLAY_SELECTORS
|
||||
const overlay = page.locator(container)
|
||||
const visible = await overlay.isVisible({ timeout: 200 }).catch(() => false)
|
||||
if (!visible) return 0
|
||||
|
||||
const rejectBtn = overlay.locator(reject)
|
||||
if (await rejectBtn.first().isVisible().catch(() => false)) {
|
||||
await rejectBtn.first().click({ timeout: 500 }).catch(() => {})
|
||||
this.bot.log(this.bot.isMobile, 'DISMISS-ALL-MESSAGES', 'Dismissed: Overlay Reject')
|
||||
return 1
|
||||
}
|
||||
|
||||
const acceptBtn = overlay.locator(accept)
|
||||
if (await acceptBtn.first().isVisible().catch(() => false)) {
|
||||
await acceptBtn.first().click({ timeout: 500 }).catch(() => {})
|
||||
this.bot.log(this.bot.isMobile, 'DISMISS-ALL-MESSAGES', 'Dismissed: Overlay Accept')
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
} catch {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
private async dismissStreakDialog(page: Page): Promise<number> {
|
||||
try {
|
||||
const { container, textFilter, closeButtons } = BrowserUtil.STREAK_DIALOG_SELECTORS
|
||||
const dialog = page.locator(container).filter({ hasText: textFilter })
|
||||
const visible = await dialog.first().isVisible({ timeout: 200 }).catch(() => false)
|
||||
if (!visible) return 0
|
||||
|
||||
const closeBtn = dialog.locator(closeButtons).first()
|
||||
if (await closeBtn.isVisible({ timeout: 200 }).catch(() => false)) {
|
||||
await closeBtn.click({ timeout: 500 }).catch(() => {})
|
||||
this.bot.log(this.bot.isMobile, 'DISMISS-ALL-MESSAGES', 'Dismissed: Streak Protection Dialog Button')
|
||||
return 1
|
||||
}
|
||||
|
||||
await page.keyboard.press('Escape').catch(() => {})
|
||||
this.bot.log(this.bot.isMobile, 'DISMISS-ALL-MESSAGES', 'Dismissed: Streak Protection Dialog Escape')
|
||||
return 1
|
||||
} catch {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
private async dismissTermsUpdateDialog(page: Page): Promise<number> {
|
||||
try {
|
||||
const { titleId, titleText, nextButton } = BrowserUtil.TERMS_UPDATE_SELECTORS
|
||||
|
||||
// Check if terms update page is present
|
||||
const titleById = page.locator(titleId)
|
||||
const titleByText = page.locator('h1').filter({ hasText: titleText })
|
||||
|
||||
const hasTitle = await titleById.isVisible({ timeout: 200 }).catch(() => false) ||
|
||||
await titleByText.first().isVisible({ timeout: 200 }).catch(() => false)
|
||||
|
||||
if (!hasTitle) return 0
|
||||
|
||||
// Click the Next button
|
||||
const nextBtn = page.locator(nextButton).first()
|
||||
if (await nextBtn.isVisible({ timeout: 500 }).catch(() => false)) {
|
||||
await nextBtn.click({ timeout: 1000 }).catch(() => {})
|
||||
this.bot.log(this.bot.isMobile, 'DISMISS-ALL-MESSAGES', 'Dismissed: Terms Update Dialog (Next)')
|
||||
// Wait a bit for navigation
|
||||
await page.waitForTimeout(1000)
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
} catch {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
async getLatestTab(page: Page): Promise<Page> {
|
||||
try {
|
||||
await this.bot.utils.wait(1000)
|
||||
|
||||
const browser = page.context()
|
||||
const pages = browser.pages()
|
||||
const newTab = pages[pages.length - 1]
|
||||
|
||||
if (newTab) {
|
||||
return newTab
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'GET-NEW-TAB', 'Unable to get latest tab', 'error')
|
||||
throw new Error('Unable to get latest tab - no pages found in browser context')
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'GET-NEW-TAB', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Get new tab failed: ' + errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
async reloadBadPage(page: Page): Promise<void> {
|
||||
try {
|
||||
const html = await page.content().catch(() => '')
|
||||
const $ = load(html)
|
||||
|
||||
const isNetworkError = $('body.neterror').length
|
||||
|
||||
if (isNetworkError) {
|
||||
this.bot.log(this.bot.isMobile, 'RELOAD-BAD-PAGE', 'Bad page detected, reloading!')
|
||||
await page.reload()
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
this.bot.log(this.bot.isMobile, 'RELOAD-BAD-PAGE', 'An error occurred: ' + errorMessage, 'error')
|
||||
throw new Error('Reload bad page failed: ' + errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform small human-like gestures: short waits, minor mouse moves and occasional scrolls.
|
||||
* This should be called sparingly between actions to avoid a fixed cadence.
|
||||
*/
|
||||
async humanizePage(page: Page): Promise<void> {
|
||||
try {
|
||||
await this.bot.humanizer.microGestures(page)
|
||||
await this.bot.humanizer.actionPause()
|
||||
} catch { /* swallow */ }
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture minimal diagnostics for a page: screenshot + HTML content.
|
||||
* Files are written under ./reports/<date>/ with a safe label.
|
||||
*/
|
||||
async captureDiagnostics(page: Page, label: string): Promise<void> {
|
||||
await captureSharedDiagnostics(this.bot, page, label)
|
||||
}
|
||||
|
||||
}
|
||||
337
src/config.jsonc
Normal file
337
src/config.jsonc
Normal file
@@ -0,0 +1,337 @@
|
||||
{
|
||||
// ============================================================
|
||||
// 🌐 GENERAL CONFIGURATION
|
||||
// ============================================================
|
||||
|
||||
// Base URL for Microsoft Rewards dashboard (do not change unless necessary)
|
||||
"baseURL": "https://rewards.bing.com",
|
||||
|
||||
// Directory to store sessions (cookies, browser fingerprints)
|
||||
"sessionPath": "sessions",
|
||||
|
||||
// Dry-run mode: simulate execution without actually running tasks (useful for testing)
|
||||
"dryRun": false,
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🖥️ BROWSER CONFIGURATION
|
||||
// ============================================================
|
||||
|
||||
"browser": {
|
||||
// false = visible window | true = headless mode (invisible)
|
||||
"headless": false,
|
||||
// Max timeout for operations (supports: 30000, "30s", "2min")
|
||||
"globalTimeout": "30s"
|
||||
},
|
||||
|
||||
"fingerprinting": {
|
||||
// Persist browser fingerprints to improve consistency across runs
|
||||
"saveFingerprint": {
|
||||
"mobile": true,
|
||||
"desktop": true
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// ⚙️ EXECUTION & PERFORMANCE
|
||||
// ============================================================
|
||||
|
||||
"execution": {
|
||||
// true = Desktop + Mobile in parallel (faster, more resources)
|
||||
// false = Sequential (slower, fewer resources)
|
||||
"parallel": false,
|
||||
// If false, skip execution when 0 points are available
|
||||
"runOnZeroPoints": false,
|
||||
// Number of account clusters (processes) to run concurrently
|
||||
"clusters": 1,
|
||||
// How many times to run through all accounts in sequence (1 = process each account once, 2 = twice, etc.)
|
||||
// Higher values can catch missed tasks but increase detection risk
|
||||
"passesPerRun": 3
|
||||
},
|
||||
|
||||
"schedule": {
|
||||
// Built-in scheduler (no cron needed in containers)
|
||||
"enabled": false,
|
||||
// Time format options:
|
||||
// - US style with AM/PM → useAmPm: true and time12 (e.g., "9:00 AM")
|
||||
// - 24-hour style → useAmPm: false and time24 (e.g., "09:00")
|
||||
"useAmPm": false,
|
||||
"time12": "9:00 AM",
|
||||
"time24": "09:00",
|
||||
// IANA timezone (e.g., "Europe/Paris", "America/New_York" check schedule.md)
|
||||
"timeZone": "Europe/Paris",
|
||||
// If true, run immediately on process start
|
||||
"runImmediatelyOnStart": true
|
||||
},
|
||||
|
||||
"jobState": {
|
||||
// Save state to avoid duplicate work across restarts
|
||||
"enabled": true,
|
||||
// Custom state directory (empty = defaults to sessionPath/job-state)
|
||||
"dir": ""
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🎯 TASKS & WORKERS
|
||||
// ============================================================
|
||||
|
||||
"workers": {
|
||||
// Select which tasks the bot should complete on desktop/mobile
|
||||
"doDailySet": true, // Daily set tasks
|
||||
"doMorePromotions": true, // More promotions section
|
||||
"doPunchCards": true, // Punch cards
|
||||
"doDesktopSearch": true, // Desktop searches
|
||||
"doMobileSearch": true, // Mobile searches
|
||||
"doDailyCheckIn": true, // Daily check-in
|
||||
"doReadToEarn": true, // Read to earn
|
||||
// If true, run desktop searches right after Daily Set
|
||||
"bundleDailySetWithSearch": true
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🔍 SEARCH CONFIGURATION
|
||||
// ============================================================
|
||||
|
||||
"search": {
|
||||
// Use locale-specific query sources
|
||||
"useLocalQueries": true,
|
||||
"settings": {
|
||||
// Use region-specific queries (at, fr, us, etc.)
|
||||
"useGeoLocaleQueries": true,
|
||||
// Randomly scroll search result pages (more natural behavior)
|
||||
"scrollRandomResults": true,
|
||||
// Occasionally click a result (safe targets only)
|
||||
"clickRandomResults": true,
|
||||
// Number of retries if mobile searches don't progress
|
||||
"retryMobileSearchAmount": 2,
|
||||
// Delay between searches
|
||||
"delay": {
|
||||
"min": "40sec",
|
||||
"max": "1min"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"queryDiversity": {
|
||||
// Multi-source query generation: Reddit, News, Wikipedia instead of only Google Trends
|
||||
"enabled": true,
|
||||
// Available sources: google-trends, reddit, news, wikipedia, local-fallback
|
||||
"sources": ["google-trends", "reddit", "local-fallback"],
|
||||
// Max queries to fetch per source
|
||||
"maxQueriesPerSource": 10,
|
||||
// Cache duration in minutes (avoids hammering APIs)
|
||||
"cacheMinutes": 30
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🤖 HUMANIZATION & NATURAL BEHAVIOR
|
||||
// ============================================================
|
||||
|
||||
"humanization": {
|
||||
// Human Mode: adds subtle micro-gestures & pauses to mimic real users
|
||||
"enabled": true,
|
||||
// If a ban is detected on any account, stop processing remaining accounts
|
||||
"stopOnBan": true,
|
||||
// Immediately send an alert (webhook/NTFY) when a ban is detected
|
||||
"immediateBanAlert": true,
|
||||
// Extra random pause between actions
|
||||
"actionDelay": {
|
||||
"min": 500, // 0.5 seconds minimum
|
||||
"max": 2200 // 2.2 seconds maximum
|
||||
},
|
||||
// Probability (0-1) to move mouse slightly between actions
|
||||
"gestureMoveProb": 0.65,
|
||||
// Probability (0-1) to perform a small scroll
|
||||
"gestureScrollProb": 0.4,
|
||||
// Optional execution time windows (e.g., ["08:30-11:00", "19:00-22:00"])
|
||||
// If specified, waits until inside a window before starting
|
||||
"allowedWindows": []
|
||||
},
|
||||
|
||||
"vacation": {
|
||||
// Monthly "vacation" block: skip a random range of days each month
|
||||
// Each month, a random period between minDays and maxDays is selected
|
||||
// and all runs within that date range are skipped (more human-like behavior)
|
||||
"enabled": true,
|
||||
"minDays": 2,
|
||||
"maxDays": 4
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🛡️ RISK MANAGEMENT & SECURITY
|
||||
// ============================================================
|
||||
|
||||
"riskManagement": {
|
||||
// Dynamic delay adjustment based on detected risk signals
|
||||
"enabled": true,
|
||||
// Automatically increase delays when captchas/errors are detected
|
||||
"autoAdjustDelays": true,
|
||||
// Stop execution if risk level reaches critical threshold
|
||||
"stopOnCritical": false,
|
||||
// Enable ML-based ban prediction based on patterns
|
||||
"banPrediction": true,
|
||||
// Risk threshold (0-100). If exceeded, bot pauses or alerts you
|
||||
"riskThreshold": 75
|
||||
},
|
||||
|
||||
"retryPolicy": {
|
||||
// Generic retry/backoff for transient failures
|
||||
"maxAttempts": 3,
|
||||
"baseDelay": 1000,
|
||||
"maxDelay": "30s",
|
||||
"multiplier": 2,
|
||||
"jitter": 0.2
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🌐 PROXY
|
||||
// ============================================================
|
||||
|
||||
"proxy": {
|
||||
// Control which outbound calls go through your proxy
|
||||
"proxyGoogleTrends": true,
|
||||
"proxyBingTerms": true
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🔔 NOTIFICATIONS
|
||||
// ============================================================
|
||||
|
||||
// Live logs webhook (Discord or similar). URL = your webhook endpoint
|
||||
"webhook": {
|
||||
"enabled": false,
|
||||
"url": ""
|
||||
},
|
||||
|
||||
// Rich end-of-run summary webhook (Discord or similar)
|
||||
"conclusionWebhook": {
|
||||
"enabled": false,
|
||||
"url": ""
|
||||
},
|
||||
|
||||
// NTFY push notifications (plain text)
|
||||
"ntfy": {
|
||||
"enabled": false,
|
||||
"url": "",
|
||||
"topic": "rewards",
|
||||
"authToken": ""
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 📊 LOGGING & DIAGNOSTICS
|
||||
// ============================================================
|
||||
|
||||
"logging": {
|
||||
// Logging controls (see docs/config.md)
|
||||
// Filter out noisy log buckets locally and for webhook summaries
|
||||
"excludeFunc": [
|
||||
"SEARCH-CLOSE-TABS",
|
||||
"LOGIN-NO-PROMPT",
|
||||
"FLOW"
|
||||
],
|
||||
"webhookExcludeFunc": [
|
||||
"SEARCH-CLOSE-TABS",
|
||||
"LOGIN-NO-PROMPT",
|
||||
"FLOW"
|
||||
],
|
||||
// Email redaction toggle (true = secure, false = full emails)
|
||||
"redactEmails": true
|
||||
},
|
||||
|
||||
"diagnostics": {
|
||||
// Capture minimal evidence on failures (screenshots/HTML)
|
||||
"enabled": true,
|
||||
"saveScreenshot": true,
|
||||
"saveHtml": true,
|
||||
"maxPerRun": 2,
|
||||
"retentionDays": 7
|
||||
},
|
||||
|
||||
"analytics": {
|
||||
// 📈 Performance Dashboard: tracks points earned, success rates, execution times
|
||||
// Useful for monitoring your stats over time. Disable if you don't need it.
|
||||
// WHAT IT DOES:
|
||||
// - Collects daily/weekly/monthly statistics
|
||||
// - Calculates success rates for each activity type
|
||||
// - Tracks average execution times
|
||||
// - Generates trend reports
|
||||
// - Can export to Markdown or send via webhook
|
||||
"enabled": true,
|
||||
// How long to keep analytics data (days)
|
||||
"retentionDays": 30,
|
||||
// Generate markdown summary reports
|
||||
"exportMarkdown": true,
|
||||
// Send analytics summary via webhook
|
||||
"webhookSummary": true
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🛒 BUY MODE
|
||||
// ============================================================
|
||||
|
||||
"buyMode": {
|
||||
// Manual purchase/redeem mode. Use CLI -buy to enable
|
||||
// Session duration cap in minutes
|
||||
"maxMinutes": 45
|
||||
},
|
||||
|
||||
|
||||
// ============================================================
|
||||
// 🔄 UPDATES
|
||||
// ============================================================
|
||||
|
||||
"update": {
|
||||
// Post-run auto-update settings
|
||||
"git": true,
|
||||
"docker": false,
|
||||
// Custom updater script path (relative to repo root)
|
||||
"scriptPath": "setup/update/update.mjs",
|
||||
|
||||
// ⚠️ SMART UPDATE CONTROL - How It Really Works:
|
||||
//
|
||||
// BACKUP: Your files are ALWAYS backed up to .update-backup/ before any update
|
||||
//
|
||||
// UPDATE PROCESS:
|
||||
// 1. Script checks if remote modified config.jsonc or accounts.json
|
||||
// 2. Runs "git pull --rebase" to merge remote changes
|
||||
// 3. Git intelligently merges:
|
||||
// ✅ NEW FIELDS ADDED (new config options, new account properties)
|
||||
// → Your existing values are PRESERVED, new fields are added alongside
|
||||
// → This is 95% of updates - works perfectly without conflicts
|
||||
//
|
||||
// ⚠️ MAJOR RESTRUCTURING (fields renamed, sections reordered, format changed)
|
||||
// → Git may choose one version over the other
|
||||
// → Risk of losing your custom values in restructured sections
|
||||
//
|
||||
// WHAT THE OPTIONS DO:
|
||||
// - true: ACCEPT git merge result (keeps new features + your settings in most cases)
|
||||
// - false: REJECT remote changes, RESTORE your local file from backup (stay on old version)
|
||||
//
|
||||
// RECOMMENDED: Keep both TRUE
|
||||
// Why? Because we rarely restructure files. Most updates just ADD new optional fields.
|
||||
// Your passwords, emails, and custom settings survive addition-only updates.
|
||||
// Only risk: major file restructuring (rare, usually announced in release notes).
|
||||
//
|
||||
// SAFETY NET: Check .update-backup/ folder after updates to compare if worried.
|
||||
|
||||
// Apply remote updates to config.jsonc via git merge
|
||||
// true = accept new features + intelligent merge (RECOMMENDED for most users)
|
||||
// false = always keep your local version (miss new config options)
|
||||
"autoUpdateConfig": true,
|
||||
|
||||
// Apply remote updates to accounts.json via git merge
|
||||
// true = accept new fields (like "region", "totpSecret") while keeping credentials (RECOMMENDED)
|
||||
// false = always keep your local accounts file (safest but may miss new optional fields)
|
||||
"autoUpdateAccounts": false
|
||||
}
|
||||
}
|
||||
|
||||
89
src/constants.ts
Normal file
89
src/constants.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
/**
|
||||
* Central constants file for the Microsoft Rewards Script
|
||||
* Defines timeouts, retry limits, and other magic numbers used throughout the application
|
||||
*/
|
||||
|
||||
/**
|
||||
* Safe environment variable parsing with validation
|
||||
*/
|
||||
function parseEnvNumber(key: string, defaultValue: number, min: number, max: number): number {
|
||||
const raw = process.env[key]
|
||||
if (!raw) return defaultValue
|
||||
|
||||
const parsed = Number(raw)
|
||||
if (isNaN(parsed)) {
|
||||
console.warn(`[Constants] Invalid ${key}="${raw}". Using default ${defaultValue}`)
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
if (parsed < min || parsed > max) {
|
||||
console.warn(`[Constants] ${key}=${parsed} out of range [${min}, ${max}]. Using default ${defaultValue}`)
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
export const TIMEOUTS = {
|
||||
SHORT: 500,
|
||||
MEDIUM: 1500,
|
||||
MEDIUM_LONG: 2000,
|
||||
LONG: 3000,
|
||||
VERY_LONG: 5000,
|
||||
EXTRA_LONG: 10000,
|
||||
DASHBOARD_WAIT: 10000,
|
||||
LOGIN_MAX: parseEnvNumber('LOGIN_MAX_WAIT_MS', 180000, 30000, 600000),
|
||||
NETWORK_IDLE: 5000
|
||||
} as const
|
||||
|
||||
export const RETRY_LIMITS = {
|
||||
MAX_ITERATIONS: 5,
|
||||
DASHBOARD_RELOAD: 2,
|
||||
MOBILE_SEARCH: 3,
|
||||
ABC_MAX: 15,
|
||||
POLL_MAX: 15,
|
||||
QUIZ_MAX: 15,
|
||||
QUIZ_ANSWER_TIMEOUT: 10000,
|
||||
GO_HOME_MAX: 5
|
||||
} as const
|
||||
|
||||
export const DELAYS = {
|
||||
ACTION_MIN: 1000,
|
||||
ACTION_MAX: 3000,
|
||||
SEARCH_DEFAULT_MIN: 2000,
|
||||
SEARCH_DEFAULT_MAX: 5000,
|
||||
BROWSER_CLOSE: 2000,
|
||||
TYPING_DELAY: 20,
|
||||
SEARCH_ON_BING_WAIT: 5000,
|
||||
SEARCH_ON_BING_COMPLETE: 3000,
|
||||
SEARCH_ON_BING_FOCUS: 200,
|
||||
SEARCH_BAR_TIMEOUT: 15000,
|
||||
QUIZ_ANSWER_WAIT: 2000,
|
||||
THIS_OR_THAT_START: 2000
|
||||
} as const
|
||||
|
||||
export const SELECTORS = {
|
||||
MORE_ACTIVITIES: '#more-activities',
|
||||
SUSPENDED_ACCOUNT: '#suspendedAccountHeader',
|
||||
QUIZ_COMPLETE: '#quizCompleteContainer',
|
||||
QUIZ_CREDITS: 'span.rqMCredits'
|
||||
} as const
|
||||
|
||||
export const URLS = {
|
||||
REWARDS_BASE: 'https://rewards.bing.com',
|
||||
REWARDS_SIGNIN: 'https://rewards.bing.com/signin',
|
||||
APP_USER_DATA: 'https://prod.rewardsplatform.microsoft.com/dapi/me?channel=SAAndroid&options=613'
|
||||
} as const
|
||||
|
||||
export const DISCORD = {
|
||||
MAX_EMBED_LENGTH: 1900,
|
||||
RATE_LIMIT_DELAY: 500,
|
||||
WEBHOOK_TIMEOUT: 10000,
|
||||
DEBOUNCE_DELAY: 750,
|
||||
COLOR_RED: 0xFF0000,
|
||||
COLOR_CRIMSON: 0xDC143C,
|
||||
COLOR_ORANGE: 0xFFA500,
|
||||
COLOR_BLUE: 0x3498DB,
|
||||
COLOR_GREEN: 0x00D26A,
|
||||
AVATAR_URL: 'https://media.discordapp.net/attachments/1430643658788438144/1430644205344133290/rewi-v1.png?ex=68fbd83e&is=68fa86be&hm=ccddee9430de1fff90c1c3750907c13a60d1da29f13617a5dbbdc642f243f5b9&=&format=png&quality=lossless&width=968&height=968'
|
||||
} as const
|
||||
164
src/functions/Activities.ts
Normal file
164
src/functions/Activities.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
|
||||
import { MicrosoftRewardsBot } from '../index'
|
||||
|
||||
import { Search } from './activities/Search'
|
||||
import { ABC } from './activities/ABC'
|
||||
import { Poll } from './activities/Poll'
|
||||
import { Quiz } from './activities/Quiz'
|
||||
import { ThisOrThat } from './activities/ThisOrThat'
|
||||
import { UrlReward } from './activities/UrlReward'
|
||||
import { SearchOnBing } from './activities/SearchOnBing'
|
||||
import { ReadToEarn } from './activities/ReadToEarn'
|
||||
import { DailyCheckIn } from './activities/DailyCheckIn'
|
||||
|
||||
import { DashboardData, MorePromotion, PromotionalItem } from '../interface/DashboardData'
|
||||
import type { ActivityHandler } from '../interface/ActivityHandler'
|
||||
|
||||
type ActivityKind =
|
||||
| { type: 'poll' }
|
||||
| { type: 'abc' }
|
||||
| { type: 'thisOrThat' }
|
||||
| { type: 'quiz' }
|
||||
| { type: 'urlReward' }
|
||||
| { type: 'searchOnBing' }
|
||||
| { type: 'unsupported' }
|
||||
|
||||
|
||||
export default class Activities {
|
||||
private bot: MicrosoftRewardsBot
|
||||
private handlers: ActivityHandler[] = []
|
||||
|
||||
constructor(bot: MicrosoftRewardsBot) {
|
||||
this.bot = bot
|
||||
}
|
||||
|
||||
// Register external/custom handlers (optional extension point)
|
||||
registerHandler(handler: ActivityHandler) {
|
||||
this.handlers.push(handler)
|
||||
}
|
||||
|
||||
// Centralized dispatcher for activities from dashboard/punchcards
|
||||
async run(page: Page, activity: MorePromotion | PromotionalItem): Promise<void> {
|
||||
// First, try custom handlers (if any)
|
||||
for (const h of this.handlers) {
|
||||
try {
|
||||
if (h.canHandle(activity)) {
|
||||
await h.run(page, activity)
|
||||
return
|
||||
}
|
||||
} catch (e) {
|
||||
this.bot.log(this.bot.isMobile, 'ACTIVITY', `Custom handler ${(h.id || 'unknown')} failed: ${e instanceof Error ? e.message : e}`, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
const kind = this.classifyActivity(activity)
|
||||
try {
|
||||
switch (kind.type) {
|
||||
case 'poll':
|
||||
await this.doPoll(page)
|
||||
break
|
||||
case 'abc':
|
||||
await this.doABC(page)
|
||||
break
|
||||
case 'thisOrThat':
|
||||
await this.doThisOrThat(page)
|
||||
break
|
||||
case 'quiz':
|
||||
await this.doQuiz(page)
|
||||
break
|
||||
case 'searchOnBing':
|
||||
await this.doSearchOnBing(page, activity)
|
||||
break
|
||||
case 'urlReward':
|
||||
await this.doUrlReward(page)
|
||||
break
|
||||
default:
|
||||
this.bot.log(this.bot.isMobile, 'ACTIVITY', `Skipped activity "${activity.title}" | Reason: Unsupported type: "${String((activity as { promotionType?: string }).promotionType)}"!`, 'warn')
|
||||
break
|
||||
}
|
||||
} catch (e) {
|
||||
this.bot.log(this.bot.isMobile, 'ACTIVITY', `Dispatcher error for "${activity.title}": ${e instanceof Error ? e.message : e}`, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
public getTypeLabel(activity: MorePromotion | PromotionalItem): string {
|
||||
const k = this.classifyActivity(activity)
|
||||
switch (k.type) {
|
||||
case 'poll': return 'Poll'
|
||||
case 'abc': return 'ABC'
|
||||
case 'thisOrThat': return 'ThisOrThat'
|
||||
case 'quiz': return 'Quiz'
|
||||
case 'searchOnBing': return 'SearchOnBing'
|
||||
case 'urlReward': return 'UrlReward'
|
||||
default: return 'Unsupported'
|
||||
}
|
||||
}
|
||||
|
||||
private classifyActivity(activity: MorePromotion | PromotionalItem): ActivityKind {
|
||||
const type = (activity.promotionType || '').toLowerCase()
|
||||
if (type === 'quiz') {
|
||||
// Distinguish Poll/ABC/ThisOrThat vs general quiz using current heuristics
|
||||
const max = activity.pointProgressMax
|
||||
const url = (activity.destinationUrl || '').toLowerCase()
|
||||
if (max === 10) {
|
||||
if (url.includes('pollscenarioid')) return { type: 'poll' }
|
||||
return { type: 'abc' }
|
||||
}
|
||||
if (max === 50) return { type: 'thisOrThat' }
|
||||
return { type: 'quiz' }
|
||||
}
|
||||
if (type === 'urlreward') {
|
||||
const name = (activity.name || '').toLowerCase()
|
||||
if (name.includes('exploreonbing')) return { type: 'searchOnBing' }
|
||||
return { type: 'urlReward' }
|
||||
}
|
||||
return { type: 'unsupported' }
|
||||
}
|
||||
|
||||
doSearch = async (page: Page, data: DashboardData): Promise<void> => {
|
||||
const search = new Search(this.bot)
|
||||
await search.doSearch(page, data)
|
||||
}
|
||||
|
||||
doABC = async (page: Page): Promise<void> => {
|
||||
const abc = new ABC(this.bot)
|
||||
await abc.doABC(page)
|
||||
}
|
||||
|
||||
doPoll = async (page: Page): Promise<void> => {
|
||||
const poll = new Poll(this.bot)
|
||||
await poll.doPoll(page)
|
||||
}
|
||||
|
||||
doThisOrThat = async (page: Page): Promise<void> => {
|
||||
const thisOrThat = new ThisOrThat(this.bot)
|
||||
await thisOrThat.doThisOrThat(page)
|
||||
}
|
||||
|
||||
doQuiz = async (page: Page): Promise<void> => {
|
||||
const quiz = new Quiz(this.bot)
|
||||
await quiz.doQuiz(page)
|
||||
}
|
||||
|
||||
doUrlReward = async (page: Page): Promise<void> => {
|
||||
const urlReward = new UrlReward(this.bot)
|
||||
await urlReward.doUrlReward(page)
|
||||
}
|
||||
|
||||
doSearchOnBing = async (page: Page, activity: MorePromotion | PromotionalItem): Promise<void> => {
|
||||
const searchOnBing = new SearchOnBing(this.bot)
|
||||
await searchOnBing.doSearchOnBing(page, activity)
|
||||
}
|
||||
|
||||
doReadToEarn = async (accessToken: string, data: DashboardData): Promise<void> => {
|
||||
const readToEarn = new ReadToEarn(this.bot)
|
||||
await readToEarn.doReadToEarn(accessToken, data)
|
||||
}
|
||||
|
||||
doDailyCheckIn = async (accessToken: string, data: DashboardData): Promise<void> => {
|
||||
const dailyCheckIn = new DailyCheckIn(this.bot)
|
||||
await dailyCheckIn.doDailyCheckIn(accessToken, data)
|
||||
}
|
||||
|
||||
}
|
||||
1303
src/functions/Login.ts
Normal file
1303
src/functions/Login.ts
Normal file
File diff suppressed because it is too large
Load Diff
248
src/functions/Workers.ts
Normal file
248
src/functions/Workers.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
|
||||
import { DashboardData, MorePromotion, PromotionalItem, PunchCard } from '../interface/DashboardData'
|
||||
|
||||
import { MicrosoftRewardsBot } from '../index'
|
||||
import JobState from '../util/JobState'
|
||||
import Retry from '../util/Retry'
|
||||
import { AdaptiveThrottler } from '../util/AdaptiveThrottler'
|
||||
|
||||
export class Workers {
|
||||
public bot: MicrosoftRewardsBot
|
||||
private jobState: JobState
|
||||
|
||||
constructor(bot: MicrosoftRewardsBot) {
|
||||
this.bot = bot
|
||||
this.jobState = new JobState(this.bot.config)
|
||||
}
|
||||
|
||||
// Daily Set
|
||||
async doDailySet(page: Page, data: DashboardData) {
|
||||
const todayData = data.dailySetPromotions[this.bot.utils.getFormattedDate()]
|
||||
|
||||
const today = this.bot.utils.getFormattedDate()
|
||||
const activitiesUncompleted = (todayData?.filter(x => !x.complete && x.pointProgressMax > 0) ?? [])
|
||||
.filter(x => {
|
||||
if (this.bot.config.jobState?.enabled === false) return true
|
||||
const email = this.bot.currentAccountEmail || 'unknown'
|
||||
return !this.jobState.isDone(email, today, x.offerId)
|
||||
})
|
||||
|
||||
if (!activitiesUncompleted.length) {
|
||||
this.bot.log(this.bot.isMobile, 'DAILY-SET', 'All Daily Set" items have already been completed')
|
||||
return
|
||||
}
|
||||
|
||||
// Solve Activities
|
||||
this.bot.log(this.bot.isMobile, 'DAILY-SET', 'Started solving "Daily Set" items')
|
||||
|
||||
await this.solveActivities(page, activitiesUncompleted)
|
||||
|
||||
// Mark as done to prevent duplicate work if checkpoints enabled
|
||||
if (this.bot.config.jobState?.enabled !== false) {
|
||||
const email = this.bot.currentAccountEmail || 'unknown'
|
||||
for (const a of activitiesUncompleted) {
|
||||
this.jobState.markDone(email, today, a.offerId)
|
||||
}
|
||||
}
|
||||
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
// Always return to the homepage if not already
|
||||
await this.bot.browser.func.goHome(page)
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'DAILY-SET', 'All "Daily Set" items have been completed')
|
||||
|
||||
// Optional: immediately run desktop search bundle
|
||||
if (!this.bot.isMobile && this.bot.config.workers.bundleDailySetWithSearch && this.bot.config.workers.doDesktopSearch) {
|
||||
try {
|
||||
await this.bot.utils.waitRandom(1200, 2600)
|
||||
await this.bot.activities.doSearch(page, data)
|
||||
} catch (e) {
|
||||
this.bot.log(this.bot.isMobile, 'DAILY-SET', `Post-DailySet search failed: ${e instanceof Error ? e.message : e}`, 'warn')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Punch Card
|
||||
async doPunchCard(page: Page, data: DashboardData) {
|
||||
|
||||
const punchCardsUncompleted = data.punchCards?.filter(x => x.parentPromotion && !x.parentPromotion.complete) ?? [] // Only return uncompleted punch cards
|
||||
|
||||
if (!punchCardsUncompleted.length) {
|
||||
this.bot.log(this.bot.isMobile, 'PUNCH-CARD', 'All "Punch Cards" have already been completed')
|
||||
return
|
||||
}
|
||||
|
||||
for (const punchCard of punchCardsUncompleted) {
|
||||
|
||||
// Ensure parentPromotion exists before proceeding
|
||||
if (!punchCard.parentPromotion?.title) {
|
||||
this.bot.log(this.bot.isMobile, 'PUNCH-CARD', `Skipped punchcard "${punchCard.name}" | Reason: Parent promotion is missing!`, 'warn')
|
||||
continue
|
||||
}
|
||||
|
||||
// Get latest page for each card
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
const activitiesUncompleted = punchCard.childPromotions.filter(x => !x.complete) // Only return uncompleted activities
|
||||
|
||||
// Solve Activities
|
||||
this.bot.log(this.bot.isMobile, 'PUNCH-CARD', `Started solving "Punch Card" items for punchcard: "${punchCard.parentPromotion.title}"`)
|
||||
|
||||
// Got to punch card index page in a new tab
|
||||
await page.goto(punchCard.parentPromotion.destinationUrl, { referer: this.bot.config.baseURL })
|
||||
|
||||
// Wait for new page to load, max 10 seconds, however try regardless in case of error
|
||||
await page.waitForLoadState('networkidle', { timeout: 5000 }).catch(() => { })
|
||||
|
||||
await this.solveActivities(page, activitiesUncompleted, punchCard)
|
||||
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
const pages = page.context().pages()
|
||||
|
||||
if (pages.length > 3) {
|
||||
await page.close()
|
||||
} else {
|
||||
await this.bot.browser.func.goHome(page)
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'PUNCH-CARD', `All items for punchcard: "${punchCard.parentPromotion.title}" have been completed`)
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'PUNCH-CARD', 'All "Punch Card" items have been completed')
|
||||
}
|
||||
|
||||
// More Promotions
|
||||
async doMorePromotions(page: Page, data: DashboardData) {
|
||||
const morePromotions = data.morePromotions
|
||||
|
||||
// Check if there is a promotional item
|
||||
if (data.promotionalItem) { // Convert and add the promotional item to the array
|
||||
morePromotions.push(data.promotionalItem as unknown as MorePromotion)
|
||||
}
|
||||
|
||||
const activitiesUncompleted = morePromotions?.filter(x => !x.complete && x.pointProgressMax > 0 && x.exclusiveLockedFeatureStatus !== 'locked') ?? []
|
||||
|
||||
if (!activitiesUncompleted.length) {
|
||||
this.bot.log(this.bot.isMobile, 'MORE-PROMOTIONS', 'All "More Promotion" items have already been completed')
|
||||
return
|
||||
}
|
||||
|
||||
// Solve Activities
|
||||
this.bot.log(this.bot.isMobile, 'MORE-PROMOTIONS', 'Started solving "More Promotions" items')
|
||||
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
await this.solveActivities(page, activitiesUncompleted)
|
||||
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
// Always return to the homepage if not already
|
||||
await this.bot.browser.func.goHome(page)
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'MORE-PROMOTIONS', 'All "More Promotion" items have been completed')
|
||||
}
|
||||
|
||||
// Solve all the different types of activities
|
||||
private async solveActivities(activityPage: Page, activities: PromotionalItem[] | MorePromotion[], punchCard?: PunchCard) {
|
||||
const activityInitial = activityPage.url()
|
||||
const retry = new Retry(this.bot.config.retryPolicy)
|
||||
const throttle = new AdaptiveThrottler()
|
||||
|
||||
for (const activity of activities) {
|
||||
try {
|
||||
activityPage = await this.manageTabLifecycle(activityPage, activityInitial)
|
||||
await this.applyThrottle(throttle, 800, 1400)
|
||||
|
||||
const selector = await this.buildActivitySelector(activityPage, activity, punchCard)
|
||||
await this.prepareActivityPage(activityPage, selector, throttle)
|
||||
|
||||
const typeLabel = this.bot.activities.getTypeLabel(activity)
|
||||
if (typeLabel !== 'Unsupported') {
|
||||
await this.executeActivity(activityPage, activity, selector, throttle, retry)
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'ACTIVITY', `Skipped activity "${activity.title}" | Reason: Unsupported type: "${activity.promotionType}"!`, 'warn')
|
||||
}
|
||||
|
||||
await this.applyThrottle(throttle, 1200, 2600)
|
||||
} catch (error) {
|
||||
await this.bot.browser.utils.captureDiagnostics(activityPage, `activity_error_${activity.title || activity.offerId}`)
|
||||
this.bot.log(this.bot.isMobile, 'ACTIVITY', 'An error occurred:' + error, 'error')
|
||||
throttle.record(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async manageTabLifecycle(page: Page, initialUrl: string): Promise<Page> {
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
const pages = page.context().pages()
|
||||
if (pages.length > 3) {
|
||||
await page.close()
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
}
|
||||
|
||||
if (page.url() !== initialUrl) {
|
||||
await page.goto(initialUrl)
|
||||
}
|
||||
|
||||
return page
|
||||
}
|
||||
|
||||
private async buildActivitySelector(page: Page, activity: PromotionalItem | MorePromotion, punchCard?: PunchCard): Promise<string> {
|
||||
if (punchCard) {
|
||||
return await this.bot.browser.func.getPunchCardActivity(page, activity)
|
||||
}
|
||||
|
||||
const name = activity.name.toLowerCase()
|
||||
if (name.includes('membercenter') || name.includes('exploreonbing')) {
|
||||
return `[data-bi-id^="${activity.name}"] .pointLink:not(.contentContainer .pointLink)`
|
||||
}
|
||||
|
||||
return `[data-bi-id^="${activity.offerId}"] .pointLink:not(.contentContainer .pointLink)`
|
||||
}
|
||||
|
||||
private async prepareActivityPage(page: Page, selector: string, throttle: AdaptiveThrottler): Promise<void> {
|
||||
await page.waitForLoadState('networkidle', { timeout: 10000 }).catch(() => {})
|
||||
await this.bot.browser.utils.humanizePage(page)
|
||||
await this.applyThrottle(throttle, 1200, 2600)
|
||||
}
|
||||
|
||||
private async executeActivity(page: Page, activity: PromotionalItem | MorePromotion, selector: string, throttle: AdaptiveThrottler, retry: Retry): Promise<void> {
|
||||
this.bot.log(this.bot.isMobile, 'ACTIVITY', `Found activity type: "${this.bot.activities.getTypeLabel(activity)}" title: "${activity.title}"`)
|
||||
|
||||
await page.click(selector)
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
const timeoutMs = this.bot.utils.stringToMs(this.bot.config?.globalTimeout ?? '30s') * 2
|
||||
const runWithTimeout = (p: Promise<void>) => Promise.race([
|
||||
p,
|
||||
new Promise<void>((_, rej) => setTimeout(() => rej(new Error('activity-timeout')), timeoutMs))
|
||||
])
|
||||
|
||||
await retry.run(async () => {
|
||||
try {
|
||||
await runWithTimeout(this.bot.activities.run(page, activity))
|
||||
throttle.record(true)
|
||||
} catch (e) {
|
||||
await this.bot.browser.utils.captureDiagnostics(page, `activity_timeout_${activity.title || activity.offerId}`)
|
||||
throttle.record(false)
|
||||
throw e
|
||||
}
|
||||
}, () => true)
|
||||
|
||||
await this.bot.browser.utils.humanizePage(page)
|
||||
}
|
||||
|
||||
private async applyThrottle(throttle: AdaptiveThrottler, min: number, max: number): Promise<void> {
|
||||
const multiplier = throttle.getDelayMultiplier()
|
||||
const riskMultiplier = this.bot.getRiskDelayMultiplier()
|
||||
await this.bot.utils.waitRandom(
|
||||
Math.floor(min * multiplier * riskMultiplier),
|
||||
Math.floor(max * multiplier * riskMultiplier)
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
51
src/functions/activities/ABC.ts
Normal file
51
src/functions/activities/ABC.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
import { RETRY_LIMITS, TIMEOUTS } from '../../constants'
|
||||
|
||||
|
||||
export class ABC extends Workers {
|
||||
|
||||
async doABC(page: Page) {
|
||||
this.bot.log(this.bot.isMobile, 'ABC', 'Trying to complete poll')
|
||||
|
||||
try {
|
||||
let $ = await this.bot.browser.func.loadInCheerio(page)
|
||||
|
||||
let i
|
||||
for (i = 0; i < RETRY_LIMITS.ABC_MAX && !$('span.rw_icon').length; i++) {
|
||||
await page.waitForSelector('.wk_OptionClickClass', { state: 'visible', timeout: TIMEOUTS.DASHBOARD_WAIT })
|
||||
|
||||
const answers = $('.wk_OptionClickClass')
|
||||
const answer = answers[this.bot.utils.randomNumber(0, 2)]?.attribs['id']
|
||||
|
||||
await page.waitForSelector(`#${answer}`, { state: 'visible', timeout: TIMEOUTS.DASHBOARD_WAIT })
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM_LONG)
|
||||
await page.click(`#${answer}`) // Click answer
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.LONG + 1000)
|
||||
await page.waitForSelector('div.wk_button', { state: 'visible', timeout: TIMEOUTS.DASHBOARD_WAIT })
|
||||
await page.click('div.wk_button') // Click next question button
|
||||
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
$ = await this.bot.browser.func.loadInCheerio(page)
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM)
|
||||
}
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.LONG + 1000)
|
||||
await page.close()
|
||||
|
||||
if (i === RETRY_LIMITS.ABC_MAX) {
|
||||
this.bot.log(this.bot.isMobile, 'ABC', `Failed to solve quiz, exceeded max iterations of ${RETRY_LIMITS.ABC_MAX}`, 'warn')
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'ABC', 'Completed the ABC successfully')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'ABC', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
48
src/functions/activities/DailyCheckIn.ts
Normal file
48
src/functions/activities/DailyCheckIn.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { randomBytes } from 'crypto'
|
||||
import { AxiosRequestConfig } from 'axios'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
|
||||
import { DashboardData } from '../../interface/DashboardData'
|
||||
|
||||
|
||||
export class DailyCheckIn extends Workers {
|
||||
public async doDailyCheckIn(accessToken: string, data: DashboardData) {
|
||||
this.bot.log(this.bot.isMobile, 'DAILY-CHECK-IN', 'Starting Daily Check In')
|
||||
|
||||
try {
|
||||
let geoLocale = data.userProfile.attributes.country
|
||||
geoLocale = (this.bot.config.searchSettings.useGeoLocaleQueries && geoLocale.length === 2) ? geoLocale.toLowerCase() : 'us'
|
||||
|
||||
const jsonData = {
|
||||
amount: 1,
|
||||
country: geoLocale,
|
||||
id: randomBytes(64).toString('hex'),
|
||||
type: 101,
|
||||
attributes: {
|
||||
offerid: 'Gamification_Sapphire_DailyCheckIn'
|
||||
}
|
||||
}
|
||||
|
||||
const claimRequest: AxiosRequestConfig = {
|
||||
url: 'https://prod.rewardsplatform.microsoft.com/dapi/me/activities',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
'X-Rewards-Country': geoLocale,
|
||||
'X-Rewards-Language': 'en'
|
||||
},
|
||||
data: JSON.stringify(jsonData)
|
||||
}
|
||||
|
||||
const claimResponse = await this.bot.axios.request(claimRequest)
|
||||
const claimedPoint = parseInt((await claimResponse.data).response?.activity?.p, 10) ?? 0
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'DAILY-CHECK-IN', claimedPoint > 0 ? `Claimed ${claimedPoint} points` : 'Already claimed today')
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'DAILY-CHECK-IN', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
32
src/functions/activities/Poll.ts
Normal file
32
src/functions/activities/Poll.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
import { TIMEOUTS } from '../../constants'
|
||||
|
||||
|
||||
export class Poll extends Workers {
|
||||
|
||||
async doPoll(page: Page) {
|
||||
this.bot.log(this.bot.isMobile, 'POLL', 'Trying to complete poll')
|
||||
|
||||
try {
|
||||
const buttonId = `#btoption${Math.floor(this.bot.utils.randomNumber(0, 1))}`
|
||||
|
||||
await page.waitForSelector(buttonId, { state: 'visible', timeout: TIMEOUTS.DASHBOARD_WAIT }).catch((e) => {
|
||||
this.bot.log(this.bot.isMobile, 'POLL', `Could not find poll button: ${e}`, 'warn')
|
||||
})
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM_LONG)
|
||||
|
||||
await page.click(buttonId)
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.LONG + 1000)
|
||||
await page.close()
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'POLL', 'Completed the poll successfully')
|
||||
} catch (error) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'POLL', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
132
src/functions/activities/Quiz.ts
Normal file
132
src/functions/activities/Quiz.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
import { RETRY_LIMITS, TIMEOUTS, DELAYS } from '../../constants'
|
||||
|
||||
|
||||
export class Quiz extends Workers {
|
||||
|
||||
async doQuiz(page: Page) {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'Trying to complete quiz')
|
||||
|
||||
try {
|
||||
// Check if the quiz has been started or not
|
||||
const quizNotStarted = await page.waitForSelector('#rqStartQuiz', { state: 'visible', timeout: TIMEOUTS.MEDIUM_LONG }).then(() => true).catch(() => false)
|
||||
if (quizNotStarted) {
|
||||
await page.click('#rqStartQuiz')
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'Quiz has already been started, trying to finish it')
|
||||
}
|
||||
|
||||
await this.bot.utils.wait(TIMEOUTS.MEDIUM_LONG)
|
||||
|
||||
let quizData = await this.bot.browser.func.getQuizData(page)
|
||||
|
||||
// Verify quiz is actually loaded before proceeding
|
||||
const firstOptionExists = await page.waitForSelector('#rqAnswerOption0', { state: 'attached', timeout: TIMEOUTS.VERY_LONG }).then(() => true).catch(() => false)
|
||||
if (!firstOptionExists) {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'Quiz options not found - page may not have loaded correctly. Skipping.', 'warn')
|
||||
await page.close()
|
||||
return
|
||||
}
|
||||
const questionsRemaining = quizData.maxQuestions - quizData.CorrectlyAnsweredQuestionCount // Amount of questions remaining
|
||||
|
||||
// All questions
|
||||
for (let question = 0; question < questionsRemaining; question++) {
|
||||
|
||||
if (quizData.numberOfOptions === 8) {
|
||||
const answers: string[] = []
|
||||
|
||||
for (let i = 0; i < quizData.numberOfOptions; i++) {
|
||||
const answerSelector = await page.waitForSelector(`#rqAnswerOption${i}`, { state: 'visible', timeout: TIMEOUTS.DASHBOARD_WAIT }).catch(() => null)
|
||||
|
||||
if (!answerSelector) {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', `Option ${i} not found - quiz structure may have changed. Skipping remaining options.`, 'warn')
|
||||
break
|
||||
}
|
||||
|
||||
const answerAttribute = await answerSelector?.evaluate((el: Element) => el.getAttribute('iscorrectoption'))
|
||||
|
||||
if (answerAttribute && answerAttribute.toLowerCase() === 'true') {
|
||||
answers.push(`#rqAnswerOption${i}`)
|
||||
}
|
||||
}
|
||||
|
||||
// If no correct answers found, skip this question
|
||||
if (answers.length === 0) {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'No correct answers found for 8-option quiz. Skipping.', 'warn')
|
||||
await page.close()
|
||||
return
|
||||
}
|
||||
|
||||
// Click the answers
|
||||
for (const answer of answers) {
|
||||
await page.waitForSelector(answer, { state: 'visible', timeout: DELAYS.QUIZ_ANSWER_WAIT })
|
||||
|
||||
// Click the answer on page
|
||||
await page.click(answer)
|
||||
|
||||
const refreshSuccess = await this.bot.browser.func.waitForQuizRefresh(page)
|
||||
if (!refreshSuccess) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'An error occurred, refresh was unsuccessful', 'error')
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Other type quiz, lightspeed
|
||||
} else if ([2, 3, 4].includes(quizData.numberOfOptions)) {
|
||||
quizData = await this.bot.browser.func.getQuizData(page) // Refresh Quiz Data
|
||||
const correctOption = quizData.correctAnswer
|
||||
|
||||
let answerClicked = false
|
||||
|
||||
for (let i = 0; i < quizData.numberOfOptions; i++) {
|
||||
|
||||
const answerSelector = await page.waitForSelector(`#rqAnswerOption${i}`, { state: 'visible', timeout: RETRY_LIMITS.QUIZ_ANSWER_TIMEOUT }).catch(() => null)
|
||||
|
||||
if (!answerSelector) {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', `Option ${i} not found for ${quizData.numberOfOptions}-option quiz. Skipping.`, 'warn')
|
||||
continue
|
||||
}
|
||||
|
||||
const dataOption = await answerSelector?.evaluate((el: Element) => el.getAttribute('data-option'))
|
||||
|
||||
if (dataOption === correctOption) {
|
||||
// Click the answer on page
|
||||
await page.click(`#rqAnswerOption${i}`)
|
||||
answerClicked = true
|
||||
|
||||
const refreshSuccess = await this.bot.browser.func.waitForQuizRefresh(page)
|
||||
if (!refreshSuccess) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'An error occurred, refresh was unsuccessful', 'error')
|
||||
return
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!answerClicked) {
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', `Could not find correct answer for ${quizData.numberOfOptions}-option quiz. Skipping.`, 'warn')
|
||||
await page.close()
|
||||
return
|
||||
}
|
||||
|
||||
await this.bot.utils.wait(DELAYS.QUIZ_ANSWER_WAIT)
|
||||
}
|
||||
}
|
||||
|
||||
// Done with
|
||||
await this.bot.utils.wait(DELAYS.QUIZ_ANSWER_WAIT)
|
||||
await page.close()
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'Completed the quiz successfully')
|
||||
} catch (error) {
|
||||
await this.bot.browser.utils.captureDiagnostics(page, 'quiz_error')
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
73
src/functions/activities/ReadToEarn.ts
Normal file
73
src/functions/activities/ReadToEarn.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomBytes } from 'crypto'
|
||||
import { AxiosRequestConfig } from 'axios'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
|
||||
import { DashboardData } from '../../interface/DashboardData'
|
||||
|
||||
|
||||
export class ReadToEarn extends Workers {
|
||||
public async doReadToEarn(accessToken: string, data: DashboardData) {
|
||||
this.bot.log(this.bot.isMobile, 'READ-TO-EARN', 'Starting Read to Earn')
|
||||
|
||||
try {
|
||||
let geoLocale = data.userProfile.attributes.country
|
||||
geoLocale = (this.bot.config.searchSettings.useGeoLocaleQueries && geoLocale.length === 2) ? geoLocale.toLowerCase() : 'us'
|
||||
|
||||
const userDataRequest: AxiosRequestConfig = {
|
||||
url: 'https://prod.rewardsplatform.microsoft.com/dapi/me',
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
'X-Rewards-Country': geoLocale,
|
||||
'X-Rewards-Language': 'en'
|
||||
}
|
||||
}
|
||||
const userDataResponse = await this.bot.axios.request(userDataRequest)
|
||||
const userData = (await userDataResponse.data).response
|
||||
let userBalance = userData.balance
|
||||
|
||||
const jsonData = {
|
||||
amount: 1,
|
||||
country: geoLocale,
|
||||
id: '1',
|
||||
type: 101,
|
||||
attributes: {
|
||||
offerid: 'ENUS_readarticle3_30points'
|
||||
}
|
||||
}
|
||||
|
||||
const articleCount = 10
|
||||
for (let i = 0; i < articleCount; ++i) {
|
||||
jsonData.id = randomBytes(64).toString('hex')
|
||||
const claimRequest = {
|
||||
url: 'https://prod.rewardsplatform.microsoft.com/dapi/me/activities',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
'X-Rewards-Country': geoLocale,
|
||||
'X-Rewards-Language': 'en'
|
||||
},
|
||||
data: JSON.stringify(jsonData)
|
||||
}
|
||||
|
||||
const claimResponse = await this.bot.axios.request(claimRequest)
|
||||
const newBalance = (await claimResponse.data).response.balance
|
||||
|
||||
if (newBalance == userBalance) {
|
||||
this.bot.log(this.bot.isMobile, 'READ-TO-EARN', 'Read all available articles')
|
||||
break
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'READ-TO-EARN', `Read article ${i + 1} of ${articleCount} max | Gained ${newBalance - userBalance} Points`)
|
||||
userBalance = newBalance
|
||||
await this.bot.utils.wait(Math.floor(this.bot.utils.randomNumber(this.bot.utils.stringToMs(this.bot.config.searchSettings.searchDelay.min), this.bot.utils.stringToMs(this.bot.config.searchSettings.searchDelay.max))))
|
||||
}
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'READ-TO-EARN', 'Completed Read to Earn')
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'READ-TO-EARN', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
}
|
||||
458
src/functions/activities/Search.ts
Normal file
458
src/functions/activities/Search.ts
Normal file
@@ -0,0 +1,458 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
import { platform } from 'os'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
|
||||
import { Counters, DashboardData } from '../../interface/DashboardData'
|
||||
import { GoogleSearch } from '../../interface/Search'
|
||||
import { AxiosRequestConfig } from 'axios'
|
||||
|
||||
type GoogleTrendsResponse = [
|
||||
string,
|
||||
[
|
||||
string,
|
||||
...null[],
|
||||
[string, ...string[]]
|
||||
][]
|
||||
];
|
||||
|
||||
export class Search extends Workers {
|
||||
private bingHome = 'https://bing.com'
|
||||
private searchPageURL = ''
|
||||
|
||||
public async doSearch(page: Page, data: DashboardData) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Starting Bing searches')
|
||||
|
||||
page = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
let searchCounters: Counters = await this.bot.browser.func.getSearchPoints()
|
||||
let missingPoints = this.calculatePoints(searchCounters)
|
||||
|
||||
if (missingPoints === 0) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Bing searches have already been completed')
|
||||
return
|
||||
}
|
||||
|
||||
// Generate search queries (primary: Google Trends)
|
||||
const geo = this.bot.config.searchSettings.useGeoLocaleQueries ? data.userProfile.attributes.country : 'US'
|
||||
let googleSearchQueries = await this.getGoogleTrends(geo)
|
||||
|
||||
// Fallback: if trends failed or insufficient, sample from local queries file
|
||||
if (!googleSearchQueries.length || googleSearchQueries.length < 10) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Primary trends source insufficient, falling back to local queries.json', 'warn')
|
||||
try {
|
||||
const local = await import('../queries.json')
|
||||
// Flatten & sample
|
||||
const sampleSize = Math.max(5, Math.min(this.bot.config.searchSettings.localFallbackCount || 25, local.default.length))
|
||||
const sampled = this.bot.utils.shuffleArray(local.default).slice(0, sampleSize)
|
||||
googleSearchQueries = sampled.map((x: { title: string; queries: string[] }) => ({ topic: x.queries[0] || x.title, related: x.queries.slice(1) }))
|
||||
} catch (e) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Failed loading local queries fallback: ' + (e instanceof Error ? e.message : e), 'error')
|
||||
}
|
||||
}
|
||||
|
||||
if (this.bot.config.queryDiversity?.enabled && this.bot.queryEngine) {
|
||||
try {
|
||||
const targetCount = Math.max(20, missingPoints * 2)
|
||||
const extraTerms = await this.bot.queryEngine.fetchQueries(targetCount)
|
||||
if (extraTerms.length) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', `Query diversity enabled — adding ${extraTerms.length} mixed-source terms`)
|
||||
googleSearchQueries.push(...extraTerms.map(term => ({ topic: term, related: [] })))
|
||||
}
|
||||
} catch (err) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', `Query diversity error: ${err instanceof Error ? err.message : err}`, 'warn')
|
||||
}
|
||||
}
|
||||
|
||||
googleSearchQueries = this.bot.utils.shuffleArray(googleSearchQueries)
|
||||
// Deduplicate topics
|
||||
const seen = new Set<string>()
|
||||
googleSearchQueries = googleSearchQueries.filter(q => {
|
||||
if (seen.has(q.topic.toLowerCase())) return false
|
||||
seen.add(q.topic.toLowerCase())
|
||||
return true
|
||||
})
|
||||
|
||||
// Go to bing
|
||||
await page.goto(this.searchPageURL ? this.searchPageURL : this.bingHome)
|
||||
|
||||
await this.bot.utils.wait(2000)
|
||||
|
||||
await this.bot.browser.utils.tryDismissAllMessages(page)
|
||||
|
||||
let stagnation = 0 // consecutive searches without point progress
|
||||
|
||||
const queries: string[] = []
|
||||
// Mobile search doesn't seem to like related queries?
|
||||
googleSearchQueries.forEach(x => { this.bot.isMobile ? queries.push(x.topic) : queries.push(x.topic, ...x.related) })
|
||||
|
||||
// Loop over Google search queries
|
||||
for (let i = 0; i < queries.length; i++) {
|
||||
const query = queries[i] as string
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', `${missingPoints} Points Remaining | Query: ${query}`)
|
||||
|
||||
searchCounters = await this.bingSearch(page, query)
|
||||
const newMissingPoints = this.calculatePoints(searchCounters)
|
||||
|
||||
// If the new point amount is the same as before
|
||||
if (newMissingPoints === missingPoints) {
|
||||
stagnation++
|
||||
} else {
|
||||
stagnation = 0
|
||||
}
|
||||
|
||||
missingPoints = newMissingPoints
|
||||
|
||||
if (missingPoints === 0) break
|
||||
|
||||
// Only for mobile searches
|
||||
if (stagnation > 5 && this.bot.isMobile) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Search didn\'t gain point for 5 iterations, likely bad User-Agent', 'warn')
|
||||
break
|
||||
}
|
||||
|
||||
// If we didn't gain points for 10 iterations, assume it's stuck
|
||||
if (stagnation > 10) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Search didn\'t gain point for 10 iterations aborting searches', 'warn')
|
||||
stagnation = 0 // allow fallback loop below
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Only for mobile searches
|
||||
if (missingPoints > 0 && this.bot.isMobile) {
|
||||
return
|
||||
}
|
||||
|
||||
// If we still got remaining search queries, generate extra ones
|
||||
if (missingPoints > 0) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', `Search completed but we're missing ${missingPoints} points, generating extra searches`)
|
||||
|
||||
let i = 0
|
||||
let fallbackRounds = 0
|
||||
const extraRetries = this.bot.config.searchSettings.extraFallbackRetries || 1
|
||||
while (missingPoints > 0 && fallbackRounds <= extraRetries) {
|
||||
const query = googleSearchQueries[i++] as GoogleSearch
|
||||
if (!query) break
|
||||
|
||||
// Get related search terms to the Google search queries
|
||||
const relatedTerms = await this.getRelatedTerms(query?.topic)
|
||||
if (relatedTerms.length > 3) {
|
||||
// Search for the first 2 related terms
|
||||
for (const term of relatedTerms.slice(1, 3)) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING-EXTRA', `${missingPoints} Points Remaining | Query: ${term}`)
|
||||
|
||||
searchCounters = await this.bingSearch(page, term)
|
||||
const newMissingPoints = this.calculatePoints(searchCounters)
|
||||
|
||||
// If the new point amount is the same as before
|
||||
if (newMissingPoints === missingPoints) {
|
||||
stagnation++
|
||||
} else {
|
||||
stagnation = 0
|
||||
}
|
||||
|
||||
missingPoints = newMissingPoints
|
||||
|
||||
// If we satisfied the searches
|
||||
if (missingPoints === 0) {
|
||||
break
|
||||
}
|
||||
|
||||
// Try 5 more times, then we tried a total of 15 times, fair to say it's stuck
|
||||
if (stagnation > 5) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING-EXTRA', 'Search didn\'t gain point for 5 iterations aborting searches', 'warn')
|
||||
return
|
||||
}
|
||||
}
|
||||
fallbackRounds++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Completed searches')
|
||||
}
|
||||
|
||||
private async bingSearch(searchPage: Page, query: string) {
|
||||
const platformControlKey = platform() === 'darwin' ? 'Meta' : 'Control'
|
||||
|
||||
// Try a max of 5 times
|
||||
for (let i = 0; i < 5; i++) {
|
||||
try {
|
||||
// This page had already been set to the Bing.com page or the previous search listing, we just need to select it
|
||||
searchPage = await this.bot.browser.utils.getLatestTab(searchPage)
|
||||
|
||||
// Go to top of the page
|
||||
await searchPage.evaluate(() => {
|
||||
window.scrollTo(0, 0)
|
||||
})
|
||||
|
||||
await this.bot.utils.wait(500)
|
||||
|
||||
const searchBar = '#sb_form_q'
|
||||
// Prefer attached over visible to avoid strict visibility waits when overlays exist
|
||||
const box = searchPage.locator(searchBar)
|
||||
await box.waitFor({ state: 'attached', timeout: 15000 })
|
||||
|
||||
// Try dismissing overlays before interacting
|
||||
await this.bot.browser.utils.tryDismissAllMessages(searchPage)
|
||||
await this.bot.utils.wait(200)
|
||||
|
||||
let navigatedDirectly = false
|
||||
try {
|
||||
// Try focusing and filling instead of clicking (more reliable on mobile)
|
||||
await box.focus({ timeout: 2000 }).catch(() => { /* ignore focus errors */ })
|
||||
await box.fill('')
|
||||
await this.bot.utils.wait(200)
|
||||
await searchPage.keyboard.down(platformControlKey)
|
||||
await searchPage.keyboard.press('A')
|
||||
await searchPage.keyboard.press('Backspace')
|
||||
await searchPage.keyboard.up(platformControlKey)
|
||||
await box.type(query, { delay: 20 })
|
||||
await searchPage.keyboard.press('Enter')
|
||||
} catch (typeErr) {
|
||||
// As a robust fallback, navigate directly to the search results URL
|
||||
const q = encodeURIComponent(query)
|
||||
const url = `https://www.bing.com/search?q=${q}`
|
||||
await searchPage.goto(url)
|
||||
navigatedDirectly = true
|
||||
}
|
||||
|
||||
await this.bot.utils.wait(3000)
|
||||
|
||||
// Bing.com in Chrome opens a new tab when searching via Enter; if we navigated directly, stay on current tab
|
||||
const resultPage = navigatedDirectly ? searchPage : await this.bot.browser.utils.getLatestTab(searchPage)
|
||||
this.searchPageURL = new URL(resultPage.url()).href // Set the results page
|
||||
|
||||
await this.bot.browser.utils.reloadBadPage(resultPage)
|
||||
|
||||
if (this.bot.config.searchSettings.scrollRandomResults) {
|
||||
await this.bot.utils.wait(2000)
|
||||
await this.randomScroll(resultPage)
|
||||
}
|
||||
|
||||
if (this.bot.config.searchSettings.clickRandomResults) {
|
||||
await this.bot.utils.wait(2000)
|
||||
await this.clickRandomLink(resultPage)
|
||||
}
|
||||
|
||||
// Delay between searches
|
||||
const minDelay = this.bot.utils.stringToMs(this.bot.config.searchSettings.searchDelay.min)
|
||||
const maxDelay = this.bot.utils.stringToMs(this.bot.config.searchSettings.searchDelay.max)
|
||||
const adaptivePad = Math.min(4000, Math.max(0, Math.floor(Math.random() * 800)))
|
||||
await this.bot.utils.wait(Math.floor(this.bot.utils.randomNumber(minDelay, maxDelay)) + adaptivePad)
|
||||
|
||||
return await this.bot.browser.func.getSearchPoints()
|
||||
|
||||
} catch (error) {
|
||||
if (i === 5) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Failed after 5 retries... An error occurred:' + error, 'error')
|
||||
break
|
||||
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Search failed, An error occurred:' + error, 'error')
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', `Retrying search, attempt ${i}/5`, 'warn')
|
||||
|
||||
// Reset the tabs
|
||||
const lastTab = await this.bot.browser.utils.getLatestTab(searchPage)
|
||||
await this.closeTabs(lastTab)
|
||||
|
||||
await this.bot.utils.wait(4000)
|
||||
}
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING', 'Search failed after 5 retries, ending', 'error')
|
||||
return await this.bot.browser.func.getSearchPoints()
|
||||
}
|
||||
|
||||
private async getGoogleTrends(geoLocale: string = 'US'): Promise<GoogleSearch[]> {
|
||||
const queryTerms: GoogleSearch[] = []
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', `Generating search queries, can take a while! | GeoLocale: ${geoLocale}`)
|
||||
|
||||
try {
|
||||
const request: AxiosRequestConfig = {
|
||||
url: 'https://trends.google.com/_/TrendsUi/data/batchexecute',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'
|
||||
},
|
||||
data: `f.req=[[[i0OFE,"[null, null, \\"${geoLocale.toUpperCase()}\\", 0, null, 48]"]]]`
|
||||
}
|
||||
|
||||
const response = await this.bot.axios.request(request, this.bot.config.proxy.proxyGoogleTrends)
|
||||
const rawText = response.data
|
||||
|
||||
const trendsData = this.extractJsonFromResponse(rawText)
|
||||
if (!trendsData) {
|
||||
throw this.bot.log(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'Failed to parse Google Trends response', 'error')
|
||||
}
|
||||
|
||||
const mappedTrendsData = trendsData.map(query => [query[0], query[9]!.slice(1)])
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', `Found ${mappedTrendsData.length} search queries for ${geoLocale}`)
|
||||
|
||||
if (mappedTrendsData.length < 30 && geoLocale.toUpperCase() !== 'US') {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', `Insufficient search queries (${mappedTrendsData.length} < 30), falling back to US`, 'warn')
|
||||
return this.getGoogleTrends()
|
||||
}
|
||||
|
||||
for (const [topic, relatedQueries] of mappedTrendsData) {
|
||||
queryTerms.push({
|
||||
topic: topic as string,
|
||||
related: relatedQueries as string[]
|
||||
})
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
|
||||
return queryTerms
|
||||
}
|
||||
|
||||
private extractJsonFromResponse(text: string): GoogleTrendsResponse[1] | null {
|
||||
const lines = text.split('\n')
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim()
|
||||
if (trimmed.startsWith('[') && trimmed.endsWith(']')) {
|
||||
try {
|
||||
return JSON.parse(JSON.parse(trimmed)[0][2])[1]
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
private async getRelatedTerms(term: string): Promise<string[]> {
|
||||
try {
|
||||
const request = {
|
||||
url: `https://api.bing.com/osjson.aspx?query=${term}`,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}
|
||||
|
||||
const response = await this.bot.axios.request(request, this.bot.config.proxy.proxyBingTerms)
|
||||
|
||||
return response.data[1] as string[]
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-BING-RELATED', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
private async randomScroll(page: Page) {
|
||||
try {
|
||||
const viewportHeight = await page.evaluate(() => window.innerHeight)
|
||||
const totalHeight = await page.evaluate(() => document.body.scrollHeight)
|
||||
const randomScrollPosition = Math.floor(Math.random() * (totalHeight - viewportHeight))
|
||||
|
||||
await page.evaluate((scrollPos: number) => {
|
||||
window.scrollTo(0, scrollPos)
|
||||
}, randomScrollPosition)
|
||||
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-RANDOM-SCROLL', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
private async clickRandomLink(page: Page) {
|
||||
try {
|
||||
await page.click('#b_results .b_algo h2', { timeout: 2000 }).catch(() => { }) // Since we don't really care if it did it or not
|
||||
|
||||
// Only used if the browser is not the edge browser (continue on Edge popup)
|
||||
await this.closeContinuePopup(page)
|
||||
|
||||
// Stay for 10 seconds for page to load and "visit"
|
||||
await this.bot.utils.wait(10000)
|
||||
|
||||
// Will get current tab if no new one is created, this will always be the visited site or the result page if it failed to click
|
||||
let lastTab = await this.bot.browser.utils.getLatestTab(page)
|
||||
|
||||
let lastTabURL = new URL(lastTab.url()) // Get new tab info, this is the website we're visiting
|
||||
|
||||
// Check if the URL is different from the original one, don't loop more than 5 times.
|
||||
let i = 0
|
||||
while (lastTabURL.href !== this.searchPageURL && i < 5) {
|
||||
|
||||
await this.closeTabs(lastTab)
|
||||
|
||||
// End of loop, refresh lastPage
|
||||
lastTab = await this.bot.browser.utils.getLatestTab(page) // Finally update the lastTab var again
|
||||
lastTabURL = new URL(lastTab.url()) // Get new tab info
|
||||
i++
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-RANDOM-CLICK', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
private async closeTabs(lastTab: Page) {
|
||||
const browser = lastTab.context()
|
||||
const tabs = browser.pages()
|
||||
|
||||
try {
|
||||
if (tabs.length > 2) {
|
||||
// If more than 2 tabs are open, close the last tab
|
||||
|
||||
await lastTab.close()
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-CLOSE-TABS', `More than 2 were open, closed the last tab: "${new URL(lastTab.url()).host}"`)
|
||||
|
||||
} else if (tabs.length === 1) {
|
||||
// If only 1 tab is open, open a new one to search in
|
||||
|
||||
const newPage = await browser.newPage()
|
||||
await this.bot.utils.wait(1000)
|
||||
|
||||
await newPage.goto(this.bingHome)
|
||||
await this.bot.utils.wait(3000)
|
||||
this.searchPageURL = newPage.url()
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-CLOSE-TABS', 'There was only 1 tab open, crated a new one')
|
||||
} else {
|
||||
// Else reset the last tab back to the search listing or Bing.com
|
||||
|
||||
lastTab = await this.bot.browser.utils.getLatestTab(lastTab)
|
||||
await lastTab.goto(this.searchPageURL ? this.searchPageURL : this.bingHome)
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-CLOSE-TABS', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private calculatePoints(counters: Counters) {
|
||||
const mobileData = counters.mobileSearch?.[0] // Mobile searches
|
||||
const genericData = counters.pcSearch?.[0] // Normal searches
|
||||
const edgeData = counters.pcSearch?.[1] // Edge searches
|
||||
|
||||
const missingPoints = (this.bot.isMobile && mobileData)
|
||||
? mobileData.pointProgressMax - mobileData.pointProgress
|
||||
: (edgeData ? edgeData.pointProgressMax - edgeData.pointProgress : 0)
|
||||
+ (genericData ? genericData.pointProgressMax - genericData.pointProgress : 0)
|
||||
|
||||
return missingPoints
|
||||
}
|
||||
|
||||
private async closeContinuePopup(page: Page) {
|
||||
try {
|
||||
await page.waitForSelector('#sacs_close', { timeout: 1000 })
|
||||
const continueButton = await page.$('#sacs_close')
|
||||
|
||||
if (continueButton) {
|
||||
await continueButton.click()
|
||||
}
|
||||
} catch (error) {
|
||||
// Continue if element is not found or other error occurs
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
85
src/functions/activities/SearchOnBing.ts
Normal file
85
src/functions/activities/SearchOnBing.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { Page } from 'playwright'
|
||||
import * as fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
import { DELAYS } from '../../constants'
|
||||
|
||||
import { MorePromotion, PromotionalItem } from '../../interface/DashboardData'
|
||||
|
||||
|
||||
export class SearchOnBing extends Workers {
|
||||
|
||||
async doSearchOnBing(page: Page, activity: MorePromotion | PromotionalItem) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-ON-BING', 'Trying to complete SearchOnBing')
|
||||
|
||||
try {
|
||||
await this.bot.utils.wait(DELAYS.SEARCH_ON_BING_WAIT)
|
||||
|
||||
await this.bot.browser.utils.tryDismissAllMessages(page)
|
||||
|
||||
const query = await this.getSearchQuery(activity.title)
|
||||
|
||||
const searchBar = '#sb_form_q'
|
||||
const box = page.locator(searchBar)
|
||||
await box.waitFor({ state: 'attached', timeout: DELAYS.SEARCH_BAR_TIMEOUT })
|
||||
await this.bot.browser.utils.tryDismissAllMessages(page)
|
||||
await this.bot.utils.wait(DELAYS.SEARCH_ON_BING_FOCUS)
|
||||
try {
|
||||
await box.focus({ timeout: DELAYS.THIS_OR_THAT_START }).catch(() => { /* ignore */ })
|
||||
await box.fill('')
|
||||
await this.bot.utils.wait(DELAYS.SEARCH_ON_BING_FOCUS)
|
||||
await page.keyboard.type(query, { delay: DELAYS.TYPING_DELAY })
|
||||
await page.keyboard.press('Enter')
|
||||
} catch {
|
||||
const url = `https://www.bing.com/search?q=${encodeURIComponent(query)}`
|
||||
await page.goto(url)
|
||||
}
|
||||
await this.bot.utils.wait(DELAYS.SEARCH_ON_BING_COMPLETE)
|
||||
|
||||
await page.close()
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-ON-BING', 'Completed the SearchOnBing successfully')
|
||||
} catch (error) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-ON-BING', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
private async getSearchQuery(title: string): Promise<string> {
|
||||
interface Queries {
|
||||
title: string;
|
||||
queries: string[]
|
||||
}
|
||||
|
||||
let queries: Queries[] = []
|
||||
|
||||
try {
|
||||
if (this.bot.config.searchOnBingLocalQueries) {
|
||||
const data = fs.readFileSync(path.join(__dirname, '../queries.json'), 'utf8')
|
||||
queries = JSON.parse(data)
|
||||
} else {
|
||||
// Fetch from the repo directly so the user doesn't need to redownload the script for the new activities
|
||||
const response = await this.bot.axios.request({
|
||||
method: 'GET',
|
||||
url: 'https://raw.githubusercontent.com/LightZirconite/Microsoft-Rewards-Rewi/refs/heads/main/src/functions/queries.json'
|
||||
})
|
||||
queries = response.data
|
||||
}
|
||||
|
||||
const answers = queries.find(x => this.normalizeString(x.title) === this.normalizeString(title))
|
||||
const answer = answers ? this.bot.utils.shuffleArray(answers?.queries)[0] as string : title
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-ON-BING-QUERY', `Fetched answer: ${answer} | question: ${title}`)
|
||||
return answer
|
||||
|
||||
} catch (error) {
|
||||
this.bot.log(this.bot.isMobile, 'SEARCH-ON-BING-QUERY', 'An error occurred:' + error, 'error')
|
||||
return title
|
||||
}
|
||||
}
|
||||
|
||||
private normalizeString(string: string): string {
|
||||
return string.normalize('NFD').trim().toLowerCase().replace(/[^\x20-\x7E]/g, '').replace(/[?!]/g, '')
|
||||
}
|
||||
}
|
||||
48
src/functions/activities/ThisOrThat.ts
Normal file
48
src/functions/activities/ThisOrThat.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
import { DELAYS } from '../../constants'
|
||||
|
||||
|
||||
export class ThisOrThat extends Workers {
|
||||
|
||||
async doThisOrThat(page: Page) {
|
||||
this.bot.log(this.bot.isMobile, 'THIS-OR-THAT', 'Trying to complete ThisOrThat')
|
||||
|
||||
|
||||
try {
|
||||
// Check if the quiz has been started or not
|
||||
const quizNotStarted = await page.waitForSelector('#rqStartQuiz', { state: 'visible', timeout: DELAYS.THIS_OR_THAT_START }).then(() => true).catch(() => false)
|
||||
if (quizNotStarted) {
|
||||
await page.click('#rqStartQuiz')
|
||||
} else {
|
||||
this.bot.log(this.bot.isMobile, 'THIS-OR-THAT', 'ThisOrThat has already been started, trying to finish it')
|
||||
}
|
||||
|
||||
await this.bot.utils.wait(DELAYS.THIS_OR_THAT_START)
|
||||
|
||||
// Solving
|
||||
const quizData = await this.bot.browser.func.getQuizData(page)
|
||||
const questionsRemaining = quizData.maxQuestions - (quizData.currentQuestionNumber - 1) // Amount of questions remaining
|
||||
|
||||
for (let question = 0; question < questionsRemaining; question++) {
|
||||
// Since there's no solving logic yet, randomly guess to complete
|
||||
const buttonId = `#rqAnswerOption${Math.floor(this.bot.utils.randomNumber(0, 1))}`
|
||||
await page.click(buttonId)
|
||||
|
||||
const refreshSuccess = await this.bot.browser.func.waitForQuizRefresh(page)
|
||||
if (!refreshSuccess) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'QUIZ', 'An error occurred, refresh was unsuccessful', 'error')
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'THIS-OR-THAT', 'Completed the ThisOrThat successfully')
|
||||
} catch (error) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'THIS-OR-THAT', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
23
src/functions/activities/UrlReward.ts
Normal file
23
src/functions/activities/UrlReward.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
|
||||
import { Workers } from '../Workers'
|
||||
|
||||
|
||||
export class UrlReward extends Workers {
|
||||
|
||||
async doUrlReward(page: Page) {
|
||||
this.bot.log(this.bot.isMobile, 'URL-REWARD', 'Trying to complete UrlReward')
|
||||
|
||||
try {
|
||||
await this.bot.utils.wait(2000)
|
||||
|
||||
await page.close()
|
||||
|
||||
this.bot.log(this.bot.isMobile, 'URL-REWARD', 'Completed the UrlReward successfully')
|
||||
} catch (error) {
|
||||
await page.close()
|
||||
this.bot.log(this.bot.isMobile, 'URL-REWARD', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
289
src/functions/queries.json
Normal file
289
src/functions/queries.json
Normal file
@@ -0,0 +1,289 @@
|
||||
[
|
||||
{
|
||||
"title": "Houses near you",
|
||||
"queries": [
|
||||
"Houses near me"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Feeling symptoms?",
|
||||
"queries": [
|
||||
"Rash on forearm",
|
||||
"Stuffy nose",
|
||||
"Tickling cough"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Get your shopping done faster",
|
||||
"queries": [
|
||||
"Buy PS5",
|
||||
"Buy Xbox",
|
||||
"Chair deals"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Translate anything",
|
||||
"queries": [
|
||||
"Translate welcome home to Korean",
|
||||
"Translate welcome home to Japanese",
|
||||
"Translate goodbye to Japanese"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Search the lyrics of a song",
|
||||
"queries": [
|
||||
"Debarge rhythm of the night lyrics"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Let's watch that movie again!",
|
||||
"queries": [
|
||||
"Alien movie",
|
||||
"Aliens movie",
|
||||
"Alien 3 movie",
|
||||
"Predator movie"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Plan a quick getaway",
|
||||
"queries": [
|
||||
"Flights Amsterdam to Tokyo",
|
||||
"Flights New York to Tokyo"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Discover open job roles",
|
||||
"queries": [
|
||||
"jobs at Microsoft",
|
||||
"Microsoft Job Openings",
|
||||
"Jobs near me",
|
||||
"jobs at Boeing worked"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "You can track your package",
|
||||
"queries": [
|
||||
"USPS tracking"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Find somewhere new to explore",
|
||||
"queries": [
|
||||
"Directions to Berlin",
|
||||
"Directions to Tokyo",
|
||||
"Directions to New York"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Too tired to cook tonight?",
|
||||
"queries": [
|
||||
"KFC near me",
|
||||
"Burger King near me",
|
||||
"McDonalds near me"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Quickly convert your money",
|
||||
"queries": [
|
||||
"convert 250 USD to yen",
|
||||
"convert 500 USD to yen"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Learn to cook a new recipe",
|
||||
"queries": [
|
||||
"How to cook ratatouille",
|
||||
"How to cook lasagna"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Find places to stay!",
|
||||
"queries": [
|
||||
"Hotels Berlin Germany",
|
||||
"Hotels Amsterdam Netherlands"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "How's the economy?",
|
||||
"queries": [
|
||||
"sp 500"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Who won?",
|
||||
"queries": [
|
||||
"braves score"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Gaming time",
|
||||
"queries": [
|
||||
"Overwatch video game",
|
||||
"Call of duty video game"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Expand your vocabulary",
|
||||
"queries": [
|
||||
"definition definition"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "What time is it?",
|
||||
"queries": [
|
||||
"Japan time",
|
||||
"New York time"
|
||||
]
|
||||
},
|
||||
|
||||
{
|
||||
"title": "Maisons près de chez vous",
|
||||
"queries": [
|
||||
"Maisons près de chez moi"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Vous ressentez des symptômes ?",
|
||||
"queries": [
|
||||
"Éruption cutanée sur l'avant-bras",
|
||||
"Nez bouché",
|
||||
"Toux chatouilleuse"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Faites vos achats plus vite",
|
||||
"queries": [
|
||||
"Acheter une PS5",
|
||||
"Acheter une Xbox",
|
||||
"Offres sur les chaises"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Traduisez tout !",
|
||||
"queries": [
|
||||
"Traduction bienvenue à la maison en coréen",
|
||||
"Traduction bienvenue à la maison en japonais",
|
||||
"Traduction au revoir en japonais"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Rechercher paroles de chanson",
|
||||
"queries": [
|
||||
"Paroles de Debarge rhythm of the night"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Et si nous regardions ce film une nouvelle fois?",
|
||||
"queries": [
|
||||
"Alien film",
|
||||
"Film Aliens",
|
||||
"Film Alien 3",
|
||||
"Film Predator"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Planifiez une petite escapade",
|
||||
"queries": [
|
||||
"Vols Amsterdam-Tokyo",
|
||||
"Vols New York-Tokyo"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Consulter postes à pourvoir",
|
||||
"queries": [
|
||||
"emplois chez Microsoft",
|
||||
"Offres d'emploi Microsoft",
|
||||
"Emplois près de chez moi",
|
||||
"emplois chez Boeing"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Vous pouvez suivre votre colis",
|
||||
"queries": [
|
||||
"Suivi Chronopost"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Trouver un endroit à découvrir",
|
||||
"queries": [
|
||||
"Itinéraire vers Berlin",
|
||||
"Itinéraire vers Tokyo",
|
||||
"Itinéraire vers New York"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Trop fatigué pour cuisiner ce soir ?",
|
||||
"queries": [
|
||||
"KFC près de chez moi",
|
||||
"Burger King près de chez moi",
|
||||
"McDonalds près de chez moi"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Convertissez rapidement votre argent",
|
||||
"queries": [
|
||||
"convertir 250 EUR en yen",
|
||||
"convertir 500 EUR en yen"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Apprenez à cuisiner une nouvelle recette",
|
||||
"queries": [
|
||||
"Comment faire cuire la ratatouille",
|
||||
"Comment faire cuire les lasagnes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Trouvez des emplacements pour rester!",
|
||||
"queries": [
|
||||
"Hôtels Berlin Allemagne",
|
||||
"Hôtels Amsterdam Pays-Bas"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Comment se porte l'économie ?",
|
||||
"queries": [
|
||||
"CAC 40"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Qui a gagné ?",
|
||||
"queries": [
|
||||
"score du Paris Saint-Germain"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Temps de jeu",
|
||||
"queries": [
|
||||
"Jeu vidéo Overwatch",
|
||||
"Jeu vidéo Call of Duty"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Enrichissez votre vocabulaire",
|
||||
"queries": [
|
||||
"definition definition"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Quelle heure est-il ?",
|
||||
"queries": [
|
||||
"Heure du Japon",
|
||||
"Heure de New York"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Vérifier la météo",
|
||||
"queries": [
|
||||
"Météo de Paris",
|
||||
"Météo de la France"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Tenez-vous informé des sujets d'actualité",
|
||||
"queries": [
|
||||
"Augmentation Impots",
|
||||
"Mort célébrité"
|
||||
]
|
||||
}
|
||||
]
|
||||
1598
src/index.ts
Normal file
1598
src/index.ts
Normal file
File diff suppressed because it is too large
Load Diff
19
src/interface/Account.ts
Normal file
19
src/interface/Account.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
export interface Account {
|
||||
/** Enable/disable this account (if false, account will be skipped during execution) */
|
||||
enabled?: boolean;
|
||||
email: string;
|
||||
password: string;
|
||||
/** Optional TOTP secret in Base32 (e.g., from Microsoft Authenticator setup) */
|
||||
totp?: string;
|
||||
/** Recovery email used during security challenge verification (mandatory) */
|
||||
recoveryEmail: string;
|
||||
proxy: AccountProxy;
|
||||
}
|
||||
|
||||
export interface AccountProxy {
|
||||
proxyAxios: boolean;
|
||||
url: string;
|
||||
port: number;
|
||||
password: string;
|
||||
username: string;
|
||||
}
|
||||
21
src/interface/ActivityHandler.ts
Normal file
21
src/interface/ActivityHandler.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { MorePromotion, PromotionalItem } from './DashboardData'
|
||||
import type { Page } from 'playwright'
|
||||
|
||||
/**
|
||||
* Activity handler contract for solving a single dashboard activity.
|
||||
* Implementations should be stateless (or hold only a reference to the bot)
|
||||
* and perform all required steps on the provided page.
|
||||
*/
|
||||
export interface ActivityHandler {
|
||||
/** Optional identifier for diagnostics */
|
||||
id?: string
|
||||
/**
|
||||
* Return true if this handler knows how to process the given activity.
|
||||
*/
|
||||
canHandle(activity: MorePromotion | PromotionalItem): boolean
|
||||
/**
|
||||
* Execute the activity on the provided page. The page is already
|
||||
* navigated to the activity tab/window by the caller.
|
||||
*/
|
||||
run(page: Page, activity: MorePromotion | PromotionalItem): Promise<void>
|
||||
}
|
||||
226
src/interface/AppUserData.ts
Normal file
226
src/interface/AppUserData.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
export interface AppUserData {
|
||||
response: Response;
|
||||
correlationId: string;
|
||||
code: number;
|
||||
}
|
||||
|
||||
export interface Response {
|
||||
profile: Profile;
|
||||
balance: number;
|
||||
counters: null;
|
||||
promotions: Promotion[];
|
||||
catalog: null;
|
||||
goal_item: GoalItem;
|
||||
activities: null;
|
||||
cashback: null;
|
||||
orders: Order[];
|
||||
rebateProfile: null;
|
||||
rebatePayouts: null;
|
||||
giveProfile: GiveProfile;
|
||||
autoRedeemProfile: null;
|
||||
autoRedeemItem: null;
|
||||
thirdPartyProfile: null;
|
||||
notifications: null;
|
||||
waitlist: null;
|
||||
autoOpenFlyout: null;
|
||||
coupons: null;
|
||||
recommendedAffordableCatalog: null;
|
||||
}
|
||||
|
||||
export interface GiveProfile {
|
||||
give_user: string;
|
||||
give_organization: { [key: string]: GiveOrganization | null };
|
||||
first_give_optin: string;
|
||||
last_give_optout: string;
|
||||
give_lifetime_balance: string;
|
||||
give_lifetime_donation_balance: string;
|
||||
give_balance: string;
|
||||
form: null;
|
||||
}
|
||||
|
||||
export interface GiveOrganization {
|
||||
give_organization_donation_points: number;
|
||||
give_organization_donation_point_to_currency_ratio: number;
|
||||
give_organization_donation_currency: number;
|
||||
}
|
||||
|
||||
export interface GoalItem {
|
||||
name: string;
|
||||
provider: string;
|
||||
price: number;
|
||||
attributes: GoalItemAttributes;
|
||||
config: GoalItemConfig;
|
||||
}
|
||||
|
||||
export interface GoalItemAttributes {
|
||||
category: string;
|
||||
CategoryDescription: string;
|
||||
'desc.group_text': string;
|
||||
'desc.legal_text'?: string;
|
||||
'desc.sc_description': string;
|
||||
'desc.sc_title': string;
|
||||
display_order: string;
|
||||
ExtraLargeImage: string;
|
||||
group: string;
|
||||
group_image: string;
|
||||
group_sc_image: string;
|
||||
group_title: string;
|
||||
hidden?: string;
|
||||
large_image: string;
|
||||
large_sc_image: string;
|
||||
medium_image: string;
|
||||
MobileImage: string;
|
||||
original_price: string;
|
||||
Remarks?: string;
|
||||
ShortText?: string;
|
||||
showcase?: string;
|
||||
small_image: string;
|
||||
title: string;
|
||||
cimsid: string;
|
||||
user_defined_goal?: string;
|
||||
disable_bot_redemptions?: string;
|
||||
'desc.large_text'?: string;
|
||||
english_title?: string;
|
||||
etid?: string;
|
||||
sku?: string;
|
||||
coupon_discount?: string;
|
||||
}
|
||||
|
||||
export interface GoalItemConfig {
|
||||
amount: string;
|
||||
currencyCode: string;
|
||||
isHidden: string;
|
||||
PointToCurrencyConversionRatio: string;
|
||||
}
|
||||
|
||||
export interface Order {
|
||||
id: string;
|
||||
t: Date;
|
||||
sku: string;
|
||||
item_snapshot: ItemSnapshot;
|
||||
p: number;
|
||||
s: S;
|
||||
a: A;
|
||||
child_redemption: null;
|
||||
third_party_partner: null;
|
||||
log: Log[];
|
||||
}
|
||||
|
||||
export interface A {
|
||||
form?: string;
|
||||
OrderId: string;
|
||||
CorrelationId: string;
|
||||
Channel: string;
|
||||
Language: string;
|
||||
Country: string;
|
||||
EvaluationId: string;
|
||||
provider?: string;
|
||||
referenceOrderID?: string;
|
||||
externalRefID?: string;
|
||||
denomination?: string;
|
||||
rewardName?: string;
|
||||
sendEmail?: string;
|
||||
status?: string;
|
||||
createdAt?: Date;
|
||||
bal_before_deduct?: string;
|
||||
bal_after_deduct?: string;
|
||||
}
|
||||
|
||||
export interface ItemSnapshot {
|
||||
name: string;
|
||||
provider: string;
|
||||
price: number;
|
||||
attributes: GoalItemAttributes;
|
||||
config: ItemSnapshotConfig;
|
||||
}
|
||||
|
||||
export interface ItemSnapshotConfig {
|
||||
amount: string;
|
||||
countryCode: string;
|
||||
currencyCode: string;
|
||||
sku: string;
|
||||
}
|
||||
|
||||
export interface Log {
|
||||
time: Date;
|
||||
from: From;
|
||||
to: S;
|
||||
reason: string;
|
||||
}
|
||||
|
||||
export enum From {
|
||||
Created = 'Created',
|
||||
RiskApproved = 'RiskApproved',
|
||||
RiskReview = 'RiskReview'
|
||||
}
|
||||
|
||||
export enum S {
|
||||
Cancelled = 'Cancelled',
|
||||
RiskApproved = 'RiskApproved',
|
||||
RiskReview = 'RiskReview',
|
||||
Shipped = 'Shipped'
|
||||
}
|
||||
|
||||
export interface Profile {
|
||||
ruid: string;
|
||||
attributes: ProfileAttributes;
|
||||
offline_attributes: OfflineAttributes;
|
||||
}
|
||||
|
||||
export interface ProfileAttributes {
|
||||
publisher: string;
|
||||
publisher_upd: Date;
|
||||
creative: string;
|
||||
creative_upd: Date;
|
||||
program: string;
|
||||
program_upd: Date;
|
||||
country: string;
|
||||
country_upd: Date;
|
||||
referrerhash: string;
|
||||
referrerhash_upd: Date;
|
||||
optout_upd: Date;
|
||||
language: string;
|
||||
language_upd: Date;
|
||||
target: string;
|
||||
target_upd: Date;
|
||||
created: Date;
|
||||
created_upd: Date;
|
||||
epuid: string;
|
||||
epuid_upd: Date;
|
||||
goal: string;
|
||||
goal_upd: Date;
|
||||
waitlistattributes: string;
|
||||
waitlistattributes_upd: Date;
|
||||
serpbotscore_upd: Date;
|
||||
iscashbackeligible: string;
|
||||
cbedc: string;
|
||||
rlscpct_upd: Date;
|
||||
give_user: string;
|
||||
rebcpc_upd: Date;
|
||||
SerpBotScore_upd: Date;
|
||||
AdsBotScore_upd: Date;
|
||||
dbs_upd: Date;
|
||||
rbs: string;
|
||||
rbs_upd: Date;
|
||||
iris_segmentation: string;
|
||||
iris_segmentation_upd: Date;
|
||||
}
|
||||
|
||||
export interface OfflineAttributes {
|
||||
}
|
||||
|
||||
export interface Promotion {
|
||||
name: string;
|
||||
priority: number;
|
||||
attributes: { [key: string]: string };
|
||||
tags: Tag[];
|
||||
}
|
||||
|
||||
export enum Tag {
|
||||
AllowTrialUser = 'allow_trial_user',
|
||||
ExcludeGivePcparent = 'exclude_give_pcparent',
|
||||
ExcludeGlobalConfig = 'exclude_global_config',
|
||||
ExcludeHidden = 'exclude_hidden',
|
||||
LOCString = 'locString',
|
||||
NonGlobalConfig = 'non_global_config'
|
||||
}
|
||||
219
src/interface/Config.ts
Normal file
219
src/interface/Config.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
export interface Config {
|
||||
baseURL: string;
|
||||
sessionPath: string;
|
||||
headless: boolean;
|
||||
browser?: ConfigBrowser; // Optional nested browser config
|
||||
fingerprinting?: ConfigFingerprinting; // Optional nested fingerprinting config
|
||||
parallel: boolean;
|
||||
runOnZeroPoints: boolean;
|
||||
clusters: number;
|
||||
saveFingerprint: ConfigSaveFingerprint;
|
||||
workers: ConfigWorkers;
|
||||
searchOnBingLocalQueries: boolean;
|
||||
globalTimeout: number | string;
|
||||
searchSettings: ConfigSearchSettings;
|
||||
humanization?: ConfigHumanization; // Anti-ban humanization controls
|
||||
retryPolicy?: ConfigRetryPolicy; // Global retry/backoff policy
|
||||
jobState?: ConfigJobState; // Persistence of per-activity checkpoints
|
||||
logExcludeFunc: string[];
|
||||
webhookLogExcludeFunc: string[];
|
||||
logging?: ConfigLogging; // Preserve original logging object (for live webhook settings)
|
||||
proxy: ConfigProxy;
|
||||
webhook: ConfigWebhook;
|
||||
conclusionWebhook?: ConfigWebhook; // Optional secondary webhook for final summary
|
||||
ntfy: ConfigNtfy;
|
||||
diagnostics?: ConfigDiagnostics;
|
||||
update?: ConfigUpdate;
|
||||
schedule?: ConfigSchedule;
|
||||
passesPerRun?: number;
|
||||
buyMode?: ConfigBuyMode; // Optional manual spending mode
|
||||
vacation?: ConfigVacation; // Optional monthly contiguous off-days
|
||||
crashRecovery?: ConfigCrashRecovery; // Automatic restart / graceful shutdown
|
||||
riskManagement?: ConfigRiskManagement; // NEW: Risk-aware throttling and ban prediction
|
||||
analytics?: ConfigAnalytics; // NEW: Performance dashboard and metrics tracking
|
||||
dryRun?: boolean; // NEW: Dry-run mode (simulate without executing)
|
||||
queryDiversity?: ConfigQueryDiversity; // NEW: Multi-source query generation
|
||||
}
|
||||
|
||||
export interface ConfigSaveFingerprint {
|
||||
mobile: boolean;
|
||||
desktop: boolean;
|
||||
}
|
||||
|
||||
export interface ConfigBrowser {
|
||||
headless?: boolean;
|
||||
globalTimeout?: number | string;
|
||||
}
|
||||
|
||||
export interface ConfigFingerprinting {
|
||||
saveFingerprint?: ConfigSaveFingerprint;
|
||||
}
|
||||
|
||||
export interface ConfigSearchSettings {
|
||||
useGeoLocaleQueries: boolean;
|
||||
scrollRandomResults: boolean;
|
||||
clickRandomResults: boolean;
|
||||
searchDelay: ConfigSearchDelay;
|
||||
retryMobileSearchAmount: number;
|
||||
localFallbackCount?: number; // Number of local fallback queries to sample when trends fail
|
||||
extraFallbackRetries?: number; // Additional mini-retry loops with fallback terms
|
||||
}
|
||||
|
||||
export interface ConfigSearchDelay {
|
||||
min: number | string;
|
||||
max: number | string;
|
||||
}
|
||||
|
||||
export interface ConfigWebhook {
|
||||
enabled: boolean;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ConfigNtfy {
|
||||
enabled: boolean;
|
||||
url: string;
|
||||
topic: string;
|
||||
authToken?: string; // Optional authentication token
|
||||
}
|
||||
|
||||
export interface ConfigProxy {
|
||||
proxyGoogleTrends: boolean;
|
||||
proxyBingTerms: boolean;
|
||||
}
|
||||
|
||||
export interface ConfigDiagnostics {
|
||||
enabled?: boolean; // master toggle
|
||||
saveScreenshot?: boolean; // capture .png
|
||||
saveHtml?: boolean; // capture .html
|
||||
maxPerRun?: number; // cap number of captures per run
|
||||
retentionDays?: number; // delete older diagnostic folders
|
||||
}
|
||||
|
||||
export interface ConfigUpdate {
|
||||
git?: boolean; // if true, run git pull + npm ci + npm run build after completion
|
||||
docker?: boolean; // if true, run docker update routine (compose pull/up) after completion
|
||||
scriptPath?: string; // optional custom path to update script relative to repo root
|
||||
autoUpdateConfig?: boolean; // if true, allow auto-update of config.jsonc when remote changes it (default: false to preserve user settings)
|
||||
autoUpdateAccounts?: boolean; // if true, allow auto-update of accounts.json when remote changes it (default: false to preserve credentials)
|
||||
}
|
||||
|
||||
export interface ConfigBuyMode {
|
||||
enabled?: boolean; // if true, force buy mode session
|
||||
maxMinutes?: number; // session duration cap
|
||||
}
|
||||
|
||||
export interface ConfigSchedule {
|
||||
enabled?: boolean;
|
||||
time?: string; // Back-compat: accepts "HH:mm" or "h:mm AM/PM"
|
||||
// New optional explicit times
|
||||
time12?: string; // e.g., "9:00 AM"
|
||||
time24?: string; // e.g., "09:00"
|
||||
timeZone?: string; // IANA TZ e.g., "America/New_York"
|
||||
useAmPm?: boolean; // If true, prefer time12 + AM/PM style; if false, prefer time24. If undefined, back-compat behavior.
|
||||
runImmediatelyOnStart?: boolean; // if true, run once immediately when process starts
|
||||
cron?: string | string[]; // Optional cron expression(s) (standard 5-field or 6-field) for advanced scheduling
|
||||
}
|
||||
|
||||
export interface ConfigVacation {
|
||||
enabled?: boolean; // default false
|
||||
minDays?: number; // default 3
|
||||
maxDays?: number; // default 5
|
||||
}
|
||||
|
||||
export interface ConfigCrashRecovery {
|
||||
autoRestart?: boolean; // Restart the root process after fatal crash
|
||||
maxRestarts?: number; // Max restart attempts (default 2)
|
||||
backoffBaseMs?: number; // Base backoff before restart (default 2000)
|
||||
restartFailedWorker?: boolean; // (future) attempt to respawn crashed worker
|
||||
restartFailedWorkerAttempts?: number; // attempts per worker (default 1)
|
||||
}
|
||||
|
||||
export interface ConfigWorkers {
|
||||
doDailySet: boolean;
|
||||
doMorePromotions: boolean;
|
||||
doPunchCards: boolean;
|
||||
doDesktopSearch: boolean;
|
||||
doMobileSearch: boolean;
|
||||
doDailyCheckIn: boolean;
|
||||
doReadToEarn: boolean;
|
||||
bundleDailySetWithSearch?: boolean; // If true, run desktop search right after Daily Set
|
||||
}
|
||||
|
||||
// Anti-ban humanization
|
||||
export interface ConfigHumanization {
|
||||
// Master toggle for Human Mode. When false, humanization is minimized.
|
||||
enabled?: boolean;
|
||||
// If true, stop processing remaining accounts after a ban is detected
|
||||
stopOnBan?: boolean;
|
||||
// If true, send an immediate webhook/NTFY alert when a ban is detected
|
||||
immediateBanAlert?: boolean;
|
||||
// Additional random waits between actions
|
||||
actionDelay?: { min: number | string; max: number | string };
|
||||
// Probability [0..1] to perform micro mouse moves per step
|
||||
gestureMoveProb?: number;
|
||||
// Probability [0..1] to perform tiny scrolls per step
|
||||
gestureScrollProb?: number;
|
||||
// Allowed execution windows (local time). Each item is "HH:mm-HH:mm".
|
||||
// If provided, runs outside these windows will be delayed until the next allowed window.
|
||||
allowedWindows?: string[];
|
||||
// Randomly skip N days per week to look more human (0-7). Default 1.
|
||||
randomOffDaysPerWeek?: number;
|
||||
}
|
||||
|
||||
// Retry/backoff policy
|
||||
export interface ConfigRetryPolicy {
|
||||
maxAttempts?: number; // default 3
|
||||
baseDelay?: number | string; // default 1000ms
|
||||
maxDelay?: number | string; // default 30s
|
||||
multiplier?: number; // default 2
|
||||
jitter?: number; // 0..1; default 0.2
|
||||
}
|
||||
|
||||
// Job state persistence
|
||||
export interface ConfigJobState {
|
||||
enabled?: boolean; // default true
|
||||
dir?: string; // base directory; defaults to <sessionPath>/job-state
|
||||
skipCompletedAccounts?: boolean; // if true (default), skip accounts already completed for the day
|
||||
}
|
||||
|
||||
// Live logging configuration
|
||||
export interface ConfigLoggingLive {
|
||||
enabled?: boolean; // master switch for live webhook logs
|
||||
redactEmails?: boolean; // if true, redact emails in outbound logs
|
||||
}
|
||||
|
||||
export interface ConfigLogging {
|
||||
excludeFunc?: string[];
|
||||
webhookExcludeFunc?: string[];
|
||||
live?: ConfigLoggingLive;
|
||||
liveWebhookUrl?: string; // legacy/dedicated live webhook override
|
||||
redactEmails?: boolean; // legacy top-level redaction flag
|
||||
// Optional nested live.url support (already handled dynamically in Logger)
|
||||
[key: string]: unknown; // forward compatibility
|
||||
}
|
||||
|
||||
// CommunityHelp removed (privacy-first policy)
|
||||
|
||||
// NEW FEATURES: Risk Management, Analytics, Query Diversity
|
||||
export interface ConfigRiskManagement {
|
||||
enabled?: boolean; // master toggle for risk-aware throttling
|
||||
autoAdjustDelays?: boolean; // automatically increase delays when risk is high
|
||||
stopOnCritical?: boolean; // halt execution if risk reaches critical level
|
||||
banPrediction?: boolean; // enable ML-style ban prediction
|
||||
riskThreshold?: number; // 0-100, pause if risk exceeds this
|
||||
}
|
||||
|
||||
export interface ConfigAnalytics {
|
||||
enabled?: boolean; // track performance metrics
|
||||
retentionDays?: number; // how long to keep analytics data
|
||||
exportMarkdown?: boolean; // generate markdown reports
|
||||
webhookSummary?: boolean; // send analytics via webhook
|
||||
}
|
||||
|
||||
export interface ConfigQueryDiversity {
|
||||
enabled?: boolean; // use multi-source query generation
|
||||
sources?: Array<'google-trends' | 'reddit' | 'news' | 'wikipedia' | 'local-fallback'>; // which sources to use
|
||||
maxQueriesPerSource?: number; // limit per source
|
||||
cacheMinutes?: number; // cache duration
|
||||
}
|
||||
|
||||
701
src/interface/DashboardData.ts
Normal file
701
src/interface/DashboardData.ts
Normal file
@@ -0,0 +1,701 @@
|
||||
export interface DashboardData {
|
||||
userStatus: UserStatus;
|
||||
promotionalItem: PromotionalItem;
|
||||
dailySetPromotions: { [key: string]: PromotionalItem[] };
|
||||
streakPromotion: StreakPromotion;
|
||||
streakBonusPromotions: StreakBonusPromotion[];
|
||||
punchCards: PunchCard[];
|
||||
dashboardFlights: DashboardFlights;
|
||||
morePromotions: MorePromotion[];
|
||||
suggestedRewards: AutoRedeemItem[];
|
||||
coachMarks: CoachMarks;
|
||||
welcomeTour: WelcomeTour;
|
||||
userInterests: UserInterests;
|
||||
isVisualParityTest: boolean;
|
||||
mbingFlight: null;
|
||||
langCountryMismatchPromo: null;
|
||||
machineTranslationPromo: MachineTranslationPromo;
|
||||
autoRedeemItem: AutoRedeemItem;
|
||||
userProfile: UserProfile;
|
||||
}
|
||||
|
||||
export interface AutoRedeemItem {
|
||||
name: null | string;
|
||||
price: number;
|
||||
provider: null | string;
|
||||
disabled: boolean;
|
||||
category: string;
|
||||
title: string;
|
||||
variableGoalSpecificTitle: string;
|
||||
smallImageUrl: string;
|
||||
mediumImageUrl: string;
|
||||
largeImageUrl: string;
|
||||
largeShowcaseImageUrl: string;
|
||||
description: Description;
|
||||
showcase: boolean;
|
||||
showcaseInAllCategory: boolean;
|
||||
originalPrice: number;
|
||||
discountedPrice: number;
|
||||
popular: boolean;
|
||||
isTestOnly: boolean;
|
||||
groupId: string;
|
||||
inGroup: boolean;
|
||||
isDefaultItemInGroup: boolean;
|
||||
groupTitle: string;
|
||||
groupImageUrl: string;
|
||||
groupShowcaseImageUrl: string;
|
||||
instantWinGameId: string;
|
||||
instantWinPlayAgainSku: string;
|
||||
isLowInStock: boolean;
|
||||
isOutOfStock: boolean;
|
||||
getCodeMessage: string;
|
||||
disableEmail: boolean;
|
||||
stockMessage: string;
|
||||
comingSoonFlag: boolean;
|
||||
isGenericDonation: boolean;
|
||||
isVariableRedemptionItem: boolean;
|
||||
variableRedemptionItemCurrencySymbol: null;
|
||||
variableRedemptionItemMin: number;
|
||||
variableRedemptionItemMax: number;
|
||||
variableItemConfigPointsToCurrencyConversionRatio: number;
|
||||
isAutoRedeem: boolean;
|
||||
}
|
||||
|
||||
export interface Description {
|
||||
itemGroupText: string;
|
||||
smallText: string;
|
||||
largeText: string;
|
||||
legalText: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
}
|
||||
|
||||
export interface CoachMarks {
|
||||
streaks: WelcomeTour;
|
||||
}
|
||||
|
||||
export interface WelcomeTour {
|
||||
promotion: DashboardImpression;
|
||||
slides: Slide[];
|
||||
}
|
||||
|
||||
export interface DashboardImpression {
|
||||
name: null | string;
|
||||
priority: number;
|
||||
attributes: { [key: string]: string } | null;
|
||||
offerId: string;
|
||||
complete: boolean;
|
||||
counter: number;
|
||||
activityProgress: number;
|
||||
activityProgressMax: number;
|
||||
pointProgressMax: number;
|
||||
pointProgress: number;
|
||||
promotionType: string;
|
||||
promotionSubtype: string;
|
||||
title: string;
|
||||
extBannerTitle: string;
|
||||
titleStyle: string;
|
||||
theme: string;
|
||||
description: string;
|
||||
extBannerDescription: string;
|
||||
descriptionStyle: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
imageUrl: string;
|
||||
dynamicImage: string;
|
||||
smallImageUrl: string;
|
||||
backgroundImageUrl: string;
|
||||
showcaseBackgroundImageUrl: string;
|
||||
showcaseBackgroundLargeImageUrl: string;
|
||||
promotionBackgroundLeft: string;
|
||||
promotionBackgroundRight: string;
|
||||
iconUrl: string;
|
||||
animatedIconUrl: string;
|
||||
animatedLargeBackgroundImageUrl: string;
|
||||
destinationUrl: string;
|
||||
linkText: string;
|
||||
hash: string;
|
||||
activityType: string;
|
||||
isRecurring: boolean;
|
||||
isHidden: boolean;
|
||||
isTestOnly: boolean;
|
||||
isGiveEligible: boolean;
|
||||
level: string;
|
||||
slidesCount: number;
|
||||
legalText: string;
|
||||
legalLinkText: string;
|
||||
deviceType: string;
|
||||
benefits?: Benefit[];
|
||||
supportedLevelKeys?: string[];
|
||||
supportedLevelTitles?: string[];
|
||||
supportedLevelTitlesMobile?: string[];
|
||||
activeLevel?: string;
|
||||
isCodexAutoJoinUser?: boolean;
|
||||
}
|
||||
|
||||
export interface Benefit {
|
||||
key: string;
|
||||
text: string;
|
||||
url: null | string;
|
||||
helpText: null | string;
|
||||
supportedLevels: SupportedLevels;
|
||||
}
|
||||
|
||||
export interface SupportedLevels {
|
||||
level1?: string;
|
||||
level2: string;
|
||||
level2XBoxGold: string;
|
||||
}
|
||||
|
||||
export interface Slide {
|
||||
slideType: null;
|
||||
slideShowTourId: string;
|
||||
id: number;
|
||||
title: string;
|
||||
subtitle: null;
|
||||
subtitle1: null;
|
||||
description: string;
|
||||
description1: null;
|
||||
imageTitle: null;
|
||||
image2Title: null | string;
|
||||
image3Title: null | string;
|
||||
image4Title: null | string;
|
||||
imageDescription: null;
|
||||
image2Description: null | string;
|
||||
image3Description: null | string;
|
||||
image4Description: null | string;
|
||||
imageUrl: null | string;
|
||||
darkImageUrl: null;
|
||||
image2Url: null | string;
|
||||
image3Url: null | string;
|
||||
image4Url: null | string;
|
||||
layout: null | string;
|
||||
actionButtonText: null | string;
|
||||
actionButtonUrl: null | string;
|
||||
foregroundImageUrl: null;
|
||||
backLink: null;
|
||||
nextLink: CloseLink;
|
||||
closeLink: CloseLink;
|
||||
footnote: null | string;
|
||||
termsText: null;
|
||||
termsUrl: null;
|
||||
privacyText: null;
|
||||
privacyUrl: null;
|
||||
taggedItem: null | string;
|
||||
slideVisited: boolean;
|
||||
aboutPageLinkText: null;
|
||||
aboutPageLink: null;
|
||||
redeemLink: null;
|
||||
rewardsLink: null;
|
||||
quizLinks?: string[];
|
||||
quizCorrectAnswerTitle?: string;
|
||||
quizWrongAnswerTitle?: string;
|
||||
quizAnswerDescription?: string;
|
||||
}
|
||||
|
||||
export interface CloseLink {
|
||||
text: null | string;
|
||||
url: null | string;
|
||||
}
|
||||
|
||||
export interface PromotionalItem {
|
||||
name: string;
|
||||
priority: number;
|
||||
attributes: PromotionalItemAttributes;
|
||||
offerId: string;
|
||||
complete: boolean;
|
||||
counter: number;
|
||||
activityProgress: number;
|
||||
activityProgressMax: number;
|
||||
pointProgressMax: number;
|
||||
pointProgress: number;
|
||||
promotionType: Type;
|
||||
promotionSubtype: string;
|
||||
title: string;
|
||||
extBannerTitle: string;
|
||||
titleStyle: string;
|
||||
theme: string;
|
||||
description: string;
|
||||
extBannerDescription: string;
|
||||
descriptionStyle: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
imageUrl: string;
|
||||
dynamicImage: string;
|
||||
smallImageUrl: string;
|
||||
backgroundImageUrl: string;
|
||||
showcaseBackgroundImageUrl: string;
|
||||
showcaseBackgroundLargeImageUrl: string;
|
||||
promotionBackgroundLeft: string;
|
||||
promotionBackgroundRight: string;
|
||||
iconUrl: string;
|
||||
animatedIconUrl: string;
|
||||
animatedLargeBackgroundImageUrl: string;
|
||||
destinationUrl: string;
|
||||
linkText: string;
|
||||
hash: string;
|
||||
activityType: string;
|
||||
isRecurring: boolean;
|
||||
isHidden: boolean;
|
||||
isTestOnly: boolean;
|
||||
isGiveEligible: boolean;
|
||||
level: string;
|
||||
slidesCount: number;
|
||||
legalText: string;
|
||||
legalLinkText: string;
|
||||
deviceType: string;
|
||||
}
|
||||
|
||||
export interface PromotionalItemAttributes {
|
||||
animated_icon?: string;
|
||||
bg_image: string;
|
||||
complete: GiveEligible;
|
||||
daily_set_date?: string;
|
||||
description: string;
|
||||
destination: string;
|
||||
icon: string;
|
||||
image: string;
|
||||
link_text: string;
|
||||
max: string;
|
||||
offerid: string;
|
||||
progress: string;
|
||||
sc_bg_image: string;
|
||||
sc_bg_large_image: string;
|
||||
small_image: string;
|
||||
state: State;
|
||||
title: string;
|
||||
type: Type;
|
||||
give_eligible: GiveEligible;
|
||||
activity_max?: string;
|
||||
activity_progress?: string;
|
||||
is_wot?: GiveEligible;
|
||||
offer_counter?: string;
|
||||
promotional?: GiveEligible;
|
||||
parentPunchcards?: string;
|
||||
'classification.DescriptionText'?: string;
|
||||
'classification.PunchcardChildrenCount'?: string;
|
||||
'classification.PunchcardEndDate'?: Date;
|
||||
'classification.Template'?: string;
|
||||
'classification.TitleText'?: string;
|
||||
}
|
||||
|
||||
export enum GiveEligible {
|
||||
False = 'False',
|
||||
True = 'True'
|
||||
}
|
||||
|
||||
export enum State {
|
||||
Default = 'Default'
|
||||
}
|
||||
|
||||
export enum Type {
|
||||
Quiz = 'quiz',
|
||||
Urlreward = 'urlreward',
|
||||
UrlrewardUrlrewardUrlrewardUrlrewardUrlreward = 'urlreward,urlreward,urlreward,urlreward,urlreward'
|
||||
}
|
||||
|
||||
export interface DashboardFlights {
|
||||
dashboardbannernav: string;
|
||||
togglegiveuser: string;
|
||||
spotifyRedirect: string;
|
||||
give_eligible: GiveEligible;
|
||||
destination: string;
|
||||
}
|
||||
|
||||
export interface MachineTranslationPromo {
|
||||
}
|
||||
|
||||
export interface MorePromotion {
|
||||
name: string;
|
||||
priority: number;
|
||||
attributes: { [key: string]: string };
|
||||
offerId: string;
|
||||
complete: boolean;
|
||||
counter: number;
|
||||
activityProgress: number;
|
||||
activityProgressMax: number;
|
||||
pointProgressMax: number;
|
||||
pointProgress: number;
|
||||
promotionType: string;
|
||||
promotionSubtype: string;
|
||||
title: string;
|
||||
extBannerTitle: string;
|
||||
titleStyle: string;
|
||||
theme: string;
|
||||
description: string;
|
||||
extBannerDescription: string;
|
||||
descriptionStyle: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
imageUrl: string;
|
||||
dynamicImage: string;
|
||||
smallImageUrl: string;
|
||||
backgroundImageUrl: string;
|
||||
showcaseBackgroundImageUrl: string;
|
||||
showcaseBackgroundLargeImageUrl: string;
|
||||
promotionBackgroundLeft: string;
|
||||
promotionBackgroundRight: string;
|
||||
iconUrl: string;
|
||||
animatedIconUrl: string;
|
||||
animatedLargeBackgroundImageUrl: string;
|
||||
destinationUrl: string;
|
||||
linkText: string;
|
||||
hash: string;
|
||||
activityType: string;
|
||||
isRecurring: boolean;
|
||||
isHidden: boolean;
|
||||
isTestOnly: boolean;
|
||||
isGiveEligible: boolean;
|
||||
level: string;
|
||||
slidesCount: number;
|
||||
legalText: string;
|
||||
legalLinkText: string;
|
||||
deviceType: string;
|
||||
exclusiveLockedFeatureType: string;
|
||||
exclusiveLockedFeatureStatus: string;
|
||||
}
|
||||
|
||||
export interface PunchCard {
|
||||
name: string;
|
||||
parentPromotion?: PromotionalItem;
|
||||
childPromotions: PromotionalItem[];
|
||||
}
|
||||
|
||||
export interface StreakBonusPromotion {
|
||||
name: string;
|
||||
priority: number;
|
||||
attributes: StreakBonusPromotionAttributes;
|
||||
offerId: string;
|
||||
complete: boolean;
|
||||
counter: number;
|
||||
activityProgress: number;
|
||||
activityProgressMax: number;
|
||||
pointProgressMax: number;
|
||||
pointProgress: number;
|
||||
promotionType: string;
|
||||
promotionSubtype: string;
|
||||
title: string;
|
||||
extBannerTitle: string;
|
||||
titleStyle: string;
|
||||
theme: string;
|
||||
description: string;
|
||||
extBannerDescription: string;
|
||||
descriptionStyle: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
imageUrl: string;
|
||||
dynamicImage: string;
|
||||
smallImageUrl: string;
|
||||
backgroundImageUrl: string;
|
||||
showcaseBackgroundImageUrl: string;
|
||||
showcaseBackgroundLargeImageUrl: string;
|
||||
promotionBackgroundLeft: string;
|
||||
promotionBackgroundRight: string;
|
||||
iconUrl: string;
|
||||
animatedIconUrl: string;
|
||||
animatedLargeBackgroundImageUrl: string;
|
||||
destinationUrl: string;
|
||||
linkText: string;
|
||||
hash: string;
|
||||
activityType: string;
|
||||
isRecurring: boolean;
|
||||
isHidden: boolean;
|
||||
isTestOnly: boolean;
|
||||
isGiveEligible: boolean;
|
||||
level: string;
|
||||
slidesCount: number;
|
||||
legalText: string;
|
||||
legalLinkText: string;
|
||||
deviceType: string;
|
||||
}
|
||||
|
||||
export interface StreakBonusPromotionAttributes {
|
||||
hidden: GiveEligible;
|
||||
type: string;
|
||||
title: string;
|
||||
description: string;
|
||||
description_localizedkey: string;
|
||||
image: string;
|
||||
animated_icon: string;
|
||||
activity_progress: string;
|
||||
activity_max: string;
|
||||
bonus_earned: string;
|
||||
break_description: string;
|
||||
give_eligible: GiveEligible;
|
||||
destination: string;
|
||||
}
|
||||
|
||||
export interface StreakPromotion {
|
||||
lastUpdatedDate: Date;
|
||||
breakImageUrl: string;
|
||||
lifetimeMaxValue: number;
|
||||
bonusPointsEarned: number;
|
||||
name: string;
|
||||
priority: number;
|
||||
attributes: StreakPromotionAttributes;
|
||||
offerId: string;
|
||||
complete: boolean;
|
||||
counter: number;
|
||||
activityProgress: number;
|
||||
activityProgressMax: number;
|
||||
pointProgressMax: number;
|
||||
pointProgress: number;
|
||||
promotionType: string;
|
||||
promotionSubtype: string;
|
||||
title: string;
|
||||
extBannerTitle: string;
|
||||
titleStyle: string;
|
||||
theme: string;
|
||||
description: string;
|
||||
extBannerDescription: string;
|
||||
descriptionStyle: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
imageUrl: string;
|
||||
dynamicImage: string;
|
||||
smallImageUrl: string;
|
||||
backgroundImageUrl: string;
|
||||
showcaseBackgroundImageUrl: string;
|
||||
showcaseBackgroundLargeImageUrl: string;
|
||||
promotionBackgroundLeft: string;
|
||||
promotionBackgroundRight: string;
|
||||
iconUrl: string;
|
||||
animatedIconUrl: string;
|
||||
animatedLargeBackgroundImageUrl: string;
|
||||
destinationUrl: string;
|
||||
linkText: string;
|
||||
hash: string;
|
||||
activityType: string;
|
||||
isRecurring: boolean;
|
||||
isHidden: boolean;
|
||||
isTestOnly: boolean;
|
||||
isGiveEligible: boolean;
|
||||
level: string;
|
||||
slidesCount: number;
|
||||
legalText: string;
|
||||
legalLinkText: string;
|
||||
deviceType: string;
|
||||
}
|
||||
|
||||
export interface StreakPromotionAttributes {
|
||||
hidden: GiveEligible;
|
||||
type: string;
|
||||
title: string;
|
||||
image: string;
|
||||
activity_progress: string;
|
||||
last_updated: Date;
|
||||
break_image: string;
|
||||
lifetime_max: string;
|
||||
bonus_points: string;
|
||||
give_eligible: GiveEligible;
|
||||
destination: string;
|
||||
}
|
||||
|
||||
export interface UserInterests {
|
||||
name: string;
|
||||
priority: number;
|
||||
attributes: UserInterestsAttributes;
|
||||
offerId: string;
|
||||
complete: boolean;
|
||||
counter: number;
|
||||
activityProgress: number;
|
||||
activityProgressMax: number;
|
||||
pointProgressMax: number;
|
||||
pointProgress: number;
|
||||
promotionType: string;
|
||||
promotionSubtype: string;
|
||||
title: string;
|
||||
extBannerTitle: string;
|
||||
titleStyle: string;
|
||||
theme: string;
|
||||
description: string;
|
||||
extBannerDescription: string;
|
||||
descriptionStyle: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
imageUrl: string;
|
||||
dynamicImage: string;
|
||||
smallImageUrl: string;
|
||||
backgroundImageUrl: string;
|
||||
showcaseBackgroundImageUrl: string;
|
||||
showcaseBackgroundLargeImageUrl: string;
|
||||
promotionBackgroundLeft: string;
|
||||
promotionBackgroundRight: string;
|
||||
iconUrl: string;
|
||||
animatedIconUrl: string;
|
||||
animatedLargeBackgroundImageUrl: string;
|
||||
destinationUrl: string;
|
||||
linkText: string;
|
||||
hash: string;
|
||||
activityType: string;
|
||||
isRecurring: boolean;
|
||||
isHidden: boolean;
|
||||
isTestOnly: boolean;
|
||||
isGiveEligible: boolean;
|
||||
level: string;
|
||||
slidesCount: number;
|
||||
legalText: string;
|
||||
legalLinkText: string;
|
||||
deviceType: string;
|
||||
}
|
||||
|
||||
export interface UserInterestsAttributes {
|
||||
hidden: GiveEligible;
|
||||
give_eligible: GiveEligible;
|
||||
destination: string;
|
||||
}
|
||||
|
||||
export interface UserProfile {
|
||||
ruid: string;
|
||||
attributes: UserProfileAttributes;
|
||||
}
|
||||
|
||||
export interface UserProfileAttributes {
|
||||
publisher: string;
|
||||
publisher_upd: Date;
|
||||
creative: string;
|
||||
creative_upd: Date;
|
||||
program: string;
|
||||
program_upd: Date;
|
||||
country: string;
|
||||
country_upd: Date;
|
||||
referrerhash: string;
|
||||
referrerhash_upd: Date;
|
||||
optout_upd: Date;
|
||||
language: string;
|
||||
language_upd: Date;
|
||||
target: string;
|
||||
target_upd: Date;
|
||||
created: Date;
|
||||
created_upd: Date;
|
||||
epuid: string;
|
||||
epuid_upd: Date;
|
||||
waitlistattributes: string;
|
||||
waitlistattributes_upd: Date;
|
||||
cbedc: GiveEligible;
|
||||
iscashbackeligible: GiveEligible;
|
||||
give_user: GiveEligible;
|
||||
}
|
||||
|
||||
export interface UserStatus {
|
||||
levelInfo: LevelInfo;
|
||||
availablePoints: number;
|
||||
lifetimePoints: number;
|
||||
lifetimePointsRedeemed: number;
|
||||
ePuid: string;
|
||||
redeemGoal: AutoRedeemItem;
|
||||
counters: Counters;
|
||||
lastOrder: LastOrder;
|
||||
dashboardImpression: DashboardImpression;
|
||||
referrerProgressInfo: ReferrerProgressInfo;
|
||||
isGiveModeOn: boolean;
|
||||
giveBalance: number;
|
||||
firstTimeGiveModeOptIn: null;
|
||||
giveOrganizationName: string;
|
||||
lifetimeGivingPoints: number;
|
||||
isRewardsUser: boolean;
|
||||
isMuidTrialUser: boolean;
|
||||
}
|
||||
|
||||
export interface Counters {
|
||||
pcSearch: DashboardImpression[];
|
||||
mobileSearch?: DashboardImpression[];
|
||||
shopAndEarn: DashboardImpression[];
|
||||
activityAndQuiz: ActivityAndQuiz[];
|
||||
dailyPoint: DashboardImpression[];
|
||||
}
|
||||
|
||||
export interface ActivityAndQuiz {
|
||||
name: string;
|
||||
priority: number;
|
||||
attributes: ActivityAndQuizAttributes;
|
||||
offerId: string;
|
||||
complete: boolean;
|
||||
counter: number;
|
||||
activityProgress: number;
|
||||
activityProgressMax: number;
|
||||
pointProgressMax: number;
|
||||
pointProgress: number;
|
||||
promotionType: string;
|
||||
promotionSubtype: string;
|
||||
title: string;
|
||||
extBannerTitle: string;
|
||||
titleStyle: string;
|
||||
theme: string;
|
||||
description: string;
|
||||
extBannerDescription: string;
|
||||
descriptionStyle: string;
|
||||
showcaseTitle: string;
|
||||
showcaseDescription: string;
|
||||
imageUrl: string;
|
||||
dynamicImage: string;
|
||||
smallImageUrl: string;
|
||||
backgroundImageUrl: string;
|
||||
showcaseBackgroundImageUrl: string;
|
||||
showcaseBackgroundLargeImageUrl: string;
|
||||
promotionBackgroundLeft: string;
|
||||
promotionBackgroundRight: string;
|
||||
iconUrl: string;
|
||||
animatedIconUrl: string;
|
||||
animatedLargeBackgroundImageUrl: string;
|
||||
destinationUrl: string;
|
||||
linkText: string;
|
||||
hash: string;
|
||||
activityType: string;
|
||||
isRecurring: boolean;
|
||||
isHidden: boolean;
|
||||
isTestOnly: boolean;
|
||||
isGiveEligible: boolean;
|
||||
level: string;
|
||||
slidesCount: number;
|
||||
legalText: string;
|
||||
legalLinkText: string;
|
||||
deviceType: string;
|
||||
}
|
||||
|
||||
export interface ActivityAndQuizAttributes {
|
||||
type: string;
|
||||
title: string;
|
||||
link_text: string;
|
||||
description: string;
|
||||
foreground_color: string;
|
||||
image: string;
|
||||
recurring: string;
|
||||
destination: string;
|
||||
'classification.ShowProgress': GiveEligible;
|
||||
hidden: GiveEligible;
|
||||
give_eligible: GiveEligible;
|
||||
}
|
||||
|
||||
export interface LastOrder {
|
||||
id: null;
|
||||
price: number;
|
||||
status: null;
|
||||
sku: null;
|
||||
timestamp: Date;
|
||||
catalogItem: null;
|
||||
}
|
||||
|
||||
export interface LevelInfo {
|
||||
activeLevel: string;
|
||||
activeLevelName: string;
|
||||
progress: number;
|
||||
progressMax: number;
|
||||
levels: Level[];
|
||||
benefitsPromotion: DashboardImpression;
|
||||
}
|
||||
|
||||
export interface Level {
|
||||
key: string;
|
||||
active: boolean;
|
||||
name: string;
|
||||
tasks: CloseLink[];
|
||||
privileges: CloseLink[];
|
||||
}
|
||||
|
||||
export interface ReferrerProgressInfo {
|
||||
pointsEarned: number;
|
||||
pointsMax: number;
|
||||
isComplete: boolean;
|
||||
promotions: string[];
|
||||
}
|
||||
44
src/interface/GoogleDailyTrends.ts
Normal file
44
src/interface/GoogleDailyTrends.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
export interface GoogleTrends {
|
||||
default: Default;
|
||||
}
|
||||
|
||||
export interface Default {
|
||||
trendingSearchesDays: TrendingSearchesDay[];
|
||||
endDateForNextRequest: string;
|
||||
rssFeedPageUrl: string;
|
||||
}
|
||||
|
||||
export interface TrendingSearchesDay {
|
||||
date: string;
|
||||
formattedDate: string;
|
||||
trendingSearches: TrendingSearch[];
|
||||
}
|
||||
|
||||
export interface TrendingSearch {
|
||||
title: Title;
|
||||
formattedTraffic: string;
|
||||
relatedQueries: Title[];
|
||||
image: Image;
|
||||
articles: Article[];
|
||||
shareUrl: string;
|
||||
}
|
||||
|
||||
export interface Article {
|
||||
title: string;
|
||||
timeAgo: string;
|
||||
source: string;
|
||||
image?: Image;
|
||||
url: string;
|
||||
snippet: string;
|
||||
}
|
||||
|
||||
export interface Image {
|
||||
newsUrl: string;
|
||||
source: string;
|
||||
imageUrl: string;
|
||||
}
|
||||
|
||||
export interface Title {
|
||||
query: string;
|
||||
exploreLink: string;
|
||||
}
|
||||
9
src/interface/OAuth.ts
Normal file
9
src/interface/OAuth.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export interface OAuth {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
scope: string;
|
||||
expires_in: number;
|
||||
ext_expires_in: number;
|
||||
foci: string;
|
||||
token_type: string;
|
||||
}
|
||||
7
src/interface/Points.ts
Normal file
7
src/interface/Points.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export interface EarnablePoints {
|
||||
desktopSearchPoints: number
|
||||
mobileSearchPoints: number
|
||||
dailySetPoints: number
|
||||
morePromotionsPoints: number
|
||||
totalEarnablePoints: number
|
||||
}
|
||||
50
src/interface/QuizData.ts
Normal file
50
src/interface/QuizData.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
export interface QuizData {
|
||||
offerId: string;
|
||||
quizId: string;
|
||||
quizCategory: string;
|
||||
IsCurrentQuestionCompleted: boolean;
|
||||
quizRenderSummaryPage: boolean;
|
||||
resetQuiz: boolean;
|
||||
userClickedOnHint: boolean;
|
||||
isDemoEnabled: boolean;
|
||||
correctAnswer: string;
|
||||
isMultiChoiceQuizType: boolean;
|
||||
isPutInOrderQuizType: boolean;
|
||||
isListicleQuizType: boolean;
|
||||
isWOTQuizType: boolean;
|
||||
isBugsForRewardsQuizType: boolean;
|
||||
currentQuestionNumber: number;
|
||||
maxQuestions: number;
|
||||
resetTrackingCounters: boolean;
|
||||
showWelcomePanel: boolean;
|
||||
isAjaxCall: boolean;
|
||||
showHint: boolean;
|
||||
numberOfOptions: number;
|
||||
isMobile: boolean;
|
||||
inRewardsMode: boolean;
|
||||
enableDailySetWelcomePane: boolean;
|
||||
enableDailySetNonWelcomePane: boolean;
|
||||
isDailySetUrlOffer: boolean;
|
||||
isDailySetFlightEnabled: boolean;
|
||||
dailySetUrlOfferId: string;
|
||||
earnedCredits: number;
|
||||
maxCredits: number;
|
||||
creditsPerQuestion: number;
|
||||
userAlreadyClickedOptions: number;
|
||||
hasUserClickedOnOption: boolean;
|
||||
recentAnswerChoice: string;
|
||||
sessionTimerSeconds: string;
|
||||
isOverlayMinimized: number;
|
||||
ScreenReaderMsgOnMove: string;
|
||||
ScreenReaderMsgOnDrop: string;
|
||||
IsPartialPointsEnabled: boolean;
|
||||
PrioritizeUrlOverCookies: boolean;
|
||||
UseNewReportActivityAPI: boolean;
|
||||
CorrectlyAnsweredQuestionCount: number;
|
||||
showJoinRewardsPage: boolean;
|
||||
CorrectOptionAnswer_WOT: string;
|
||||
WrongOptionAnswer_WOT: string;
|
||||
enableSlideAnimation: boolean;
|
||||
ariaLoggingEnabled: boolean;
|
||||
UseQuestionIndexInActivityId: boolean;
|
||||
}
|
||||
4
src/interface/Search.ts
Normal file
4
src/interface/Search.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export interface GoogleSearch {
|
||||
topic: string;
|
||||
related: string[];
|
||||
}
|
||||
62
src/interface/UserAgentUtil.ts
Normal file
62
src/interface/UserAgentUtil.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
// Chrome Product Data
|
||||
export interface ChromeVersion {
|
||||
timestamp: Date;
|
||||
channels: Channels;
|
||||
}
|
||||
|
||||
export interface Channels {
|
||||
Stable: Beta;
|
||||
Beta: Beta;
|
||||
Dev: Beta;
|
||||
Canary: Beta;
|
||||
}
|
||||
|
||||
export interface Beta {
|
||||
channel: string;
|
||||
version: string;
|
||||
revision: string;
|
||||
}
|
||||
|
||||
// Edge Product Data
|
||||
export interface EdgeVersion {
|
||||
Product: string;
|
||||
Releases: Release[];
|
||||
}
|
||||
|
||||
export interface Release {
|
||||
ReleaseId: number;
|
||||
Platform: Platform;
|
||||
Architecture: Architecture;
|
||||
CVEs: string[];
|
||||
ProductVersion: string;
|
||||
Artifacts: Artifact[];
|
||||
PublishedTime: Date;
|
||||
ExpectedExpiryDate: Date;
|
||||
}
|
||||
|
||||
export enum Architecture {
|
||||
Arm64 = 'arm64',
|
||||
Universal = 'universal',
|
||||
X64 = 'x64',
|
||||
X86 = 'x86'
|
||||
}
|
||||
|
||||
export interface Artifact {
|
||||
ArtifactName: string;
|
||||
Location: string;
|
||||
Hash: string;
|
||||
HashAlgorithm: HashAlgorithm;
|
||||
SizeInBytes: number;
|
||||
}
|
||||
|
||||
export enum HashAlgorithm {
|
||||
Sha256 = 'SHA256'
|
||||
}
|
||||
|
||||
export enum Platform {
|
||||
Android = 'Android',
|
||||
IOS = 'iOS',
|
||||
Linux = 'Linux',
|
||||
MACOS = 'MacOS',
|
||||
Windows = 'Windows'
|
||||
}
|
||||
7
src/luxon.d.ts
vendored
Normal file
7
src/luxon.d.ts
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/* Minimal ambient declarations to unblock TypeScript when @types/luxon is absent. */
|
||||
declare module 'luxon' {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const DateTime: any
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const IANAZone: any
|
||||
}
|
||||
419
src/scheduler.ts
Normal file
419
src/scheduler.ts
Normal file
@@ -0,0 +1,419 @@
|
||||
import { DateTime, IANAZone } from 'luxon'
|
||||
import cronParser from 'cron-parser'
|
||||
import { spawn } from 'child_process'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { MicrosoftRewardsBot } from './index'
|
||||
import { loadConfig } from './util/Load'
|
||||
import { log } from './util/Logger'
|
||||
import type { Config } from './interface/Config'
|
||||
|
||||
type CronExpressionInfo = { expression: string; tz: string }
|
||||
type DateTimeInstance = ReturnType<typeof DateTime.fromJSDate>
|
||||
|
||||
function resolveTimeParts(schedule: Config['schedule'] | undefined): { tz: string; hour: number; minute: number } {
|
||||
const tz = (schedule?.timeZone && IANAZone.isValidZone(schedule.timeZone)) ? schedule.timeZone : 'UTC'
|
||||
|
||||
// Warn if an invalid timezone was provided
|
||||
if (schedule?.timeZone && !IANAZone.isValidZone(schedule.timeZone)) {
|
||||
void log('main', 'SCHEDULER', `Invalid timezone "${schedule.timeZone}" provided. Falling back to UTC. Valid zones: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones`, 'warn')
|
||||
}
|
||||
|
||||
// Determine source string
|
||||
let src = ''
|
||||
if (typeof schedule?.useAmPm === 'boolean') {
|
||||
if (schedule.useAmPm) src = (schedule.time12 || schedule.time || '').trim()
|
||||
else src = (schedule.time24 || schedule.time || '').trim()
|
||||
} else {
|
||||
// Back-compat: prefer time if present; else time24 or time12
|
||||
src = (schedule?.time || schedule?.time24 || schedule?.time12 || '').trim()
|
||||
}
|
||||
// Try to parse 24h first: HH:mm
|
||||
const m24 = src.match(/^\s*(\d{1,2}):(\d{2})\s*$/i)
|
||||
if (m24) {
|
||||
const hh = Math.max(0, Math.min(23, parseInt(m24[1]!, 10)))
|
||||
const mm = Math.max(0, Math.min(59, parseInt(m24[2]!, 10)))
|
||||
return { tz, hour: hh, minute: mm }
|
||||
}
|
||||
// Parse 12h with AM/PM: h:mm AM or h AM
|
||||
const m12 = src.match(/^\s*(\d{1,2})(?::(\d{2}))?\s*(AM|PM)\s*$/i)
|
||||
if (m12) {
|
||||
let hh = parseInt(m12[1]!, 10)
|
||||
const mm = m12[2] ? parseInt(m12[2]!, 10) : 0
|
||||
const ampm = m12[3]!.toUpperCase()
|
||||
if (hh === 12) hh = 0
|
||||
if (ampm === 'PM') hh += 12
|
||||
hh = Math.max(0, Math.min(23, hh))
|
||||
const m = Math.max(0, Math.min(59, mm))
|
||||
return { tz, hour: hh, minute: m }
|
||||
}
|
||||
// Fallback: default 09:00
|
||||
return { tz, hour: 9, minute: 0 }
|
||||
}
|
||||
|
||||
function parseTargetToday(now: Date, schedule: Config['schedule'] | undefined) {
|
||||
const { tz, hour, minute } = resolveTimeParts(schedule)
|
||||
const dtn = DateTime.fromJSDate(now, { zone: tz })
|
||||
return dtn.set({ hour, minute, second: 0, millisecond: 0 })
|
||||
}
|
||||
|
||||
function normalizeCronExpressions(schedule: Config['schedule'] | undefined, fallbackTz: string): CronExpressionInfo[] {
|
||||
if (!schedule) return []
|
||||
const raw = schedule.cron
|
||||
if (!raw) return []
|
||||
const expressions = Array.isArray(raw) ? raw : [raw]
|
||||
return expressions
|
||||
.map(expr => (typeof expr === 'string' ? expr.trim() : ''))
|
||||
.filter(expr => expr.length > 0)
|
||||
.map(expr => ({ expression: expr, tz: (schedule.timeZone && IANAZone.isValidZone(schedule.timeZone)) ? schedule.timeZone : fallbackTz }))
|
||||
}
|
||||
|
||||
function getNextCronOccurrence(after: DateTimeInstance, items: CronExpressionInfo[]): { next: DateTimeInstance; source: string } | null {
|
||||
let soonest: { next: DateTimeInstance; source: string } | null = null
|
||||
for (const item of items) {
|
||||
try {
|
||||
const iterator = cronParser.parseExpression(item.expression, {
|
||||
currentDate: after.toJSDate(),
|
||||
tz: item.tz
|
||||
})
|
||||
const nextDate = iterator.next().toDate()
|
||||
const nextDt = DateTime.fromJSDate(nextDate, { zone: item.tz })
|
||||
if (!soonest || nextDt < soonest.next) {
|
||||
soonest = { next: nextDt, source: item.expression }
|
||||
}
|
||||
} catch (error) {
|
||||
void log('main', 'SCHEDULER', `Invalid cron expression "${item.expression}": ${error instanceof Error ? error.message : String(error)}`, 'warn')
|
||||
}
|
||||
}
|
||||
return soonest
|
||||
}
|
||||
|
||||
function getNextDailyOccurrence(after: DateTimeInstance, schedule: Config['schedule'] | undefined): DateTimeInstance {
|
||||
const todayTarget = parseTargetToday(after.toJSDate(), schedule)
|
||||
const target = after >= todayTarget ? todayTarget.plus({ days: 1 }) : todayTarget
|
||||
return target
|
||||
}
|
||||
|
||||
function computeNextRun(after: DateTimeInstance, schedule: Config['schedule'] | undefined, cronItems: CronExpressionInfo[]): { next: DateTimeInstance; source: 'cron' | 'daily'; detail?: string } {
|
||||
if (cronItems.length > 0) {
|
||||
const cronNext = getNextCronOccurrence(after, cronItems)
|
||||
if (cronNext) {
|
||||
return { next: cronNext.next, source: 'cron', detail: cronNext.source }
|
||||
}
|
||||
void log('main', 'SCHEDULER', 'All cron expressions invalid; falling back to daily schedule', 'warn')
|
||||
}
|
||||
|
||||
return { next: getNextDailyOccurrence(after, schedule), source: 'daily' }
|
||||
}
|
||||
|
||||
async function runOnePass(): Promise<void> {
|
||||
const bot = new MicrosoftRewardsBot(false)
|
||||
await bot.initialize()
|
||||
await bot.run()
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a single pass either in-process or as a child process (default),
|
||||
* with a watchdog timeout to kill stuck runs.
|
||||
*/
|
||||
async function runOnePassWithWatchdog(): Promise<void> {
|
||||
// Heartbeat-aware watchdog configuration
|
||||
// If a child is actively updating its heartbeat file, we allow it to run beyond the legacy timeout.
|
||||
// Defaults are generous to allow first-day passes to finish searches with delays.
|
||||
const parseEnvNumber = (key: string, fallback: number, min: number, max: number): number => {
|
||||
const val = Number(process.env[key] || fallback)
|
||||
if (isNaN(val) || val < min || val > max) {
|
||||
void log('main', 'SCHEDULER', `Invalid ${key}="${process.env[key]}". Using default ${fallback}`, 'warn')
|
||||
return fallback
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
const staleHeartbeatMin = parseEnvNumber(
|
||||
process.env.SCHEDULER_STALE_HEARTBEAT_MINUTES ? 'SCHEDULER_STALE_HEARTBEAT_MINUTES' : 'SCHEDULER_PASS_TIMEOUT_MINUTES',
|
||||
30, 5, 1440
|
||||
)
|
||||
const graceMin = parseEnvNumber('SCHEDULER_HEARTBEAT_GRACE_MINUTES', 15, 1, 120)
|
||||
const hardcapMin = parseEnvNumber('SCHEDULER_PASS_HARDCAP_MINUTES', 480, 30, 1440)
|
||||
const checkEveryMs = 60_000 // check once per minute
|
||||
|
||||
// Validate: stale should be >= grace
|
||||
if (staleHeartbeatMin < graceMin) {
|
||||
await log('main', 'SCHEDULER', `Warning: STALE_HEARTBEAT (${staleHeartbeatMin}m) < GRACE (${graceMin}m). Adjusting stale to ${graceMin}m`, 'warn')
|
||||
}
|
||||
|
||||
// Fork per pass: safer because we can terminate a stuck child without killing the scheduler
|
||||
const forkPerPass = String(process.env.SCHEDULER_FORK_PER_PASS || 'true').toLowerCase() !== 'false'
|
||||
|
||||
if (!forkPerPass) {
|
||||
// In-process fallback (cannot forcefully stop if truly stuck)
|
||||
await log('main', 'SCHEDULER', `Starting pass in-process (grace ${graceMin}m • stale ${staleHeartbeatMin}m • hardcap ${hardcapMin}m). Cannot force-kill if stuck.`)
|
||||
// No true watchdog possible in-process; just run
|
||||
await runOnePass()
|
||||
return
|
||||
}
|
||||
|
||||
// Child process execution
|
||||
const indexJs = path.join(__dirname, 'index.js')
|
||||
await log('main', 'SCHEDULER', `Spawning child for pass: ${process.execPath} ${indexJs}`)
|
||||
|
||||
// Prepare heartbeat file path and pass to child
|
||||
const cfg = loadConfig() as Config
|
||||
const baseDir = path.join(process.cwd(), cfg.sessionPath || 'sessions')
|
||||
const hbFile = path.join(baseDir, `heartbeat_${Date.now()}.lock`)
|
||||
try { fs.mkdirSync(baseDir, { recursive: true }) } catch { /* ignore */ }
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
const child = spawn(process.execPath, [indexJs], { stdio: 'inherit', env: { ...process.env, SCHEDULER_HEARTBEAT_FILE: hbFile } })
|
||||
let finished = false
|
||||
const startedAt = Date.now()
|
||||
|
||||
let killTimeout: NodeJS.Timeout | undefined
|
||||
|
||||
const killChild = async (signal: NodeJS.Signals) => {
|
||||
try {
|
||||
await log('main', 'SCHEDULER', `Sending ${signal} to stuck child PID ${child.pid}`,'warn')
|
||||
child.kill(signal)
|
||||
} catch { /* ignore */ }
|
||||
}
|
||||
|
||||
const timer = setInterval(() => {
|
||||
if (finished) return
|
||||
const now = Date.now()
|
||||
const runtimeMin = Math.floor((now - startedAt) / 60000)
|
||||
// Hard cap: always terminate if exceeded
|
||||
if (runtimeMin >= hardcapMin) {
|
||||
log('main', 'SCHEDULER', `Pass exceeded hard cap of ${hardcapMin} minutes; terminating...`, 'warn')
|
||||
void killChild('SIGTERM')
|
||||
if (killTimeout) clearTimeout(killTimeout)
|
||||
killTimeout = setTimeout(() => { try { child.kill('SIGKILL') } catch { /* ignore */ } }, 10_000)
|
||||
return
|
||||
}
|
||||
// Before grace, don't judge
|
||||
if (runtimeMin < graceMin) return
|
||||
// Check heartbeat freshness
|
||||
try {
|
||||
const st = fs.statSync(hbFile)
|
||||
const mtimeMs = st.mtimeMs
|
||||
const ageMin = Math.floor((now - mtimeMs) / 60000)
|
||||
if (ageMin >= staleHeartbeatMin) {
|
||||
log('main', 'SCHEDULER', `Heartbeat stale for ${ageMin}m (>=${staleHeartbeatMin}m). Terminating child...`, 'warn')
|
||||
void killChild('SIGTERM')
|
||||
if (killTimeout) clearTimeout(killTimeout)
|
||||
killTimeout = setTimeout(() => { try { child.kill('SIGKILL') } catch { /* ignore */ } }, 10_000)
|
||||
}
|
||||
} catch (err) {
|
||||
// If file missing after grace, consider stale
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
log('main', 'SCHEDULER', `Heartbeat file check failed: ${msg}. Terminating child...`, 'warn')
|
||||
void killChild('SIGTERM')
|
||||
if (killTimeout) clearTimeout(killTimeout)
|
||||
killTimeout = setTimeout(() => { try { child.kill('SIGKILL') } catch { /* ignore */ } }, 10_000)
|
||||
}
|
||||
}, checkEveryMs)
|
||||
|
||||
child.on('exit', async (code, signal) => {
|
||||
finished = true
|
||||
clearInterval(timer)
|
||||
if (killTimeout) clearTimeout(killTimeout)
|
||||
// Cleanup heartbeat file
|
||||
try { if (fs.existsSync(hbFile)) fs.unlinkSync(hbFile) } catch { /* ignore */ }
|
||||
if (signal) {
|
||||
await log('main', 'SCHEDULER', `Child exited due to signal: ${signal}`, 'warn')
|
||||
} else if (code && code !== 0) {
|
||||
await log('main', 'SCHEDULER', `Child exited with non-zero code: ${code}`, 'warn')
|
||||
} else {
|
||||
await log('main', 'SCHEDULER', 'Child pass completed successfully')
|
||||
}
|
||||
resolve()
|
||||
})
|
||||
|
||||
child.on('error', async (err) => {
|
||||
finished = true
|
||||
clearInterval(timer)
|
||||
if (killTimeout) clearTimeout(killTimeout)
|
||||
try { if (fs.existsSync(hbFile)) fs.unlinkSync(hbFile) } catch { /* ignore */ }
|
||||
await log('main', 'SCHEDULER', `Failed to spawn child: ${err instanceof Error ? err.message : String(err)}`, 'error')
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function runPasses(passes: number): Promise<void> {
|
||||
const n = Math.max(1, Math.floor(passes || 1))
|
||||
for (let i = 1; i <= n; i++) {
|
||||
await log('main', 'SCHEDULER', `Starting pass ${i}/${n}`)
|
||||
const started = Date.now()
|
||||
await runOnePassWithWatchdog()
|
||||
const took = Date.now() - started
|
||||
const sec = Math.max(1, Math.round(took / 1000))
|
||||
await log('main', 'SCHEDULER', `Completed pass ${i}/${n}`)
|
||||
await log('main', 'SCHEDULER', `Pass ${i} duration: ${sec}s`)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const cfg = loadConfig() as Config & { schedule?: { enabled?: boolean; time?: string; timeZone?: string; runImmediatelyOnStart?: boolean } }
|
||||
const schedule = cfg.schedule || { enabled: false }
|
||||
const passes = typeof cfg.passesPerRun === 'number' ? cfg.passesPerRun : 1
|
||||
const offPerWeek = Math.max(0, Math.min(7, Number(cfg.humanization?.randomOffDaysPerWeek ?? 1)))
|
||||
let offDays: number[] = [] // 1..7 ISO weekday
|
||||
let offWeek: number | null = null
|
||||
type VacRange = { start: string; end: string } | null
|
||||
let vacMonth: string | null = null // 'yyyy-LL'
|
||||
let vacRange: VacRange = null // ISO dates 'yyyy-LL-dd'
|
||||
|
||||
const refreshOffDays = async (now: { weekNumber: number }) => {
|
||||
if (offPerWeek <= 0) { offDays = []; offWeek = null; return }
|
||||
const week = now.weekNumber
|
||||
if (offWeek === week && offDays.length) return
|
||||
// choose distinct weekdays [1..7]
|
||||
const pool = [1,2,3,4,5,6,7]
|
||||
const chosen: number[] = []
|
||||
for (let i=0;i<Math.min(offPerWeek,7);i++) {
|
||||
const idx = Math.floor(Math.random()*pool.length)
|
||||
chosen.push(pool[idx]!)
|
||||
pool.splice(idx,1)
|
||||
}
|
||||
offDays = chosen.sort((a,b)=>a-b)
|
||||
offWeek = week
|
||||
const msg = offDays.length ? offDays.join(', ') : 'none'
|
||||
await log('main','SCHEDULER',`Weekly humanization off-day sample (ISO weekday): ${msg} | adjust via config.humanization.randomOffDaysPerWeek`,'warn')
|
||||
}
|
||||
|
||||
const chooseVacationRange = async (now: typeof DateTime.prototype) => {
|
||||
// Only when enabled
|
||||
if (!cfg.vacation?.enabled) { vacRange = null; vacMonth = null; return }
|
||||
const monthKey = now.toFormat('yyyy-LL')
|
||||
if (vacMonth === monthKey && vacRange) return
|
||||
// Determine month days and choose contiguous block
|
||||
const monthStart = now.startOf('month')
|
||||
const monthEnd = now.endOf('month')
|
||||
const totalDays = monthEnd.day
|
||||
const minD = Math.max(1, Math.min(28, Number(cfg.vacation.minDays ?? 3)))
|
||||
const maxD = Math.max(minD, Math.min(31, Number(cfg.vacation.maxDays ?? 5)))
|
||||
const span = (minD === maxD) ? minD : (minD + Math.floor(Math.random() * (maxD - minD + 1)))
|
||||
const latestStart = Math.max(1, totalDays - span + 1)
|
||||
const startDay = 1 + Math.floor(Math.random() * latestStart)
|
||||
const start = monthStart.set({ day: startDay })
|
||||
const end = start.plus({ days: span - 1 })
|
||||
vacMonth = monthKey
|
||||
vacRange = { start: start.toFormat('yyyy-LL-dd'), end: end.toFormat('yyyy-LL-dd') }
|
||||
await log('main','SCHEDULER',`Selected vacation block this month: ${vacRange.start} → ${vacRange.end} (${span} day(s))`,'warn')
|
||||
}
|
||||
|
||||
if (!schedule.enabled) {
|
||||
await log('main', 'SCHEDULER', 'Schedule disabled; running once then exit')
|
||||
await runPasses(passes)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const tz = (schedule.timeZone && IANAZone.isValidZone(schedule.timeZone)) ? schedule.timeZone : 'UTC'
|
||||
const cronExpressions = normalizeCronExpressions(schedule, tz)
|
||||
let running = false
|
||||
|
||||
// Optional initial jitter before the first run (to vary start time)
|
||||
const parseJitter = (minKey: string, maxKey: string, fallbackMin: string, fallbackMax: string): [number, number] => {
|
||||
const minVal = Number(process.env[minKey] || process.env[fallbackMin] || 0)
|
||||
const maxVal = Number(process.env[maxKey] || process.env[fallbackMax] || 0)
|
||||
if (isNaN(minVal) || minVal < 0) {
|
||||
void log('main', 'SCHEDULER', `Invalid ${minKey}="${process.env[minKey]}". Using 0`, 'warn')
|
||||
return [0, isNaN(maxVal) || maxVal < 0 ? 0 : maxVal]
|
||||
}
|
||||
if (isNaN(maxVal) || maxVal < 0) {
|
||||
void log('main', 'SCHEDULER', `Invalid ${maxKey}="${process.env[maxKey]}". Using 0`, 'warn')
|
||||
return [minVal, 0]
|
||||
}
|
||||
return [minVal, maxVal]
|
||||
}
|
||||
|
||||
const initialJitterBounds = parseJitter('SCHEDULER_INITIAL_JITTER_MINUTES_MIN', 'SCHEDULER_INITIAL_JITTER_MINUTES_MAX', 'SCHEDULER_INITIAL_JITTER_MIN', 'SCHEDULER_INITIAL_JITTER_MAX')
|
||||
const applyInitialJitter = (initialJitterBounds[0] > 0 || initialJitterBounds[1] > 0)
|
||||
|
||||
// Check if immediate run is enabled (default to false to avoid unexpected runs)
|
||||
const runImmediate = schedule.runImmediatelyOnStart === true
|
||||
|
||||
if (runImmediate && !running) {
|
||||
running = true
|
||||
if (applyInitialJitter) {
|
||||
const min = Math.max(0, Math.min(initialJitterBounds[0], initialJitterBounds[1]))
|
||||
const max = Math.max(min, initialJitterBounds[0], initialJitterBounds[1])
|
||||
const jitterSec = (min === max) ? min * 60 : (min * 60 + Math.floor(Math.random() * ((max - min) * 60)))
|
||||
if (jitterSec > 0) {
|
||||
await log('main', 'SCHEDULER', `Initial jitter: delaying first run by ${Math.round(jitterSec / 60)} minute(s) (${jitterSec}s)`, 'warn')
|
||||
await new Promise((r) => setTimeout(r, jitterSec * 1000))
|
||||
}
|
||||
}
|
||||
const nowDT = DateTime.local().setZone(tz)
|
||||
await chooseVacationRange(nowDT)
|
||||
await refreshOffDays(nowDT)
|
||||
const todayIso = nowDT.toFormat('yyyy-LL-dd')
|
||||
const vr = vacRange as { start: string; end: string } | null
|
||||
const isVacationToday = !!(vr && todayIso >= vr.start && todayIso <= vr.end)
|
||||
if (isVacationToday) {
|
||||
await log('main','SCHEDULER',`Skipping immediate run: vacation day (${todayIso})`,'warn')
|
||||
} else if (offDays.includes(nowDT.weekday)) {
|
||||
await log('main','SCHEDULER',`Skipping immediate run: humanization off-day (ISO weekday ${nowDT.weekday}). Set humanization.randomOffDaysPerWeek=0 to disable.`,'warn')
|
||||
} else {
|
||||
await runPasses(passes)
|
||||
}
|
||||
running = false
|
||||
}
|
||||
|
||||
for (;;) {
|
||||
const nowDT = DateTime.local().setZone(tz)
|
||||
const nextInfo = computeNextRun(nowDT, schedule, cronExpressions)
|
||||
const next = nextInfo.next
|
||||
let ms = Math.max(0, next.toMillis() - nowDT.toMillis())
|
||||
|
||||
// Optional daily jitter to further randomize the exact start time each day
|
||||
let extraMs = 0
|
||||
if (cronExpressions.length === 0) {
|
||||
const dailyJitterBounds = parseJitter('SCHEDULER_DAILY_JITTER_MINUTES_MIN', 'SCHEDULER_DAILY_JITTER_MINUTES_MAX', 'SCHEDULER_DAILY_JITTER_MIN', 'SCHEDULER_DAILY_JITTER_MAX')
|
||||
const djMin = dailyJitterBounds[0]
|
||||
const djMax = dailyJitterBounds[1]
|
||||
if (djMin > 0 || djMax > 0) {
|
||||
const mn = Math.max(0, Math.min(djMin, djMax))
|
||||
const mx = Math.max(mn, djMin, djMax)
|
||||
const jitterSec = (mn === mx) ? mn * 60 : (mn * 60 + Math.floor(Math.random() * ((mx - mn) * 60)))
|
||||
extraMs = jitterSec * 1000
|
||||
ms += extraMs
|
||||
}
|
||||
}
|
||||
|
||||
const human = next.toFormat('yyyy-LL-dd HH:mm ZZZZ')
|
||||
const totalSec = Math.round(ms / 1000)
|
||||
const jitterMsg = extraMs > 0 ? ` plus daily jitter (+${Math.round(extraMs/60000)}m)` : ''
|
||||
const sourceMsg = nextInfo.source === 'cron' ? ` [cron: ${nextInfo.detail}]` : ''
|
||||
await log('main', 'SCHEDULER', `Next run at ${human}${jitterMsg}${sourceMsg} (in ${totalSec}s)`)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, ms))
|
||||
|
||||
const nowRun = DateTime.local().setZone(tz)
|
||||
await chooseVacationRange(nowRun)
|
||||
await refreshOffDays(nowRun)
|
||||
const todayIso2 = nowRun.toFormat('yyyy-LL-dd')
|
||||
const vr2 = vacRange as { start: string; end: string } | null
|
||||
const isVacation = !!(vr2 && todayIso2 >= vr2.start && todayIso2 <= vr2.end)
|
||||
if (isVacation) {
|
||||
await log('main','SCHEDULER',`Skipping scheduled run: vacation day (${todayIso2})`,'warn')
|
||||
continue
|
||||
}
|
||||
if (offDays.includes(nowRun.weekday)) {
|
||||
await log('main','SCHEDULER',`Skipping scheduled run: humanization off-day (ISO weekday ${nowRun.weekday}). Set humanization.randomOffDaysPerWeek=0 to disable.`,'warn')
|
||||
continue
|
||||
}
|
||||
if (!running) {
|
||||
running = true
|
||||
await runPasses(passes)
|
||||
running = false
|
||||
} else {
|
||||
await log('main','SCHEDULER','Skipped scheduled trigger because a pass is already running','warn')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
void log('main', 'SCHEDULER', `Fatal error: ${e instanceof Error ? e.message : String(e)}`, 'error')
|
||||
process.exit(1)
|
||||
})
|
||||
25
src/util/AdaptiveThrottler.ts
Normal file
25
src/util/AdaptiveThrottler.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
export class AdaptiveThrottler {
|
||||
private errorCount = 0
|
||||
private successCount = 0
|
||||
private window: Array<{ ok: boolean; at: number }> = []
|
||||
private readonly maxWindow = 50
|
||||
|
||||
record(ok: boolean) {
|
||||
this.window.push({ ok, at: Date.now() })
|
||||
if (ok) this.successCount++
|
||||
else this.errorCount++
|
||||
if (this.window.length > this.maxWindow) {
|
||||
const removed = this.window.shift()
|
||||
if (removed) removed.ok ? this.successCount-- : this.errorCount--
|
||||
}
|
||||
}
|
||||
|
||||
/** Return a multiplier to apply to waits (1 = normal). */
|
||||
getDelayMultiplier(): number {
|
||||
const total = Math.max(1, this.successCount + this.errorCount)
|
||||
const errRatio = this.errorCount / total
|
||||
// 0% errors -> 1x; 50% errors -> ~1.8x; 80% -> ~2.5x (cap)
|
||||
const mult = 1 + Math.min(1.5, errRatio * 2)
|
||||
return Number(mult.toFixed(2))
|
||||
}
|
||||
}
|
||||
264
src/util/Analytics.ts
Normal file
264
src/util/Analytics.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
export interface DailyMetrics {
|
||||
date: string // YYYY-MM-DD
|
||||
email: string
|
||||
pointsEarned: number
|
||||
pointsInitial: number
|
||||
pointsEnd: number
|
||||
desktopPoints: number
|
||||
mobilePoints: number
|
||||
executionTimeMs: number
|
||||
successRate: number // 0-1
|
||||
errorsCount: number
|
||||
banned: boolean
|
||||
riskScore?: number
|
||||
}
|
||||
|
||||
export interface AccountHistory {
|
||||
email: string
|
||||
totalRuns: number
|
||||
totalPointsEarned: number
|
||||
avgPointsPerDay: number
|
||||
avgExecutionTime: number
|
||||
successRate: number
|
||||
lastRunDate: string
|
||||
banHistory: Array<{ date: string; reason: string }>
|
||||
riskTrend: number[] // last N risk scores
|
||||
}
|
||||
|
||||
export interface AnalyticsSummary {
|
||||
period: string // e.g., 'last-7-days', 'last-30-days', 'all-time'
|
||||
accounts: AccountHistory[]
|
||||
globalStats: {
|
||||
totalPoints: number
|
||||
avgSuccessRate: number
|
||||
mostProductiveAccount: string
|
||||
mostRiskyAccount: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Analytics tracks performance metrics, point collection trends, and account health.
|
||||
* Stores data in JSON files for lightweight persistence and easy analysis.
|
||||
*/
|
||||
export class Analytics {
|
||||
private dataDir: string
|
||||
|
||||
constructor(baseDir: string = 'analytics') {
|
||||
this.dataDir = path.join(process.cwd(), baseDir)
|
||||
if (!fs.existsSync(this.dataDir)) {
|
||||
fs.mkdirSync(this.dataDir, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record metrics for a completed account run
|
||||
*/
|
||||
recordRun(metrics: DailyMetrics): void {
|
||||
const date = metrics.date
|
||||
const email = this.sanitizeEmail(metrics.email)
|
||||
const fileName = `${email}_${date}.json`
|
||||
const filePath = path.join(this.dataDir, fileName)
|
||||
|
||||
try {
|
||||
fs.writeFileSync(filePath, JSON.stringify(metrics, null, 2), 'utf-8')
|
||||
} catch (error) {
|
||||
console.error(`Failed to save metrics for ${metrics.email}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get history for a specific account
|
||||
*/
|
||||
getAccountHistory(email: string, days: number = 30): AccountHistory {
|
||||
const sanitized = this.sanitizeEmail(email)
|
||||
const files = this.getAccountFiles(sanitized, days)
|
||||
|
||||
if (files.length === 0) {
|
||||
return {
|
||||
email,
|
||||
totalRuns: 0,
|
||||
totalPointsEarned: 0,
|
||||
avgPointsPerDay: 0,
|
||||
avgExecutionTime: 0,
|
||||
successRate: 1.0,
|
||||
lastRunDate: 'never',
|
||||
banHistory: [],
|
||||
riskTrend: []
|
||||
}
|
||||
}
|
||||
|
||||
let totalPoints = 0
|
||||
let totalTime = 0
|
||||
let successCount = 0
|
||||
const banHistory: Array<{ date: string; reason: string }> = []
|
||||
const riskScores: number[] = []
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(this.dataDir, file)
|
||||
try {
|
||||
const data: DailyMetrics = JSON.parse(fs.readFileSync(filePath, 'utf-8'))
|
||||
totalPoints += data.pointsEarned
|
||||
totalTime += data.executionTimeMs
|
||||
if (data.successRate > 0.5) successCount++
|
||||
if (data.banned) {
|
||||
banHistory.push({ date: data.date, reason: 'detected' })
|
||||
}
|
||||
if (typeof data.riskScore === 'number') {
|
||||
riskScores.push(data.riskScore)
|
||||
}
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
const totalRuns = files.length
|
||||
const lastFile = files[files.length - 1]
|
||||
const lastRunDate = lastFile ? lastFile.split('_')[1]?.replace('.json', '') || 'unknown' : 'unknown'
|
||||
|
||||
return {
|
||||
email,
|
||||
totalRuns,
|
||||
totalPointsEarned: totalPoints,
|
||||
avgPointsPerDay: Math.round(totalPoints / Math.max(1, totalRuns)),
|
||||
avgExecutionTime: Math.round(totalTime / Math.max(1, totalRuns)),
|
||||
successRate: successCount / Math.max(1, totalRuns),
|
||||
lastRunDate,
|
||||
banHistory,
|
||||
riskTrend: riskScores.slice(-10) // last 10 risk scores
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a summary report for all accounts
|
||||
*/
|
||||
generateSummary(days: number = 30): AnalyticsSummary {
|
||||
const accountEmails = this.getAllAccounts()
|
||||
const accounts: AccountHistory[] = []
|
||||
|
||||
for (const email of accountEmails) {
|
||||
accounts.push(this.getAccountHistory(email, days))
|
||||
}
|
||||
|
||||
const totalPoints = accounts.reduce((sum, a) => sum + a.totalPointsEarned, 0)
|
||||
const avgSuccess = accounts.reduce((sum, a) => sum + a.successRate, 0) / Math.max(1, accounts.length)
|
||||
|
||||
let mostProductive = ''
|
||||
let maxPoints = 0
|
||||
let mostRisky = ''
|
||||
let maxRisk = 0
|
||||
|
||||
for (const acc of accounts) {
|
||||
if (acc.totalPointsEarned > maxPoints) {
|
||||
maxPoints = acc.totalPointsEarned
|
||||
mostProductive = acc.email
|
||||
}
|
||||
const avgRisk = acc.riskTrend.reduce((s, r) => s + r, 0) / Math.max(1, acc.riskTrend.length)
|
||||
if (avgRisk > maxRisk) {
|
||||
maxRisk = avgRisk
|
||||
mostRisky = acc.email
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
period: `last-${days}-days`,
|
||||
accounts,
|
||||
globalStats: {
|
||||
totalPoints,
|
||||
avgSuccessRate: Number(avgSuccess.toFixed(2)),
|
||||
mostProductiveAccount: mostProductive || 'none',
|
||||
mostRiskyAccount: mostRisky || 'none'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Export summary as markdown table (for human readability)
|
||||
*/
|
||||
exportMarkdown(days: number = 30): string {
|
||||
const summary = this.generateSummary(days)
|
||||
const lines: string[] = []
|
||||
|
||||
lines.push(`# Analytics Summary (${summary.period})`)
|
||||
lines.push('')
|
||||
lines.push('## Global Stats')
|
||||
lines.push(`- Total Points: ${summary.globalStats.totalPoints}`)
|
||||
lines.push(`- Avg Success Rate: ${(summary.globalStats.avgSuccessRate * 100).toFixed(1)}%`)
|
||||
lines.push(`- Most Productive: ${summary.globalStats.mostProductiveAccount}`)
|
||||
lines.push(`- Most Risky: ${summary.globalStats.mostRiskyAccount}`)
|
||||
lines.push('')
|
||||
lines.push('## Per-Account Breakdown')
|
||||
lines.push('')
|
||||
lines.push('| Account | Runs | Total Points | Avg/Day | Success Rate | Last Run | Bans |')
|
||||
lines.push('|---------|------|--------------|---------|--------------|----------|------|')
|
||||
|
||||
for (const acc of summary.accounts) {
|
||||
const successPct = (acc.successRate * 100).toFixed(0)
|
||||
const banCount = acc.banHistory.length
|
||||
lines.push(
|
||||
`| ${acc.email} | ${acc.totalRuns} | ${acc.totalPointsEarned} | ${acc.avgPointsPerDay} | ${successPct}% | ${acc.lastRunDate} | ${banCount} |`
|
||||
)
|
||||
}
|
||||
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old analytics files (retention policy)
|
||||
*/
|
||||
cleanup(retentionDays: number): void {
|
||||
const files = fs.readdirSync(this.dataDir)
|
||||
const cutoff = Date.now() - (retentionDays * 24 * 60 * 60 * 1000)
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.endsWith('.json')) continue
|
||||
const filePath = path.join(this.dataDir, file)
|
||||
try {
|
||||
const stats = fs.statSync(filePath)
|
||||
if (stats.mtimeMs < cutoff) {
|
||||
fs.unlinkSync(filePath)
|
||||
}
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sanitizeEmail(email: string): string {
|
||||
return email.replace(/[^a-zA-Z0-9@._-]/g, '_')
|
||||
}
|
||||
|
||||
private getAccountFiles(sanitizedEmail: string, days: number): string[] {
|
||||
const files = fs.readdirSync(this.dataDir)
|
||||
const cutoffDate = new Date()
|
||||
cutoffDate.setDate(cutoffDate.getDate() - days)
|
||||
|
||||
return files
|
||||
.filter((f: string) => f.startsWith(sanitizedEmail) && f.endsWith('.json'))
|
||||
.filter((f: string) => {
|
||||
const datePart = f.split('_')[1]?.replace('.json', '')
|
||||
if (!datePart) return false
|
||||
const fileDate = new Date(datePart)
|
||||
return fileDate >= cutoffDate
|
||||
})
|
||||
.sort()
|
||||
}
|
||||
|
||||
private getAllAccounts(): string[] {
|
||||
const files = fs.readdirSync(this.dataDir)
|
||||
const emailSet = new Set<string>()
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.endsWith('.json')) continue
|
||||
const parts = file.split('_')
|
||||
if (parts.length >= 2) {
|
||||
const email = parts[0]
|
||||
if (email) emailSet.add(email)
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(emailSet)
|
||||
}
|
||||
}
|
||||
141
src/util/Axios.ts
Normal file
141
src/util/Axios.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import axios, { AxiosError, AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'
|
||||
import { HttpProxyAgent } from 'http-proxy-agent'
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
import { AccountProxy } from '../interface/Account'
|
||||
|
||||
class AxiosClient {
|
||||
private instance: AxiosInstance
|
||||
private account: AccountProxy
|
||||
|
||||
constructor(account: AccountProxy) {
|
||||
this.account = account
|
||||
this.instance = axios.create()
|
||||
|
||||
// If a proxy configuration is provided, set up the agent
|
||||
if (this.account.url && this.account.proxyAxios) {
|
||||
const agent = this.getAgentForProxy(this.account)
|
||||
this.instance.defaults.httpAgent = agent
|
||||
this.instance.defaults.httpsAgent = agent
|
||||
}
|
||||
}
|
||||
|
||||
private getAgentForProxy(proxyConfig: AccountProxy): HttpProxyAgent<string> | HttpsProxyAgent<string> | SocksProxyAgent {
|
||||
const { proxyUrl, protocol } = this.buildProxyUrl(proxyConfig)
|
||||
const normalized = protocol.replace(/:$/, '')
|
||||
|
||||
switch (normalized) {
|
||||
case 'http':
|
||||
return new HttpProxyAgent(proxyUrl)
|
||||
case 'https':
|
||||
return new HttpsProxyAgent(proxyUrl)
|
||||
case 'socks':
|
||||
case 'socks4':
|
||||
case 'socks5':
|
||||
return new SocksProxyAgent(proxyUrl)
|
||||
default:
|
||||
throw new Error(`Unsupported proxy protocol in "${proxyConfig.url}". Supported: http://, https://, socks://, socks4://, socks5://`)
|
||||
}
|
||||
}
|
||||
|
||||
private buildProxyUrl(proxyConfig: AccountProxy): { proxyUrl: string; protocol: string } {
|
||||
const { url, port, username, password } = proxyConfig
|
||||
|
||||
if (!url) {
|
||||
throw new Error('Proxy URL is required when proxyAxios is enabled.')
|
||||
}
|
||||
|
||||
const hasScheme = /^[a-zA-Z][a-zA-Z0-9+.-]*:/.test(url)
|
||||
const candidate = hasScheme ? url : `http://${url}`
|
||||
|
||||
let parsedUrl: URL
|
||||
try {
|
||||
parsedUrl = new URL(candidate)
|
||||
} catch (err) {
|
||||
throw new Error(`Invalid proxy URL "${url}": ${(err as Error).message}`)
|
||||
}
|
||||
|
||||
const protocol = parsedUrl.protocol.replace(/:$/, '')
|
||||
const allowed = new Set(['http', 'https', 'socks', 'socks4', 'socks5'])
|
||||
if (!allowed.has(protocol)) {
|
||||
throw new Error(`Unsupported proxy protocol in "${url}". Supported: http://, https://, socks://, socks4://, socks5://`)
|
||||
}
|
||||
|
||||
if (!parsedUrl.port) {
|
||||
if (port) {
|
||||
parsedUrl.port = String(port)
|
||||
} else {
|
||||
throw new Error(`Proxy port missing for "${url}". Provide a port value.`)
|
||||
}
|
||||
}
|
||||
|
||||
if (username) {
|
||||
parsedUrl.username = encodeURIComponent(username)
|
||||
}
|
||||
|
||||
if (password) {
|
||||
parsedUrl.password = encodeURIComponent(password)
|
||||
}
|
||||
|
||||
return { proxyUrl: parsedUrl.toString(), protocol: parsedUrl.protocol }
|
||||
}
|
||||
|
||||
// Generic method to make any Axios request
|
||||
public async request(config: AxiosRequestConfig, bypassProxy = false): Promise<AxiosResponse> {
|
||||
if (bypassProxy) {
|
||||
const bypassInstance = axios.create()
|
||||
return bypassInstance.request(config)
|
||||
}
|
||||
|
||||
let lastError: unknown
|
||||
const maxAttempts = 2
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
return await this.instance.request(config)
|
||||
} catch (err: unknown) {
|
||||
lastError = err
|
||||
const axiosErr = err as AxiosError | undefined
|
||||
|
||||
// Detect HTTP proxy auth failures (status 407) and retry without proxy
|
||||
if (axiosErr && axiosErr.response && axiosErr.response.status === 407) {
|
||||
if (attempt < maxAttempts) {
|
||||
await this.sleep(1000 * attempt) // Exponential backoff
|
||||
}
|
||||
const bypassInstance = axios.create()
|
||||
return bypassInstance.request(config)
|
||||
}
|
||||
|
||||
// If proxied request fails with common proxy/network errors, retry with backoff
|
||||
const e = err as { code?: string; cause?: { code?: string }; message?: string } | undefined
|
||||
const code = e?.code || e?.cause?.code
|
||||
const isNetErr = code === 'ECONNREFUSED' || code === 'ETIMEDOUT' || code === 'ECONNRESET' || code === 'ENOTFOUND'
|
||||
const msg = String(e?.message || '')
|
||||
const looksLikeProxyIssue = /proxy|tunnel|socks|agent/i.test(msg)
|
||||
|
||||
if (isNetErr || looksLikeProxyIssue) {
|
||||
if (attempt < maxAttempts) {
|
||||
// Exponential backoff: 1s, 2s, 4s, etc.
|
||||
const delayMs = 1000 * Math.pow(2, attempt - 1)
|
||||
await this.sleep(delayMs)
|
||||
continue
|
||||
}
|
||||
// Last attempt: try without proxy
|
||||
const bypassInstance = axios.create()
|
||||
return bypassInstance.request(config)
|
||||
}
|
||||
|
||||
// Non-retryable error
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError
|
||||
}
|
||||
|
||||
private sleep(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms))
|
||||
}
|
||||
}
|
||||
|
||||
export default AxiosClient
|
||||
16
src/util/BanDetector.ts
Normal file
16
src/util/BanDetector.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
export type BanStatus = { status: boolean; reason: string }
|
||||
|
||||
const BAN_PATTERNS: Array<{ re: RegExp; reason: string }> = [
|
||||
{ re: /suspend|suspended|suspension/i, reason: 'account suspended' },
|
||||
{ re: /locked|lockout|serviceabuse|abuse/i, reason: 'locked or service abuse detected' },
|
||||
{ re: /unusual.*activity|unusual activity/i, reason: 'unusual activity prompts' },
|
||||
{ re: /verify.*identity|identity.*verification/i, reason: 'identity verification required' }
|
||||
]
|
||||
|
||||
export function detectBanReason(input: unknown): BanStatus {
|
||||
const s = input instanceof Error ? (input.message || '') : String(input || '')
|
||||
for (const p of BAN_PATTERNS) {
|
||||
if (p.re.test(s)) return { status: true, reason: p.reason }
|
||||
}
|
||||
return { status: false, reason: '' }
|
||||
}
|
||||
394
src/util/BanPredictor.ts
Normal file
394
src/util/BanPredictor.ts
Normal file
@@ -0,0 +1,394 @@
|
||||
import { RiskManager, RiskEvent } from './RiskManager'
|
||||
|
||||
export interface BanPattern {
|
||||
name: string
|
||||
description: string
|
||||
weight: number // 0-10
|
||||
detected: boolean
|
||||
evidence: string[]
|
||||
}
|
||||
|
||||
export interface BanPrediction {
|
||||
riskScore: number // 0-100
|
||||
confidence: number // 0-1
|
||||
likelihood: 'very-low' | 'low' | 'medium' | 'high' | 'critical'
|
||||
patterns: BanPattern[]
|
||||
recommendation: string
|
||||
preventiveActions: string[]
|
||||
}
|
||||
|
||||
export interface HistoricalData {
|
||||
email: string
|
||||
timestamp: number
|
||||
banned: boolean
|
||||
preBanEvents: RiskEvent[]
|
||||
accountAge: number // days since first use
|
||||
totalRuns: number
|
||||
}
|
||||
|
||||
/**
|
||||
* BanPredictor uses machine-learning-style pattern analysis to predict ban risk.
|
||||
* Learns from historical data and real-time signals to calculate ban probability.
|
||||
*/
|
||||
export class BanPredictor {
|
||||
private riskManager: RiskManager
|
||||
private history: HistoricalData[] = []
|
||||
private patterns: BanPattern[] = []
|
||||
|
||||
constructor(riskManager: RiskManager) {
|
||||
this.riskManager = riskManager
|
||||
this.initializePatterns()
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze current state and predict ban risk
|
||||
*/
|
||||
predictBanRisk(accountEmail: string, accountAgeDays: number, totalRuns: number): BanPrediction {
|
||||
const riskMetrics = this.riskManager.assessRisk()
|
||||
const recentEvents = this.riskManager.getRecentEvents(60)
|
||||
|
||||
// Detect patterns
|
||||
this.detectPatterns(recentEvents, accountAgeDays, totalRuns)
|
||||
|
||||
// Calculate base risk from RiskManager
|
||||
const baseRisk = riskMetrics.score
|
||||
|
||||
// Apply ML-style feature weights
|
||||
const featureScore = this.calculateFeatureScore(recentEvents, accountAgeDays, totalRuns)
|
||||
|
||||
// Pattern detection bonus
|
||||
const detectedPatterns = this.patterns.filter(p => p.detected)
|
||||
const patternPenalty = detectedPatterns.reduce((sum, p) => sum + p.weight, 0)
|
||||
|
||||
// Historical learning adjustment
|
||||
const historicalAdjustment = this.getHistoricalAdjustment(accountEmail)
|
||||
|
||||
// Final risk score (capped at 100)
|
||||
const finalScore = Math.min(100, baseRisk + featureScore + patternPenalty + historicalAdjustment)
|
||||
|
||||
// Calculate confidence (based on data availability)
|
||||
const confidence = this.calculateConfidence(recentEvents.length, this.history.length)
|
||||
|
||||
// Determine likelihood tier
|
||||
let likelihood: BanPrediction['likelihood']
|
||||
if (finalScore < 20) likelihood = 'very-low'
|
||||
else if (finalScore < 40) likelihood = 'low'
|
||||
else if (finalScore < 60) likelihood = 'medium'
|
||||
else if (finalScore < 80) likelihood = 'high'
|
||||
else likelihood = 'critical'
|
||||
|
||||
// Generate recommendations
|
||||
const recommendation = this.generateRecommendation(finalScore)
|
||||
const preventiveActions = this.generatePreventiveActions(detectedPatterns)
|
||||
|
||||
return {
|
||||
riskScore: Math.round(finalScore),
|
||||
confidence: Number(confidence.toFixed(2)),
|
||||
likelihood,
|
||||
patterns: detectedPatterns,
|
||||
recommendation,
|
||||
preventiveActions
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record ban event for learning
|
||||
*/
|
||||
recordBan(email: string, accountAgeDays: number, totalRuns: number): void {
|
||||
const preBanEvents = this.riskManager.getRecentEvents(120)
|
||||
|
||||
this.history.push({
|
||||
email,
|
||||
timestamp: Date.now(),
|
||||
banned: true,
|
||||
preBanEvents,
|
||||
accountAge: accountAgeDays,
|
||||
totalRuns
|
||||
})
|
||||
|
||||
// Keep history limited (last 100 bans)
|
||||
if (this.history.length > 100) {
|
||||
this.history.shift()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record successful run (no ban) for learning
|
||||
*/
|
||||
recordSuccess(email: string, accountAgeDays: number, totalRuns: number): void {
|
||||
this.history.push({
|
||||
email,
|
||||
timestamp: Date.now(),
|
||||
banned: false,
|
||||
preBanEvents: [],
|
||||
accountAge: accountAgeDays,
|
||||
totalRuns
|
||||
})
|
||||
|
||||
if (this.history.length > 100) {
|
||||
this.history.shift()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize known ban patterns
|
||||
*/
|
||||
private initializePatterns(): void {
|
||||
this.patterns = [
|
||||
{
|
||||
name: 'rapid-captcha-sequence',
|
||||
description: 'Multiple captchas in short timespan',
|
||||
weight: 8,
|
||||
detected: false,
|
||||
evidence: []
|
||||
},
|
||||
{
|
||||
name: 'high-error-rate',
|
||||
description: 'Excessive errors (>50% in last hour)',
|
||||
weight: 6,
|
||||
detected: false,
|
||||
evidence: []
|
||||
},
|
||||
{
|
||||
name: 'timeout-storm',
|
||||
description: 'Many consecutive timeouts',
|
||||
weight: 7,
|
||||
detected: false,
|
||||
evidence: []
|
||||
},
|
||||
{
|
||||
name: 'suspicious-timing',
|
||||
description: 'Activity at unusual hours or too consistent',
|
||||
weight: 5,
|
||||
detected: false,
|
||||
evidence: []
|
||||
},
|
||||
{
|
||||
name: 'new-account-aggressive',
|
||||
description: 'Aggressive activity on young account',
|
||||
weight: 9,
|
||||
detected: false,
|
||||
evidence: []
|
||||
},
|
||||
{
|
||||
name: 'proxy-flagged',
|
||||
description: 'Proxy showing signs of blacklisting',
|
||||
weight: 7,
|
||||
detected: false,
|
||||
evidence: []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect patterns in recent events
|
||||
*/
|
||||
private detectPatterns(events: RiskEvent[], accountAgeDays: number, totalRuns: number): void {
|
||||
// Reset detection
|
||||
for (const p of this.patterns) {
|
||||
p.detected = false
|
||||
p.evidence = []
|
||||
}
|
||||
|
||||
const captchaEvents = events.filter(e => e.type === 'captcha')
|
||||
const errorEvents = events.filter(e => e.type === 'error')
|
||||
const timeoutEvents = events.filter(e => e.type === 'timeout')
|
||||
|
||||
// Pattern 1: Rapid captcha sequence
|
||||
if (captchaEvents.length >= 3) {
|
||||
const timeSpan = (events[events.length - 1]?.timestamp || 0) - (events[0]?.timestamp || 0)
|
||||
if (timeSpan < 1800000) { // 30 min
|
||||
const p = this.patterns.find(pat => pat.name === 'rapid-captcha-sequence')
|
||||
if (p) {
|
||||
p.detected = true
|
||||
p.evidence.push(`${captchaEvents.length} captchas in ${Math.round(timeSpan / 60000)}min`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern 2: High error rate
|
||||
const errorRate = errorEvents.length / Math.max(1, events.length)
|
||||
if (errorRate > 0.5) {
|
||||
const p = this.patterns.find(pat => pat.name === 'high-error-rate')
|
||||
if (p) {
|
||||
p.detected = true
|
||||
p.evidence.push(`Error rate: ${(errorRate * 100).toFixed(1)}%`)
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern 3: Timeout storm
|
||||
if (timeoutEvents.length >= 5) {
|
||||
const p = this.patterns.find(pat => pat.name === 'timeout-storm')
|
||||
if (p) {
|
||||
p.detected = true
|
||||
p.evidence.push(`${timeoutEvents.length} timeouts detected`)
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern 4: Suspicious timing (all events within same hour)
|
||||
if (events.length > 5) {
|
||||
const hours = new Set(events.map(e => new Date(e.timestamp).getHours()))
|
||||
if (hours.size === 1) {
|
||||
const p = this.patterns.find(pat => pat.name === 'suspicious-timing')
|
||||
if (p) {
|
||||
p.detected = true
|
||||
p.evidence.push('All activity in same hour of day')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern 5: New account aggressive
|
||||
if (accountAgeDays < 7 && totalRuns > 10) {
|
||||
const p = this.patterns.find(pat => pat.name === 'new-account-aggressive')
|
||||
if (p) {
|
||||
p.detected = true
|
||||
p.evidence.push(`Account ${accountAgeDays} days old with ${totalRuns} runs`)
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern 6: Proxy flagged (heuristic: many ban hints)
|
||||
const banHints = events.filter(e => e.type === 'ban_hint')
|
||||
if (banHints.length >= 2) {
|
||||
const p = this.patterns.find(pat => pat.name === 'proxy-flagged')
|
||||
if (p) {
|
||||
p.detected = true
|
||||
p.evidence.push(`${banHints.length} ban hints detected`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate feature-based risk score (ML-style)
|
||||
*/
|
||||
private calculateFeatureScore(events: RiskEvent[], accountAgeDays: number, totalRuns: number): number {
|
||||
let score = 0
|
||||
|
||||
// Feature 1: Event density (events per minute)
|
||||
const eventDensity = events.length / 60
|
||||
if (eventDensity > 0.5) score += 10
|
||||
else if (eventDensity > 0.2) score += 5
|
||||
|
||||
// Feature 2: Account age risk
|
||||
if (accountAgeDays < 3) score += 15
|
||||
else if (accountAgeDays < 7) score += 10
|
||||
else if (accountAgeDays < 14) score += 5
|
||||
|
||||
// Feature 3: Run frequency risk
|
||||
const runsPerDay = totalRuns / Math.max(1, accountAgeDays)
|
||||
if (runsPerDay > 3) score += 12
|
||||
else if (runsPerDay > 2) score += 6
|
||||
|
||||
// Feature 4: Severity distribution
|
||||
const highSeverityEvents = events.filter(e => e.severity >= 7)
|
||||
if (highSeverityEvents.length > 3) score += 15
|
||||
else if (highSeverityEvents.length > 1) score += 8
|
||||
|
||||
return score
|
||||
}
|
||||
|
||||
/**
|
||||
* Learn from historical data
|
||||
*/
|
||||
private getHistoricalAdjustment(email: string): number {
|
||||
const accountHistory = this.history.filter(h => h.email === email)
|
||||
if (accountHistory.length === 0) return 0
|
||||
|
||||
const bannedCount = accountHistory.filter(h => h.banned).length
|
||||
const banRate = bannedCount / accountHistory.length
|
||||
|
||||
// If this account has high ban history, increase risk
|
||||
if (banRate > 0.3) return 20
|
||||
if (banRate > 0.1) return 10
|
||||
|
||||
// If clean history, slight bonus
|
||||
if (accountHistory.length > 5 && banRate === 0) return -5
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate prediction confidence
|
||||
*/
|
||||
private calculateConfidence(eventCount: number, historyCount: number): number {
|
||||
let confidence = 0.5
|
||||
|
||||
// More events = higher confidence
|
||||
if (eventCount > 20) confidence += 0.2
|
||||
else if (eventCount > 10) confidence += 0.1
|
||||
|
||||
// More historical data = higher confidence
|
||||
if (historyCount > 50) confidence += 0.2
|
||||
else if (historyCount > 20) confidence += 0.1
|
||||
|
||||
return Math.min(1.0, confidence)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate human-readable recommendation
|
||||
*/
|
||||
private generateRecommendation(score: number): string {
|
||||
if (score < 20) {
|
||||
return 'Safe to proceed. Risk is minimal.'
|
||||
} else if (score < 40) {
|
||||
return 'Low risk detected. Monitor for issues but safe to continue.'
|
||||
} else if (score < 60) {
|
||||
return 'Moderate risk. Consider increasing delays and reviewing patterns.'
|
||||
} else if (score < 80) {
|
||||
return 'High risk! Strongly recommend pausing automation for 24-48 hours.'
|
||||
} else {
|
||||
return 'CRITICAL RISK! Stop all automation immediately. Manual review required.'
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate actionable preventive steps
|
||||
*/
|
||||
private generatePreventiveActions(patterns: BanPattern[]): string[] {
|
||||
const actions: string[] = []
|
||||
|
||||
if (patterns.some(p => p.name === 'rapid-captcha-sequence')) {
|
||||
actions.push('Increase search delays to 3-5 minutes minimum')
|
||||
actions.push('Enable longer cool-down periods between activities')
|
||||
}
|
||||
|
||||
if (patterns.some(p => p.name === 'high-error-rate')) {
|
||||
actions.push('Check proxy connectivity and health')
|
||||
actions.push('Verify User-Agent and fingerprint configuration')
|
||||
}
|
||||
|
||||
if (patterns.some(p => p.name === 'new-account-aggressive')) {
|
||||
actions.push('Slow down activity on new accounts (max 1 run per day for first week)')
|
||||
actions.push('Allow account to age naturally before heavy automation')
|
||||
}
|
||||
|
||||
if (patterns.some(p => p.name === 'proxy-flagged')) {
|
||||
actions.push('Rotate to different proxy immediately')
|
||||
actions.push('Test proxy manually before resuming')
|
||||
}
|
||||
|
||||
if (patterns.some(p => p.name === 'suspicious-timing')) {
|
||||
actions.push('Randomize execution times across different hours')
|
||||
actions.push('Enable humanization.allowedWindows with varied schedules')
|
||||
}
|
||||
|
||||
if (actions.length === 0) {
|
||||
actions.push('Continue monitoring but no immediate action needed')
|
||||
}
|
||||
|
||||
return actions
|
||||
}
|
||||
|
||||
/**
|
||||
* Export historical data for analysis
|
||||
*/
|
||||
exportHistory(): HistoricalData[] {
|
||||
return [...this.history]
|
||||
}
|
||||
|
||||
/**
|
||||
* Import historical data (for persistence)
|
||||
*/
|
||||
importHistory(data: HistoricalData[]): void {
|
||||
this.history = data.slice(-100) // Keep last 100
|
||||
}
|
||||
}
|
||||
112
src/util/ConclusionWebhook.ts
Normal file
112
src/util/ConclusionWebhook.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import axios from 'axios'
|
||||
import { Config } from '../interface/Config'
|
||||
import { Ntfy } from './Ntfy'
|
||||
import { log } from './Logger'
|
||||
|
||||
interface DiscordField {
|
||||
name: string
|
||||
value: string
|
||||
inline?: boolean
|
||||
}
|
||||
|
||||
interface DiscordEmbed {
|
||||
title?: string
|
||||
description?: string
|
||||
color?: number
|
||||
fields?: DiscordField[]
|
||||
timestamp?: string
|
||||
thumbnail?: {
|
||||
url: string
|
||||
}
|
||||
footer?: {
|
||||
text: string
|
||||
icon_url?: string
|
||||
}
|
||||
}
|
||||
|
||||
interface WebhookPayload {
|
||||
username: string
|
||||
avatar_url: string
|
||||
embeds: DiscordEmbed[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a clean, structured Discord webhook notification
|
||||
*/
|
||||
export async function ConclusionWebhook(
|
||||
config: Config,
|
||||
title: string,
|
||||
description: string,
|
||||
fields?: DiscordField[],
|
||||
color?: number
|
||||
) {
|
||||
const hasConclusion = config.conclusionWebhook?.enabled && config.conclusionWebhook.url
|
||||
const hasWebhook = config.webhook?.enabled && config.webhook.url
|
||||
|
||||
if (!hasConclusion && !hasWebhook) return
|
||||
|
||||
const embed: DiscordEmbed = {
|
||||
title,
|
||||
description,
|
||||
color: color || 0x0078D4,
|
||||
timestamp: new Date().toISOString(),
|
||||
thumbnail: {
|
||||
url: 'https://media.discordapp.net/attachments/1430643658788438144/1430644205344133290/rewi-v1.png'
|
||||
}
|
||||
}
|
||||
|
||||
if (fields && fields.length > 0) {
|
||||
embed.fields = fields
|
||||
}
|
||||
|
||||
const payload: WebhookPayload = {
|
||||
username: 'MS Rewi 🎮',
|
||||
avatar_url: 'https://media.discordapp.net/attachments/1430643658788438144/1430644205344133290/rewi-v1.png',
|
||||
embeds: [embed]
|
||||
}
|
||||
|
||||
const postWebhook = async (url: string, label: string) => {
|
||||
const maxAttempts = 3
|
||||
let lastError: unknown = null
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
await axios.post(url, payload, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
timeout: 15000
|
||||
})
|
||||
log('main', 'WEBHOOK', `${label} notification sent successfully (attempt ${attempt})`)
|
||||
return
|
||||
} catch (error) {
|
||||
lastError = error
|
||||
if (attempt < maxAttempts) {
|
||||
// Exponential backoff: 1s, 2s, 4s
|
||||
const delayMs = 1000 * Math.pow(2, attempt - 1)
|
||||
await new Promise(resolve => setTimeout(resolve, delayMs))
|
||||
}
|
||||
}
|
||||
}
|
||||
log('main', 'WEBHOOK', `${label} failed after ${maxAttempts} attempts: ${lastError instanceof Error ? lastError.message : String(lastError)}`, 'error')
|
||||
}
|
||||
|
||||
const urls = new Set<string>()
|
||||
if (hasConclusion) urls.add(config.conclusionWebhook!.url)
|
||||
if (hasWebhook) urls.add(config.webhook!.url)
|
||||
|
||||
await Promise.all(
|
||||
Array.from(urls).map((url, index) => postWebhook(url, `webhook-${index + 1}`))
|
||||
)
|
||||
|
||||
// Optional NTFY notification
|
||||
if (config.ntfy?.enabled && config.ntfy.url && config.ntfy.topic) {
|
||||
const message = `${title}\n${description}${fields ? '\n\n' + fields.map(f => `${f.name}: ${f.value}`).join('\n') : ''}`
|
||||
const ntfyType = color === 0xFF0000 ? 'error' : color === 0xFFAA00 ? 'warn' : 'log'
|
||||
|
||||
try {
|
||||
await Ntfy(message, ntfyType)
|
||||
log('main', 'NTFY', 'Notification sent successfully')
|
||||
} catch (error) {
|
||||
log('main', 'NTFY', `Failed to send notification: ${error instanceof Error ? error.message : String(error)}`, 'error')
|
||||
}
|
||||
}
|
||||
}
|
||||
532
src/util/ConfigValidator.ts
Normal file
532
src/util/ConfigValidator.ts
Normal file
@@ -0,0 +1,532 @@
|
||||
import fs from 'fs'
|
||||
import { Config } from '../interface/Config'
|
||||
import { Account } from '../interface/Account'
|
||||
|
||||
export interface ValidationIssue {
|
||||
severity: 'error' | 'warning' | 'info'
|
||||
field: string
|
||||
message: string
|
||||
suggestion?: string
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean
|
||||
issues: ValidationIssue[]
|
||||
}
|
||||
|
||||
/**
|
||||
* ConfigValidator performs intelligent validation of config.jsonc and accounts.json
|
||||
* before execution to catch common mistakes, conflicts, and security issues.
|
||||
*/
|
||||
export class ConfigValidator {
|
||||
/**
|
||||
* Validate the main config file
|
||||
*/
|
||||
static validateConfig(config: Config): ValidationResult {
|
||||
const issues: ValidationIssue[] = []
|
||||
|
||||
// Check baseURL
|
||||
if (!config.baseURL || !config.baseURL.startsWith('https://')) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'baseURL',
|
||||
message: 'baseURL must be a valid HTTPS URL',
|
||||
suggestion: 'Use https://rewards.bing.com'
|
||||
})
|
||||
}
|
||||
|
||||
// Check sessionPath
|
||||
if (!config.sessionPath || config.sessionPath.trim() === '') {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'sessionPath',
|
||||
message: 'sessionPath cannot be empty'
|
||||
})
|
||||
}
|
||||
|
||||
// Check clusters
|
||||
if (config.clusters < 1) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'clusters',
|
||||
message: 'clusters must be at least 1'
|
||||
})
|
||||
}
|
||||
if (config.clusters > 10) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'clusters',
|
||||
message: 'High cluster count may consume excessive resources',
|
||||
suggestion: 'Consider using 2-4 clusters for optimal performance'
|
||||
})
|
||||
}
|
||||
|
||||
// Check globalTimeout
|
||||
const timeout = this.parseTimeout(config.globalTimeout)
|
||||
if (timeout < 10000) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'globalTimeout',
|
||||
message: 'Very short timeout may cause frequent failures',
|
||||
suggestion: 'Use at least 15s for stability'
|
||||
})
|
||||
}
|
||||
if (timeout > 120000) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'globalTimeout',
|
||||
message: 'Very long timeout may slow down execution',
|
||||
suggestion: 'Use 30-60s for optimal balance'
|
||||
})
|
||||
}
|
||||
|
||||
// Check search settings
|
||||
if (config.searchSettings) {
|
||||
const searchDelay = config.searchSettings.searchDelay
|
||||
const minDelay = this.parseTimeout(searchDelay.min)
|
||||
const maxDelay = this.parseTimeout(searchDelay.max)
|
||||
|
||||
if (minDelay >= maxDelay) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'searchSettings.searchDelay',
|
||||
message: 'min delay must be less than max delay'
|
||||
})
|
||||
}
|
||||
|
||||
if (minDelay < 10000) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'searchSettings.searchDelay.min',
|
||||
message: 'Very short search delays increase ban risk',
|
||||
suggestion: 'Use at least 30s between searches'
|
||||
})
|
||||
}
|
||||
|
||||
if (config.searchSettings.retryMobileSearchAmount > 5) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'searchSettings.retryMobileSearchAmount',
|
||||
message: 'Too many retries may waste time',
|
||||
suggestion: 'Use 2-3 retries maximum'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check humanization
|
||||
if (config.humanization) {
|
||||
if (config.humanization.enabled === false && config.humanization.stopOnBan === true) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'humanization',
|
||||
message: 'stopOnBan is enabled but humanization is disabled',
|
||||
suggestion: 'Enable humanization for better ban protection'
|
||||
})
|
||||
}
|
||||
|
||||
const actionDelay = config.humanization.actionDelay
|
||||
if (actionDelay) {
|
||||
const minAction = this.parseTimeout(actionDelay.min)
|
||||
const maxAction = this.parseTimeout(actionDelay.max)
|
||||
if (minAction >= maxAction) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'humanization.actionDelay',
|
||||
message: 'min action delay must be less than max'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (config.humanization.allowedWindows && config.humanization.allowedWindows.length > 0) {
|
||||
for (const window of config.humanization.allowedWindows) {
|
||||
if (!/^\d{2}:\d{2}-\d{2}:\d{2}$/.test(window)) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'humanization.allowedWindows',
|
||||
message: `Invalid time window format: ${window}`,
|
||||
suggestion: 'Use format HH:mm-HH:mm (e.g., 09:00-17:00)'
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check proxy config
|
||||
if (config.proxy) {
|
||||
if (config.proxy.proxyGoogleTrends === false && config.proxy.proxyBingTerms === false) {
|
||||
issues.push({
|
||||
severity: 'info',
|
||||
field: 'proxy',
|
||||
message: 'All proxy options disabled - outbound requests will use direct connection'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check webhooks
|
||||
if (config.webhook?.enabled && (!config.webhook.url || config.webhook.url.trim() === '')) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'webhook.url',
|
||||
message: 'Webhook enabled but URL is empty'
|
||||
})
|
||||
}
|
||||
|
||||
if (config.conclusionWebhook?.enabled && (!config.conclusionWebhook.url || config.conclusionWebhook.url.trim() === '')) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'conclusionWebhook.url',
|
||||
message: 'Conclusion webhook enabled but URL is empty'
|
||||
})
|
||||
}
|
||||
|
||||
// Check ntfy
|
||||
if (config.ntfy?.enabled) {
|
||||
if (!config.ntfy.url || config.ntfy.url.trim() === '') {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'ntfy.url',
|
||||
message: 'NTFY enabled but URL is empty'
|
||||
})
|
||||
}
|
||||
if (!config.ntfy.topic || config.ntfy.topic.trim() === '') {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'ntfy.topic',
|
||||
message: 'NTFY enabled but topic is empty'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check schedule
|
||||
if (config.schedule?.enabled) {
|
||||
if (!config.schedule.timeZone) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'schedule.timeZone',
|
||||
message: 'No timeZone specified, defaulting to UTC',
|
||||
suggestion: 'Set your local timezone (e.g., America/New_York)'
|
||||
})
|
||||
}
|
||||
|
||||
const useAmPm = config.schedule.useAmPm
|
||||
const time12 = (config.schedule as unknown as Record<string, unknown>)['time12']
|
||||
const time24 = (config.schedule as unknown as Record<string, unknown>)['time24']
|
||||
|
||||
if (useAmPm === true && (!time12 || (typeof time12 === 'string' && time12.trim() === ''))) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'schedule.time12',
|
||||
message: 'useAmPm is true but time12 is empty'
|
||||
})
|
||||
}
|
||||
if (useAmPm === false && (!time24 || (typeof time24 === 'string' && time24.trim() === ''))) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'schedule.time24',
|
||||
message: 'useAmPm is false but time24 is empty'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check workers
|
||||
if (config.workers) {
|
||||
const allDisabled = !config.workers.doDailySet &&
|
||||
!config.workers.doMorePromotions &&
|
||||
!config.workers.doPunchCards &&
|
||||
!config.workers.doDesktopSearch &&
|
||||
!config.workers.doMobileSearch &&
|
||||
!config.workers.doDailyCheckIn &&
|
||||
!config.workers.doReadToEarn
|
||||
|
||||
if (allDisabled) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'workers',
|
||||
message: 'All workers are disabled - bot will not perform any tasks',
|
||||
suggestion: 'Enable at least one worker type'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check diagnostics
|
||||
if (config.diagnostics?.enabled) {
|
||||
const maxPerRun = config.diagnostics.maxPerRun || 2
|
||||
if (maxPerRun > 20) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'diagnostics.maxPerRun',
|
||||
message: 'Very high maxPerRun may fill disk quickly'
|
||||
})
|
||||
}
|
||||
|
||||
const retention = config.diagnostics.retentionDays || 7
|
||||
if (retention > 90) {
|
||||
issues.push({
|
||||
severity: 'info',
|
||||
field: 'diagnostics.retentionDays',
|
||||
message: 'Long retention period - monitor disk usage'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const valid = !issues.some(i => i.severity === 'error')
|
||||
return { valid, issues }
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate accounts.json
|
||||
*/
|
||||
static validateAccounts(accounts: Account[]): ValidationResult {
|
||||
const issues: ValidationIssue[] = []
|
||||
|
||||
if (accounts.length === 0) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: 'accounts',
|
||||
message: 'No accounts found in accounts.json'
|
||||
})
|
||||
return { valid: false, issues }
|
||||
}
|
||||
|
||||
const seenEmails = new Set<string>()
|
||||
const seenProxies = new Map<string, string[]>() // proxy -> [emails]
|
||||
|
||||
for (let i = 0; i < accounts.length; i++) {
|
||||
const acc = accounts[i]
|
||||
const prefix = `accounts[${i}]`
|
||||
|
||||
if (!acc) continue
|
||||
|
||||
// Check email
|
||||
if (!acc.email || acc.email.trim() === '') {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: `${prefix}.email`,
|
||||
message: 'Account email is empty'
|
||||
})
|
||||
} else {
|
||||
if (seenEmails.has(acc.email)) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: `${prefix}.email`,
|
||||
message: `Duplicate email: ${acc.email}`
|
||||
})
|
||||
}
|
||||
seenEmails.add(acc.email)
|
||||
|
||||
if (!/@/.test(acc.email)) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: `${prefix}.email`,
|
||||
message: 'Invalid email format'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check password
|
||||
if (!acc.password || acc.password.trim() === '') {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: `${prefix}.password`,
|
||||
message: 'Account password is empty'
|
||||
})
|
||||
} else if (acc.password.length < 8) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: `${prefix}.password`,
|
||||
message: 'Very short password - verify it\'s correct'
|
||||
})
|
||||
}
|
||||
|
||||
// Check proxy
|
||||
if (acc.proxy) {
|
||||
const proxyUrl = acc.proxy.url
|
||||
if (proxyUrl && proxyUrl.trim() !== '') {
|
||||
if (!acc.proxy.port) {
|
||||
issues.push({
|
||||
severity: 'error',
|
||||
field: `${prefix}.proxy.port`,
|
||||
message: 'Proxy URL specified but port is missing'
|
||||
})
|
||||
}
|
||||
|
||||
// Track proxy reuse
|
||||
const proxyKey = `${proxyUrl}:${acc.proxy.port}`
|
||||
if (!seenProxies.has(proxyKey)) {
|
||||
seenProxies.set(proxyKey, [])
|
||||
}
|
||||
seenProxies.get(proxyKey)?.push(acc.email)
|
||||
}
|
||||
}
|
||||
|
||||
// Check TOTP
|
||||
if (acc.totp && acc.totp.trim() !== '') {
|
||||
if (acc.totp.length < 16) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: `${prefix}.totp`,
|
||||
message: 'TOTP secret seems too short - verify it\'s correct'
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Warn about excessive proxy reuse
|
||||
for (const [proxyKey, emails] of seenProxies) {
|
||||
if (emails.length > 3) {
|
||||
issues.push({
|
||||
severity: 'warning',
|
||||
field: 'accounts.proxy',
|
||||
message: `Proxy ${proxyKey} used by ${emails.length} accounts - may trigger rate limits`,
|
||||
suggestion: 'Use different proxies per account for better safety'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const valid = !issues.some(i => i.severity === 'error')
|
||||
return { valid, issues }
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate both config and accounts together (cross-checks)
|
||||
*/
|
||||
static validateAll(config: Config, accounts: Account[]): ValidationResult {
|
||||
const configResult = this.validateConfig(config)
|
||||
const accountsResult = this.validateAccounts(accounts)
|
||||
|
||||
const issues = [...configResult.issues, ...accountsResult.issues]
|
||||
|
||||
// Cross-validation: clusters vs accounts
|
||||
if (accounts.length > 0 && config.clusters > accounts.length) {
|
||||
issues.push({
|
||||
severity: 'info',
|
||||
field: 'clusters',
|
||||
message: `${config.clusters} clusters configured but only ${accounts.length} account(s)`,
|
||||
suggestion: 'Reduce clusters to match account count for efficiency'
|
||||
})
|
||||
}
|
||||
|
||||
// Cross-validation: parallel mode with single account
|
||||
if (config.parallel && accounts.length === 1) {
|
||||
issues.push({
|
||||
severity: 'info',
|
||||
field: 'parallel',
|
||||
message: 'Parallel mode enabled with single account has no effect',
|
||||
suggestion: 'Disable parallel mode or add more accounts'
|
||||
})
|
||||
}
|
||||
|
||||
const valid = !issues.some(i => i.severity === 'error')
|
||||
return { valid, issues }
|
||||
}
|
||||
|
||||
/**
|
||||
* Load and validate from file paths
|
||||
*/
|
||||
static validateFromFiles(configPath: string, accountsPath: string): ValidationResult {
|
||||
try {
|
||||
if (!fs.existsSync(configPath)) {
|
||||
return {
|
||||
valid: false,
|
||||
issues: [{
|
||||
severity: 'error',
|
||||
field: 'config',
|
||||
message: `Config file not found: ${configPath}`
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
if (!fs.existsSync(accountsPath)) {
|
||||
return {
|
||||
valid: false,
|
||||
issues: [{
|
||||
severity: 'error',
|
||||
field: 'accounts',
|
||||
message: `Accounts file not found: ${accountsPath}`
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
const configRaw = fs.readFileSync(configPath, 'utf-8')
|
||||
const accountsRaw = fs.readFileSync(accountsPath, 'utf-8')
|
||||
|
||||
// Remove JSONC comments (basic approach)
|
||||
const configJson = configRaw.replace(/\/\*[\s\S]*?\*\/|\/\/.*/g, '')
|
||||
const config: Config = JSON.parse(configJson)
|
||||
const accounts: Account[] = JSON.parse(accountsRaw)
|
||||
|
||||
return this.validateAll(config, accounts)
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
issues: [{
|
||||
severity: 'error',
|
||||
field: 'parse',
|
||||
message: `Failed to parse files: ${error instanceof Error ? error.message : String(error)}`
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print validation results to console with color
|
||||
* Note: This method intentionally uses console.log for CLI output formatting
|
||||
*/
|
||||
static printResults(result: ValidationResult): void {
|
||||
if (result.valid) {
|
||||
console.log('✅ Configuration validation passed\n')
|
||||
} else {
|
||||
console.log('❌ Configuration validation failed\n')
|
||||
}
|
||||
|
||||
if (result.issues.length === 0) {
|
||||
console.log('No issues found.')
|
||||
return
|
||||
}
|
||||
|
||||
const errors = result.issues.filter(i => i.severity === 'error')
|
||||
const warnings = result.issues.filter(i => i.severity === 'warning')
|
||||
const infos = result.issues.filter(i => i.severity === 'info')
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.log(`\n🚫 ERRORS (${errors.length}):`)
|
||||
for (const issue of errors) {
|
||||
console.log(` ${issue.field}: ${issue.message}`)
|
||||
if (issue.suggestion) {
|
||||
console.log(` → ${issue.suggestion}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (warnings.length > 0) {
|
||||
console.log(`\n⚠️ WARNINGS (${warnings.length}):`)
|
||||
for (const issue of warnings) {
|
||||
console.log(` ${issue.field}: ${issue.message}`)
|
||||
if (issue.suggestion) {
|
||||
console.log(` → ${issue.suggestion}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (infos.length > 0) {
|
||||
console.log(`\nℹ️ INFO (${infos.length}):`)
|
||||
for (const issue of infos) {
|
||||
console.log(` ${issue.field}: ${issue.message}`)
|
||||
if (issue.suggestion) {
|
||||
console.log(` → ${issue.suggestion}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log()
|
||||
}
|
||||
|
||||
private static parseTimeout(value: number | string): number {
|
||||
if (typeof value === 'number') return value
|
||||
const str = String(value).toLowerCase()
|
||||
if (str.endsWith('ms')) return parseInt(str, 10)
|
||||
if (str.endsWith('s')) return parseInt(str, 10) * 1000
|
||||
if (str.endsWith('min')) return parseInt(str, 10) * 60000
|
||||
return parseInt(str, 10) || 30000
|
||||
}
|
||||
}
|
||||
74
src/util/Diagnostics.ts
Normal file
74
src/util/Diagnostics.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import type { Page } from 'rebrowser-playwright'
|
||||
import type { MicrosoftRewardsBot } from '../index'
|
||||
|
||||
export type DiagnosticsScope = 'default' | 'security'
|
||||
|
||||
export interface DiagnosticsOptions {
|
||||
scope?: DiagnosticsScope
|
||||
skipSlot?: boolean
|
||||
force?: boolean
|
||||
}
|
||||
|
||||
export async function captureDiagnostics(bot: MicrosoftRewardsBot, page: Page, rawLabel: string, options?: DiagnosticsOptions): Promise<void> {
|
||||
try {
|
||||
const scope: DiagnosticsScope = options?.scope ?? 'default'
|
||||
const cfg = bot.config?.diagnostics ?? {}
|
||||
const forceCapture = options?.force === true || scope === 'security'
|
||||
if (!forceCapture && cfg.enabled === false) return
|
||||
|
||||
if (scope === 'default') {
|
||||
const maxPerRun = typeof cfg.maxPerRun === 'number' ? cfg.maxPerRun : 8
|
||||
if (!options?.skipSlot && !bot.tryReserveDiagSlot(maxPerRun)) return
|
||||
}
|
||||
|
||||
const saveScreenshot = scope === 'security' ? true : cfg.saveScreenshot !== false
|
||||
const saveHtml = scope === 'security' ? true : cfg.saveHtml !== false
|
||||
if (!saveScreenshot && !saveHtml) return
|
||||
|
||||
const safeLabel = rawLabel.replace(/[^a-z0-9-_]/gi, '_').slice(0, 64) || 'capture'
|
||||
const now = new Date()
|
||||
const timestamp = `${String(now.getHours()).padStart(2, '0')}${String(now.getMinutes()).padStart(2, '0')}${String(now.getSeconds()).padStart(2, '0')}`
|
||||
|
||||
let dir: string
|
||||
if (scope === 'security') {
|
||||
const base = path.join(process.cwd(), 'diagnostics', 'security-incidents')
|
||||
fs.mkdirSync(base, { recursive: true })
|
||||
const sub = `${now.toISOString().replace(/[:.]/g, '-')}-${safeLabel}`
|
||||
dir = path.join(base, sub)
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
} else {
|
||||
const day = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}`
|
||||
dir = path.join(process.cwd(), 'reports', day)
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
}
|
||||
|
||||
if (saveScreenshot) {
|
||||
const shotName = scope === 'security' ? 'page.png' : `${timestamp}_${safeLabel}.png`
|
||||
const shotPath = path.join(dir, shotName)
|
||||
await page.screenshot({ path: shotPath }).catch(() => {})
|
||||
if (scope === 'security') {
|
||||
bot.log(bot.isMobile, 'DIAG', `Saved security screenshot to ${shotPath}`)
|
||||
} else {
|
||||
bot.log(bot.isMobile, 'DIAG', `Saved diagnostics screenshot to ${shotPath}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (saveHtml) {
|
||||
const htmlName = scope === 'security' ? 'page.html' : `${timestamp}_${safeLabel}.html`
|
||||
const htmlPath = path.join(dir, htmlName)
|
||||
try {
|
||||
const html = await page.content()
|
||||
await fs.promises.writeFile(htmlPath, html, 'utf-8')
|
||||
if (scope === 'security') {
|
||||
bot.log(bot.isMobile, 'DIAG', `Saved security HTML to ${htmlPath}`)
|
||||
}
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
bot.log(bot.isMobile, 'DIAG', `Failed to capture diagnostics: ${error instanceof Error ? error.message : error}`, 'warn')
|
||||
}
|
||||
}
|
||||
54
src/util/Humanizer.ts
Normal file
54
src/util/Humanizer.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { Page } from 'rebrowser-playwright'
|
||||
import Util from './Utils'
|
||||
import type { ConfigHumanization } from '../interface/Config'
|
||||
|
||||
export class Humanizer {
|
||||
private util: Util
|
||||
private cfg: ConfigHumanization | undefined
|
||||
|
||||
constructor(util: Util, cfg?: ConfigHumanization) {
|
||||
this.util = util
|
||||
this.cfg = cfg
|
||||
}
|
||||
|
||||
async microGestures(page: Page): Promise<void> {
|
||||
if (this.cfg && this.cfg.enabled === false) return
|
||||
const moveProb = this.cfg?.gestureMoveProb ?? 0.4
|
||||
const scrollProb = this.cfg?.gestureScrollProb ?? 0.2
|
||||
try {
|
||||
if (Math.random() < moveProb) {
|
||||
const x = Math.floor(Math.random() * 40) + 5
|
||||
const y = Math.floor(Math.random() * 30) + 5
|
||||
await page.mouse.move(x, y, { steps: 2 }).catch(() => {})
|
||||
}
|
||||
if (Math.random() < scrollProb) {
|
||||
const dy = (Math.random() < 0.5 ? 1 : -1) * (Math.floor(Math.random() * 150) + 50)
|
||||
await page.mouse.wheel(0, dy).catch(() => {})
|
||||
}
|
||||
} catch {/* noop */}
|
||||
}
|
||||
|
||||
async actionPause(): Promise<void> {
|
||||
if (this.cfg && this.cfg.enabled === false) return
|
||||
const defMin = 150
|
||||
const defMax = 450
|
||||
let min = defMin
|
||||
let max = defMax
|
||||
if (this.cfg?.actionDelay) {
|
||||
const parse = (v: number | string) => {
|
||||
if (typeof v === 'number') return v
|
||||
try {
|
||||
const n = this.util.stringToMs(String(v))
|
||||
return Math.max(0, Math.min(n, 10_000))
|
||||
} catch { return defMin }
|
||||
}
|
||||
min = parse(this.cfg.actionDelay.min)
|
||||
max = parse(this.cfg.actionDelay.max)
|
||||
if (min > max) [min, max] = [max, min]
|
||||
max = Math.min(max, 5_000)
|
||||
}
|
||||
await this.util.wait(this.util.randomNumber(min, max))
|
||||
}
|
||||
}
|
||||
|
||||
export default Humanizer
|
||||
104
src/util/JobState.ts
Normal file
104
src/util/JobState.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import type { Config } from '../interface/Config'
|
||||
|
||||
type AccountCompletionMeta = {
|
||||
runId?: string
|
||||
completedAt: string
|
||||
totalCollected?: number
|
||||
banned?: boolean
|
||||
errors?: number
|
||||
}
|
||||
|
||||
type DayState = {
|
||||
doneOfferIds: string[]
|
||||
accountCompleted?: boolean
|
||||
accountMeta?: AccountCompletionMeta
|
||||
}
|
||||
|
||||
type FileState = {
|
||||
days: Record<string, DayState>
|
||||
}
|
||||
|
||||
export class JobState {
|
||||
private baseDir: string
|
||||
|
||||
constructor(cfg: Config) {
|
||||
const dir = cfg.jobState?.dir || path.join(process.cwd(), cfg.sessionPath, 'job-state')
|
||||
this.baseDir = dir
|
||||
if (!fs.existsSync(this.baseDir)) fs.mkdirSync(this.baseDir, { recursive: true })
|
||||
}
|
||||
|
||||
private fileFor(email: string): string {
|
||||
const safe = email.replace(/[^a-z0-9._-]/gi, '_')
|
||||
return path.join(this.baseDir, `${safe}.json`)
|
||||
}
|
||||
|
||||
private load(email: string): FileState {
|
||||
const file = this.fileFor(email)
|
||||
if (!fs.existsSync(file)) return { days: {} }
|
||||
try {
|
||||
const raw = fs.readFileSync(file, 'utf-8')
|
||||
const parsed = JSON.parse(raw)
|
||||
return parsed && typeof parsed === 'object' && parsed.days ? parsed as FileState : { days: {} }
|
||||
} catch { return { days: {} } }
|
||||
}
|
||||
|
||||
private save(email: string, state: FileState): void {
|
||||
const file = this.fileFor(email)
|
||||
fs.writeFileSync(file, JSON.stringify(state, null, 2), 'utf-8')
|
||||
}
|
||||
|
||||
isDone(email: string, day: string, offerId: string): boolean {
|
||||
const st = this.load(email)
|
||||
const d = st.days[day]
|
||||
if (!d) return false
|
||||
return d.doneOfferIds.includes(offerId)
|
||||
}
|
||||
|
||||
markDone(email: string, day: string, offerId: string): void {
|
||||
const st = this.load(email)
|
||||
if (!st.days[day]) st.days[day] = { doneOfferIds: [] }
|
||||
const d = st.days[day]
|
||||
if (!d.doneOfferIds.includes(offerId)) d.doneOfferIds.push(offerId)
|
||||
this.save(email, st)
|
||||
}
|
||||
|
||||
isAccountComplete(email: string, day: string): boolean {
|
||||
const st = this.load(email)
|
||||
const d = st.days[day]
|
||||
return d?.accountCompleted === true
|
||||
}
|
||||
|
||||
markAccountComplete(
|
||||
email: string,
|
||||
day: string,
|
||||
meta?: { runId?: string; totalCollected?: number; banned?: boolean; errors?: number }
|
||||
): void {
|
||||
const st = this.load(email)
|
||||
if (!st.days[day]) st.days[day] = { doneOfferIds: [] }
|
||||
const d = st.days[day]
|
||||
d.accountCompleted = true
|
||||
d.accountMeta = {
|
||||
completedAt: new Date().toISOString(),
|
||||
runId: meta?.runId,
|
||||
totalCollected: meta?.totalCollected,
|
||||
banned: meta?.banned ?? false,
|
||||
errors: meta?.errors ?? 0
|
||||
}
|
||||
this.save(email, st)
|
||||
}
|
||||
|
||||
clearAccountComplete(email: string, day: string): void {
|
||||
const st = this.load(email)
|
||||
const d = st.days[day]
|
||||
if (!d) return
|
||||
if (d.accountCompleted || d.accountMeta) {
|
||||
delete d.accountCompleted
|
||||
delete d.accountMeta
|
||||
this.save(email, st)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default JobState
|
||||
436
src/util/Load.ts
Normal file
436
src/util/Load.ts
Normal file
@@ -0,0 +1,436 @@
|
||||
import { BrowserContext, Cookie } from 'rebrowser-playwright'
|
||||
import { BrowserFingerprintWithHeaders } from 'fingerprint-generator'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
|
||||
import { Account } from '../interface/Account'
|
||||
import { Config, ConfigSaveFingerprint } from '../interface/Config'
|
||||
|
||||
let configCache: Config
|
||||
let configSourcePath = ''
|
||||
|
||||
// Basic JSON comment stripper (supports // line and /* block */ comments while preserving strings)
|
||||
function stripJsonComments(input: string): string {
|
||||
let out = ''
|
||||
let inString = false
|
||||
let stringChar = ''
|
||||
let inLine = false
|
||||
let inBlock = false
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const ch = input[i]!
|
||||
const next = input[i + 1]
|
||||
if (inLine) {
|
||||
if (ch === '\n' || ch === '\r') {
|
||||
inLine = false
|
||||
out += ch
|
||||
}
|
||||
continue
|
||||
}
|
||||
if (inBlock) {
|
||||
if (ch === '*' && next === '/') {
|
||||
inBlock = false
|
||||
i++
|
||||
}
|
||||
continue
|
||||
}
|
||||
if (inString) {
|
||||
out += ch
|
||||
if (ch === '\\') { // escape next char
|
||||
i++
|
||||
if (i < input.length) out += input[i]
|
||||
continue
|
||||
}
|
||||
if (ch === stringChar) {
|
||||
inString = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
if (ch === '"' || ch === '\'') {
|
||||
inString = true
|
||||
stringChar = ch
|
||||
out += ch
|
||||
continue
|
||||
}
|
||||
if (ch === '/' && next === '/') {
|
||||
inLine = true
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if (ch === '/' && next === '*') {
|
||||
inBlock = true
|
||||
i++
|
||||
continue
|
||||
}
|
||||
out += ch
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// Normalize both legacy (flat) and new (nested) config schemas into the flat Config interface
|
||||
function normalizeConfig(raw: unknown): Config {
|
||||
// Using any here is necessary to support both legacy flat config and new nested config structures
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const n = (raw || {}) as any
|
||||
|
||||
// Browser / execution
|
||||
const headless = n.browser?.headless ?? n.headless ?? false
|
||||
const globalTimeout = n.browser?.globalTimeout ?? n.globalTimeout ?? '30s'
|
||||
const parallel = n.execution?.parallel ?? n.parallel ?? false
|
||||
const runOnZeroPoints = n.execution?.runOnZeroPoints ?? n.runOnZeroPoints ?? false
|
||||
const clusters = n.execution?.clusters ?? n.clusters ?? 1
|
||||
const passesPerRun = n.execution?.passesPerRun ?? n.passesPerRun
|
||||
|
||||
// Search
|
||||
const useLocalQueries = n.search?.useLocalQueries ?? n.searchOnBingLocalQueries ?? false
|
||||
const searchSettingsSrc = n.search?.settings ?? n.searchSettings ?? {}
|
||||
const delaySrc = searchSettingsSrc.delay ?? searchSettingsSrc.searchDelay ?? { min: '3min', max: '5min' }
|
||||
const searchSettings = {
|
||||
useGeoLocaleQueries: !!(searchSettingsSrc.useGeoLocaleQueries ?? false),
|
||||
scrollRandomResults: !!(searchSettingsSrc.scrollRandomResults ?? false),
|
||||
clickRandomResults: !!(searchSettingsSrc.clickRandomResults ?? false),
|
||||
retryMobileSearchAmount: Number(searchSettingsSrc.retryMobileSearchAmount ?? 2),
|
||||
searchDelay: {
|
||||
min: delaySrc.min ?? '3min',
|
||||
max: delaySrc.max ?? '5min'
|
||||
},
|
||||
localFallbackCount: Number(searchSettingsSrc.localFallbackCount ?? 25),
|
||||
extraFallbackRetries: Number(searchSettingsSrc.extraFallbackRetries ?? 1)
|
||||
}
|
||||
|
||||
// Workers
|
||||
const workers = n.workers ?? {
|
||||
doDailySet: true,
|
||||
doMorePromotions: true,
|
||||
doPunchCards: true,
|
||||
doDesktopSearch: true,
|
||||
doMobileSearch: true,
|
||||
doDailyCheckIn: true,
|
||||
doReadToEarn: true,
|
||||
bundleDailySetWithSearch: false
|
||||
}
|
||||
// Ensure missing flag gets a default
|
||||
if (typeof workers.bundleDailySetWithSearch !== 'boolean') workers.bundleDailySetWithSearch = false
|
||||
|
||||
// Logging
|
||||
const logging = n.logging ?? {}
|
||||
const logExcludeFunc = Array.isArray(logging.excludeFunc) ? logging.excludeFunc : (n.logExcludeFunc ?? [])
|
||||
const webhookLogExcludeFunc = Array.isArray(logging.webhookExcludeFunc) ? logging.webhookExcludeFunc : (n.webhookLogExcludeFunc ?? [])
|
||||
|
||||
// Notifications
|
||||
const notifications = n.notifications ?? {}
|
||||
const webhook = notifications.webhook ?? n.webhook ?? { enabled: false, url: '' }
|
||||
const conclusionWebhook = notifications.conclusionWebhook ?? n.conclusionWebhook ?? { enabled: false, url: '' }
|
||||
const ntfy = notifications.ntfy ?? n.ntfy ?? { enabled: false, url: '', topic: '', authToken: '' }
|
||||
|
||||
// Buy Mode
|
||||
const buyMode = n.buyMode ?? {}
|
||||
const buyModeEnabled = typeof buyMode.enabled === 'boolean' ? buyMode.enabled : false
|
||||
const buyModeMax = typeof buyMode.maxMinutes === 'number' ? buyMode.maxMinutes : 45
|
||||
|
||||
// Fingerprinting
|
||||
const saveFingerprint = (n.fingerprinting?.saveFingerprint ?? n.saveFingerprint) ?? { mobile: false, desktop: false }
|
||||
|
||||
// Humanization defaults (single on/off)
|
||||
if (!n.humanization) n.humanization = {}
|
||||
if (typeof n.humanization.enabled !== 'boolean') n.humanization.enabled = true
|
||||
if (typeof n.humanization.stopOnBan !== 'boolean') n.humanization.stopOnBan = false
|
||||
if (typeof n.humanization.immediateBanAlert !== 'boolean') n.humanization.immediateBanAlert = true
|
||||
if (typeof n.humanization.randomOffDaysPerWeek !== 'number') {
|
||||
n.humanization.randomOffDaysPerWeek = 1
|
||||
}
|
||||
// Strong default gestures when enabled (explicit values still win)
|
||||
if (typeof n.humanization.gestureMoveProb !== 'number') {
|
||||
n.humanization.gestureMoveProb = !n.humanization.enabled ? 0 : 0.5
|
||||
}
|
||||
if (typeof n.humanization.gestureScrollProb !== 'number') {
|
||||
n.humanization.gestureScrollProb = !n.humanization.enabled ? 0 : 0.25
|
||||
}
|
||||
|
||||
// Vacation mode (monthly contiguous off-days)
|
||||
if (!n.vacation) n.vacation = {}
|
||||
if (typeof n.vacation.enabled !== 'boolean') n.vacation.enabled = false
|
||||
const vMin = Number(n.vacation.minDays)
|
||||
const vMax = Number(n.vacation.maxDays)
|
||||
n.vacation.minDays = isFinite(vMin) && vMin > 0 ? Math.floor(vMin) : 3
|
||||
n.vacation.maxDays = isFinite(vMax) && vMax > 0 ? Math.floor(vMax) : 5
|
||||
if (n.vacation.maxDays < n.vacation.minDays) {
|
||||
const t = n.vacation.minDays; n.vacation.minDays = n.vacation.maxDays; n.vacation.maxDays = t
|
||||
}
|
||||
|
||||
const riskRaw = (n.riskManagement ?? {}) as Record<string, unknown>
|
||||
const hasRiskCfg = Object.keys(riskRaw).length > 0
|
||||
const riskManagement = hasRiskCfg ? {
|
||||
enabled: riskRaw.enabled === true,
|
||||
autoAdjustDelays: riskRaw.autoAdjustDelays !== false,
|
||||
stopOnCritical: riskRaw.stopOnCritical === true,
|
||||
banPrediction: riskRaw.banPrediction === true,
|
||||
riskThreshold: typeof riskRaw.riskThreshold === 'number' ? riskRaw.riskThreshold : undefined
|
||||
} : undefined
|
||||
|
||||
const analyticsRaw = (n.analytics ?? {}) as Record<string, unknown>
|
||||
const hasAnalyticsCfg = Object.keys(analyticsRaw).length > 0
|
||||
const analytics = hasAnalyticsCfg ? {
|
||||
enabled: analyticsRaw.enabled === true,
|
||||
retentionDays: typeof analyticsRaw.retentionDays === 'number' ? analyticsRaw.retentionDays : undefined,
|
||||
exportMarkdown: analyticsRaw.exportMarkdown === true,
|
||||
webhookSummary: analyticsRaw.webhookSummary === true
|
||||
} : undefined
|
||||
|
||||
const queryDiversityRaw = (n.queryDiversity ?? {}) as Record<string, unknown>
|
||||
const hasQueryCfg = Object.keys(queryDiversityRaw).length > 0
|
||||
const queryDiversity = hasQueryCfg ? {
|
||||
enabled: queryDiversityRaw.enabled === true,
|
||||
sources: Array.isArray(queryDiversityRaw.sources) && queryDiversityRaw.sources.length
|
||||
? (queryDiversityRaw.sources.filter((s: unknown) => typeof s === 'string') as Array<'google-trends' | 'reddit' | 'news' | 'wikipedia' | 'local-fallback'>)
|
||||
: undefined,
|
||||
maxQueriesPerSource: typeof queryDiversityRaw.maxQueriesPerSource === 'number' ? queryDiversityRaw.maxQueriesPerSource : undefined,
|
||||
cacheMinutes: typeof queryDiversityRaw.cacheMinutes === 'number' ? queryDiversityRaw.cacheMinutes : undefined
|
||||
} : undefined
|
||||
|
||||
const dryRun = n.dryRun === true
|
||||
|
||||
const jobStateRaw = (n.jobState ?? {}) as Record<string, unknown>
|
||||
const jobState = {
|
||||
enabled: jobStateRaw.enabled !== false,
|
||||
dir: typeof jobStateRaw.dir === 'string' ? jobStateRaw.dir : undefined,
|
||||
skipCompletedAccounts: jobStateRaw.skipCompletedAccounts !== false
|
||||
}
|
||||
|
||||
const cfg: Config = {
|
||||
baseURL: n.baseURL ?? 'https://rewards.bing.com',
|
||||
sessionPath: n.sessionPath ?? 'sessions',
|
||||
headless,
|
||||
parallel,
|
||||
runOnZeroPoints,
|
||||
clusters,
|
||||
saveFingerprint,
|
||||
workers,
|
||||
searchOnBingLocalQueries: !!useLocalQueries,
|
||||
globalTimeout,
|
||||
searchSettings,
|
||||
humanization: n.humanization,
|
||||
retryPolicy: n.retryPolicy,
|
||||
jobState,
|
||||
logExcludeFunc,
|
||||
webhookLogExcludeFunc,
|
||||
logging, // retain full logging object for live webhook usage
|
||||
proxy: n.proxy ?? { proxyGoogleTrends: true, proxyBingTerms: true },
|
||||
webhook,
|
||||
conclusionWebhook,
|
||||
ntfy,
|
||||
diagnostics: n.diagnostics,
|
||||
update: n.update,
|
||||
schedule: n.schedule,
|
||||
passesPerRun: passesPerRun,
|
||||
vacation: n.vacation,
|
||||
buyMode: { enabled: buyModeEnabled, maxMinutes: buyModeMax },
|
||||
crashRecovery: n.crashRecovery || {},
|
||||
riskManagement,
|
||||
analytics,
|
||||
dryRun,
|
||||
queryDiversity
|
||||
}
|
||||
|
||||
return cfg
|
||||
}
|
||||
|
||||
export function loadAccounts(): Account[] {
|
||||
try {
|
||||
// 1) CLI dev override
|
||||
let file = 'accounts.json'
|
||||
if (process.argv.includes('-dev')) {
|
||||
file = 'accounts.dev.json'
|
||||
}
|
||||
|
||||
// 2) Docker-friendly env overrides
|
||||
const envJson = process.env.ACCOUNTS_JSON
|
||||
const envFile = process.env.ACCOUNTS_FILE
|
||||
|
||||
let raw: string | undefined
|
||||
if (envJson && envJson.trim().startsWith('[')) {
|
||||
raw = envJson
|
||||
} else if (envFile && envFile.trim()) {
|
||||
const full = path.isAbsolute(envFile) ? envFile : path.join(process.cwd(), envFile)
|
||||
if (!fs.existsSync(full)) {
|
||||
throw new Error(`ACCOUNTS_FILE not found: ${full}`)
|
||||
}
|
||||
raw = fs.readFileSync(full, 'utf-8')
|
||||
} else {
|
||||
// Try multiple locations to support both root mounts and dist mounts
|
||||
// Support both .json and .jsonc extensions
|
||||
const candidates = [
|
||||
path.join(__dirname, '../', file), // root/accounts.json (preferred)
|
||||
path.join(__dirname, '../', file + 'c'), // root/accounts.jsonc
|
||||
path.join(__dirname, '../src', file), // fallback: file kept inside src/
|
||||
path.join(__dirname, '../src', file + 'c'), // src/accounts.jsonc
|
||||
path.join(process.cwd(), file), // cwd override
|
||||
path.join(process.cwd(), file + 'c'), // cwd/accounts.jsonc
|
||||
path.join(process.cwd(), 'src', file), // cwd/src/accounts.json
|
||||
path.join(process.cwd(), 'src', file + 'c'), // cwd/src/accounts.jsonc
|
||||
path.join(__dirname, file), // dist/accounts.json (legacy)
|
||||
path.join(__dirname, file + 'c') // dist/accounts.jsonc
|
||||
]
|
||||
let chosen: string | null = null
|
||||
for (const p of candidates) {
|
||||
try { if (fs.existsSync(p)) { chosen = p; break } } catch { /* ignore */ }
|
||||
}
|
||||
if (!chosen) throw new Error(`accounts file not found in: ${candidates.join(' | ')}`)
|
||||
raw = fs.readFileSync(chosen, 'utf-8')
|
||||
}
|
||||
|
||||
// Support comments in accounts file (same as config)
|
||||
const cleaned = stripJsonComments(raw)
|
||||
const parsedUnknown = JSON.parse(cleaned)
|
||||
// Accept either a root array or an object with an `accounts` array, ignore `_note`
|
||||
const parsed = Array.isArray(parsedUnknown) ? parsedUnknown : (parsedUnknown && typeof parsedUnknown === 'object' && Array.isArray((parsedUnknown as { accounts?: unknown }).accounts) ? (parsedUnknown as { accounts: unknown[] }).accounts : null)
|
||||
if (!Array.isArray(parsed)) throw new Error('accounts must be an array')
|
||||
// minimal shape validation
|
||||
for (const entry of parsed) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const a = entry as any
|
||||
if (!a || typeof a.email !== 'string' || typeof a.password !== 'string') {
|
||||
throw new Error('each account must have email and password strings')
|
||||
}
|
||||
a.email = String(a.email).trim()
|
||||
a.password = String(a.password)
|
||||
if (typeof a.recoveryEmail !== 'string') {
|
||||
throw new Error(`account ${a.email || '<unknown>'} must include a recoveryEmail string`)
|
||||
}
|
||||
a.recoveryEmail = String(a.recoveryEmail).trim()
|
||||
if (!a.recoveryEmail || !/@/.test(a.recoveryEmail)) {
|
||||
throw new Error(`account ${a.email} recoveryEmail must be a valid email address`)
|
||||
}
|
||||
}
|
||||
// Filter out disabled accounts (enabled: false)
|
||||
const allAccounts = parsed as Account[]
|
||||
const enabledAccounts = allAccounts.filter(acc => acc.enabled !== false)
|
||||
return enabledAccounts
|
||||
} catch (error) {
|
||||
throw new Error(error as string)
|
||||
}
|
||||
}
|
||||
|
||||
export function getConfigPath(): string { return configSourcePath }
|
||||
|
||||
export function loadConfig(): Config {
|
||||
try {
|
||||
if (configCache) {
|
||||
return configCache
|
||||
}
|
||||
|
||||
// Resolve configuration file from common locations (supports .jsonc and .json)
|
||||
const names = ['config.jsonc', 'config.json']
|
||||
const bases = [
|
||||
path.join(__dirname, '../'), // dist root when compiled
|
||||
path.join(__dirname, '../src'), // fallback: running dist but config still in src
|
||||
process.cwd(), // repo root
|
||||
path.join(process.cwd(), 'src'), // repo/src when running ts-node
|
||||
__dirname // dist/util
|
||||
]
|
||||
const candidates: string[] = []
|
||||
for (const base of bases) {
|
||||
for (const name of names) {
|
||||
candidates.push(path.join(base, name))
|
||||
}
|
||||
}
|
||||
let cfgPath: string | null = null
|
||||
for (const p of candidates) {
|
||||
try { if (fs.existsSync(p)) { cfgPath = p; break } } catch { /* ignore */ }
|
||||
}
|
||||
if (!cfgPath) throw new Error(`config.json not found in: ${candidates.join(' | ')}`)
|
||||
const config = fs.readFileSync(cfgPath, 'utf-8')
|
||||
const text = config.replace(/^\uFEFF/, '')
|
||||
const raw = JSON.parse(stripJsonComments(text))
|
||||
const normalized = normalizeConfig(raw)
|
||||
configCache = normalized // Set as cache
|
||||
configSourcePath = cfgPath
|
||||
|
||||
return normalized
|
||||
} catch (error) {
|
||||
throw new Error(error as string)
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadSessionData(sessionPath: string, email: string, isMobile: boolean, saveFingerprint: ConfigSaveFingerprint) {
|
||||
try {
|
||||
// Fetch cookie file
|
||||
const cookieFile = path.join(__dirname, '../browser/', sessionPath, email, `${isMobile ? 'mobile_cookies' : 'desktop_cookies'}.json`)
|
||||
|
||||
let cookies: Cookie[] = []
|
||||
if (fs.existsSync(cookieFile)) {
|
||||
const cookiesData = await fs.promises.readFile(cookieFile, 'utf-8')
|
||||
cookies = JSON.parse(cookiesData)
|
||||
}
|
||||
|
||||
// Fetch fingerprint file (support both legacy typo "fingerpint" and corrected "fingerprint")
|
||||
const baseDir = path.join(__dirname, '../browser/', sessionPath, email)
|
||||
const legacyFile = path.join(baseDir, `${isMobile ? 'mobile_fingerpint' : 'desktop_fingerpint'}.json`)
|
||||
const correctFile = path.join(baseDir, `${isMobile ? 'mobile_fingerprint' : 'desktop_fingerprint'}.json`)
|
||||
|
||||
let fingerprint!: BrowserFingerprintWithHeaders
|
||||
const shouldLoad = (saveFingerprint.desktop && !isMobile) || (saveFingerprint.mobile && isMobile)
|
||||
if (shouldLoad) {
|
||||
const chosen = fs.existsSync(correctFile) ? correctFile : (fs.existsSync(legacyFile) ? legacyFile : '')
|
||||
if (chosen) {
|
||||
const fingerprintData = await fs.promises.readFile(chosen, 'utf-8')
|
||||
fingerprint = JSON.parse(fingerprintData)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
cookies: cookies,
|
||||
fingerprint: fingerprint
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
throw new Error(error as string)
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveSessionData(sessionPath: string, browser: BrowserContext, email: string, isMobile: boolean): Promise<string> {
|
||||
try {
|
||||
const cookies = await browser.cookies()
|
||||
|
||||
// Fetch path
|
||||
const sessionDir = path.join(__dirname, '../browser/', sessionPath, email)
|
||||
|
||||
// Create session dir
|
||||
if (!fs.existsSync(sessionDir)) {
|
||||
await fs.promises.mkdir(sessionDir, { recursive: true })
|
||||
}
|
||||
|
||||
// Save cookies to a file
|
||||
await fs.promises.writeFile(
|
||||
path.join(sessionDir, `${isMobile ? 'mobile_cookies' : 'desktop_cookies'}.json`),
|
||||
JSON.stringify(cookies, null, 2)
|
||||
)
|
||||
|
||||
return sessionDir
|
||||
} catch (error) {
|
||||
throw new Error(error as string)
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveFingerprintData(sessionPath: string, email: string, isMobile: boolean, fingerprint: BrowserFingerprintWithHeaders): Promise<string> {
|
||||
try {
|
||||
// Fetch path
|
||||
const sessionDir = path.join(__dirname, '../browser/', sessionPath, email)
|
||||
|
||||
// Create session dir
|
||||
if (!fs.existsSync(sessionDir)) {
|
||||
await fs.promises.mkdir(sessionDir, { recursive: true })
|
||||
}
|
||||
|
||||
// Save fingerprint to files (write both legacy and corrected names for compatibility)
|
||||
const legacy = path.join(sessionDir, `${isMobile ? 'mobile_fingerpint' : 'desktop_fingerpint'}.json`)
|
||||
const correct = path.join(sessionDir, `${isMobile ? 'mobile_fingerprint' : 'desktop_fingerprint'}.json`)
|
||||
const payload = JSON.stringify(fingerprint)
|
||||
await fs.promises.writeFile(correct, payload)
|
||||
try { await fs.promises.writeFile(legacy, payload) } catch { /* ignore */ }
|
||||
|
||||
return sessionDir
|
||||
} catch (error) {
|
||||
throw new Error(error as string)
|
||||
}
|
||||
}
|
||||
252
src/util/Logger.ts
Normal file
252
src/util/Logger.ts
Normal file
@@ -0,0 +1,252 @@
|
||||
import axios from 'axios'
|
||||
import chalk from 'chalk'
|
||||
|
||||
import { Ntfy } from './Ntfy'
|
||||
import { loadConfig } from './Load'
|
||||
import { DISCORD } from '../constants'
|
||||
|
||||
const WEBHOOK_USERNAME = 'MS Rewards - Live Logs'
|
||||
|
||||
type WebhookBuffer = {
|
||||
lines: string[]
|
||||
sending: boolean
|
||||
timer?: NodeJS.Timeout
|
||||
}
|
||||
|
||||
const webhookBuffers = new Map<string, WebhookBuffer>()
|
||||
|
||||
// Periodic cleanup of old/idle webhook buffers to prevent memory leaks
|
||||
setInterval(() => {
|
||||
const now = Date.now()
|
||||
const BUFFER_MAX_AGE_MS = 3600000 // 1 hour
|
||||
|
||||
for (const [url, buf] of webhookBuffers.entries()) {
|
||||
if (!buf.sending && buf.lines.length === 0) {
|
||||
const lastActivity = (buf as unknown as { lastActivity?: number }).lastActivity || 0
|
||||
if (now - lastActivity > BUFFER_MAX_AGE_MS) {
|
||||
webhookBuffers.delete(url)
|
||||
}
|
||||
}
|
||||
}
|
||||
}, 600000) // Check every 10 minutes
|
||||
|
||||
function getBuffer(url: string): WebhookBuffer {
|
||||
let buf = webhookBuffers.get(url)
|
||||
if (!buf) {
|
||||
buf = { lines: [], sending: false }
|
||||
webhookBuffers.set(url, buf)
|
||||
}
|
||||
// Track last activity for cleanup
|
||||
(buf as unknown as { lastActivity: number }).lastActivity = Date.now()
|
||||
return buf
|
||||
}
|
||||
|
||||
async function sendBatch(url: string, buf: WebhookBuffer) {
|
||||
if (buf.sending) return
|
||||
buf.sending = true
|
||||
while (buf.lines.length > 0) {
|
||||
const chunk: string[] = []
|
||||
let currentLength = 0
|
||||
while (buf.lines.length > 0) {
|
||||
const next = buf.lines[0]!
|
||||
const projected = currentLength + next.length + (chunk.length > 0 ? 1 : 0)
|
||||
if (projected > DISCORD.MAX_EMBED_LENGTH && chunk.length > 0) break
|
||||
buf.lines.shift()
|
||||
chunk.push(next)
|
||||
currentLength = projected
|
||||
}
|
||||
|
||||
const content = chunk.join('\n').slice(0, DISCORD.MAX_EMBED_LENGTH)
|
||||
if (!content) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Enhanced webhook payload with embed, username and avatar
|
||||
const payload = {
|
||||
username: WEBHOOK_USERNAME,
|
||||
avatar_url: DISCORD.AVATAR_URL,
|
||||
embeds: [{
|
||||
description: `\`\`\`\n${content}\n\`\`\``,
|
||||
color: determineColorFromContent(content),
|
||||
timestamp: new Date().toISOString()
|
||||
}]
|
||||
}
|
||||
|
||||
try {
|
||||
await axios.post(url, payload, { headers: { 'Content-Type': 'application/json' }, timeout: DISCORD.WEBHOOK_TIMEOUT })
|
||||
await new Promise(resolve => setTimeout(resolve, DISCORD.RATE_LIMIT_DELAY))
|
||||
} catch (error) {
|
||||
// Re-queue failed batch at front and exit loop
|
||||
buf.lines = chunk.concat(buf.lines)
|
||||
console.error('[Webhook] live log delivery failed:', error)
|
||||
break
|
||||
}
|
||||
}
|
||||
buf.sending = false
|
||||
}
|
||||
|
||||
function determineColorFromContent(content: string): number {
|
||||
const lower = content.toLowerCase()
|
||||
// Security/Ban alerts - Red
|
||||
if (lower.includes('[banned]') || lower.includes('[security]') || lower.includes('suspended') || lower.includes('compromised')) {
|
||||
return DISCORD.COLOR_RED
|
||||
}
|
||||
// Errors - Dark Red
|
||||
if (lower.includes('[error]') || lower.includes('✗')) {
|
||||
return DISCORD.COLOR_CRIMSON
|
||||
}
|
||||
// Warnings - Orange/Yellow
|
||||
if (lower.includes('[warn]') || lower.includes('⚠')) {
|
||||
return DISCORD.COLOR_ORANGE
|
||||
}
|
||||
// Success - Green
|
||||
if (lower.includes('[ok]') || lower.includes('✓') || lower.includes('complet')) {
|
||||
return DISCORD.COLOR_GREEN
|
||||
}
|
||||
// Info/Main - Blue
|
||||
if (lower.includes('[main]')) {
|
||||
return DISCORD.COLOR_BLUE
|
||||
}
|
||||
// Default - Gray
|
||||
return 0x95A5A6 // Gray
|
||||
}
|
||||
|
||||
function enqueueWebhookLog(url: string, line: string) {
|
||||
const buf = getBuffer(url)
|
||||
buf.lines.push(line)
|
||||
if (!buf.timer) {
|
||||
buf.timer = setTimeout(() => {
|
||||
buf.timer = undefined
|
||||
void sendBatch(url, buf)
|
||||
}, DISCORD.DEBOUNCE_DELAY)
|
||||
}
|
||||
}
|
||||
|
||||
// Synchronous logger that returns an Error when type === 'error' so callers can `throw log(...)` safely.
|
||||
export function log(isMobile: boolean | 'main', title: string, message: string, type: 'log' | 'warn' | 'error' = 'log', color?: keyof typeof chalk): Error | void {
|
||||
const configData = loadConfig()
|
||||
|
||||
// Access logging config with fallback for backward compatibility
|
||||
const configAny = configData as unknown as Record<string, unknown>
|
||||
const logging = configAny.logging as { excludeFunc?: string[]; logExcludeFunc?: string[] } | undefined
|
||||
const logExcludeFunc = logging?.excludeFunc ?? (configData as { logExcludeFunc?: string[] }).logExcludeFunc ?? []
|
||||
|
||||
if (logExcludeFunc.some((x: string) => x.toLowerCase() === title.toLowerCase())) {
|
||||
return
|
||||
}
|
||||
|
||||
const currentTime = new Date().toLocaleString()
|
||||
const platformText = isMobile === 'main' ? 'MAIN' : isMobile ? 'MOBILE' : 'DESKTOP'
|
||||
|
||||
// Clean string for notifications (no chalk, structured)
|
||||
type LoggingCfg = { excludeFunc?: string[]; webhookExcludeFunc?: string[]; redactEmails?: boolean }
|
||||
const loggingCfg: LoggingCfg = (configAny.logging || {}) as LoggingCfg
|
||||
const shouldRedact = !!loggingCfg.redactEmails
|
||||
const redact = (s: string) => shouldRedact ? s.replace(/[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}/ig, (m) => {
|
||||
const [u, d] = m.split('@'); return `${(u||'').slice(0,2)}***@${d||''}`
|
||||
}) : s
|
||||
const cleanStr = redact(`[${currentTime}] [PID: ${process.pid}] [${type.toUpperCase()}] ${platformText} [${title}] ${message}`)
|
||||
|
||||
// Define conditions for sending to NTFY
|
||||
const ntfyConditions = {
|
||||
log: [
|
||||
message.toLowerCase().includes('started tasks for account'),
|
||||
message.toLowerCase().includes('press the number'),
|
||||
message.toLowerCase().includes('no points to earn')
|
||||
],
|
||||
error: [],
|
||||
warn: [
|
||||
message.toLowerCase().includes('aborting'),
|
||||
message.toLowerCase().includes('didn\'t gain')
|
||||
]
|
||||
}
|
||||
|
||||
// Check if the current log type and message meet the NTFY conditions
|
||||
try {
|
||||
if (type in ntfyConditions && ntfyConditions[type as keyof typeof ntfyConditions].some(condition => condition)) {
|
||||
// Fire-and-forget
|
||||
Promise.resolve(Ntfy(cleanStr, type)).catch(() => { /* ignore ntfy errors */ })
|
||||
}
|
||||
} catch { /* ignore */ }
|
||||
|
||||
// Console output with better formatting and contextual icons
|
||||
const typeIndicator = type === 'error' ? '✗' : type === 'warn' ? '⚠' : '✓'
|
||||
const platformColor = isMobile === 'main' ? chalk.cyan : isMobile ? chalk.blue : chalk.magenta
|
||||
const typeColor = type === 'error' ? chalk.red : type === 'warn' ? chalk.yellow : chalk.green
|
||||
|
||||
// Add contextual icon based on title/message (ASCII-safe for Windows PowerShell)
|
||||
const titleLower = title.toLowerCase()
|
||||
const msgLower = message.toLowerCase()
|
||||
|
||||
// ASCII-safe icons for Windows PowerShell compatibility
|
||||
const iconMap: Array<[RegExp, string]> = [
|
||||
[/security|compromised/i, '[SECURITY]'],
|
||||
[/ban|suspend/i, '[BANNED]'],
|
||||
[/error/i, '[ERROR]'],
|
||||
[/warn/i, '[WARN]'],
|
||||
[/success|complet/i, '[OK]'],
|
||||
[/login/i, '[LOGIN]'],
|
||||
[/point/i, '[POINTS]'],
|
||||
[/search/i, '[SEARCH]'],
|
||||
[/activity|quiz|poll/i, '[ACTIVITY]'],
|
||||
[/browser/i, '[BROWSER]'],
|
||||
[/main/i, '[MAIN]']
|
||||
]
|
||||
|
||||
let icon = ''
|
||||
for (const [pattern, symbol] of iconMap) {
|
||||
if (pattern.test(titleLower) || pattern.test(msgLower)) {
|
||||
icon = chalk.dim(symbol)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const iconPart = icon ? icon + ' ' : ''
|
||||
|
||||
const formattedStr = [
|
||||
chalk.gray(`[${currentTime}]`),
|
||||
chalk.gray(`[${process.pid}]`),
|
||||
typeColor(`${typeIndicator}`),
|
||||
platformColor(`[${platformText}]`),
|
||||
chalk.bold(`[${title}]`),
|
||||
iconPart + redact(message)
|
||||
].join(' ')
|
||||
|
||||
const applyChalk = color && typeof chalk[color] === 'function' ? chalk[color] as (msg: string) => string : null
|
||||
|
||||
// Log based on the type
|
||||
switch (type) {
|
||||
case 'warn':
|
||||
applyChalk ? console.warn(applyChalk(formattedStr)) : console.warn(formattedStr)
|
||||
break
|
||||
|
||||
case 'error':
|
||||
applyChalk ? console.error(applyChalk(formattedStr)) : console.error(formattedStr)
|
||||
break
|
||||
|
||||
default:
|
||||
applyChalk ? console.log(applyChalk(formattedStr)) : console.log(formattedStr)
|
||||
break
|
||||
}
|
||||
|
||||
// Webhook streaming (live logs)
|
||||
try {
|
||||
const loggingCfg: Record<string, unknown> = (configAny.logging || {}) as Record<string, unknown>
|
||||
const webhookCfg = configData.webhook
|
||||
const liveUrlRaw = typeof loggingCfg.liveWebhookUrl === 'string' ? loggingCfg.liveWebhookUrl.trim() : ''
|
||||
const liveUrl = liveUrlRaw || (webhookCfg?.enabled && webhookCfg.url ? webhookCfg.url : '')
|
||||
const webhookExclude = Array.isArray(loggingCfg.webhookExcludeFunc) ? loggingCfg.webhookExcludeFunc : configData.webhookLogExcludeFunc || []
|
||||
const webhookExcluded = Array.isArray(webhookExclude) && webhookExclude.some((x: string) => x.toLowerCase() === title.toLowerCase())
|
||||
if (liveUrl && !webhookExcluded) {
|
||||
enqueueWebhookLog(liveUrl, cleanStr)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Logger] Failed to enqueue webhook log:', error)
|
||||
}
|
||||
|
||||
// Return an Error when logging an error so callers can `throw log(...)`
|
||||
if (type === 'error') {
|
||||
// CommunityReporter disabled per project policy
|
||||
return new Error(cleanStr)
|
||||
}
|
||||
}
|
||||
27
src/util/Ntfy.ts
Normal file
27
src/util/Ntfy.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { loadConfig } from './Load'
|
||||
import axios from 'axios'
|
||||
|
||||
const NOTIFICATION_TYPES = {
|
||||
error: { priority: 'max', tags: 'rotating_light' }, // Customize the ERROR icon here, see: https://docs.ntfy.sh/emojis/
|
||||
warn: { priority: 'high', tags: 'warning' }, // Customize the WARN icon here, see: https://docs.ntfy.sh/emojis/
|
||||
log: { priority: 'default', tags: 'medal_sports' } // Customize the LOG icon here, see: https://docs.ntfy.sh/emojis/
|
||||
}
|
||||
|
||||
export async function Ntfy(message: string, type: keyof typeof NOTIFICATION_TYPES = 'log'): Promise<void> {
|
||||
const config = loadConfig().ntfy
|
||||
if (!config?.enabled || !config.url || !config.topic) return
|
||||
|
||||
try {
|
||||
const { priority, tags } = NOTIFICATION_TYPES[type]
|
||||
const headers = {
|
||||
Title: 'Microsoft Rewards Script',
|
||||
Priority: priority,
|
||||
Tags: tags,
|
||||
...(config.authToken && { Authorization: `Bearer ${config.authToken}` })
|
||||
}
|
||||
|
||||
await axios.post(`${config.url}/${config.topic}`, message, { headers })
|
||||
} catch (error) {
|
||||
// Silently fail - NTFY is a non-critical notification service
|
||||
}
|
||||
}
|
||||
340
src/util/QueryDiversityEngine.ts
Normal file
340
src/util/QueryDiversityEngine.ts
Normal file
@@ -0,0 +1,340 @@
|
||||
import axios from 'axios'
|
||||
|
||||
export interface QuerySource {
|
||||
name: string
|
||||
weight: number // 0-1, probability of selection
|
||||
fetchQueries: () => Promise<string[]>
|
||||
}
|
||||
|
||||
export interface QueryDiversityConfig {
|
||||
sources: Array<'google-trends' | 'reddit' | 'news' | 'wikipedia' | 'local-fallback'>
|
||||
deduplicate: boolean
|
||||
mixStrategies: boolean // Mix different source types in same session
|
||||
maxQueriesPerSource: number
|
||||
cacheMinutes: number
|
||||
}
|
||||
|
||||
/**
|
||||
* QueryDiversityEngine fetches search queries from multiple sources to avoid patterns.
|
||||
* Supports Google Trends, Reddit, News APIs, Wikipedia, and local fallbacks.
|
||||
*/
|
||||
export class QueryDiversityEngine {
|
||||
private config: QueryDiversityConfig
|
||||
private cache: Map<string, { queries: string[]; expires: number }> = new Map()
|
||||
|
||||
constructor(config?: Partial<QueryDiversityConfig>) {
|
||||
this.config = {
|
||||
sources: config?.sources || ['google-trends', 'reddit', 'local-fallback'],
|
||||
deduplicate: config?.deduplicate !== false,
|
||||
mixStrategies: config?.mixStrategies !== false,
|
||||
maxQueriesPerSource: config?.maxQueriesPerSource || 10,
|
||||
cacheMinutes: config?.cacheMinutes || 30
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch diverse queries from configured sources
|
||||
*/
|
||||
async fetchQueries(count: number): Promise<string[]> {
|
||||
const allQueries: string[] = []
|
||||
|
||||
for (const sourceName of this.config.sources) {
|
||||
try {
|
||||
const queries = await this.getFromSource(sourceName)
|
||||
allQueries.push(...queries.slice(0, this.config.maxQueriesPerSource))
|
||||
} catch (error) {
|
||||
// Silently fail and try other sources
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate
|
||||
let final = this.config.deduplicate ? Array.from(new Set(allQueries)) : allQueries
|
||||
|
||||
// Mix strategies: interleave queries from different sources
|
||||
if (this.config.mixStrategies && this.config.sources.length > 1) {
|
||||
final = this.interleaveQueries(final, count)
|
||||
}
|
||||
|
||||
// Shuffle and limit to requested count
|
||||
final = this.shuffleArray(final).slice(0, count)
|
||||
|
||||
return final.length > 0 ? final : this.getLocalFallback(count)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from a specific source with caching
|
||||
*/
|
||||
private async getFromSource(source: string): Promise<string[]> {
|
||||
const cached = this.cache.get(source)
|
||||
if (cached && Date.now() < cached.expires) {
|
||||
return cached.queries
|
||||
}
|
||||
|
||||
let queries: string[] = []
|
||||
|
||||
switch (source) {
|
||||
case 'google-trends':
|
||||
queries = await this.fetchGoogleTrends()
|
||||
break
|
||||
case 'reddit':
|
||||
queries = await this.fetchReddit()
|
||||
break
|
||||
case 'news':
|
||||
queries = await this.fetchNews()
|
||||
break
|
||||
case 'wikipedia':
|
||||
queries = await this.fetchWikipedia()
|
||||
break
|
||||
case 'local-fallback':
|
||||
queries = this.getLocalFallback(20)
|
||||
break
|
||||
default:
|
||||
// Unknown source, skip silently
|
||||
break
|
||||
}
|
||||
|
||||
this.cache.set(source, {
|
||||
queries,
|
||||
expires: Date.now() + (this.config.cacheMinutes * 60000)
|
||||
})
|
||||
|
||||
return queries
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from Google Trends (existing logic can be reused)
|
||||
*/
|
||||
private async fetchGoogleTrends(): Promise<string[]> {
|
||||
try {
|
||||
const response = await axios.get('https://trends.google.com/trends/api/dailytrends?geo=US', {
|
||||
timeout: 10000
|
||||
})
|
||||
|
||||
const data = response.data.toString().replace(')]}\',', '')
|
||||
const parsed = JSON.parse(data)
|
||||
|
||||
const queries: string[] = []
|
||||
for (const item of parsed.default.trendingSearchesDays || []) {
|
||||
for (const search of item.trendingSearches || []) {
|
||||
if (search.title?.query) {
|
||||
queries.push(search.title.query)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return queries.slice(0, 20)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from Reddit (top posts from popular subreddits)
|
||||
*/
|
||||
private async fetchReddit(): Promise<string[]> {
|
||||
try {
|
||||
const subreddits = ['news', 'worldnews', 'todayilearned', 'askreddit', 'technology']
|
||||
const randomSub = subreddits[Math.floor(Math.random() * subreddits.length)]
|
||||
|
||||
const response = await axios.get(`https://www.reddit.com/r/${randomSub}/hot.json?limit=15`, {
|
||||
timeout: 10000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
})
|
||||
|
||||
const posts = response.data.data.children || []
|
||||
const queries: string[] = []
|
||||
|
||||
for (const post of posts) {
|
||||
const title = post.data?.title
|
||||
if (title && title.length > 10 && title.length < 100) {
|
||||
queries.push(title)
|
||||
}
|
||||
}
|
||||
|
||||
return queries
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from News API (requires API key - fallback to headlines scraping)
|
||||
*/
|
||||
private async fetchNews(): Promise<string[]> {
|
||||
try {
|
||||
// Using NewsAPI.org free tier (limited requests)
|
||||
const apiKey = process.env.NEWS_API_KEY
|
||||
if (!apiKey) {
|
||||
return this.fetchNewsFallback()
|
||||
}
|
||||
|
||||
const response = await axios.get('https://newsapi.org/v2/top-headlines', {
|
||||
params: {
|
||||
country: 'us',
|
||||
pageSize: 15,
|
||||
apiKey
|
||||
},
|
||||
timeout: 10000
|
||||
})
|
||||
|
||||
const articles = response.data.articles || []
|
||||
return articles.map((a: { title?: string }) => a.title).filter((t: string | undefined) => t && t.length > 10)
|
||||
} catch {
|
||||
return this.fetchNewsFallback()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fallback news scraper (BBC/CNN headlines)
|
||||
*/
|
||||
private async fetchNewsFallback(): Promise<string[]> {
|
||||
try {
|
||||
const response = await axios.get('https://www.bbc.com/news', {
|
||||
timeout: 10000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
}
|
||||
})
|
||||
|
||||
const html = response.data
|
||||
const regex = /<h3[^>]*>(.*?)<\/h3>/gi
|
||||
const matches: RegExpMatchArray[] = []
|
||||
let match
|
||||
while ((match = regex.exec(html)) !== null) {
|
||||
matches.push(match)
|
||||
}
|
||||
|
||||
return matches
|
||||
.map(m => m[1]?.replace(/<[^>]+>/g, '').trim())
|
||||
.filter((t: string | undefined) => t && t.length > 10 && t.length < 100)
|
||||
.slice(0, 10) as string[]
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from Wikipedia (featured articles / trending topics)
|
||||
*/
|
||||
private async fetchWikipedia(): Promise<string[]> {
|
||||
try {
|
||||
const response = await axios.get('https://en.wikipedia.org/w/api.php', {
|
||||
params: {
|
||||
action: 'query',
|
||||
list: 'random',
|
||||
rnnamespace: 0,
|
||||
rnlimit: 15,
|
||||
format: 'json'
|
||||
},
|
||||
timeout: 10000
|
||||
})
|
||||
|
||||
const pages = response.data.query?.random || []
|
||||
return pages.map((p: { title?: string }) => p.title).filter((t: string | undefined) => t && t.length > 3)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Local fallback queries (curated list)
|
||||
*/
|
||||
private getLocalFallback(count: number): string[] {
|
||||
const fallback = [
|
||||
'weather forecast',
|
||||
'news today',
|
||||
'stock market',
|
||||
'sports scores',
|
||||
'movie reviews',
|
||||
'recipes',
|
||||
'travel destinations',
|
||||
'health tips',
|
||||
'technology news',
|
||||
'best restaurants near me',
|
||||
'how to cook pasta',
|
||||
'python tutorial',
|
||||
'world events',
|
||||
'climate change',
|
||||
'electric vehicles',
|
||||
'space exploration',
|
||||
'artificial intelligence',
|
||||
'cryptocurrency',
|
||||
'gaming news',
|
||||
'fashion trends',
|
||||
'fitness workout',
|
||||
'home improvement',
|
||||
'gardening tips',
|
||||
'pet care',
|
||||
'book recommendations',
|
||||
'music charts',
|
||||
'streaming shows',
|
||||
'historical events',
|
||||
'science discoveries',
|
||||
'education resources'
|
||||
]
|
||||
|
||||
return this.shuffleArray(fallback).slice(0, count)
|
||||
}
|
||||
|
||||
/**
|
||||
* Interleave queries from different sources for diversity
|
||||
*/
|
||||
private interleaveQueries(queries: string[], targetCount: number): string[] {
|
||||
const result: string[] = []
|
||||
const sourceMap = new Map<string, string[]>()
|
||||
|
||||
// Group queries by estimated source (simple heuristic)
|
||||
for (const q of queries) {
|
||||
const source = this.guessSource(q)
|
||||
if (!sourceMap.has(source)) {
|
||||
sourceMap.set(source, [])
|
||||
}
|
||||
sourceMap.get(source)?.push(q)
|
||||
}
|
||||
|
||||
const sources = Array.from(sourceMap.values())
|
||||
let index = 0
|
||||
|
||||
while (result.length < targetCount && sources.some(s => s.length > 0)) {
|
||||
const source = sources[index % sources.length]
|
||||
if (source && source.length > 0) {
|
||||
const q = source.shift()
|
||||
if (q) result.push(q)
|
||||
}
|
||||
index++
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Guess which source a query came from (basic heuristic)
|
||||
*/
|
||||
private guessSource(query: string): string {
|
||||
if (/^[A-Z]/.test(query) && query.includes(' ')) return 'news'
|
||||
if (query.length > 80) return 'reddit'
|
||||
if (/how to|what is|why/i.test(query)) return 'local'
|
||||
return 'trends'
|
||||
}
|
||||
|
||||
/**
|
||||
* Shuffle array (Fisher-Yates)
|
||||
*/
|
||||
private shuffleArray<T>(array: T[]): T[] {
|
||||
const shuffled = [...array]
|
||||
for (let i = shuffled.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[shuffled[i], shuffled[j]] = [shuffled[j]!, shuffled[i]!]
|
||||
}
|
||||
return shuffled
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cache (call between runs)
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cache.clear()
|
||||
}
|
||||
}
|
||||
63
src/util/Retry.ts
Normal file
63
src/util/Retry.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import type { ConfigRetryPolicy } from '../interface/Config'
|
||||
import Util from './Utils'
|
||||
|
||||
type NumericPolicy = {
|
||||
maxAttempts: number
|
||||
baseDelay: number
|
||||
maxDelay: number
|
||||
multiplier: number
|
||||
jitter: number
|
||||
}
|
||||
|
||||
export type Retryable<T> = () => Promise<T>
|
||||
|
||||
export class Retry {
|
||||
private policy: NumericPolicy
|
||||
|
||||
constructor(policy?: ConfigRetryPolicy) {
|
||||
const def: NumericPolicy = {
|
||||
maxAttempts: 3,
|
||||
baseDelay: 1000,
|
||||
maxDelay: 30000,
|
||||
multiplier: 2,
|
||||
jitter: 0.2
|
||||
}
|
||||
const merged: ConfigRetryPolicy = { ...(policy || {}) }
|
||||
// normalize string durations
|
||||
const util = new Util()
|
||||
const parse = (v: number | string) => {
|
||||
if (typeof v === 'number') return v
|
||||
try { return util.stringToMs(String(v)) } catch { return def.baseDelay }
|
||||
}
|
||||
this.policy = {
|
||||
maxAttempts: (merged.maxAttempts as number) ?? def.maxAttempts,
|
||||
baseDelay: parse(merged.baseDelay ?? def.baseDelay),
|
||||
maxDelay: parse(merged.maxDelay ?? def.maxDelay),
|
||||
multiplier: (merged.multiplier as number) ?? def.multiplier,
|
||||
jitter: (merged.jitter as number) ?? def.jitter
|
||||
}
|
||||
}
|
||||
|
||||
async run<T>(fn: Retryable<T>, isRetryable?: (e: unknown) => boolean): Promise<T> {
|
||||
let attempt = 0
|
||||
let delay = this.policy.baseDelay
|
||||
let lastErr: unknown
|
||||
while (attempt < this.policy.maxAttempts) {
|
||||
try {
|
||||
return await fn()
|
||||
} catch (e) {
|
||||
lastErr = e
|
||||
attempt += 1
|
||||
const retry = isRetryable ? isRetryable(e) : true
|
||||
if (!retry || attempt >= this.policy.maxAttempts) break
|
||||
const jitter = 1 + (Math.random() * 2 - 1) * this.policy.jitter
|
||||
const sleep = Math.min(this.policy.maxDelay, Math.max(0, Math.floor(delay * jitter)))
|
||||
await new Promise((r) => setTimeout(r, sleep))
|
||||
delay = Math.min(this.policy.maxDelay, Math.floor(delay * (this.policy.multiplier || 2)))
|
||||
}
|
||||
}
|
||||
throw lastErr instanceof Error ? lastErr : new Error(String(lastErr))
|
||||
}
|
||||
}
|
||||
|
||||
export default Retry
|
||||
177
src/util/RiskManager.ts
Normal file
177
src/util/RiskManager.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
import { AdaptiveThrottler } from './AdaptiveThrottler'
|
||||
|
||||
export interface RiskEvent {
|
||||
type: 'captcha' | 'error' | 'timeout' | 'ban_hint' | 'success'
|
||||
timestamp: number
|
||||
severity: number // 0-10, higher = worse
|
||||
context?: string
|
||||
}
|
||||
|
||||
export interface RiskMetrics {
|
||||
score: number // 0-100, higher = riskier
|
||||
level: 'safe' | 'elevated' | 'high' | 'critical'
|
||||
recommendation: string
|
||||
delayMultiplier: number
|
||||
}
|
||||
|
||||
/**
|
||||
* RiskManager monitors account activity patterns and detects early ban signals.
|
||||
* Integrates with AdaptiveThrottler to dynamically adjust delays based on risk.
|
||||
*/
|
||||
export class RiskManager {
|
||||
private events: RiskEvent[] = []
|
||||
private readonly maxEvents = 100
|
||||
private readonly timeWindowMs = 3600000 // 1 hour
|
||||
private throttler: AdaptiveThrottler
|
||||
private cooldownUntil: number = 0
|
||||
|
||||
constructor(throttler?: AdaptiveThrottler) {
|
||||
this.throttler = throttler || new AdaptiveThrottler()
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a risk event (captcha, error, success, etc.)
|
||||
*/
|
||||
recordEvent(type: RiskEvent['type'], severity: number, context?: string): void {
|
||||
const event: RiskEvent = {
|
||||
type,
|
||||
timestamp: Date.now(),
|
||||
severity: Math.max(0, Math.min(10, severity)),
|
||||
context
|
||||
}
|
||||
|
||||
this.events.push(event)
|
||||
if (this.events.length > this.maxEvents) {
|
||||
this.events.shift()
|
||||
}
|
||||
|
||||
// Feed success/error into adaptive throttler
|
||||
if (type === 'success') {
|
||||
this.throttler.record(true)
|
||||
} else if (['error', 'captcha', 'timeout', 'ban_hint'].includes(type)) {
|
||||
this.throttler.record(false)
|
||||
}
|
||||
|
||||
// Auto cool-down on critical events
|
||||
if (severity >= 8) {
|
||||
const coolMs = Math.min(300000, severity * 30000) // max 5min
|
||||
this.cooldownUntil = Date.now() + coolMs
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate current risk metrics based on recent events
|
||||
*/
|
||||
assessRisk(): RiskMetrics {
|
||||
const now = Date.now()
|
||||
const recentEvents = this.events.filter(e => now - e.timestamp < this.timeWindowMs)
|
||||
|
||||
if (recentEvents.length === 0) {
|
||||
return {
|
||||
score: 0,
|
||||
level: 'safe',
|
||||
recommendation: 'Normal operation',
|
||||
delayMultiplier: 1.0
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate base risk score (weighted by recency and severity)
|
||||
let weightedSum = 0
|
||||
let totalWeight = 0
|
||||
|
||||
for (const event of recentEvents) {
|
||||
const age = now - event.timestamp
|
||||
const recencyFactor = 1 - (age / this.timeWindowMs) // newer = higher weight
|
||||
const weight = recencyFactor * (event.severity / 10)
|
||||
|
||||
weightedSum += weight * event.severity
|
||||
totalWeight += weight
|
||||
}
|
||||
|
||||
const baseScore = totalWeight > 0 ? (weightedSum / totalWeight) * 10 : 0
|
||||
|
||||
// Penalty for rapid event frequency
|
||||
const eventRate = recentEvents.length / (this.timeWindowMs / 60000) // events per minute
|
||||
const frequencyPenalty = Math.min(30, eventRate * 5)
|
||||
|
||||
// Bonus penalty for specific patterns
|
||||
const captchaCount = recentEvents.filter(e => e.type === 'captcha').length
|
||||
const banHintCount = recentEvents.filter(e => e.type === 'ban_hint').length
|
||||
const patternPenalty = (captchaCount * 15) + (banHintCount * 25)
|
||||
|
||||
const finalScore = Math.min(100, baseScore + frequencyPenalty + patternPenalty)
|
||||
|
||||
// Determine risk level
|
||||
let level: RiskMetrics['level']
|
||||
let recommendation: string
|
||||
let delayMultiplier: number
|
||||
|
||||
if (finalScore < 20) {
|
||||
level = 'safe'
|
||||
recommendation = 'Normal operation'
|
||||
delayMultiplier = 1.0
|
||||
} else if (finalScore < 40) {
|
||||
level = 'elevated'
|
||||
recommendation = 'Minor issues detected. Increasing delays slightly.'
|
||||
delayMultiplier = 1.5
|
||||
} else if (finalScore < 70) {
|
||||
level = 'high'
|
||||
recommendation = 'Significant risk detected. Applying heavy throttling.'
|
||||
delayMultiplier = 2.5
|
||||
} else {
|
||||
level = 'critical'
|
||||
recommendation = 'CRITICAL: High ban risk. Consider stopping or manual review.'
|
||||
delayMultiplier = 4.0
|
||||
}
|
||||
|
||||
// Apply adaptive throttler multiplier on top
|
||||
const adaptiveMultiplier = this.throttler.getDelayMultiplier()
|
||||
delayMultiplier *= adaptiveMultiplier
|
||||
|
||||
return {
|
||||
score: Math.round(finalScore),
|
||||
level,
|
||||
recommendation,
|
||||
delayMultiplier: Number(delayMultiplier.toFixed(2))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if currently in forced cool-down period
|
||||
*/
|
||||
isInCooldown(): boolean {
|
||||
return Date.now() < this.cooldownUntil
|
||||
}
|
||||
|
||||
/**
|
||||
* Get remaining cool-down time in milliseconds
|
||||
*/
|
||||
getCooldownRemaining(): number {
|
||||
const remaining = this.cooldownUntil - Date.now()
|
||||
return Math.max(0, remaining)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the adaptive throttler instance for advanced usage
|
||||
*/
|
||||
getThrottler(): AdaptiveThrottler {
|
||||
return this.throttler
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all events and reset state (use between accounts)
|
||||
*/
|
||||
reset(): void {
|
||||
this.events = []
|
||||
this.cooldownUntil = 0
|
||||
// Keep throttler state across resets for learning
|
||||
}
|
||||
|
||||
/**
|
||||
* Export events for analytics/logging
|
||||
*/
|
||||
getRecentEvents(limitMinutes: number = 60): RiskEvent[] {
|
||||
const cutoff = Date.now() - (limitMinutes * 60000)
|
||||
return this.events.filter(e => e.timestamp >= cutoff)
|
||||
}
|
||||
}
|
||||
728
src/util/StartupValidator.ts
Normal file
728
src/util/StartupValidator.ts
Normal file
@@ -0,0 +1,728 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import chalk from 'chalk'
|
||||
import { Config } from '../interface/Config'
|
||||
import { Account } from '../interface/Account'
|
||||
|
||||
interface ValidationError {
|
||||
severity: 'error' | 'warning'
|
||||
category: string
|
||||
message: string
|
||||
fix?: string
|
||||
docsLink?: string
|
||||
}
|
||||
|
||||
export class StartupValidator {
|
||||
private errors: ValidationError[] = []
|
||||
private warnings: ValidationError[] = []
|
||||
|
||||
/**
|
||||
* Run all validation checks before starting the bot.
|
||||
* Always returns true - validation is informative, not blocking.
|
||||
* Displays errors and warnings but lets execution continue.
|
||||
*/
|
||||
async validate(config: Config, accounts: Account[]): Promise<boolean> {
|
||||
console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'))
|
||||
console.log(chalk.cyan(' 🔍 STARTUP VALIDATION - Checking Configuration'))
|
||||
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'))
|
||||
|
||||
// Run all validation checks
|
||||
this.validateAccounts(accounts)
|
||||
this.validateConfig(config)
|
||||
this.validateEnvironment()
|
||||
this.validateFileSystem(config)
|
||||
this.validateBrowserSettings(config)
|
||||
this.validateScheduleSettings(config)
|
||||
this.validateNetworkSettings(config)
|
||||
this.validateWorkerSettings(config)
|
||||
this.validateSearchSettings(config)
|
||||
this.validateHumanizationSettings(config)
|
||||
this.validateSecuritySettings(config)
|
||||
|
||||
// Display results (await to respect the delay)
|
||||
await this.displayResults()
|
||||
|
||||
// Always return true - validation is informative only
|
||||
// Users can proceed even with errors (they might be false positives)
|
||||
return true
|
||||
}
|
||||
|
||||
private validateAccounts(accounts: Account[]): void {
|
||||
if (!accounts || accounts.length === 0) {
|
||||
this.addError(
|
||||
'accounts',
|
||||
'No accounts found in accounts.json',
|
||||
'Add at least one account to src/accounts.json or src/accounts.jsonc',
|
||||
'docs/accounts.md'
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
accounts.forEach((account, index) => {
|
||||
const prefix = `Account ${index + 1} (${account.email || 'unknown'})`
|
||||
|
||||
// Required: email
|
||||
if (!account.email || typeof account.email !== 'string') {
|
||||
this.addError(
|
||||
'accounts',
|
||||
`${prefix}: Missing or invalid email address`,
|
||||
'Add a valid email address in the "email" field'
|
||||
)
|
||||
} else if (!/@/.test(account.email)) {
|
||||
this.addError(
|
||||
'accounts',
|
||||
`${prefix}: Email format is invalid`,
|
||||
'Email must contain @ symbol (e.g., user@example.com)'
|
||||
)
|
||||
}
|
||||
|
||||
// Required: password
|
||||
if (!account.password || typeof account.password !== 'string') {
|
||||
this.addError(
|
||||
'accounts',
|
||||
`${prefix}: Missing or invalid password`,
|
||||
'Add your Microsoft account password in the "password" field'
|
||||
)
|
||||
} else if (account.password.length < 4) {
|
||||
this.addWarning(
|
||||
'accounts',
|
||||
`${prefix}: Password seems too short (${account.password.length} characters)`,
|
||||
'Verify this is your correct Microsoft account password'
|
||||
)
|
||||
}
|
||||
|
||||
// Required: recoveryEmail (NEW - mandatory field)
|
||||
if (!account.recoveryEmail || typeof account.recoveryEmail !== 'string') {
|
||||
this.addError(
|
||||
'accounts',
|
||||
`${prefix}: Missing required field "recoveryEmail"`,
|
||||
'Add your recovery/backup email address. This is MANDATORY for security checks.\nExample: "recoveryEmail": "backup@gmail.com"',
|
||||
'docs/accounts.md'
|
||||
)
|
||||
} else if (!/@/.test(account.recoveryEmail)) {
|
||||
this.addError(
|
||||
'accounts',
|
||||
`${prefix}: Recovery email format is invalid`,
|
||||
'Recovery email must be a valid email address (e.g., backup@gmail.com)'
|
||||
)
|
||||
} else if (account.recoveryEmail.trim() === '') {
|
||||
this.addError(
|
||||
'accounts',
|
||||
`${prefix}: Recovery email cannot be empty`,
|
||||
'Provide the actual recovery email associated with this Microsoft account'
|
||||
)
|
||||
}
|
||||
|
||||
// Optional but recommended: TOTP
|
||||
if (!account.totp || account.totp.trim() === '') {
|
||||
this.addWarning(
|
||||
'accounts',
|
||||
`${prefix}: No TOTP (2FA) secret configured`,
|
||||
'Highly recommended: Set up 2FA and add your TOTP secret for automated login',
|
||||
'docs/accounts.md'
|
||||
)
|
||||
} else {
|
||||
const cleaned = account.totp.replace(/\s+/g, '')
|
||||
if (cleaned.length < 16) {
|
||||
this.addWarning(
|
||||
'accounts',
|
||||
`${prefix}: TOTP secret seems too short (${cleaned.length} chars)`,
|
||||
'Verify you copied the complete Base32 secret from Microsoft Authenticator setup'
|
||||
)
|
||||
}
|
||||
// Check if it's Base32 (A-Z, 2-7)
|
||||
if (!/^[A-Z2-7\s]+$/i.test(account.totp)) {
|
||||
this.addWarning(
|
||||
'accounts',
|
||||
`${prefix}: TOTP secret contains invalid characters`,
|
||||
'TOTP secrets should only contain letters A-Z and numbers 2-7 (Base32 format)'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Proxy validation
|
||||
if (account.proxy) {
|
||||
if (account.proxy.url && account.proxy.url.trim() !== '') {
|
||||
if (!account.proxy.port || account.proxy.port <= 0) {
|
||||
this.addError(
|
||||
'accounts',
|
||||
`${prefix}: Proxy URL provided but port is missing or invalid`,
|
||||
'Add a valid proxy port number (e.g., 8080, 3128)'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private validateConfig(config: Config): void {
|
||||
// Headless mode in Docker
|
||||
if (process.env.FORCE_HEADLESS === '1' && config.headless === false) {
|
||||
this.addWarning(
|
||||
'config',
|
||||
'FORCE_HEADLESS=1 but config.headless is false',
|
||||
'Docker environment forces headless mode. Your config setting will be overridden.'
|
||||
)
|
||||
}
|
||||
|
||||
// Parallel mode warning
|
||||
if (config.parallel === true) {
|
||||
this.addWarning(
|
||||
'config',
|
||||
'Parallel mode enabled (desktop + mobile run simultaneously)',
|
||||
'This uses more resources. Disable if you experience crashes or timeouts.',
|
||||
'docs/config.md'
|
||||
)
|
||||
}
|
||||
|
||||
// Clusters validation
|
||||
if (config.clusters > 1) {
|
||||
this.addWarning(
|
||||
'config',
|
||||
`Clusters set to ${config.clusters} - accounts will run in parallel`,
|
||||
'Ensure your system has enough resources (RAM, CPU) for concurrent execution'
|
||||
)
|
||||
}
|
||||
|
||||
// Global timeout validation
|
||||
const timeout = typeof config.globalTimeout === 'string'
|
||||
? config.globalTimeout
|
||||
: `${config.globalTimeout}ms`
|
||||
|
||||
if (timeout === '0' || timeout === '0ms' || timeout === '0s') {
|
||||
this.addError(
|
||||
'config',
|
||||
'Global timeout is set to 0',
|
||||
'Set a reasonable timeout value (e.g., "30s", "60s") to prevent infinite hangs'
|
||||
)
|
||||
}
|
||||
|
||||
// Job state validation
|
||||
if (config.jobState?.enabled === false) {
|
||||
this.addWarning(
|
||||
'config',
|
||||
'Job state tracking is disabled',
|
||||
'The bot will not save progress. If interrupted, all tasks will restart from scratch.',
|
||||
'docs/jobstate.md'
|
||||
)
|
||||
}
|
||||
|
||||
// Risk management validation
|
||||
if (config.riskManagement?.enabled === true) {
|
||||
if (config.riskManagement.stopOnCritical === true) {
|
||||
this.addWarning(
|
||||
'config',
|
||||
'Risk management will stop execution if critical risk is detected',
|
||||
'Bot will halt all accounts if risk score becomes too high'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Search delays validation
|
||||
const minDelay = typeof config.searchSettings.searchDelay.min === 'string'
|
||||
? config.searchSettings.searchDelay.min
|
||||
: `${config.searchSettings.searchDelay.min}ms`
|
||||
|
||||
if (minDelay === '0' || minDelay === '0ms' || minDelay === '0s') {
|
||||
this.addWarning(
|
||||
'config',
|
||||
'Search delay minimum is 0 - this may look suspicious',
|
||||
'Consider setting a minimum delay (e.g., "1s", "2s") for more natural behavior'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private validateEnvironment(): void {
|
||||
// Node.js version check
|
||||
const nodeVersion = process.version
|
||||
const major = parseInt(nodeVersion.split('.')[0]?.replace('v', '') || '0', 10)
|
||||
|
||||
if (major < 18) {
|
||||
this.addError(
|
||||
'environment',
|
||||
`Node.js version ${nodeVersion} is too old`,
|
||||
'Install Node.js 18 or newer. Visit https://nodejs.org/',
|
||||
'docs/getting-started.md'
|
||||
)
|
||||
} else if (major < 20) {
|
||||
this.addWarning(
|
||||
'environment',
|
||||
`Node.js version ${nodeVersion} is outdated`,
|
||||
'Consider upgrading to Node.js 20+ for better performance and security'
|
||||
)
|
||||
}
|
||||
|
||||
// Docker-specific checks
|
||||
if (process.env.FORCE_HEADLESS === '1') {
|
||||
this.addWarning(
|
||||
'environment',
|
||||
'Running in Docker/containerized environment',
|
||||
'Make sure volumes are correctly mounted for sessions persistence'
|
||||
)
|
||||
}
|
||||
|
||||
// Time sync warning for TOTP users
|
||||
if (process.platform === 'linux') {
|
||||
this.addWarning(
|
||||
'environment',
|
||||
'Linux detected: Ensure system time is synchronized',
|
||||
'Run: sudo timedatectl set-ntp true (required for TOTP to work correctly)'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private validateFileSystem(config: Config): void {
|
||||
// Check if sessions directory exists or can be created
|
||||
const sessionPath = path.isAbsolute(config.sessionPath)
|
||||
? config.sessionPath
|
||||
: path.join(process.cwd(), config.sessionPath)
|
||||
|
||||
if (!fs.existsSync(sessionPath)) {
|
||||
try {
|
||||
fs.mkdirSync(sessionPath, { recursive: true })
|
||||
this.addWarning(
|
||||
'filesystem',
|
||||
`Created missing sessions directory: ${sessionPath}`,
|
||||
'Session data will be stored here'
|
||||
)
|
||||
} catch (error) {
|
||||
this.addError(
|
||||
'filesystem',
|
||||
`Cannot create sessions directory: ${sessionPath}`,
|
||||
`Check file permissions. Error: ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check job-state directory if enabled
|
||||
if (config.jobState?.enabled !== false) {
|
||||
const jobStateDir = config.jobState?.dir
|
||||
? config.jobState.dir
|
||||
: path.join(sessionPath, 'job-state')
|
||||
|
||||
if (!fs.existsSync(jobStateDir)) {
|
||||
try {
|
||||
fs.mkdirSync(jobStateDir, { recursive: true })
|
||||
} catch (error) {
|
||||
this.addWarning(
|
||||
'filesystem',
|
||||
`Cannot create job-state directory: ${jobStateDir}`,
|
||||
'Job state tracking may fail. Check file permissions.'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check diagnostics directory if enabled
|
||||
if (config.diagnostics?.enabled === true) {
|
||||
const diagPath = path.join(process.cwd(), 'diagnostics')
|
||||
if (!fs.existsSync(diagPath)) {
|
||||
try {
|
||||
fs.mkdirSync(diagPath, { recursive: true })
|
||||
} catch (error) {
|
||||
this.addWarning(
|
||||
'filesystem',
|
||||
'Cannot create diagnostics directory',
|
||||
'Screenshots and HTML snapshots will not be saved'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private validateBrowserSettings(config: Config): void {
|
||||
// Headless validation - only warn in Docker/containerized environments
|
||||
if (!config.headless && process.env.FORCE_HEADLESS === '1') {
|
||||
this.addWarning(
|
||||
'browser',
|
||||
'FORCE_HEADLESS=1 but config.headless is false',
|
||||
'Docker environment forces headless mode. Your config setting will be overridden.',
|
||||
'docs/docker.md'
|
||||
)
|
||||
}
|
||||
|
||||
// Fingerprinting validation
|
||||
if (config.saveFingerprint?.desktop === false && config.saveFingerprint?.mobile === false) {
|
||||
this.addWarning(
|
||||
'browser',
|
||||
'Fingerprint saving is completely disabled',
|
||||
'Each run will generate new fingerprints, which may look suspicious'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private validateScheduleSettings(config: Config): void {
|
||||
if (config.schedule?.enabled === true) {
|
||||
// Time format validation
|
||||
const schedRec = config.schedule as Record<string, unknown>
|
||||
const useAmPm = schedRec.useAmPm
|
||||
const time12 = typeof schedRec.time12 === 'string' ? schedRec.time12 : ''
|
||||
const time24 = typeof schedRec.time24 === 'string' ? schedRec.time24 : ''
|
||||
|
||||
if (useAmPm === true && (!time12 || time12.trim() === '')) {
|
||||
this.addError(
|
||||
'schedule',
|
||||
'Schedule enabled with useAmPm=true but time12 is missing',
|
||||
'Add time12 field (e.g., "9:00 AM") or set useAmPm=false',
|
||||
'docs/schedule.md'
|
||||
)
|
||||
}
|
||||
|
||||
if (useAmPm === false && (!time24 || time24.trim() === '')) {
|
||||
this.addError(
|
||||
'schedule',
|
||||
'Schedule enabled with useAmPm=false but time24 is missing',
|
||||
'Add time24 field (e.g., "09:00") or set useAmPm=true',
|
||||
'docs/schedule.md'
|
||||
)
|
||||
}
|
||||
|
||||
// Timezone validation
|
||||
const tz = config.schedule.timeZone || 'UTC'
|
||||
try {
|
||||
Intl.DateTimeFormat(undefined, { timeZone: tz })
|
||||
} catch {
|
||||
this.addError(
|
||||
'schedule',
|
||||
`Invalid timezone: ${tz}`,
|
||||
'Use a valid IANA timezone (e.g., "America/New_York", "Europe/Paris")',
|
||||
'docs/schedule.md'
|
||||
)
|
||||
}
|
||||
|
||||
// Vacation mode check
|
||||
if (config.vacation?.enabled === true) {
|
||||
if (config.vacation.minDays && config.vacation.maxDays) {
|
||||
if (config.vacation.minDays > config.vacation.maxDays) {
|
||||
this.addError(
|
||||
'schedule',
|
||||
`Vacation minDays (${config.vacation.minDays}) > maxDays (${config.vacation.maxDays})`,
|
||||
'Set minDays <= maxDays (e.g., minDays: 2, maxDays: 4)'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private validateNetworkSettings(config: Config): void {
|
||||
// Webhook validation
|
||||
if (config.webhook?.enabled === true) {
|
||||
if (!config.webhook.url || config.webhook.url.trim() === '') {
|
||||
this.addError(
|
||||
'network',
|
||||
'Webhook enabled but URL is missing',
|
||||
'Add webhook URL or set webhook.enabled=false',
|
||||
'docs/config.md'
|
||||
)
|
||||
} else if (!config.webhook.url.startsWith('http')) {
|
||||
this.addError(
|
||||
'network',
|
||||
`Invalid webhook URL: ${config.webhook.url}`,
|
||||
'Webhook URL must start with http:// or https://'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Conclusion webhook validation
|
||||
if (config.conclusionWebhook?.enabled === true) {
|
||||
if (!config.conclusionWebhook.url || config.conclusionWebhook.url.trim() === '') {
|
||||
this.addError(
|
||||
'network',
|
||||
'Conclusion webhook enabled but URL is missing',
|
||||
'Add conclusion webhook URL or disable it'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// NTFY validation
|
||||
if (config.ntfy?.enabled === true) {
|
||||
if (!config.ntfy.url || config.ntfy.url.trim() === '') {
|
||||
this.addError(
|
||||
'network',
|
||||
'NTFY enabled but URL is missing',
|
||||
'Add NTFY server URL or set ntfy.enabled=false',
|
||||
'docs/ntfy.md'
|
||||
)
|
||||
}
|
||||
if (!config.ntfy.topic || config.ntfy.topic.trim() === '') {
|
||||
this.addError(
|
||||
'network',
|
||||
'NTFY enabled but topic is missing',
|
||||
'Add NTFY topic name',
|
||||
'docs/ntfy.md'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private validateWorkerSettings(config: Config): void {
|
||||
const workers = config.workers
|
||||
|
||||
// Check if at least one worker is enabled
|
||||
const anyEnabled = workers.doDailySet || workers.doMorePromotions || workers.doPunchCards ||
|
||||
workers.doDesktopSearch || workers.doMobileSearch || workers.doDailyCheckIn ||
|
||||
workers.doReadToEarn
|
||||
|
||||
if (!anyEnabled) {
|
||||
this.addWarning(
|
||||
'workers',
|
||||
'All workers are disabled - bot will do nothing',
|
||||
'Enable at least one worker task (doDailySet, doDesktopSearch, etc.)',
|
||||
'docs/config.md'
|
||||
)
|
||||
}
|
||||
|
||||
// Mobile + desktop search check
|
||||
if (!workers.doDesktopSearch && !workers.doMobileSearch) {
|
||||
this.addWarning(
|
||||
'workers',
|
||||
'Both desktop and mobile searches are disabled',
|
||||
'Enable at least one search type to earn search points'
|
||||
)
|
||||
}
|
||||
|
||||
// Bundle validation
|
||||
if (workers.bundleDailySetWithSearch === true && !workers.doDesktopSearch) {
|
||||
this.addWarning(
|
||||
'workers',
|
||||
'bundleDailySetWithSearch is enabled but doDesktopSearch is disabled',
|
||||
'Desktop search will not run after Daily Set. Enable doDesktopSearch or disable bundling.'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private validateSearchSettings(config: Config): void {
|
||||
const search = config.searchSettings
|
||||
|
||||
// Retry validation
|
||||
if (search.retryMobileSearchAmount < 0) {
|
||||
this.addWarning(
|
||||
'search',
|
||||
'retryMobileSearchAmount is negative',
|
||||
'Set to 0 or positive number (recommended: 2-3)'
|
||||
)
|
||||
}
|
||||
|
||||
if (search.retryMobileSearchAmount > 10) {
|
||||
this.addWarning(
|
||||
'search',
|
||||
`retryMobileSearchAmount is very high (${search.retryMobileSearchAmount})`,
|
||||
'High retry count may trigger detection. Recommended: 2-3'
|
||||
)
|
||||
}
|
||||
|
||||
// Fallback validation
|
||||
if (search.localFallbackCount !== undefined && search.localFallbackCount < 10) {
|
||||
this.addWarning(
|
||||
'search',
|
||||
`localFallbackCount is low (${search.localFallbackCount})`,
|
||||
'Consider at least 15-25 fallback queries for variety'
|
||||
)
|
||||
}
|
||||
|
||||
// Query diversity check
|
||||
if (config.queryDiversity?.enabled === false && !config.searchOnBingLocalQueries) {
|
||||
this.addWarning(
|
||||
'search',
|
||||
'Query diversity disabled and local queries disabled',
|
||||
'Bot will only use Google Trends. Enable one query source for better variety.',
|
||||
'docs/config.md'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private validateHumanizationSettings(config: Config): void {
|
||||
const human = config.humanization
|
||||
|
||||
if (!human || human.enabled === false) {
|
||||
this.addWarning(
|
||||
'humanization',
|
||||
'Humanization is completely disabled',
|
||||
'This increases detection risk. Consider enabling for safer automation.',
|
||||
'docs/config.md'
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// Gesture probabilities
|
||||
if (human.gestureMoveProb !== undefined) {
|
||||
if (human.gestureMoveProb < 0 || human.gestureMoveProb > 1) {
|
||||
this.addError(
|
||||
'humanization',
|
||||
`gestureMoveProb must be between 0 and 1 (got ${human.gestureMoveProb})`,
|
||||
'Set a probability value between 0.0 and 1.0'
|
||||
)
|
||||
}
|
||||
if (human.gestureMoveProb === 0) {
|
||||
this.addWarning(
|
||||
'humanization',
|
||||
'Mouse gestures disabled (gestureMoveProb=0)',
|
||||
'This may look robotic. Consider 0.3-0.7 for natural behavior.'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (human.gestureScrollProb !== undefined) {
|
||||
if (human.gestureScrollProb < 0 || human.gestureScrollProb > 1) {
|
||||
this.addError(
|
||||
'humanization',
|
||||
`gestureScrollProb must be between 0 and 1 (got ${human.gestureScrollProb})`,
|
||||
'Set a probability value between 0.0 and 1.0'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Action delays
|
||||
if (human.actionDelay) {
|
||||
const minMs = typeof human.actionDelay.min === 'string'
|
||||
? parseInt(human.actionDelay.min, 10)
|
||||
: human.actionDelay.min
|
||||
const maxMs = typeof human.actionDelay.max === 'string'
|
||||
? parseInt(human.actionDelay.max, 10)
|
||||
: human.actionDelay.max
|
||||
|
||||
if (minMs > maxMs) {
|
||||
this.addError(
|
||||
'humanization',
|
||||
'actionDelay min is greater than max',
|
||||
`Fix: min=${minMs} should be <= max=${maxMs}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Random off days
|
||||
if (human.randomOffDaysPerWeek !== undefined) {
|
||||
if (human.randomOffDaysPerWeek < 0 || human.randomOffDaysPerWeek > 7) {
|
||||
this.addError(
|
||||
'humanization',
|
||||
`randomOffDaysPerWeek must be 0-7 (got ${human.randomOffDaysPerWeek})`,
|
||||
'Set to a value between 0 (no off days) and 7 (always off)'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Allowed windows validation
|
||||
if (human.allowedWindows && Array.isArray(human.allowedWindows)) {
|
||||
human.allowedWindows.forEach((window, idx) => {
|
||||
if (typeof window !== 'string') {
|
||||
this.addError(
|
||||
'humanization',
|
||||
`allowedWindows[${idx}] is not a string`,
|
||||
'Format: "HH:mm-HH:mm" (e.g., "09:00-17:00")'
|
||||
)
|
||||
} else if (!/^\d{2}:\d{2}-\d{2}:\d{2}$/.test(window)) {
|
||||
this.addWarning(
|
||||
'humanization',
|
||||
`allowedWindows[${idx}] format may be invalid: "${window}"`,
|
||||
'Expected format: "HH:mm-HH:mm" (24-hour, e.g., "09:00-17:00")'
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private validateSecuritySettings(config: Config): void {
|
||||
// Check logging redaction
|
||||
const logging = config.logging as { redactEmails?: boolean } | undefined
|
||||
if (logging && logging.redactEmails === false) {
|
||||
this.addWarning(
|
||||
'security',
|
||||
'Email redaction is disabled in logs',
|
||||
'Enable redactEmails=true if you share logs publicly',
|
||||
'docs/security.md'
|
||||
)
|
||||
}
|
||||
|
||||
// Removed diagnostics warning - reports/ folder with masked emails is safe for debugging
|
||||
|
||||
// Proxy exposure check
|
||||
if (config.proxy?.proxyGoogleTrends === false && config.proxy?.proxyBingTerms === false) {
|
||||
this.addWarning(
|
||||
'security',
|
||||
'All external API calls will use your real IP',
|
||||
'Consider enabling proxy for Google Trends or Bing Terms to mask your IP'
|
||||
)
|
||||
}
|
||||
|
||||
// Crash recovery
|
||||
if (config.crashRecovery?.autoRestart === true) {
|
||||
const maxRestarts = config.crashRecovery.maxRestarts ?? 2
|
||||
if (maxRestarts > 5) {
|
||||
this.addWarning(
|
||||
'security',
|
||||
`Crash recovery maxRestarts is high (${maxRestarts})`,
|
||||
'Excessive restarts on errors may trigger rate limits or detection'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private addError(category: string, message: string, fix?: string, docsLink?: string): void {
|
||||
this.errors.push({ severity: 'error', category, message, fix, docsLink })
|
||||
}
|
||||
|
||||
private addWarning(category: string, message: string, fix?: string, docsLink?: string): void {
|
||||
this.warnings.push({ severity: 'warning', category, message, fix, docsLink })
|
||||
}
|
||||
|
||||
private async displayResults(): Promise<void> {
|
||||
// Display errors
|
||||
if (this.errors.length > 0) {
|
||||
console.log(chalk.red('\n❌ VALIDATION ERRORS FOUND:\n'))
|
||||
this.errors.forEach((err, index) => {
|
||||
console.log(chalk.red(` ${index + 1}. [${err.category.toUpperCase()}] ${err.message}`))
|
||||
if (err.fix) {
|
||||
console.log(chalk.yellow(` 💡 Fix: ${err.fix}`))
|
||||
}
|
||||
if (err.docsLink) {
|
||||
console.log(chalk.cyan(` 📖 Documentation: ${err.docsLink}`))
|
||||
}
|
||||
console.log('')
|
||||
})
|
||||
}
|
||||
|
||||
// Display warnings
|
||||
if (this.warnings.length > 0) {
|
||||
console.log(chalk.yellow('\n⚠️ WARNINGS:\n'))
|
||||
this.warnings.forEach((warn, index) => {
|
||||
console.log(chalk.yellow(` ${index + 1}. [${warn.category.toUpperCase()}] ${warn.message}`))
|
||||
if (warn.fix) {
|
||||
console.log(chalk.gray(` 💡 Suggestion: ${warn.fix}`))
|
||||
}
|
||||
if (warn.docsLink) {
|
||||
console.log(chalk.cyan(` 📖 Documentation: ${warn.docsLink}`))
|
||||
}
|
||||
console.log('')
|
||||
})
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════'))
|
||||
|
||||
if (this.errors.length === 0 && this.warnings.length === 0) {
|
||||
console.log(chalk.green(' ✅ All validation checks passed! Configuration looks good.'))
|
||||
console.log(chalk.gray(' → Starting bot execution...'))
|
||||
} else {
|
||||
console.log(chalk.white(` Found: ${chalk.red(`${this.errors.length} error(s)`)} | ${chalk.yellow(`${this.warnings.length} warning(s)`)}`))
|
||||
|
||||
if (this.errors.length > 0) {
|
||||
console.log(chalk.red('\n ⚠️ CRITICAL ERRORS DETECTED'))
|
||||
console.log(chalk.white(' → Bot will continue, but these issues may cause failures'))
|
||||
console.log(chalk.white(' → Review errors above and fix them for stable operation'))
|
||||
console.log(chalk.gray(' → If you believe these are false positives, you can ignore them'))
|
||||
} else {
|
||||
console.log(chalk.yellow('\n ⚠️ Warnings detected - review recommended'))
|
||||
console.log(chalk.gray(' → Bot will continue normally'))
|
||||
}
|
||||
|
||||
console.log(chalk.white('\n 📖 Full documentation: docs/index.md'))
|
||||
console.log(chalk.gray(' → Proceeding with execution in 5 seconds...'))
|
||||
|
||||
// Give user time to read (5 seconds for errors, 5 seconds for warnings)
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
}
|
||||
|
||||
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'))
|
||||
}
|
||||
}
|
||||
84
src/util/Totp.ts
Normal file
84
src/util/Totp.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import crypto from 'crypto'
|
||||
|
||||
/**
|
||||
* Decode Base32 (RFC 4648) to a Buffer.
|
||||
* Accepts lowercase/uppercase, optional padding.
|
||||
*/
|
||||
function base32Decode(input: string): Buffer {
|
||||
const alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'
|
||||
const clean = input.toUpperCase().replace(/=+$/g, '').replace(/[^A-Z2-7]/g, '')
|
||||
let bits = 0
|
||||
let value = 0
|
||||
const bytes: number[] = []
|
||||
|
||||
for (const char of clean) {
|
||||
const idx = alphabet.indexOf(char)
|
||||
if (idx < 0) continue
|
||||
value = (value << 5) | idx
|
||||
bits += 5
|
||||
if (bits >= 8) {
|
||||
bits -= 8
|
||||
bytes.push((value >>> bits) & 0xff)
|
||||
}
|
||||
}
|
||||
return Buffer.from(bytes)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate an HMAC using Node's crypto and return Buffer.
|
||||
*/
|
||||
function hmac(algorithm: string, key: Buffer, data: Buffer): Buffer {
|
||||
return crypto.createHmac(algorithm, key).update(data).digest()
|
||||
}
|
||||
|
||||
export type TotpOptions = { digits?: number; step?: number; algorithm?: 'SHA1' | 'SHA256' | 'SHA512' }
|
||||
|
||||
/**
|
||||
* Generate TOTP per RFC 6238.
|
||||
* @param secretBase32 - shared secret in Base32
|
||||
* @param time - Unix time in seconds (defaults to now)
|
||||
* @param options - { digits, step, algorithm }
|
||||
* @returns numeric TOTP as string (zero-padded)
|
||||
*/
|
||||
export function generateTOTP(
|
||||
secretBase32: string,
|
||||
time: number = Math.floor(Date.now() / 1000),
|
||||
options?: TotpOptions
|
||||
): string {
|
||||
const digits = options?.digits ?? 6
|
||||
const step = options?.step ?? 30
|
||||
const alg = (options?.algorithm ?? 'SHA1').toUpperCase()
|
||||
|
||||
const key = base32Decode(secretBase32)
|
||||
const counter = Math.floor(time / step)
|
||||
|
||||
// 8-byte big-endian counter
|
||||
const counterBuffer = Buffer.alloc(8)
|
||||
counterBuffer.writeBigUInt64BE(BigInt(counter), 0)
|
||||
|
||||
let hmacAlg: string
|
||||
if (alg === 'SHA1') hmacAlg = 'sha1'
|
||||
else if (alg === 'SHA256') hmacAlg = 'sha256'
|
||||
else if (alg === 'SHA512') hmacAlg = 'sha512'
|
||||
else throw new Error('Unsupported algorithm. Use SHA1, SHA256 or SHA512.')
|
||||
|
||||
const hash = hmac(hmacAlg, key, counterBuffer)
|
||||
if (!hash || hash.length < 20) {
|
||||
// Minimal sanity check; for SHA1 length is 20
|
||||
throw new Error('Invalid HMAC output for TOTP')
|
||||
}
|
||||
|
||||
// Dynamic truncation
|
||||
const offset = hash[hash.length - 1]! & 0x0f
|
||||
if (offset + 3 >= hash.length) {
|
||||
throw new Error('Invalid dynamic truncation offset')
|
||||
}
|
||||
const code =
|
||||
((hash[offset]! & 0x7f) << 24) |
|
||||
((hash[offset + 1]! & 0xff) << 16) |
|
||||
((hash[offset + 2]! & 0xff) << 8) |
|
||||
(hash[offset + 3]! & 0xff)
|
||||
|
||||
const otp = (code % 10 ** digits).toString().padStart(digits, '0')
|
||||
return otp
|
||||
}
|
||||
300
src/util/UserAgent.ts
Normal file
300
src/util/UserAgent.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
import axios from 'axios'
|
||||
import { BrowserFingerprintWithHeaders } from 'fingerprint-generator'
|
||||
|
||||
import { log } from './Logger'
|
||||
import Retry from './Retry'
|
||||
|
||||
import { ChromeVersion, EdgeVersion, Architecture, Platform } from '../interface/UserAgentUtil'
|
||||
|
||||
const NOT_A_BRAND_VERSION = '99'
|
||||
const EDGE_VERSION_URL = 'https://edgeupdates.microsoft.com/api/products'
|
||||
const EDGE_VERSION_CACHE_TTL_MS = 1000 * 60 * 60
|
||||
|
||||
// Static fallback versions (updated periodically, valid as of October 2024)
|
||||
const FALLBACK_EDGE_VERSIONS: EdgeVersionResult = {
|
||||
android: '130.0.2849.66',
|
||||
windows: '130.0.2849.68'
|
||||
}
|
||||
|
||||
type EdgeVersionResult = {
|
||||
android?: string
|
||||
windows?: string
|
||||
}
|
||||
|
||||
let edgeVersionCache: { data: EdgeVersionResult; expiresAt: number } | null = null
|
||||
let edgeVersionInFlight: Promise<EdgeVersionResult> | null = null
|
||||
|
||||
export async function getUserAgent(isMobile: boolean) {
|
||||
const system = getSystemComponents(isMobile)
|
||||
const app = await getAppComponents(isMobile)
|
||||
|
||||
const uaTemplate = isMobile ?
|
||||
`Mozilla/5.0 (${system}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/${app.chrome_reduced_version} Mobile Safari/537.36 EdgA/${app.edge_version}` :
|
||||
`Mozilla/5.0 (${system}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/${app.chrome_reduced_version} Safari/537.36 Edg/${app.edge_version}`
|
||||
|
||||
const platformVersion = `${isMobile ? Math.floor(Math.random() * 5) + 9 : Math.floor(Math.random() * 15) + 1}.0.0`
|
||||
|
||||
const uaMetadata = {
|
||||
mobile: isMobile,
|
||||
isMobile,
|
||||
platform: isMobile ? 'Android' : 'Windows',
|
||||
fullVersionList: [
|
||||
{ brand: 'Not/A)Brand', version: `${NOT_A_BRAND_VERSION}.0.0.0` },
|
||||
{ brand: 'Microsoft Edge', version: app['edge_version'] },
|
||||
{ brand: 'Chromium', version: app['chrome_version'] }
|
||||
],
|
||||
brands: [
|
||||
{ brand: 'Not/A)Brand', version: NOT_A_BRAND_VERSION },
|
||||
{ brand: 'Microsoft Edge', version: app['edge_major_version'] },
|
||||
{ brand: 'Chromium', version: app['chrome_major_version'] }
|
||||
],
|
||||
platformVersion,
|
||||
architecture: isMobile ? '' : 'x86',
|
||||
bitness: isMobile ? '' : '64',
|
||||
model: '',
|
||||
uaFullVersion: app['chrome_version']
|
||||
}
|
||||
|
||||
return { userAgent: uaTemplate, userAgentMetadata: uaMetadata }
|
||||
}
|
||||
|
||||
export async function getChromeVersion(isMobile: boolean): Promise<string> {
|
||||
try {
|
||||
const request = {
|
||||
url: 'https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions.json',
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}
|
||||
|
||||
const response = await axios(request)
|
||||
const data: ChromeVersion = response.data
|
||||
return data.channels.Stable.version
|
||||
|
||||
} catch (error) {
|
||||
throw log(isMobile, 'USERAGENT-CHROME-VERSION', 'An error occurred:' + error, 'error')
|
||||
}
|
||||
}
|
||||
|
||||
export async function getEdgeVersions(isMobile: boolean): Promise<EdgeVersionResult> {
|
||||
const now = Date.now()
|
||||
|
||||
// Return cached version if still valid
|
||||
if (edgeVersionCache && edgeVersionCache.expiresAt > now) {
|
||||
return edgeVersionCache.data
|
||||
}
|
||||
|
||||
// Wait for in-flight request if one exists
|
||||
if (edgeVersionInFlight) {
|
||||
try {
|
||||
return await edgeVersionInFlight
|
||||
} catch (error) {
|
||||
if (edgeVersionCache) {
|
||||
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Using cached Edge versions after in-flight failure', 'warn')
|
||||
return edgeVersionCache.data
|
||||
}
|
||||
// Fall through to fetch attempt below
|
||||
}
|
||||
}
|
||||
|
||||
// Attempt to fetch fresh versions
|
||||
const fetchPromise = fetchEdgeVersionsWithRetry(isMobile)
|
||||
.then(result => {
|
||||
edgeVersionCache = { data: result, expiresAt: Date.now() + EDGE_VERSION_CACHE_TTL_MS }
|
||||
edgeVersionInFlight = null
|
||||
return result
|
||||
})
|
||||
.catch(() => {
|
||||
edgeVersionInFlight = null
|
||||
|
||||
// Try stale cache first
|
||||
if (edgeVersionCache) {
|
||||
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Using stale cached Edge versions due to fetch failure', 'warn')
|
||||
return edgeVersionCache.data
|
||||
}
|
||||
|
||||
// Fall back to static versions
|
||||
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Using static fallback Edge versions (API unavailable)', 'warn')
|
||||
edgeVersionCache = { data: FALLBACK_EDGE_VERSIONS, expiresAt: Date.now() + EDGE_VERSION_CACHE_TTL_MS }
|
||||
return FALLBACK_EDGE_VERSIONS
|
||||
})
|
||||
|
||||
edgeVersionInFlight = fetchPromise
|
||||
return fetchPromise
|
||||
}
|
||||
|
||||
export function getSystemComponents(mobile: boolean): string {
|
||||
if (mobile) {
|
||||
const androidVersion = 10 + Math.floor(Math.random() * 5)
|
||||
return `Linux; Android ${androidVersion}; K`
|
||||
}
|
||||
|
||||
return 'Windows NT 10.0; Win64; x64'
|
||||
}
|
||||
|
||||
export async function getAppComponents(isMobile: boolean) {
|
||||
const versions = await getEdgeVersions(isMobile)
|
||||
const edgeVersion = isMobile ? versions.android : versions.windows as string
|
||||
const edgeMajorVersion = edgeVersion?.split('.')[0]
|
||||
|
||||
const chromeVersion = await getChromeVersion(isMobile)
|
||||
const chromeMajorVersion = chromeVersion?.split('.')[0]
|
||||
const chromeReducedVersion = `${chromeMajorVersion}.0.0.0`
|
||||
|
||||
return {
|
||||
not_a_brand_version: `${NOT_A_BRAND_VERSION}.0.0.0`,
|
||||
not_a_brand_major_version: NOT_A_BRAND_VERSION,
|
||||
edge_version: edgeVersion as string,
|
||||
edge_major_version: edgeMajorVersion as string,
|
||||
chrome_version: chromeVersion as string,
|
||||
chrome_major_version: chromeMajorVersion as string,
|
||||
chrome_reduced_version: chromeReducedVersion as string
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchEdgeVersionsWithRetry(isMobile: boolean): Promise<EdgeVersionResult> {
|
||||
const retry = new Retry()
|
||||
return retry.run(async () => {
|
||||
const versions = await fetchEdgeVersionsOnce(isMobile)
|
||||
if (!versions.android && !versions.windows) {
|
||||
throw new Error('Stable Edge releases did not include Android or Windows versions')
|
||||
}
|
||||
return versions
|
||||
}, () => true)
|
||||
}
|
||||
|
||||
async function fetchEdgeVersionsOnce(isMobile: boolean): Promise<EdgeVersionResult> {
|
||||
let lastError: unknown = null
|
||||
|
||||
// Try axios first
|
||||
try {
|
||||
const response = await axios<EdgeVersion[]>({
|
||||
url: EDGE_VERSION_URL,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
},
|
||||
timeout: 10000,
|
||||
validateStatus: (status) => status === 200
|
||||
})
|
||||
|
||||
if (!response.data || !Array.isArray(response.data)) {
|
||||
throw new Error('Invalid response format from Edge API')
|
||||
}
|
||||
|
||||
return mapEdgeVersions(response.data)
|
||||
} catch (axiosError) {
|
||||
lastError = axiosError
|
||||
// Continue to fallback
|
||||
}
|
||||
|
||||
// Try native fetch as fallback
|
||||
try {
|
||||
const fallback = await tryNativeFetchFallback()
|
||||
if (fallback) {
|
||||
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Axios failed, using native fetch fallback', 'warn')
|
||||
return fallback
|
||||
}
|
||||
} catch (fetchError) {
|
||||
lastError = fetchError
|
||||
}
|
||||
|
||||
// Both methods failed
|
||||
const errorMsg = lastError instanceof Error ? lastError.message : String(lastError)
|
||||
throw new Error(`Failed to fetch Edge versions: ${errorMsg}`)
|
||||
}
|
||||
|
||||
async function tryNativeFetchFallback(): Promise<EdgeVersionResult | null> {
|
||||
let timeoutHandle: NodeJS.Timeout | undefined
|
||||
try {
|
||||
const controller = new AbortController()
|
||||
timeoutHandle = setTimeout(() => controller.abort(), 10000)
|
||||
|
||||
const response = await fetch(EDGE_VERSION_URL, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
},
|
||||
signal: controller.signal
|
||||
})
|
||||
|
||||
clearTimeout(timeoutHandle)
|
||||
timeoutHandle = undefined
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json() as EdgeVersion[]
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
|
||||
return mapEdgeVersions(data)
|
||||
} catch (error) {
|
||||
if (timeoutHandle) clearTimeout(timeoutHandle)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function mapEdgeVersions(data: EdgeVersion[]): EdgeVersionResult {
|
||||
if (!Array.isArray(data) || data.length === 0) {
|
||||
throw new Error('Edge API returned empty or invalid data')
|
||||
}
|
||||
|
||||
const stable = data.find(entry => entry?.Product?.toLowerCase() === 'stable')
|
||||
?? data.find(entry => entry?.Product && /stable/i.test(entry.Product))
|
||||
|
||||
if (!stable || !stable.Releases || !Array.isArray(stable.Releases)) {
|
||||
throw new Error('Stable Edge channel not found or invalid format')
|
||||
}
|
||||
|
||||
const androidRelease = stable.Releases.find(release =>
|
||||
release?.Platform === Platform.Android && release?.ProductVersion
|
||||
)
|
||||
|
||||
const windowsRelease = stable.Releases.find(release =>
|
||||
release?.Platform === Platform.Windows &&
|
||||
release?.Architecture === Architecture.X64 &&
|
||||
release?.ProductVersion
|
||||
) ?? stable.Releases.find(release =>
|
||||
release?.Platform === Platform.Windows &&
|
||||
release?.ProductVersion
|
||||
)
|
||||
|
||||
const result: EdgeVersionResult = {
|
||||
android: androidRelease?.ProductVersion,
|
||||
windows: windowsRelease?.ProductVersion
|
||||
}
|
||||
|
||||
// Validate at least one version was found
|
||||
if (!result.android && !result.windows) {
|
||||
throw new Error('No valid Edge versions found in API response')
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export async function updateFingerprintUserAgent(fingerprint: BrowserFingerprintWithHeaders, isMobile: boolean): Promise<BrowserFingerprintWithHeaders> {
|
||||
try {
|
||||
const userAgentData = await getUserAgent(isMobile)
|
||||
const componentData = await getAppComponents(isMobile)
|
||||
|
||||
fingerprint.fingerprint.navigator.userAgentData = userAgentData.userAgentMetadata
|
||||
fingerprint.fingerprint.navigator.userAgent = userAgentData.userAgent
|
||||
fingerprint.fingerprint.navigator.appVersion = userAgentData.userAgent.replace(`${fingerprint.fingerprint.navigator.appCodeName}/`, '')
|
||||
|
||||
fingerprint.headers['user-agent'] = userAgentData.userAgent
|
||||
fingerprint.headers['sec-ch-ua'] = `"Microsoft Edge";v="${componentData.edge_major_version}", "Not=A?Brand";v="${componentData.not_a_brand_major_version}", "Chromium";v="${componentData.chrome_major_version}"`
|
||||
fingerprint.headers['sec-ch-ua-full-version-list'] = `"Microsoft Edge";v="${componentData.edge_version}", "Not=A?Brand";v="${componentData.not_a_brand_version}", "Chromium";v="${componentData.chrome_version}"`
|
||||
|
||||
return fingerprint
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
log(isMobile, 'USER-AGENT-UPDATE', `Failed to update fingerprint: ${errorMsg}`, 'error')
|
||||
throw new Error(`User-Agent update failed: ${errorMsg}`)
|
||||
}
|
||||
}
|
||||
73
src/util/Utils.ts
Normal file
73
src/util/Utils.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import ms from 'ms'
|
||||
|
||||
export default class Util {
|
||||
|
||||
async wait(ms: number): Promise<void> {
|
||||
// Safety check: prevent extremely long or negative waits
|
||||
const MAX_WAIT_MS = 3600000 // 1 hour max
|
||||
const safeMs = Math.min(Math.max(0, ms), MAX_WAIT_MS)
|
||||
|
||||
if (ms !== safeMs) {
|
||||
console.warn(`[Utils] wait() clamped from ${ms}ms to ${safeMs}ms (max: ${MAX_WAIT_MS}ms)`)
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
setTimeout(resolve, safeMs)
|
||||
})
|
||||
}
|
||||
|
||||
async waitRandom(minMs: number, maxMs: number): Promise<void> {
|
||||
const delta = this.randomNumber(minMs, maxMs)
|
||||
return this.wait(delta)
|
||||
}
|
||||
|
||||
getFormattedDate(ms = Date.now()): string {
|
||||
const today = new Date(ms)
|
||||
const month = String(today.getMonth() + 1).padStart(2, '0') // January is 0
|
||||
const day = String(today.getDate()).padStart(2, '0')
|
||||
const year = today.getFullYear()
|
||||
|
||||
return `${month}/${day}/${year}`
|
||||
}
|
||||
|
||||
shuffleArray<T>(array: T[]): T[] {
|
||||
return array.map(value => ({ value, sort: Math.random() }))
|
||||
.sort((a, b) => a.sort - b.sort)
|
||||
.map(({ value }) => value)
|
||||
}
|
||||
|
||||
randomNumber(min: number, max: number): number {
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min
|
||||
}
|
||||
|
||||
chunkArray<T>(arr: T[], numChunks: number): T[][] {
|
||||
// Validate input to prevent division by zero or invalid chunks
|
||||
if (numChunks <= 0) {
|
||||
throw new Error(`Invalid numChunks: ${numChunks}. Must be a positive integer.`)
|
||||
}
|
||||
|
||||
if (arr.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
const safeNumChunks = Math.max(1, Math.floor(numChunks))
|
||||
const chunkSize = Math.ceil(arr.length / safeNumChunks)
|
||||
const chunks: T[][] = []
|
||||
|
||||
for (let i = 0; i < arr.length; i += chunkSize) {
|
||||
const chunk = arr.slice(i, i + chunkSize)
|
||||
chunks.push(chunk)
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
|
||||
stringToMs(input: string | number): number {
|
||||
const milisec = ms(input.toString())
|
||||
if (!milisec) {
|
||||
throw new Error('The string provided cannot be parsed to a valid time! Use a format like "1 min", "1m" or "1 minutes"')
|
||||
}
|
||||
return milisec
|
||||
}
|
||||
|
||||
}
|
||||
75
tsconfig.json
Normal file
75
tsconfig.json
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||
/* Basic Options */
|
||||
"target": "ES2020", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||
"declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
"sourceMap": true, /* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
"outDir": "./dist", /* Redirect output structure to the directory. */
|
||||
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||
// "removeComments": true, /* Do not emit comments to output. */
|
||||
// "noEmit": true, /* Do not emit outputs. */
|
||||
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||
/* Strict Type-Checking Options */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"strictNullChecks": true, /* Enable strict null checks. */
|
||||
"strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
|
||||
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||
"noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||
"alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||
/* Additional Checks */
|
||||
"noUnusedLocals": true, /* Report errors on unused locals. */
|
||||
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||
"noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||
"noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||
"noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
|
||||
"noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
|
||||
/* Module Resolution Options */
|
||||
"moduleResolution":"node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
"types": ["node"],
|
||||
// Keep explicit typeRoots to ensure resolution in environments that don't auto-detect before full install.
|
||||
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
// "types": [], /* Type declaration files to be included in compilation. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
/* Source Map Options */
|
||||
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||
/* Experimental Options */
|
||||
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||
/* Advanced Options */
|
||||
"skipLibCheck": true, /* Skip type checking of declaration files. */
|
||||
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
|
||||
"resolveJsonModule": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts",
|
||||
"src/**/*.d.ts",
|
||||
"src/functions/queries.json"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user