Formatting + General Improvements

- Added cookie header function
- Added failback to html parsing for dashboard data
- Removed some logs
This commit is contained in:
TheNetsky
2026-01-21 22:48:28 +01:00
parent 7e51bff52b
commit 175ba45543
23 changed files with 252 additions and 390 deletions

View File

@@ -1,51 +1,51 @@
name: Build and Push Docker Image on Release name: Build and Push Docker Image on Release
on: on:
release: release:
types: [published] types: [published]
workflow_dispatch: workflow_dispatch:
env: env:
REGISTRY: ghcr.io REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }} IMAGE_NAME: ${{ github.repository }}
jobs: jobs:
build-and-push: build-and-push:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
contents: read contents: read
packages: write packages: write
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry - name: Log in to GitHub Container Registry
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: ${{ env.REGISTRY }} registry: ${{ env.REGISTRY }}
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata - name: Extract metadata
id: meta id: meta
uses: docker/metadata-action@v5 uses: docker/metadata-action@v5
with: with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: | tags: |
type=semver,pattern={{version}} type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}} type=semver,pattern={{major}}.{{minor}}
type=raw,value=latest,enable={{is_default_branch}} type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push Docker image - name: Build and push Docker image
uses: docker/build-push-action@v5 uses: docker/build-push-action@v5
with: with:
context: . context: .
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha cache-from: type=gha
cache-to: type=gha,mode=max cache-to: type=gha,mode=max

143
README.md
View File

@@ -3,6 +3,7 @@
--- ---
## Table of Contents ## Table of Contents
- [Quick Setup](#quick-setup) - [Quick Setup](#quick-setup)
- [Nix Setup](#nix-setup) - [Nix Setup](#nix-setup)
- [Configuration Options](#configuration-options) - [Configuration Options](#configuration-options)
@@ -11,20 +12,25 @@
- [Disclaimer](#disclaimer) - [Disclaimer](#disclaimer)
--- ---
## Quick Setup ## Quick Setup
**Requirements:** Node.js >= 24 and Git **Requirements:** Node.js >= 24 and Git
Works on Windows, Linux, macOS, and WSL. Works on Windows, Linux, macOS, and WSL.
### Get the script ### Get the script
```bash ```bash
git clone https://github.com/TheNetsky/Microsoft-Rewards-Script.git git clone https://github.com/TheNetsky/Microsoft-Rewards-Script.git
cd Microsoft-Rewards-Script cd Microsoft-Rewards-Script
``` ```
Or, download the latest release ZIP and extract it. Or, download the latest release ZIP and extract it.
### Create an account.json and config.json ### Create an account.json and config.json
Copy, rename, and edit your account and configuration files before deploying the script. Copy, rename, and edit your account and configuration files before deploying the script.
- Copy or rename `src/accounts.example.json` to `src/accounts.json` and add your credentials - Copy or rename `src/accounts.example.json` to `src/accounts.json` and add your credentials
- Copy or rename `src/config.example.json` to `src/config.json` and customize your preferences. - Copy or rename `src/config.example.json` to `src/config.json` and customize your preferences.
@@ -33,9 +39,10 @@ Copy, rename, and edit your account and configuration files before deploying the
> Prior versions of accounts.json and config.json are not compatible with current release. > Prior versions of accounts.json and config.json are not compatible with current release.
> [!WARNING] > [!WARNING]
> You must rebuild your script after making any changes to accounts.json and config.json. > You must rebuild your script after making any changes to accounts.json and config.json.
### Build and run the script (bare metal version) ### Build and run the script (bare metal version)
```bash ```bash
npm run pre-build npm run pre-build
npm run build npm run build
@@ -43,6 +50,7 @@ npm run start
``` ```
### Build and run the script (docker version) ### Build and run the script (docker version)
```bash ```bash
docker compose up -d docker compose up -d
``` ```
@@ -53,6 +61,7 @@ docker compose up -d
> [!TIP] > [!TIP]
> When headeless, monitor logs with `docker logs microsoft-rewards-script` (for example, to view passwordless codes), or enable a webhook service in the `src/config.json`. > When headeless, monitor logs with `docker logs microsoft-rewards-script` (for example, to view passwordless codes), or enable a webhook service in the `src/config.json`.
--- ---
## Nix Setup ## Nix Setup
@@ -65,86 +74,91 @@ If using Nix: `bash scripts/nix/run.sh`
Edit `src/config.json` to customize behavior. Below are all currently available options. Edit `src/config.json` to customize behavior. Below are all currently available options.
> [!WARNING] > [!WARNING]
> Rebuild the script after all changes. > Rebuild the script after all changes.
### Core ### Core
| Setting | Type | Default | Description |
|----------|------|----------|-------------| | Setting | Type | Default | Description |
| `baseURL` | string | `"https://rewards.bing.com"` | Microsoft Rewards base URL | | -------------------------- | ------- | ---------------------------- | ------------------------------------- |
| `sessionPath` | string | `"sessions"` | Directory to store browser sessions | | `baseURL` | string | `"https://rewards.bing.com"` | Microsoft Rewards base URL |
| `headless` | boolean | `false` | Run browser invisibly | | `sessionPath` | string | `"sessions"` | Directory to store browser sessions |
| `runOnZeroPoints` | boolean | `false` | Run even when no points are available | | `headless` | boolean | `false` | Run browser invisibly |
| `clusters` | number | `1` | Number of concurrent account clusters | | `runOnZeroPoints` | boolean | `false` | Run even when no points are available |
| `errorDiagnostics` | boolean | `false` | Enable error diagnostics | | `clusters` | number | `1` | Number of concurrent account clusters |
| `searchOnBingLocalQueries` | boolean | `false` | Use local query list | | `errorDiagnostics` | boolean | `false` | Enable error diagnostics |
| `globalTimeout` | string | `"30sec"` | Timeout for all actions | | `searchOnBingLocalQueries` | boolean | `false` | Use local query list |
| `globalTimeout` | string | `"30sec"` | Timeout for all actions |
> [!CAUTION] > [!CAUTION]
> Set `headless` to `true` when using docker > Set `headless` to `true` when using docker
### Workers ### Workers
| Setting | Type | Default | Description |
|----------|------|----------|-------------| | Setting | Type | Default | Description |
| `workers.doDailySet` | boolean | `true` | Complete daily set | | ----------------------------- | ------- | ------- | --------------------------- |
| `workers.doSpecialPromotions` | boolean | `true` | Complete special promotions | | `workers.doDailySet` | boolean | `true` | Complete daily set |
| `workers.doMorePromotions` | boolean | `true` | Complete more promotions | | `workers.doSpecialPromotions` | boolean | `true` | Complete special promotions |
| `workers.doPunchCards` | boolean | `true` | Complete punchcards | | `workers.doMorePromotions` | boolean | `true` | Complete more promotions |
| `workers.doAppPromotions` | boolean | `true` | Complete app promotions | | `workers.doPunchCards` | boolean | `true` | Complete punchcards |
| `workers.doDesktopSearch` | boolean | `true` | Perform desktop searches | | `workers.doAppPromotions` | boolean | `true` | Complete app promotions |
| `workers.doMobileSearch` | boolean | `true` | Perform mobile searches | | `workers.doDesktopSearch` | boolean | `true` | Perform desktop searches |
| `workers.doDailyCheckIn` | boolean | `true` | Complete daily check-in | | `workers.doMobileSearch` | boolean | `true` | Perform mobile searches |
| `workers.doReadToEarn` | boolean | `true` | Complete Read-to-Earn | | `workers.doDailyCheckIn` | boolean | `true` | Complete daily check-in |
| `workers.doReadToEarn` | boolean | `true` | Complete Read-to-Earn |
### Search Settings ### Search Settings
| Setting | Type | Default | Description |
|----------|------|----------|-------------| | Setting | Type | Default | Description |
| `searchSettings.scrollRandomResults` | boolean | `false` | Scroll randomly on results | | -------------------------------------- | -------- | -------------------------------------------- | ----------------------------------- |
| `searchSettings.clickRandomResults` | boolean | `false` | Click random links | | `searchSettings.scrollRandomResults` | boolean | `false` | Scroll randomly on results |
| `searchSettings.parallelSearching` | boolean | `true` | Run searches in parallel | | `searchSettings.clickRandomResults` | boolean | `false` | Click random links |
| `searchSettings.queryEngines` | string[] | `["google", "wikipedia", "reddit", "local"]` | Query engines to use | | `searchSettings.parallelSearching` | boolean | `true` | Run searches in parallel |
| `searchSettings.searchResultVisitTime` | string | `"10sec"` | Time to spend on each search result | | `searchSettings.queryEngines` | string[] | `["google", "wikipedia", "reddit", "local"]` | Query engines to use |
| `searchSettings.searchDelay.min` | string | `"30sec"` | Minimum delay between searches | | `searchSettings.searchResultVisitTime` | string | `"10sec"` | Time to spend on each search result |
| `searchSettings.searchDelay.max` | string | `"1min"` | Maximum delay between searches | | `searchSettings.searchDelay.min` | string | `"30sec"` | Minimum delay between searches |
| `searchSettings.readDelay.min` | string | `"30sec"` | Minimum delay for reading | | `searchSettings.searchDelay.max` | string | `"1min"` | Maximum delay between searches |
| `searchSettings.readDelay.max` | string | `"1min"` | Maximum delay for reading | | `searchSettings.readDelay.min` | string | `"30sec"` | Minimum delay for reading |
| `searchSettings.readDelay.max` | string | `"1min"` | Maximum delay for reading |
### Logging ### Logging
| Setting | Type | Default | Description |
|----------|------|----------|-------------| | Setting | Type | Default | Description |
| `debugLogs` | boolean | `false` | Enable debug logging | | -------------------------------- | -------- | ---------------------- | --------------------------------- |
| `consoleLogFilter.enabled` | boolean | `false` | Enable console log filtering | | `debugLogs` | boolean | `false` | Enable debug logging |
| `consoleLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) | | `consoleLogFilter.enabled` | boolean | `false` | Enable console log filtering |
| `consoleLogFilter.levels` | string[] | `["error", "warn"]` | Log levels to filter | | `consoleLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) |
| `consoleLogFilter.keywords` | string[] | `["starting account"]` | Keywords to filter | | `consoleLogFilter.levels` | string[] | `["error", "warn"]` | Log levels to filter |
| `consoleLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering | | `consoleLogFilter.keywords` | string[] | `["starting account"]` | Keywords to filter |
| `consoleLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering |
### Proxy ### Proxy
| Setting | Type | Default | Description |
|----------|------|----------|-------------| | Setting | Type | Default | Description |
| `proxy.queryEngine` | boolean | `true` | Proxy query engine requests | | ------------------- | ------- | ------- | --------------------------- |
| `proxy.queryEngine` | boolean | `true` | Proxy query engine requests |
### Webhooks ### Webhooks
| Setting | Type | Default | Description |
|----------|------|----------|-------------|
| `webhook.discord.enabled` | boolean | `false` | Enable Discord webhook |
| `webhook.discord.url` | string | `""` | Discord webhook URL |
| `webhook.ntfy.enabled` | boolean | `false` | Enable ntfy notifications |
| `webhook.ntfy.url` | string | `""` | ntfy server URL |
| `webhook.ntfy.topic` | string | `""` | ntfy topic |
| `webhook.ntfy.token` | string | `""` | ntfy authentication token |
| `webhook.ntfy.title` | string | `"Microsoft-Rewards-Script"` | Notification title |
| `webhook.ntfy.tags` | string[] | `["bot", "notify"]` | Notification tags |
| `webhook.ntfy.priority` | number | `3` | Notification priority (1-5) |
| `webhook.webhookLogFilter.enabled` | boolean | `false` | Enable webhook log filtering |
| `webhook.webhookLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) |
| `webhook.webhookLogFilter.levels` | string[] | `["error"]` | Log levels to send |
| `webhook.webhookLogFilter.keywords` | string[] | `["starting account", "select number", "collected"]` | Keywords to filter |
| `webhook.webhookLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering |
| Setting | Type | Default | Description |
| ---------------------------------------- | -------- | ---------------------------------------------------- | --------------------------------- |
| `webhook.discord.enabled` | boolean | `false` | Enable Discord webhook |
| `webhook.discord.url` | string | `""` | Discord webhook URL |
| `webhook.ntfy.enabled` | boolean | `false` | Enable ntfy notifications |
| `webhook.ntfy.url` | string | `""` | ntfy server URL |
| `webhook.ntfy.topic` | string | `""` | ntfy topic |
| `webhook.ntfy.token` | string | `""` | ntfy authentication token |
| `webhook.ntfy.title` | string | `"Microsoft-Rewards-Script"` | Notification title |
| `webhook.ntfy.tags` | string[] | `["bot", "notify"]` | Notification tags |
| `webhook.ntfy.priority` | number | `3` | Notification priority (1-5) |
| `webhook.webhookLogFilter.enabled` | boolean | `false` | Enable webhook log filtering |
| `webhook.webhookLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) |
| `webhook.webhookLogFilter.levels` | string[] | `["error"]` | Log levels to send |
| `webhook.webhookLogFilter.keywords` | string[] | `["starting account", "select number", "collected"]` | Keywords to filter |
| `webhook.webhookLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering |
> [!WARNING] > [!WARNING]
> **NTFY** users set the `webhookLogFilter` to `enabled`, or you will receive push notifications for *all* logs. > **NTFY** users set the `webhookLogFilter` to `enabled`, or you will receive push notifications for _all_ logs.
> When enabled, only account start, 2FA codes, and account completion summaries are delivered as push notifcations. > When enabled, only account start, 2FA codes, and account completion summaries are delivered as push notifcations.
> Customize which notifications you receive with the `keywords` options. > Customize which notifications you receive with the `keywords` options.
@@ -152,7 +166,7 @@ Edit `src/config.json` to customize behavior. Below are all currently available
## Account Setup ## Account Setup
Edit `src/accounts.json`. Edit `src/accounts.json`.
> [!WARNING] > [!WARNING]
> The file is a **flat array** of accounts, not `{ "accounts": [ ... ] }`. > The file is a **flat array** of accounts, not `{ "accounts": [ ... ] }`.
@@ -208,10 +222,11 @@ Edit `src/accounts.json`.
> When using 2FA login, adding your `totpSecret` will enable the script to automatically generate and enter the timed 6 digit code to login. To get your `totpSecret` in your Microsoft Security settings, click 'Manage how you sign in'. Add Authenticator app, when shown the QR code, select 'enter code manually'. Use this code in the `accounts.json`. > When using 2FA login, adding your `totpSecret` will enable the script to automatically generate and enter the timed 6 digit code to login. To get your `totpSecret` in your Microsoft Security settings, click 'Manage how you sign in'. Add Authenticator app, when shown the QR code, select 'enter code manually'. Use this code in the `accounts.json`.
--- ---
## Troubleshooting ## Troubleshooting
> [!TIP] > [!TIP]
> Most login issues can be fixed by deleting your /sessions folder, and redeploying the script > Most login issues can be fixed by deleting your /sessions folder, and redeploying the script
--- ---

View File

@@ -165,4 +165,4 @@ async function main() {
}) })
} }
main() main()

View File

@@ -1,12 +1,6 @@
import path from 'path' import path from 'path'
import fs from 'fs' import fs from 'fs'
import { import { getDirname, getProjectRoot, log, loadJsonFile, safeRemoveDirectory } from '../utils.js'
getDirname,
getProjectRoot,
log,
loadJsonFile,
safeRemoveDirectory
} from '../utils.js'
const __dirname = getDirname(import.meta.url) const __dirname = getDirname(import.meta.url)
const projectRoot = getProjectRoot(__dirname) const projectRoot = getProjectRoot(__dirname)
@@ -64,4 +58,4 @@ if (!success) {
process.exit(1) process.exit(1)
} }
log('INFO', 'Done.') log('INFO', 'Done.')

View File

@@ -94,10 +94,7 @@ export function loadJsonFile(possiblePaths, required = true) {
export function loadConfig(projectRoot, isDev = false) { export function loadConfig(projectRoot, isDev = false) {
const possiblePaths = isDev const possiblePaths = isDev
? [path.join(projectRoot, 'src', 'config.json')] ? [path.join(projectRoot, 'src', 'config.json')]
: [ : [path.join(projectRoot, 'dist', 'config.json'), path.join(projectRoot, 'config.json')]
path.join(projectRoot, 'dist', 'config.json'),
path.join(projectRoot, 'config.json')
]
const result = loadJsonFile(possiblePaths, true) const result = loadJsonFile(possiblePaths, true)
@@ -121,17 +118,20 @@ export function loadAccounts(projectRoot, isDev = false) {
const possiblePaths = isDev const possiblePaths = isDev
? [path.join(projectRoot, 'src', 'accounts.dev.json')] ? [path.join(projectRoot, 'src', 'accounts.dev.json')]
: [ : [
path.join(projectRoot, 'dist', 'accounts.json'), path.join(projectRoot, 'dist', 'accounts.json'),
path.join(projectRoot, 'accounts.json'), path.join(projectRoot, 'accounts.json'),
path.join(projectRoot, 'accounts.example.json') path.join(projectRoot, 'accounts.example.json')
] ]
return loadJsonFile(possiblePaths, true) return loadJsonFile(possiblePaths, true)
} }
export function findAccountByEmail(accounts, email) { export function findAccountByEmail(accounts, email) {
if (!email || typeof email !== 'string') return null if (!email || typeof email !== 'string') return null
return accounts.find(a => a?.email && typeof a.email === 'string' && a.email.toLowerCase() === email.toLowerCase()) || null return (
accounts.find(a => a?.email && typeof a.email === 'string' && a.email.toLowerCase() === email.toLowerCase()) ||
null
)
} }
export function getRuntimeBase(projectRoot, isDev = false) { export function getRuntimeBase(projectRoot, isDev = false) {
@@ -266,4 +266,4 @@ export function safeRemoveDirectory(dirPath, projectRoot) {
log('ERROR', `Error: ${error.message}`) log('ERROR', `Error: ${error.message}`)
return false return false
} }
} }

View File

@@ -37,4 +37,4 @@
"desktop": false "desktop": false
} }
} }
] ]

View File

@@ -1,5 +1,5 @@
import type { BrowserContext, Cookie } from 'patchright' import type { BrowserContext, Cookie } from 'patchright'
import type { AxiosRequestConfig, AxiosResponse } from 'axios' import type { AxiosRequestConfig } from 'axios'
import type { MicrosoftRewardsBot } from '../index' import type { MicrosoftRewardsBot } from '../index'
import { saveSessionData } from '../util/Load' import { saveSessionData } from '../util/Load'
@@ -23,45 +23,56 @@ export default class BrowserFunc {
*/ */
async getDashboardData(): Promise<DashboardData> { async getDashboardData(): Promise<DashboardData> {
try { try {
const allowedDomains = ['bing.com', 'live.com', 'microsoftonline.com'];
const cookieHeader = [
...new Map(
this.bot.cookies.mobile
.filter(
(c: { name: string; value: string; domain?: string }) =>
typeof c.domain === 'string' &&
allowedDomains.some(d =>
c.domain && c.domain.toLowerCase().endsWith(d)
)
)
.map(c => [c.name, c]) // dedupe by name, keep last
).values()
]
.map(c => `${c.name}=${c.value}`)
.join('; ');
const request: AxiosRequestConfig = { const request: AxiosRequestConfig = {
url: 'https://rewards.bing.com/api/getuserinfo?type=1', url: 'https://rewards.bing.com/api/getuserinfo?type=1',
method: 'GET', method: 'GET',
headers: { headers: {
...(this.bot.fingerprint?.headers ?? {}), ...(this.bot.fingerprint?.headers ?? {}),
Cookie: cookieHeader, Cookie: this.buildCookieHeader(this.bot.cookies.mobile, [
'bing.com',
'live.com',
'microsoftonline.com'
]),
Referer: 'https://rewards.bing.com/', Referer: 'https://rewards.bing.com/',
Origin: 'https://rewards.bing.com' Origin: 'https://rewards.bing.com'
} }
} }
const response = await this.bot.axios.request(request) const response = await this.bot.axios.request(request)
return response.data.dashboard as DashboardData
if (response.data?.dashboard) {
return response.data.dashboard as DashboardData
}
throw new Error('Dashboard data missing from API response')
} catch (error) { } catch (error) {
this.bot.logger.info( this.bot.logger.warn(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'API failed, trying HTML fallback')
this.bot.isMobile,
'GET-DASHBOARD-DATA', // Try using script from dashboard page
`Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}` try {
) const request: AxiosRequestConfig = {
throw error url: this.bot.config.baseURL,
method: 'GET',
headers: {
...(this.bot.fingerprint?.headers ?? {}),
Cookie: this.buildCookieHeader(this.bot.cookies.mobile),
Referer: 'https://rewards.bing.com/',
Origin: 'https://rewards.bing.com'
}
}
const response = await this.bot.axios.request(request)
const match = response.data.match(/var\s+dashboard\s*=\s*({.*?});/s)
if (!match?.[1]) {
throw new Error('Dashboard script not found in HTML')
}
return JSON.parse(match[1]) as DashboardData
} catch (fallbackError) {
// If both fail
this.bot.logger.error(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Failed to get dashboard data')
throw fallbackError
}
} }
} }
@@ -84,7 +95,7 @@ export default class BrowserFunc {
const response = await this.bot.axios.request(request) const response = await this.bot.axios.request(request)
return response.data as AppDashboardData return response.data as AppDashboardData
} catch (error) { } catch (error) {
this.bot.logger.info( this.bot.logger.error(
this.bot.isMobile, this.bot.isMobile,
'GET-APP-DASHBOARD-DATA', 'GET-APP-DASHBOARD-DATA',
`Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}` `Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}`
@@ -112,7 +123,7 @@ export default class BrowserFunc {
const response = await this.bot.axios.request(request) const response = await this.bot.axios.request(request)
return response.data as XboxDashboardData return response.data as XboxDashboardData
} catch (error) { } catch (error) {
this.bot.logger.info( this.bot.logger.error(
this.bot.isMobile, this.bot.isMobile,
'GET-XBOX-DASHBOARD-DATA', 'GET-XBOX-DASHBOARD-DATA',
`Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}` `Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}`
@@ -308,141 +319,21 @@ export default class BrowserFunc {
} }
} }
mergeCookies(response: AxiosResponse, currentCookieHeader: string = '', whitelist?: string[]): string { buildCookieHeader(cookies: Cookie[], allowedDomains?: string[]): string {
const cookieMap = new Map<string, string>( return [
currentCookieHeader ...new Map(
.split(';') cookies
.map(pair => pair.split('=').map(s => s.trim())) .filter(c => {
.filter(([name, value]) => name && value) if (!allowedDomains || allowedDomains.length === 0) return true
.map(([name, value]) => [name, value] as [string, string]) return (
) typeof c.domain === 'string' &&
allowedDomains.some(d => c.domain.toLowerCase().endsWith(d.toLowerCase()))
const setCookieList = [response.headers['set-cookie']].flat().filter(Boolean) as string[] )
const cookiesByName = new Map(this.bot.cookies.mobile.map(c => [c.name, c])) })
.map(c => [c.name, c])
for (const setCookie of setCookieList) { ).values()
const [nameValue, ...attributes] = setCookie.split(';').map(s => s.trim()) ]
if (!nameValue) continue .map(c => `${c.name}=${c.value}`)
.join('; ')
const [name, value] = nameValue.split('=').map(s => s.trim())
if (!name) continue
if (whitelist && !whitelist?.includes(name)) {
continue
}
const attrs = this.parseAttributes(attributes)
const existing = cookiesByName.get(name)
if (!value) {
if (existing) {
cookiesByName.delete(name)
this.bot.cookies.mobile = this.bot.cookies.mobile.filter(c => c.name !== name)
}
cookieMap.delete(name)
continue
}
if (attrs.expires !== undefined && attrs.expires < Date.now() / 1000) {
if (existing) {
cookiesByName.delete(name)
this.bot.cookies.mobile = this.bot.cookies.mobile.filter(c => c.name !== name)
}
cookieMap.delete(name)
continue
}
cookieMap.set(name, value)
if (existing) {
this.updateCookie(existing, value, attrs)
} else {
this.bot.cookies.mobile.push(this.createCookie(name, value, attrs))
}
}
return Array.from(cookieMap, ([name, value]) => `${name}=${value}`).join('; ')
}
private parseAttributes(attributes: string[]) {
const attrs: {
domain?: string
path?: string
expires?: number
httpOnly?: boolean
secure?: boolean
sameSite?: Cookie['sameSite']
} = {}
for (const attr of attributes) {
const [key, val] = attr.split('=').map(s => s?.trim())
const lowerKey = key?.toLowerCase()
switch (lowerKey) {
case 'domain':
case 'path': {
if (val) attrs[lowerKey] = val
break
}
case 'expires': {
if (val) {
const ts = Date.parse(val)
if (!isNaN(ts)) attrs.expires = Math.floor(ts / 1000)
}
break
}
case 'max-age': {
if (val) {
const maxAge = Number(val)
if (!isNaN(maxAge)) attrs.expires = Math.floor(Date.now() / 1000) + maxAge
}
break
}
case 'httponly': {
attrs.httpOnly = true
break
}
case 'secure': {
attrs.secure = true
break
}
case 'samesite': {
const normalized = val?.toLowerCase()
if (normalized && ['lax', 'strict', 'none'].includes(normalized)) {
attrs.sameSite = (normalized.charAt(0).toUpperCase() +
normalized.slice(1)) as Cookie['sameSite']
}
break
}
}
}
return attrs
}
private updateCookie(cookie: Cookie, value: string, attrs: ReturnType<typeof this.parseAttributes>) {
cookie.value = value
if (attrs.domain) cookie.domain = attrs.domain
if (attrs.path) cookie.path = attrs.path
//if (attrs.expires !== undefined) cookie.expires = attrs.expires
//if (attrs.httpOnly) cookie.httpOnly = true
//if (attrs.secure) cookie.secure = true
//if (attrs.sameSite) cookie.sameSite = attrs.sameSite
}
private createCookie(name: string, value: string, attrs: ReturnType<typeof this.parseAttributes>): Cookie {
return {
name,
value,
domain: attrs.domain || '.bing.com',
path: attrs.path || '/'
/*
...(attrs.expires !== undefined && { expires: attrs.expires }),
...(attrs.httpOnly && { httpOnly: true }),
...(attrs.secure && { secure: true }),
...(attrs.sameSite && { sameSite: attrs.sameSite })
*/
} as Cookie
} }
} }

View File

@@ -220,14 +220,14 @@ export default class BrowserUtils {
) )
// Wait for selector to exist before clicking // Wait for selector to exist before clicking
await page.waitForSelector(selector, { timeout: 10000 }) await page.waitForSelector(selector, { timeout: 1000 }).catch(() => {})
const cursor = createCursor(page as any) const cursor = createCursor(page as any)
await cursor.click(selector, options) await cursor.click(selector, options)
return true return true
} catch (error) { } catch (error) {
this.bot.logger.error( this.bot.logger.warn(
this.bot.isMobile, this.bot.isMobile,
'GHOST-CLICK', 'GHOST-CLICK',
`Failed for ${selector}: ${error instanceof Error ? error.message : String(error)}` `Failed for ${selector}: ${error instanceof Error ? error.message : String(error)}`

View File

@@ -78,7 +78,7 @@ export class Login {
try { try {
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Starting login process') this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Starting login process')
await page.goto('https://www.bing.com/rewards/dashboard', { waitUntil: 'domcontentloaded' }).catch(() => { }) await page.goto('https://www.bing.com/rewards/dashboard', { waitUntil: 'domcontentloaded' }).catch(() => {})
await this.bot.utils.wait(2000) await this.bot.utils.wait(2000)
await this.bot.browser.utils.reloadBadPage(page) await this.bot.browser.utils.reloadBadPage(page)
await this.bot.browser.utils.disableFido(page) await this.bot.browser.utils.disableFido(page)
@@ -154,7 +154,7 @@ export class Login {
} }
private async detectCurrentState(page: Page, account?: Account): Promise<LoginState> { private async detectCurrentState(page: Page, account?: Account): Promise<LoginState> {
await page.waitForLoadState('networkidle', { timeout: 5000 }).catch(() => { }) await page.waitForLoadState('networkidle', { timeout: 5000 }).catch(() => {})
const url = new URL(page.url()) const url = new URL(page.url())
this.bot.logger.debug(this.bot.isMobile, 'DETECT-STATE', `Current URL: ${url.hostname}${url.pathname}`) this.bot.logger.debug(this.bot.isMobile, 'DETECT-STATE', `Current URL: ${url.hostname}${url.pathname}`)
@@ -190,7 +190,7 @@ export class Login {
[this.selectors.totpInput, '2FA_TOTP'], [this.selectors.totpInput, '2FA_TOTP'],
[this.selectors.totpInputOld, '2FA_TOTP'], [this.selectors.totpInputOld, '2FA_TOTP'],
[this.selectors.otpCodeEntry, 'OTP_CODE_ENTRY'], // PR 450 [this.selectors.otpCodeEntry, 'OTP_CODE_ENTRY'], // PR 450
[this.selectors.otpInput, 'OTP_CODE_ENTRY'] // My Fix [this.selectors.otpInput, 'OTP_CODE_ENTRY'] // My Fix
] ]
const results = await Promise.all( const results = await Promise.all(
@@ -435,7 +435,7 @@ export class Login {
waitUntil: 'domcontentloaded', waitUntil: 'domcontentloaded',
timeout: 10000 timeout: 10000
}) })
.catch(() => { }) .catch(() => {})
await this.bot.utils.wait(3000) await this.bot.utils.wait(3000)
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Recovery navigation successful') this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Recovery navigation successful')
return true return true
@@ -446,7 +446,7 @@ export class Login {
waitUntil: 'domcontentloaded', waitUntil: 'domcontentloaded',
timeout: 10000 timeout: 10000
}) })
.catch(() => { }) .catch(() => {})
await this.bot.utils.wait(3000) await this.bot.utils.wait(3000)
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Fallback navigation successful') this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Fallback navigation successful')
return true return true
@@ -502,7 +502,11 @@ export class Login {
} }
case 'OTP_CODE_ENTRY': { case 'OTP_CODE_ENTRY': {
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'OTP code entry page detected, attempting to find password option') this.bot.logger.info(
this.bot.isMobile,
'LOGIN',
'OTP code entry page detected, attempting to find password option'
)
// My Fix: Click "Use your password" footer // My Fix: Click "Use your password" footer
const footerLink = await page const footerLink = await page
@@ -552,7 +556,7 @@ export class Login {
private async finalizeLogin(page: Page, email: string) { private async finalizeLogin(page: Page, email: string) {
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Finalizing login') this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Finalizing login')
await page.goto(this.bot.config.baseURL, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => { }) await page.goto(this.bot.config.baseURL, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => {})
const loginRewardsSuccess = new URL(page.url()).hostname === 'rewards.bing.com' const loginRewardsSuccess = new URL(page.url()).hostname === 'rewards.bing.com'
if (loginRewardsSuccess) { if (loginRewardsSuccess) {
@@ -583,7 +587,7 @@ export class Login {
this.bot.logger.info(this.bot.isMobile, 'LOGIN-BING', 'Verifying Bing session') this.bot.logger.info(this.bot.isMobile, 'LOGIN-BING', 'Verifying Bing session')
try { try {
await page.goto(url, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => { }) await page.goto(url, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => {})
for (let i = 0; i < loopMax; i++) { for (let i = 0; i < loopMax; i++) {
if (page.isClosed()) break if (page.isClosed()) break
@@ -605,7 +609,7 @@ export class Login {
) )
if (atBingHome) { if (atBingHome) {
await this.bot.browser.utils.tryDismissAllMessages(page).catch(() => { }) await this.bot.browser.utils.tryDismissAllMessages(page).catch(() => {})
const signedIn = await page const signedIn = await page
.waitForSelector(this.selectors.bingProfile, { timeout: 3000 }) .waitForSelector(this.selectors.bingProfile, { timeout: 3000 })
@@ -641,7 +645,7 @@ export class Login {
try { try {
await page await page
.goto(`${this.bot.config.baseURL}?_=${Date.now()}`, { waitUntil: 'networkidle', timeout: 10000 }) .goto(`${this.bot.config.baseURL}?_=${Date.now()}`, { waitUntil: 'networkidle', timeout: 10000 })
.catch(() => { }) .catch(() => {})
for (let i = 0; i < loopMax; i++) { for (let i = 0; i < loopMax; i++) {
if (page.isClosed()) break if (page.isClosed()) break

View File

@@ -45,7 +45,7 @@ export function promptInput(options: PromptOptions): Promise<string | null> {
export async function getSubtitleMessage(page: Page): Promise<string | null> { export async function getSubtitleMessage(page: Page): Promise<string | null> {
const message = await page const message = await page
.waitForSelector('[data-testid="subtitle"]', { state: 'visible', timeout: 1000 }) .waitForSelector('[data-testid="subtitle"], div#oneTimeCodeDescription', { state: 'visible', timeout: 1000 })
.catch(() => null) .catch(() => null)
if (!message) return null if (!message) return null

View File

@@ -22,12 +22,7 @@
"scrollRandomResults": false, "scrollRandomResults": false,
"clickRandomResults": false, "clickRandomResults": false,
"parallelSearching": true, "parallelSearching": true,
"queryEngines": [ "queryEngines": ["google", "wikipedia", "reddit", "local"],
"google",
"wikipedia",
"reddit",
"local"
],
"searchResultVisitTime": "10sec", "searchResultVisitTime": "10sec",
"searchDelay": { "searchDelay": {
"min": "30sec", "min": "30sec",
@@ -42,13 +37,8 @@
"consoleLogFilter": { "consoleLogFilter": {
"enabled": false, "enabled": false,
"mode": "whitelist", "mode": "whitelist",
"levels": [ "levels": ["error", "warn"],
"error", "keywords": ["starting account"],
"warn"
],
"keywords": [
"starting account"
],
"regexPatterns": [] "regexPatterns": []
}, },
"proxy": { "proxy": {
@@ -65,24 +55,15 @@
"topic": "", "topic": "",
"token": "", "token": "",
"title": "Microsoft-Rewards-Script", "title": "Microsoft-Rewards-Script",
"tags": [ "tags": ["bot", "notify"],
"bot",
"notify"
],
"priority": 3 "priority": 3
}, },
"webhookLogFilter": { "webhookLogFilter": {
"enabled": false, "enabled": false,
"mode": "whitelist", "mode": "whitelist",
"levels": [ "levels": ["error"],
"error" "keywords": ["starting account", "select number", "collected"],
],
"keywords": [
"starting account",
"select number",
"collected"
],
"regexPatterns": [] "regexPatterns": []
} }
} }
} }

View File

@@ -77,7 +77,6 @@ export class QueryCore {
const baseTopics = this.normalizeAndDedupe(topicLists.flat()) const baseTopics = this.normalizeAndDedupe(topicLists.flat())
if (!baseTopics.length) { if (!baseTopics.length) {
this.bot.logger.warn(this.bot.isMobile, 'QUERY-MANAGER', 'No queries')
this.bot.logger.debug(this.bot.isMobile, 'QUERY-MANAGER', 'No base topics found (all sources empty)') this.bot.logger.debug(this.bot.isMobile, 'QUERY-MANAGER', 'No base topics found (all sources empty)')
return [] return []
} }
@@ -115,7 +114,6 @@ export class QueryCore {
) )
if (!finalQueries.length) { if (!finalQueries.length) {
this.bot.logger.warn(this.bot.isMobile, 'QUERY-MANAGER', 'No queries')
this.bot.logger.debug(this.bot.isMobile, 'QUERY-MANAGER', 'finalQueries deduped to 0') this.bot.logger.debug(this.bot.isMobile, 'QUERY-MANAGER', 'finalQueries deduped to 0')
return [] return []
} }
@@ -124,7 +122,6 @@ export class QueryCore {
return finalQueries return finalQueries
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'QUERY-MANAGER', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'QUERY-MANAGER', 'QUERY-MANAGER',
@@ -216,7 +213,6 @@ export class QueryCore {
const response = await this.bot.axios.request(request, this.bot.config.proxy.queryEngine) const response = await this.bot.axios.request(request, this.bot.config.proxy.queryEngine)
const trendsData = this.extractJsonFromResponse(response.data) const trendsData = this.extractJsonFromResponse(response.data)
if (!trendsData) { if (!trendsData) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'No queries')
this.bot.logger.debug(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'No trendsData parsed from response') this.bot.logger.debug(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'No trendsData parsed from response')
return [] return []
} }
@@ -234,7 +230,6 @@ export class QueryCore {
}) })
} }
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-GOOGLE-TRENDS', 'SEARCH-GOOGLE-TRENDS',
@@ -277,7 +272,6 @@ export class QueryCore {
response.data.suggestionGroups?.[0]?.searchSuggestions?.map((x: { query: any }) => x.query) ?? [] response.data.suggestionGroups?.[0]?.searchSuggestions?.map((x: { query: any }) => x.query) ?? []
if (!suggestions.length) { if (!suggestions.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-SUGGESTIONS', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-BING-SUGGESTIONS', 'SEARCH-BING-SUGGESTIONS',
@@ -287,7 +281,6 @@ export class QueryCore {
return suggestions return suggestions
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-SUGGESTIONS', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-BING-SUGGESTIONS', 'SEARCH-BING-SUGGESTIONS',
@@ -314,7 +307,6 @@ export class QueryCore {
const out = Array.isArray(related) ? related : [] const out = Array.isArray(related) ? related : []
if (!out.length) { if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-RELATED', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-BING-RELATED', 'SEARCH-BING-RELATED',
@@ -324,7 +316,6 @@ export class QueryCore {
return out return out
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-RELATED', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-BING-RELATED', 'SEARCH-BING-RELATED',
@@ -359,7 +350,6 @@ export class QueryCore {
) ?? [] ) ?? []
if (!topics.length) { if (!topics.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-TRENDING', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-BING-TRENDING', 'SEARCH-BING-TRENDING',
@@ -369,7 +359,6 @@ export class QueryCore {
return topics return topics
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-TRENDING', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-BING-TRENDING', 'SEARCH-BING-TRENDING',
@@ -402,7 +391,6 @@ export class QueryCore {
const out = articles.slice(0, 50).map(a => a.article.replace(/_/g, ' ')) const out = articles.slice(0, 50).map(a => a.article.replace(/_/g, ' '))
if (!out.length) { if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-WIKIPEDIA-TRENDING', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-WIKIPEDIA-TRENDING', 'SEARCH-WIKIPEDIA-TRENDING',
@@ -412,7 +400,6 @@ export class QueryCore {
return out return out
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-WIKIPEDIA-TRENDING', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-WIKIPEDIA-TRENDING', 'SEARCH-WIKIPEDIA-TRENDING',
@@ -441,7 +428,6 @@ export class QueryCore {
const out = posts.filter(p => !p.data.over_18).map(p => p.data.title) const out = posts.filter(p => !p.data.over_18).map(p => p.data.title)
if (!out.length) { if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-REDDIT-TRENDING', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-REDDIT-TRENDING', 'SEARCH-REDDIT-TRENDING',
@@ -451,7 +437,6 @@ export class QueryCore {
return out return out
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-REDDIT', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-REDDIT', 'SEARCH-REDDIT',
@@ -476,7 +461,6 @@ export class QueryCore {
) )
if (!out.length) { if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-LOCAL-QUERY-LIST', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-LOCAL-QUERY-LIST', 'SEARCH-LOCAL-QUERY-LIST',
@@ -486,7 +470,6 @@ export class QueryCore {
return out return out
} catch (error) { } catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-LOCAL-QUERY-LIST', 'No queries')
this.bot.logger.debug( this.bot.logger.debug(
this.bot.isMobile, this.bot.isMobile,
'SEARCH-LOCAL-QUERY-LIST', 'SEARCH-LOCAL-QUERY-LIST',

View File

@@ -21,9 +21,9 @@ export class DoubleSearchPoints extends Workers {
return return
} }
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop) this.cookieHeader = this.bot.browser.func.buildCookieHeader(
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`) this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
.join('; ') )
const fingerprintHeaders = { ...this.bot.fingerprint.headers } const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie'] delete fingerprintHeaders['Cookie']

View File

@@ -25,9 +25,9 @@ export class FindClippy extends Workers {
return return
} }
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop) this.cookieHeader = this.bot.browser.func.buildCookieHeader(
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`) this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
.join('; ') )
const fingerprintHeaders = { ...this.bot.fingerprint.headers } const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie'] delete fingerprintHeaders['Cookie']

View File

@@ -23,9 +23,9 @@ export class Quiz extends Workers {
) )
try { try {
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop) this.cookieHeader = this.bot.browser.func.buildCookieHeader(
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`) this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
.join('; ') )
const fingerprintHeaders = { ...this.bot.fingerprint.headers } const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie'] delete fingerprintHeaders['Cookie']

View File

@@ -30,9 +30,9 @@ export class UrlReward extends Workers {
) )
try { try {
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop) this.cookieHeader = this.bot.browser.func.buildCookieHeader(
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`) this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
.join('; ') )
const fingerprintHeaders = { ...this.bot.fingerprint.headers } const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie'] delete fingerprintHeaders['Cookie']

View File

@@ -53,7 +53,7 @@ export class Search extends Workers {
queries = [...new Set(queries.map(q => q.trim()).filter(Boolean))] queries = [...new Set(queries.map(q => q.trim()).filter(Boolean))]
this.bot.logger.debug(isMobile, 'SEARCH-BING', `Query pool ready | count=${queries.length}`) this.bot.logger.info(isMobile, 'SEARCH-BING', `Search query pool ready | count=${queries.length}`)
// Go to bing // Go to bing
const targetUrl = this.searchPageURL ? this.searchPageURL : this.bingHome const targetUrl = this.searchPageURL ? this.searchPageURL : this.bingHome
@@ -169,10 +169,10 @@ export class Search extends Workers {
const newPool = [...new Set(merged)] const newPool = [...new Set(merged)]
queries = this.bot.utils.shuffleArray(newPool) queries = this.bot.utils.shuffleArray(newPool)
this.bot.logger.debug( this.bot.logger.info(
isMobile, isMobile,
'SEARCH-BING-EXTRA', 'SEARCH-BING-EXTRA',
`New query pool generated | count=${queries.length}` `New search query pool generated | count=${queries.length}`
) )
for (const query of queries) { for (const query of queries) {

View File

@@ -32,9 +32,9 @@ export class SearchOnBing extends Workers {
) )
try { try {
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop) this.cookieHeader = this.bot.browser.func.buildCookieHeader(
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`) this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
.join('; ') )
const fingerprintHeaders = { ...this.bot.fingerprint.headers } const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie'] delete fingerprintHeaders['Cookie']

View File

@@ -579,4 +579,4 @@
"meilleurs cafés près de chez moi" "meilleurs cafés près de chez moi"
] ]
} }
] ]

View File

@@ -579,4 +579,4 @@
"meilleurs cafés près de chez moi" "meilleurs cafés près de chez moi"
] ]
} }
] ]

View File

@@ -113,4 +113,4 @@
"how to clear browser cache", "how to clear browser cache",
"popular tech podcasts", "popular tech podcasts",
"how to stay motivated" "how to stay motivated"
] ]

View File

@@ -45,7 +45,7 @@ function formatMessage(message: string | Error): string {
} }
export class Logger { export class Logger {
constructor(private bot: MicrosoftRewardsBot) { } constructor(private bot: MicrosoftRewardsBot) {}
info(isMobile: Platform, title: string, message: string, color?: ColorKey) { info(isMobile: Platform, title: string, message: string, color?: ColorKey) {
return this.baseLog('info', isMobile, title, message, color) return this.baseLog('info', isMobile, title, message, color)
@@ -180,7 +180,7 @@ export class Logger {
isMatch = true isMatch = true
break break
} }
} catch { } } catch {}
} }
} }

View File

@@ -40,12 +40,8 @@
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */ // "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
/* Module Resolution Options */ /* Module Resolution Options */
"moduleResolution": "node" /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */, "moduleResolution": "node" /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */,
"types": [ "types": ["node"],
"node" "typeRoots": ["./node_modules/@types"],
],
"typeRoots": [
"./node_modules/@types"
],
// Keep explicit typeRoots to ensure resolution in environments that don't auto-detect before full install. // Keep explicit typeRoots to ensure resolution in environments that don't auto-detect before full install.
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
@@ -76,7 +72,5 @@
"src/functions/bing-search-activity-queries.json", "src/functions/bing-search-activity-queries.json",
"src/functions/search-queries.json" "src/functions/search-queries.json"
], ],
"exclude": [ "exclude": ["node_modules"]
"node_modules" }
]
}