Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added proxy support #9

Open
wants to merge 8 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 66 additions & 9 deletions 403bypasser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

# INITIALISE COLORAMA
init()

# DISPLAY BANNER -- START
custom_fig = Figlet(font='slant')
print(Fore.BLUE + Style.BRIGHT + custom_fig.renderText('-------------') + Style.RESET_ALL)
Expand All @@ -13,6 +12,10 @@
print(Fore.LIGHTMAGENTA_EX + Style.BRIGHT + "-----> Twitter : https://twitter.com/yunem_se\n")
print(Fore.MAGENTA + Style.BRIGHT + "-----> GitHub : https://github.com/yunemse48\n")
print(Fore.MAGENTA + Style.BRIGHT + "-----> LinkedIn : https://www.linkedin.com/in/yunus-emre-sert-9102a9135/\n")
print(Fore.GREEN + Style.BRIGHT + "____________________ Proxy Contribution - Judd Rouillon ____________________\n")
print(Fore.LIGHTMAGENTA_EX + Style.BRIGHT + "-----> Twitter : https://twitter.com/judd3rm0n\n")
print(Fore.MAGENTA + Style.BRIGHT + "-----> GitHub : https://github.com/judd3rm0n\n")
print(Fore.MAGENTA + Style.BRIGHT + "-----> LinkedIn : https://www.linkedin.com/in/judd-r/\n")
print(Fore.BLUE + Style.BRIGHT + custom_fig.renderText('-------------') + Style.RESET_ALL)
# DISPLAY BANNER -- END

Expand All @@ -22,11 +25,17 @@
parser.add_argument("-U", "--urllist", type=str, help="path to list of URLs, ex: urllist.txt")
parser.add_argument("-d", "--dir", type=str, help="Single directory to scan, ex: /admin", nargs="?", const="/")
parser.add_argument("-D", "--dirlist", type=str, help="path to list of directories, ex: dirlist.txt")
parser.add_argument("-p", "--proxy", type=str, help="Send requests to proxy (may help with rate limits and burp/interceptor), ex: 127.0.0.1:8080", default="") # Added arg

args = parser.parse_args()
# HANDLE ARGUMENTS -- END


# Global for proxy
proxy = args.proxy
inProxies = {
"http" : proxy,
"https" : proxy,
}

class Arguments():
def __init__(self, url, urllist, dir, dirlist):
Expand All @@ -36,16 +45,40 @@ def __init__(self, url, urllist, dir, dirlist):
self.dirlist = dirlist
self.urls = []
self.dirs = []
print(proxy)


self.checkURL()
self.checkDir()
self.checkPro()

# Return Proxy - Might not be required.
def return_pro():
return proxy

def return_urls(self):
return self.urls

def return_dirs(self):
return self.dirs

# Proxy valid? - Don't think the isalpha works.
def checkPro(self):
if proxy:
# disable_warnings needed to stop warnings that would appear under each result.
requests.packages.urllib3.disable_warnings()
if str.isalpha(proxy):
print("The proxy input is incorrect, it should be IP:PORT ex: 127.0.0.0:8080! Exitting...\n")
sys.exit()
# Check to see if : is inclided.
if ":" not in proxy:
print("Proxy input is missing ':', it should be IP:PORT ex: 127.0.0.0:8080! Exitting...\n")
sys.exit()

else:
print("Proxy not used")


def checkURL(self):
if self.url:
if not validators.url(self.url):
Expand Down Expand Up @@ -142,14 +175,18 @@ def createNewHeaders(self):
for element in headers_overwrite:
self.rewriteHeaders.append({element : self.path})


# Added proxy into the init, added as an optional with blank default to bypass if the user doesn't add a proxy value.
class Query():
def __init__(self, url, dir, dirObject):
def __init__(self, url, dir, dirObject, proxy=""):
if proxy != "":
print("hit")
self.proxy = proxy # Added proxy to this.
self.url = url
self.dir = dir # call pathrepo by this
self.dirObject = dirObject
self.domain = tldextract.extract(self.url).domain





def checkStatusCode(self, status_code):
Expand All @@ -174,7 +211,13 @@ def writeToFile(self, array):
def manipulateRequest(self):
print((" Target URL: " + self.url + "\tTarget Path: " + self.dir + " ").center(121, "="))
results = []
p = requests.post(self.url + self.dir)
# IF to change the request params, verify=false bypasses the TLS checks.
if proxy != "":
print("\nProxy online \n")
p = requests.post(self.url + self.dir, proxies=inProxies, verify=False)
else:
p = requests.post(self.url + self.dir)


colour = self.checkStatusCode(p.status_code)
reset = Style.RESET_ALL
Expand All @@ -199,7 +242,11 @@ def manipulatePath(self):
line_width = 100

for path in self.dirObject.newPaths:
r = requests.get(self.url + path)
# Proxy IF
if proxy != "":
r = requests.get(self.url + path, proxies=inProxies, verify=False)
else:
r = requests.get(self.url + path)

colour = self.checkStatusCode(r.status_code)

Expand All @@ -220,7 +267,11 @@ def manipulateHeaders(self):
line_width = 100

for header in self.dirObject.newHeaders:
r = requests.get(self.url + self.dir, headers=header)
# Proxy if
if proxy != "":
r = requests.get(self.url + self.dir, headers=header, proxies=inProxies, verify=False)
else:
r = requests.get(self.url + self.dir, headers=header)

colour = self.checkStatusCode(r.status_code)
reset = Style.RESET_ALL
Expand All @@ -238,7 +289,11 @@ def manipulateHeaders(self):

results_2 = []
for header in self.dirObject.rewriteHeaders:
r = requests.get(self.url, headers=header)
# Proxy if
if proxy != "":
r = requests.get(self.url, headers=header, proxies=inProxies, verify=False)
else:
r = requests.get(self.url, headers=header)

colour = self.checkStatusCode(r.status_code)
reset = Style.RESET_ALL
Expand All @@ -260,8 +315,10 @@ class Program():
def __init__(self, urllist, dirlist):
self.urllist = urllist
self.dirlist = dirlist
self.proxy = proxy

def initialise(self):

for u in self.urllist:
for d in self.dirlist:
if d != "/":
Expand Down
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,13 @@
| -U | path to list of URLs | ./urllist.txt, ../../urllist.txt, etc. | Just provide the path where the file is located :) |
| -d | single directory to scan | admin or /admin or admin/ or /admin/ | All these example usages are interpreted in the same way |
| -D | path to list of directories | ./dirlist.txt, ../../dirlist.txt, etc. | Just provide the path where the file is located :) |
| -P | Send request via proxy (like burp) | 127.0.0.1:8080 | Provides the ability to send the request to a proxy, such as burp. |

**Usage 1:** `python3 403bypasser.py -u https://example.com -d /secret`<br>
**Usage 2:** `python3 403bypasser.py -u https://example.com -D dirlist.txt`<br>
**Usage 3:** `python3 403bypasser.py -U urllist.txt -d /secret`<br>
**Usage 4:** `python3 403bypasser.py -U urllist.txt -D dirlist.txt`<br>
**Usage 5 (With proxy):** `python3 403bypasser.py -u https://example.com -d /secret/ -p 127.0.0.1:8080`

**IMPORTANT NOTE:** All the followings are interpreted the same. Therefore, which pattern you use is just a matter of preference.
- `python3 403bypasser.py -u https://example.com -d secret`<br>
Expand Down Expand Up @@ -117,7 +119,7 @@ poisoning with 1)`X-Original-URL` and 2)`X-Rewrite-URL` headers.
- `127.0.0.1:443`
- `2130706433`
- `0x7F000001`
- `0177.0000.0000.0001`
- `0177.0000.0000.0001`https://exmaple.com
- `0`
- `127.1`
- `10.0.0.0`
Expand Down