Skip to content

Commit

Permalink
Merge pull request #78 from anouarbensaad/v2.0/dev
Browse files Browse the repository at this point in the history
 #76 Add − OS | − Server.
  • Loading branch information
anouarbensaad authored Apr 16, 2020
2 parents 4ba55ab + acf7be6 commit 6eeafce
Show file tree
Hide file tree
Showing 15 changed files with 1,102 additions and 948 deletions.
343 changes: 184 additions & 159 deletions cli.py

Large diffs are not rendered by default.

35 changes: 18 additions & 17 deletions common/banner.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import sys
from common.colors import bannerblue , bannerblue2 ,W ,Y ,R,end
from common.colors import bannerblue, bannerblue2, W, Y, R, end


def banner():
print("""%s
print("""%s
.:. .:,
xM; XK.
Expand Down Expand Up @@ -31,18 +32,18 @@ def banner():
;. :.
%s# Coded By Anouar Ben Saad -%s @anouarbensaad
%s"""
%
(bannerblue,bannerblue2,
W,bannerblue2,W,bannerblue2,W,bannerblue2,
W,bannerblue2,W,bannerblue2,W,bannerblue2,
W,bannerblue2,W,bannerblue2,
W,bannerblue2,W,bannerblue2,
W,bannerblue2,W,bannerblue2,
W,bannerblue2,W,bannerblue2,
W,bannerblue2,
W,bannerblue2,W,bannerblue2,
W,bannerblue2,
W,bannerblue2,
W,Y,end
))
%s"""
%
(bannerblue, bannerblue2,
W, bannerblue2, W, bannerblue2, W, bannerblue2,
W, bannerblue2, W, bannerblue2, W, bannerblue2,
W, bannerblue2, W, bannerblue2,
W, bannerblue2, W, bannerblue2,
W, bannerblue2, W, bannerblue2,
W, bannerblue2, W, bannerblue2,
W, bannerblue2,
W, bannerblue2, W, bannerblue2,
W, bannerblue2,
W, bannerblue2,
W, Y, end
))
48 changes: 24 additions & 24 deletions common/colors.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,31 +9,31 @@
# Colors shouldn't be displayed on Mac and Windows
bannerblue = bannerblue2 = yellowhead = \
W = Y = R = G = B = bg = green = \
run = good = bad = info = red = end = que = \
failexploit = vulnexploit = portopen = portclose = ''
run = good = bad = info = red = end = que = \
failexploit = vulnexploit = portopen = portclose = ''
else:
#banner Colors
bannerblue = '\033[34m'
# banner Colors
bannerblue = '\033[34m'
bannerblue2 = '\033[1;1;94m'
yellowhead = '\033[1;1;94m'
#default colors
W = '\033[97m' # white
Y = '\033[93m' # yellow
R = '\033[91m'
G = '\033[92m'
B = '\033[94m'
bg = '\033[7;91m'
green = '\033[92m'
#action colors
run = '\033[93m[~]\033[0m'
good = '\033[92m[+]\033[0m'
bad = '\033[91m[-]\033[0m'
info = '\033[93m[!]\033[0m'
red = '\033[91m'
end = '\033[0m'
que = '\033[94m[?]\033[0m'
#test colors
yellowhead = '\033[1;1;94m'
# default colors
W = '\033[97m' # white
Y = '\033[93m' # yellow
R = '\033[91m'
G = '\033[92m'
B = '\033[94m'
bg = '\033[7;91m'
green = '\033[92m'
# action colors
run = '\033[93m[~]\033[0m'
good = '\033[92m[+]\033[0m'
bad = '\033[91m[-]\033[0m'
info = '\033[93m[!]\033[0m'
red = '\033[91m'
end = '\033[0m'
que = '\033[94m[?]\033[0m'
# test colors
failexploit = '\033[91mFAIL\033[0m'
vulnexploit = '\033[92mVULN\033[0m'
portopen = '\033[92mOPEN \033[0m'
portclose = '\033[91mCLOSE\033[0m'
portopen = '\033[92mOPEN \033[0m'
portclose = '\033[91mCLOSE\033[0m'
3 changes: 2 additions & 1 deletion common/output_wr.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os
import sys


def writelogs(data, data_name, output_dir):
"""Write the results."""
for data, data_name in zip(data, data_name):
Expand All @@ -10,4 +11,4 @@ def writelogs(data, data_name, output_dir):
with open(filepath, 'w+') as out_file:
joined = '\n'.join(data)
out_file.write(str(joined.encode('utf-8').decode('utf-8')))
out_file.write('\n')
out_file.write('\n')
49 changes: 27 additions & 22 deletions common/requestUp.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
SESSION = requests.Session()
SESSION.max_redirects = 2


def random_UserAgent():
useragents_rotate = [
"Mozilla/4.0 (compatible; MSIE 6.0; MSIE 5.5; Windows NT 5.0) Opera 7.02 Bork-edition [en]",
Expand All @@ -31,42 +32,46 @@ def random_UserAgent():
useragents_random = random.choice(useragents_rotate)
return useragents_random


def getrequest(
url,
headers,
timeout=3,
):
url,
headers,
timeout=3,
):
"""GetRequest without ssl verification"""
headers = set()

def get(url):
# Selecting a random user-agent
# Selecting a random user-agent
response = SESSION.get(
url,
headers=headers,
verify=False,
timeout=timeout,
stream=True,
url,
headers=headers,
verify=False,
timeout=timeout,
stream=True,
)
return response.text
return get(url)


def sendrequest(
url,
headers=None,
data=None,
timeout=3,
):
url,
headers=None,
data=None,
timeout=3,
):
"""GetRequest without ssl verification"""
headers = set()
data = set()

def post(url):
response = SESSION.post(
url,
data=data,
headers=headers,
verify=False,
timeout=timeout,
stream=True,
url,
data=data,
headers=headers,
verify=False,
timeout=timeout,
stream=True,
)
return response.text
return post(url)
return post(url)
9 changes: 6 additions & 3 deletions common/threading.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@

from common.colors import info


def threads(function, thread_count):
""" Threadpool Uses """
threads = concurrent.futures.ThreadPoolExecutor(
max_workers=thread_count)
max_workers=thread_count)
confuture = (threads.submit(function))
for i, _ in enumerate(concurrent.futures.as_completed(confuture)):
print('%s Progress IN : %i' % (info, i + 1), end='\r')
print('')
print('%s Progress IN : %i' % (info, i + 1), end='\r')


print('')
1 change: 1 addition & 0 deletions common/uriParser.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import re
from urllib.parse import urlparse


def parsing_url(url):
host = urlparse(url).netloc
return host
39 changes: 26 additions & 13 deletions modules/dnsLookup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
import re
import base64
import json
from common.colors import red, green, bg, G, R, W, Y, G , good , bad , run , info , end , que , bannerblue
from common.colors import red, green, bg, G, R, W, Y, G, good, bad, run, info, end, que, bannerblue
from bs4 import BeautifulSoup
from common.uriParser import parsing_url as hostd


def results(table):
res = []
trs = table.findAll('tr')
Expand Down Expand Up @@ -35,6 +36,7 @@ def results(table):
pass
return res


def text_record(table):
res = []
for td in table.findAll('td'):
Expand All @@ -49,14 +51,17 @@ def dnsdumper(url):
soup = BeautifulSoup(response.text, 'html.parser')
# If no match is found, the return object won't have group method, so check.
try:
csrf_token = soup.findAll('input', attrs={'name': 'csrfmiddlewaretoken'})[0]['value']
csrf_token = soup.findAll(
'input', attrs={'name': 'csrfmiddlewaretoken'})[0]['value']
except AttributeError: # No match is found
csrf_token = soup.findAll('input', attrs={'name': 'csrfmiddlewaretoken'})[0]['value']
print (' %s Retrieved token: %s' % (info,csrf_token))
csrf_token = soup.findAll(
'input', attrs={'name': 'csrfmiddlewaretoken'})[0]['value']
print(' %s Retrieved token: %s' % (info, csrf_token))
cookies = {'csrftoken': csrf_token}
headers = {'Referer': 'https://dnsdumpster.com/'}
data = {'csrfmiddlewaretoken': csrf_token, 'targetip': domain }
response = requests.Session().post('https://dnsdumpster.com/',cookies=cookies, data=data, headers=headers)
data = {'csrfmiddlewaretoken': csrf_token, 'targetip': domain}
response = requests.Session().post('https://dnsdumpster.com/',
cookies=cookies, data=data, headers=headers)
image = requests.get('https://dnsdumpster.com/static/map/%s.png' % domain)
if response.status_code == 200:
soup = BeautifulSoup(response.content, 'html.parser')
Expand All @@ -68,23 +73,30 @@ def dnsdumper(url):
res['dns_records']['mx'] = results(tables[1])
print(' %s Search for DNS Servers' % que)
for entry in res['dns_records']['dns']:
print((" %s Host : {domain} \n %s IP : {ip} \n %s AS : {as} \n %s----------------%s".format(**entry)% (good,good,good,bannerblue,end)))
print((" %s Host : {domain} \n %s IP : {ip} \n %s AS : {as} \n %s----------------%s".format(
**entry) % (good, good, good, bannerblue, end)))
print(' %s Search for MX Records ' % que)
for entry in res['dns_records']['mx']:
print((" %s Host : {domain} \n %s IP : {ip} \n %s AS : {as} \n %s----------------%s".format(**entry)% (good,good,good,bannerblue,end)))
print((" %s Host : {domain} \n %s IP : {ip} \n %s AS : {as} \n %s----------------%s".format(
**entry) % (good, good, good, bannerblue, end)))


def domain_info(url):
domain = hostd(url)
dnsdumpster_url = 'https://dnsdumpster.com/'
response = requests.Session().get(dnsdumpster_url).text
# If no match is found, the return object won't have group method, so check.
try:
csrf_token = re.search(r"name='csrfmiddlewaretoken' value='(.*?)'", response).group(1)
csrf_token = re.search(
r"name='csrfmiddlewaretoken' value='(.*?)'", response).group(1)
except AttributeError: # No match is found
csrf_token = re.search(r"name='csrfmiddlewaretoken' value='(.*?)'", response)
csrf_token = re.search(
r"name='csrfmiddlewaretoken' value='(.*?)'", response)
cookies = {'csrftoken': csrf_token}
headers = {'Referer': 'https://dnsdumpster.com/'}
data = {'csrfmiddlewaretoken': csrf_token, 'targetip': domain }
response = requests.Session().post('https://dnsdumpster.com/',cookies=cookies, data=data, headers=headers)
data = {'csrfmiddlewaretoken': csrf_token, 'targetip': domain}
response = requests.Session().post('https://dnsdumpster.com/',
cookies=cookies, data=data, headers=headers)
image = requests.get('https://dnsdumpster.com/static/map/%s.png' % domain)
if response.status_code == 200:
soup = BeautifulSoup(response.content, 'html.parser')
Expand All @@ -95,4 +107,5 @@ def domain_info(url):
res['dns_records']['host'] = results(tables[3])
print(' %s SubDomains' % que)
for entry in res['dns_records']['host']:
print((" %s SubDomain : {domain} \n %s IP : {ip} \n %s----------------%s".format(**entry)% (good,good,bannerblue,end)))
print((" %s SubDomain : {domain} \n %s IP : {ip} \n %s----------------%s".format(
**entry) % (good, good, bannerblue, end)))
Loading

0 comments on commit 6eeafce

Please sign in to comment.