Skip to content

Commit

Permalink
Merge pull request #490 from blacklanternsecurity/dev
Browse files Browse the repository at this point in the history
Dev -> Main
  • Loading branch information
liquidsec authored Sep 16, 2024
2 parents f8068b6 + 8ee9c70 commit 4ecdf18
Show file tree
Hide file tree
Showing 11 changed files with 879 additions and 591 deletions.
38 changes: 27 additions & 11 deletions baddns/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,12 @@ def validate_modules(arg_value, pattern=re.compile(r"^[a-zA-Z0-9_]+(,[a-zA-Z0-9_
return arg_value


async def execute_module(ModuleClass, target, custom_nameservers, signatures, silent=False):
async def execute_module(ModuleClass, target, custom_nameservers, signatures, silent=False, direct_mode=False):
findings = None
try:
module_instance = ModuleClass(target, custom_nameservers=custom_nameservers, signatures=signatures, cli=True)
module_instance = ModuleClass(
target, custom_nameservers=custom_nameservers, signatures=signatures, cli=True, direct_mode=direct_mode
)
except BadDNSSignatureException as e:
log.error(f"Error loading signatures: {e}")
raise BadDNSCLIException(f"Error loading signatures: {e}")
Expand Down Expand Up @@ -126,6 +128,7 @@ async def _main():
)

parser.add_argument("-d", "--debug", action="store_true", help="Enable debug logging")
parser.add_argument("-D", "--direct", action="store_true", help="Enable direct mode")

parser.add_argument("target", nargs="?", type=validate_target, help="subdomain to analyze")
args = parser.parse_args()
Expand Down Expand Up @@ -154,15 +157,26 @@ async def _main():
# Get all available modules
all_modules = get_all_modules()

# If the user provided the -m or --modules argument, filter the modules accordingly
if args.modules:
included_module_names = [name.strip().upper() for name in args.modules.split(",")]
modules_to_execute = [module for module in all_modules if module.name.upper() in included_module_names]
else:
modules_to_execute = all_modules # Default to all modules if -m is not provided
log.info(
f"Running with all modules [{', '.join([module.name for module in modules_to_execute])}] (-m to specify)"
direct_mode = False

# if direct mode was specified, only the CNAME module will run
if args.direct:
log.warning(
"Direct mode specified. Only the CNAME module is enabled. Positive results may not be immediately exploitable without corresponding DNS records pointing to it (e.g., CNAME), or some other external resource which may try to interact with it"
)
modules_to_execute = [module for module in all_modules if module.name.upper() == "CNAME"]
direct_mode = True

else:
# If the user provided the -m or --modules argument, filter the modules accordingly
if args.modules:
included_module_names = [name.strip().upper() for name in args.modules.split(",")]
modules_to_execute = [module for module in all_modules if module.name.upper() in included_module_names]
else:
modules_to_execute = all_modules # Default to all modules if -m is not provided
log.info(
f"Running with all modules [{', '.join([module.name for module in modules_to_execute])}] (-m to specify)"
)

custom_signatures = None
if args.custom_signatures:
Expand All @@ -177,7 +191,9 @@ async def _main():
signatures = load_signatures(signatures_dir=custom_signatures)

for ModuleClass in modules_to_execute:
await execute_module(ModuleClass, args.target, custom_nameservers, signatures, silent=silent)
await execute_module(
ModuleClass, args.target, custom_nameservers, signatures, silent=silent, direct_mode=direct_mode
)


def main():
Expand Down
1 change: 1 addition & 0 deletions baddns/lib/dnsmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ async def do_resolve(self, target, rdatatype):
result_cname = r[0]
cname_chain.append(result_cname)
target = result_cname

try:
r = self.process_answer(await self.dns_client.resolve(target, "CNAME"), "CNAME")
if len(r) == 0:
Expand Down
16 changes: 10 additions & 6 deletions baddns/lib/whoismanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,21 @@ def __init__(self, target):

async def dispatchWHOIS(self):
ext = tldextract.extract(self.target)
log.debug(f"Extracted base domain [{ext.registered_domain}] from [{self.target}]")
log.debug(f"Submitting WHOIS query for {ext.registered_domain}")
if ext.registered_domain == "" or ext.registered_domain == None:
registered_domain = self.target
else:
registered_domain = ext.registered_domain
log.debug(f"Extracted base domain [{registered_domain}] from [{self.target}]")
log.debug(f"Submitting WHOIS query for {registered_domain}")
try:
w = await asyncio.to_thread(whois.whois, ext.registered_domain, quiet=True)
log.debug(f"Got response to whois request for {ext.registered_domain}")
w = await asyncio.to_thread(whois.whois, registered_domain, quiet=True)
log.debug(f"Got response to whois request for {registered_domain}")
self.whois_result = {"type": "response", "data": w}
except whois.parser.PywhoisError as e:
log.debug(f"Got PywhoisError for whois request for {ext.registered_domain}")
log.debug(f"Got PywhoisError for whois request for {registered_domain}")
self.whois_result = {"type": "error", "data": str(e)}
except Exception as e:
log.warning(f"Got unknown error from whois: {str(e)}")
log.debug(f"Got unknown error from whois: {str(e)}")
self.whois_result = {"type": "error", "data": str(e)}

def analyzeWHOIS(self):
Expand Down
205 changes: 131 additions & 74 deletions baddns/modules/references.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ class BadDNS_references(BadDNS_base):

regex_jssrc = re.compile(r'<script[^>]*src\s*=\s*[\'"]([^\'">]+)[\'"]', re.IGNORECASE)
regex_csssrc = re.compile(r'<link[^>]*href\s*=\s*[\'"]([^\'">]+)[\'"]', re.IGNORECASE)
regex_csp = re.compile(r"Content-Security-Policy: (.+?)\|", re.IGNORECASE)
regex_cors = re.compile(r"Access-Control-Allow-Origin: (.+?)\|", re.IGNORECASE)
regex_domain_url = re.compile(
r"(?:((?:https?:\/\/)?(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{0,63}\.?))(?!(\/|\?))", re.IGNORECASE
)

def __init__(self, target, **kwargs):
super().__init__(target, **kwargs)
Expand All @@ -32,95 +37,149 @@ def __init__(self, target, **kwargs):
self.cname_findings_direct = None
self.reference_data = {}

def parse_body(self, body):
def extract_domains_headers(self, header_name, regex, headers_str, description):
log.debug(f"Searching for {header_name} in headers...")

results = []
for match in self.regex_jssrc.finditer(body):
js_url = match.group(1)
parsed_url = urlparse(js_url)
js_domain = parsed_url.netloc
results.append(
{
"url": js_url,
"domain": js_domain,
"description": "Hijackable reference, JS Include",
"trigger": f"Javascript Source: [{js_url}]",
}
match = regex.search(headers_str)
if match:
log.debug(f"Found {header_name} header, extracting domains...")
header_string = match.group(1)
log.debug(f"Extracted {header_name} content: {header_string}")

extracted_domains = []
domain_url_matches = re.finditer(self.regex_domain_url, header_string)

for domain_url in domain_url_matches:
domain_or_url = domain_url.group(1)
if domain_or_url:
if not domain_or_url.startswith(("http://", "https://")):
url = f"https://{domain_or_url}"
else:
url = domain_or_url
parsed_url = urlparse(url)
domain = parsed_url.netloc
if domain not in extracted_domains:
log.debug(f"Extracted domain: {domain}")
extracted_domains.append(domain)
results.append(
{
"url": domain_or_url,
"domain": domain,
"description": f"Hijackable reference, {description} [{domain}]",
"trigger": f"{header_name} Header: [{domain_or_url}]",
}
)
else:
log.debug(f"Duplicate domain {domain} ignored.")
else:
log.debug("Failed to extract domain properly from header")
log.debug(
f"Finished extracting domains from {header_name}. Found {len(extracted_domains)} unique domain(s)."
)
else:
log.debug(f"{header_name} header not found.")

return results

def parse_headers(self, headers):
log.debug("Starting to parse headers")
headers_str = "|".join(f"{key}: {value}" for key, value in headers.items())
log.debug(f"Formatted headers string: {headers_str}")

results = []
results.extend(
self.extract_domains_headers("Content-Security-Policy", self.regex_csp, headers_str, "CSP domain")
)
results.extend(
self.extract_domains_headers(
"Access-Control-Allow-Origin", self.regex_cors, headers_str, "CORS header domain"
)
)
log.debug(f"Completed parsing headers. Total results: {len(results)}")
return results

for match in self.regex_csssrc.finditer(body):
css_url = match.group(1)
parsed_url = urlparse(css_url)
css_domain = parsed_url.netloc
def extract_domains_body(self, body, regex, description, source):
results = []
for match in regex.finditer(body):
url = match.group(1)
parsed_url = urlparse(url)
domain = parsed_url.netloc
results.append(
{
"url": css_url,
"domain": css_domain,
"description": "Hijackable reference, CSS Include",
"trigger": f"CSS Source: [{css_url}]",
"url": url,
"domain": domain,
"description": f"Hijackable reference, {description} [{domain}]",
"trigger": f"{source}: [{url}]",
}
)
return results

def parse_body(self, body):
log.debug("Starting to parse body content for JS and CSS sources...")
results = []

# Extract domains from JS sources
log.debug("Looking for JS includes...")
js_results = self.extract_domains_body(body, self.regex_jssrc, "JS Include", "Javascript Source")
if js_results:
log.debug(f"Found {len(js_results)} domain(s) in JS includes.")
results.extend(js_results)

# Extract domains from CSS sources
log.debug("Looking for CSS includes...")
css_results = self.extract_domains_body(body, self.regex_csssrc, "CSS Include", "CSS Source")
if css_results:
log.debug(f"Found {len(css_results)} domain(s) in CSS includes.")
results.extend(css_results)

log.debug(f"Completed parsing body content. Total results: {len(results)}")
return results

async def process_cname_analysis(self, parsed_results):
cname_findings = []
for pr in parsed_results:
if pr["domain"] == self.target:
log.debug(f"Found domain matches target ({self.target}), ignoring")
continue
log.debug(f"Initializing cname instance for target {pr['domain']}")

for direct_mode in [True, False]:
cname_instance = BadDNS_cname(
pr["domain"],
custom_nameservers=self.custom_nameservers,
signatures=self.signatures,
direct_mode=direct_mode,
parent_class="references",
http_client_class=self.http_client_class,
dns_client=self.dns_client,
)
if await cname_instance.dispatch():
finding = {
"finding": cname_instance.analyze(),
"description": pr["description"],
"trigger": pr["trigger"],
}
cname_findings.append(finding)
return cname_findings

async def dispatch(self):
log.debug("in references dispatch")
await self.target_httpmanager.dispatchHttp()
log.debug("HTTP dispatch complete")

live_results = []

for protocol in ["http", "https"]:
result = getattr(self.target_httpmanager, f"{protocol}_denyredirects_results")
if result:
log.debug(f"Found live host at {result.url}")
live_results.append(result)

self.cname_findings_direct = []
self.cname_findings = []
live_results = [
getattr(self.target_httpmanager, f"{protocol}_denyredirects_results")
for protocol in ["http", "https"]
if getattr(self.target_httpmanager, f"{protocol}_denyredirects_results")
]

parsed_results = []
for r in live_results:
parsed_results = self.parse_body(r.text)
if parsed_results:
for pr in parsed_results:
if pr["domain"] == self.target:
log.debug(f"Found domain matches target ({self.target}), ignoring")
continue
log.debug(f"Initializing cname instance for target {pr['domain']}")

cname_instance_direct = BadDNS_cname(
pr["domain"],
custom_nameservers=self.custom_nameservers,
signatures=self.signatures,
direct_mode=True,
parent_class="references",
http_client_class=self.http_client_class,
dns_client=self.dns_client,
)
if await cname_instance_direct.dispatch():
self.cname_findings_direct.append(
{
"finding": cname_instance_direct.analyze(),
"description": pr["description"],
"trigger": pr["trigger"],
}
)
parsed_results.extend(self.parse_headers(r.headers))
parsed_results.extend(self.parse_body(r.text))

cname_instance = BadDNS_cname(
pr["domain"],
custom_nameservers=self.custom_nameservers,
signatures=self.signatures,
direct_mode=False,
parent_class="references",
http_client_class=self.http_client_class,
dns_client=self.dns_client,
)
if await cname_instance.dispatch():
self.cname_findings.append(
{
"finding": cname_instance.analyze(),
"description": pr["description"],
"trigger": pr["trigger"],
}
)
self.cname_findings_direct = await self.process_cname_analysis(parsed_results)
return True

def _convert_findings(self, finding_sets):
Expand Down Expand Up @@ -149,6 +208,4 @@ def analyze(self):
log.debug("in references analyze")
if self.cname_findings_direct:
findings.extend(self._convert_findings(self.cname_findings_direct))
if self.cname_findings:
findings.extend(self._convert_findings(self.cname_findings))
return findings
5 changes: 2 additions & 3 deletions baddns/signatures/dnsreaper_simplebooklet.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@ identifiers:
not_cnames: []
matcher_rule:
matchers:
- condition: and
- condition: or
part: body
type: word
words:
- This is a surprise. The
- you're looking for isn't here.
- The link to this Simplebooklet may have changed
matchers-condition: and
mode: http
service_name: simplebooklet.com
Expand Down
2 changes: 1 addition & 1 deletion baddns/signatures/dnsreaper_surveysparrow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ matcher_rule:
part: body
type: word
words:
- <h5>Account not found.</h5>
- '<title>DNS resolution error '
matchers-condition: and
mode: http
service_name: survey sparrow
Expand Down
17 changes: 17 additions & 0 deletions baddns/signatures/dnsreaper_wix.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
identifiers:
cnames: []
ips:
- 23.236.62.147
nameservers: []
not_cnames: []
matcher_rule:
matchers:
- condition: or
part: body
type: word
words:
- ConnectYourDomain
matchers-condition: and
mode: http
service_name: Wix
source: dnsreaper
3 changes: 3 additions & 0 deletions baddns/signatures/signature_history.txt
Original file line number Diff line number Diff line change
Expand Up @@ -110,3 +110,6 @@ e3a452987e0aab4f6c387dd89dc1a042590a14555274a564d5d6e5f9dfc691c9 #nucleitemplate
2fef72d2f015bade20486e291d1ccf003cc8ce827ff2bbd14eb2e0e73e005116 #nucleitemplates_flexbe-takeover.yml
a2a6a8fd35e65d7c92882ba0e751b0496b5780937cebf9a9610fa279181060a2 #nucleitemplates_ghost-takeover.yml
e155aed36a19a0437650f5d1033a64a47f39a8981de9f1b5f39e2dfe7e14996d #nucleitemplates_campaignmonitor-takeover.yml
ad913111f8c498e0e5b0ef30714e6074832914e936f61c4dd3b10dad2dd9e436 #dnsreaper_surveysparrow.yml
9075b7665514a4ed5e342152a2f80c804959bb5ee5f94c1e8dfdb50858e969bd #dnsreaper_wix.yml
d3ec2dfaec7ac79042848aee837c2391958e46e2c7824dedb6a9590939d25f44 #dnsreaper_simplebooklet.yml
Loading

0 comments on commit 4ecdf18

Please sign in to comment.