Skip to content

Commit

Permalink
Merge pull request #1382 from blacklanternsecurity/dev
Browse files Browse the repository at this point in the history
Dev --> Stable 1.1.8
  • Loading branch information
TheTechromancer authored May 29, 2024
2 parents 6e96b32 + ab9df4b commit eeae1cb
Show file tree
Hide file tree
Showing 132 changed files with 1,164 additions and 610 deletions.
25 changes: 21 additions & 4 deletions bbot/core/helpers/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@
import asyncio
import logging
import traceback
from signal import SIGINT
from subprocess import CompletedProcess, CalledProcessError

from .misc import smart_decode, smart_encode

log = logging.getLogger("bbot.core.helpers.command")


async def run(self, *command, check=False, text=True, **kwargs):
async def run(self, *command, check=False, text=True, idle_timeout=None, **kwargs):
"""Runs a command asynchronously and gets its output as a string.
This method is a simple helper for executing a command and capturing its output.
Expand All @@ -20,6 +21,7 @@ async def run(self, *command, check=False, text=True, **kwargs):
check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status.
Defaults to False.
text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True.
idle_timeout (int, optional): Sets a limit on the number of seconds the process can run before throwing a TimeoutError
**kwargs (dict): Additional keyword arguments for the subprocess.
Returns:
Expand All @@ -45,7 +47,15 @@ async def run(self, *command, check=False, text=True, **kwargs):
_input = b"\n".join(smart_encode(i) for i in _input) + b"\n"
else:
_input = smart_encode(_input)
stdout, stderr = await proc.communicate(_input)

try:
if idle_timeout is not None:
stdout, stderr = await asyncio.wait_for(proc.communicate(_input), timeout=idle_timeout)
else:
stdout, stderr = await proc.communicate(_input)
except asyncio.exceptions.TimeoutError:
proc.send_signal(SIGINT)
raise

# surface stderr
if text:
Expand All @@ -65,7 +75,7 @@ async def run(self, *command, check=False, text=True, **kwargs):
proc_tracker.remove(proc)


async def run_live(self, *command, check=False, text=True, **kwargs):
async def run_live(self, *command, check=False, text=True, idle_timeout=None, **kwargs):
"""Runs a command asynchronously and iterates through its output line by line in realtime.
This method is useful for executing a command and capturing its output on-the-fly, as it is generated.
Expand All @@ -76,6 +86,7 @@ async def run_live(self, *command, check=False, text=True, **kwargs):
check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status.
Defaults to False.
text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True.
idle_timeout (int, optional): Sets a limit on the number of seconds the process can remain idle (no lines sent to stdout) before throwing a TimeoutError
**kwargs (dict): Additional keyword arguments for the subprocess.
Yields:
Expand All @@ -102,7 +113,13 @@ async def run_live(self, *command, check=False, text=True, **kwargs):

while 1:
try:
line = await proc.stdout.readline()
if idle_timeout is not None:
line = await asyncio.wait_for(proc.stdout.readline(), timeout=idle_timeout)
else:
line = await proc.stdout.readline()
except asyncio.exceptions.TimeoutError:
proc.send_signal(SIGINT)
raise
except ValueError as e:
command_str = " ".join([str(c) for c in command])
log.warning(f"Error executing command {command_str}: {e}")
Expand Down
37 changes: 36 additions & 1 deletion bbot/core/helpers/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1454,7 +1454,7 @@ def search_dict_values(d, *regexes):
... ]
... }
... }
>>> url_regexes = re.compile(r'https?://[^\\s<>"]+|www\.[^\\s<>"]+')
>>> url_regexes = re.compile(r'https?://[^\\s<>"]+|www\\.[^\\s<>"]+')
>>> list(search_dict_values(dict_to_search, url_regexes))
["https://www.evilcorp.com"]
"""
Expand Down Expand Up @@ -2666,3 +2666,38 @@ async def as_completed(coros):
for task in done:
tasks.pop(task)
yield task


def truncate_filename(file_path, max_length=255):
"""
Truncate the filename while preserving the file extension to ensure the total path length does not exceed the maximum length.
Args:
file_path (str): The original file path.
max_length (int): The maximum allowed length for the total path. Default is 255.
Returns:
pathlib.Path: A new Path object with the truncated filename.
Raises:
ValueError: If the directory path is too long to accommodate any filename within the limit.
Example:
>>> truncate_filename('/path/to/example_long_filename.txt', 20)
PosixPath('/path/to/example.txt')
"""
p = Path(file_path)
directory, stem, suffix = p.parent, p.stem, p.suffix

max_filename_length = max_length - len(str(directory)) - len(suffix) - 1 # 1 for the '/' separator

if max_filename_length <= 0:
raise ValueError("The directory path is too long to accommodate any filename within the limit.")

if len(stem) > max_filename_length:
truncated_stem = stem[:max_filename_length]
else:
truncated_stem = stem

new_path = directory / (truncated_stem + suffix)
return new_path
28 changes: 27 additions & 1 deletion bbot/core/helpers/regexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,24 @@
word_num_regex = re.compile(r"[^\W_]+")
num_regex = re.compile(r"\d+")

_ipv6_regex = r"[A-F0-9:]*:[A-F0-9:]*:[A-F0-9:]*"
_ipv4_regex = r"(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?:\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}"
ipv4_regex = re.compile(_ipv4_regex, re.I)

# IPv6 is complicated, so we have accomodate alternative patterns,
# :(:[A-F0-9]{1,4}){1,7} == ::1, ::ffff:1
# ([A-F0-9]{1,4}:){1,7}: == 2001::, 2001:db8::, 2001:db8:0:1:2:3::
# ([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4}) == 2001::1, 2001:db8::1, 2001:db8:0:1:2:3::1
# ([A-F0-9]{1,4}:){7,7}([A-F0-9]{1,4}) == 1:1:1:1:1:1:1:1, ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff

_ipv6_regex = r"(:(:[A-F0-9]{1,4}){1,7}|([A-F0-9]{1,4}:){1,7}:|([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4})|([A-F0-9]{1,4}:){7,7}([A-F0-9]{1,4}))"
ipv6_regex = re.compile(_ipv6_regex, re.I)

_ip_range_regexes = (
_ipv4_regex + r"\/[0-9]{1,2}",
_ipv6_regex + r"\/[0-9]{1,3}",
)
ip_range_regexes = list(re.compile(r, re.I) for r in _ip_range_regexes)

# dns names with periods
_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?"
dns_name_regex = re.compile(_dns_name_regex, re.I)
Expand Down Expand Up @@ -73,6 +88,17 @@
"EMAIL_ADDRESS",
(r"^" + _email_regex + r"$",),
),
(
"IP_ADDRESS",
(
r"^" + _ipv4_regex + r"$",
r"^" + _ipv6_regex + r"$",
),
),
(
"IP_RANGE",
tuple(r"^" + r + r"$" for r in _ip_range_regexes),
),
(
"OPEN_TCP_PORT",
tuple(r"^" + r + r"$" for r in _open_port_regexes),
Expand Down
2 changes: 2 additions & 0 deletions bbot/core/helpers/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from httpx._models import Cookies
from socksio.exceptions import SOCKSError

from bbot.core.helpers.misc import truncate_filename
from bbot.core.errors import WordlistError, CurlError
from bbot.core.helpers.ratelimiter import RateLimiter

Expand Down Expand Up @@ -258,6 +259,7 @@ async def download(self, url, **kwargs):
"""
success = False
filename = kwargs.pop("filename", self.parent_helper.cache_filename(url))
filename = truncate_filename(filename)
follow_redirects = kwargs.pop("follow_redirects", True)
max_size = kwargs.pop("max_size", None)
warn = kwargs.pop("warn", True)
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/ajaxpro.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,11 @@ class ajaxpro(BaseModule):
watched_events = ["HTTP_RESPONSE", "URL"]
produced_events = ["VULNERABILITY", "FINDING"]
flags = ["active", "safe", "web-thorough"]
meta = {"description": "Check for potentially vulnerable Ajaxpro instances"}
meta = {
"description": "Check for potentially vulnerable Ajaxpro instances",
"created_date": "2024-01-18",
"author": "@liquidsec",
}

async def handle_event(self, event):
if event.type == "URL":
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/anubisdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ class anubisdb(subdomain_enum):
flags = ["subdomain-enum", "passive", "safe"]
watched_events = ["DNS_NAME"]
produced_events = ["DNS_NAME"]
meta = {"description": "Query jldc.me's database for subdomains"}
meta = {
"description": "Query jldc.me's database for subdomains",
"created_date": "2022-10-04",
"author": "@TheTechromancer",
}
options = {"limit": 1000}
options_desc = {
"limit": "Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API)"
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/azure_realm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ class azure_realm(BaseModule):
watched_events = ["DNS_NAME"]
produced_events = ["URL_UNVERIFIED"]
flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "web-thorough", "passive", "safe"]
meta = {"description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm'}
meta = {
"description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm',
"created_date": "2023-07-12",
"author": "@TheTechromancer",
}

async def setup(self):
self.processed = set()
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/azure_tenant.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@ class azure_tenant(BaseModule):
watched_events = ["DNS_NAME"]
produced_events = ["DNS_NAME"]
flags = ["affiliates", "subdomain-enum", "cloud-enum", "passive", "safe"]
meta = {"description": "Query Azure for tenant sister domains"}
meta = {
"description": "Query Azure for tenant sister domains",
"created_date": "2024-07-04",
"author": "@TheTechromancer",
}

base_url = "https://autodiscover-s.outlook.com"
in_scope_only = True
Expand Down
8 changes: 6 additions & 2 deletions bbot/modules/baddns.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,18 @@ class baddns(BaseModule):
watched_events = ["DNS_NAME", "DNS_NAME_UNRESOLVED"]
produced_events = ["FINDING", "VULNERABILITY"]
flags = ["active", "safe", "web-basic", "baddns", "cloud-enum", "subdomain-hijack"]
meta = {"description": "Check hosts for domain/subdomain takeovers"}
meta = {
"description": "Check hosts for domain/subdomain takeovers",
"created_date": "2024-01-18",
"author": "@liquidsec",
}
options = {"custom_nameservers": [], "only_high_confidence": False}
options_desc = {
"custom_nameservers": "Force BadDNS to use a list of custom nameservers",
"only_high_confidence": "Do not emit low-confidence or generic detections",
}
max_event_handlers = 8
deps_pip = ["baddns~=1.1.0"]
deps_pip = ["baddns~=1.1.789"]

def select_modules(self):
selected_modules = []
Expand Down
8 changes: 6 additions & 2 deletions bbot/modules/baddns_zone.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,18 @@ class baddns_zone(baddns_module):
watched_events = ["DNS_NAME"]
produced_events = ["FINDING", "VULNERABILITY"]
flags = ["active", "safe", "subdomain-enum", "baddns", "cloud-enum"]
meta = {"description": "Check hosts for DNS zone transfers and NSEC walks"}
meta = {
"description": "Check hosts for DNS zone transfers and NSEC walks",
"created_date": "2024-01-29",
"author": "@liquidsec",
}
options = {"custom_nameservers": [], "only_high_confidence": False}
options_desc = {
"custom_nameservers": "Force BadDNS to use a list of custom nameservers",
"only_high_confidence": "Do not emit low-confidence or generic detections",
}
max_event_handlers = 8
deps_pip = ["baddns~=1.1.0"]
deps_pip = ["baddns~=1.1.789"]

def select_modules(self):
selected_modules = []
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/badsecrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,11 @@ class badsecrets(BaseModule):
watched_events = ["HTTP_RESPONSE"]
produced_events = ["FINDING", "VULNERABILITY", "TECHNOLOGY"]
flags = ["active", "safe", "web-basic", "web-thorough"]
meta = {"description": "Library for detecting known or weak secrets across many web frameworks"}
meta = {
"description": "Library for detecting known or weak secrets across many web frameworks",
"created_date": "2022-11-19",
"author": "@liquidsec",
}
deps_pip = ["badsecrets~=0.4.490"]

@property
Expand Down
7 changes: 6 additions & 1 deletion bbot/modules/bevigil.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,12 @@ class bevigil(subdomain_enum_apikey):
watched_events = ["DNS_NAME"]
produced_events = ["DNS_NAME", "URL_UNVERIFIED"]
flags = ["subdomain-enum", "passive", "safe"]
meta = {"description": "Retrieve OSINT data from mobile applications using BeVigil", "auth_required": True}
meta = {
"description": "Retrieve OSINT data from mobile applications using BeVigil",
"created_date": "2022-10-26",
"author": "@alt-glitch",
"auth_required": True,
}
options = {"api_key": "", "urls": False}
options_desc = {"api_key": "BeVigil OSINT API Key", "urls": "Emit URLs in addition to DNS_NAMEs"}

Expand Down
7 changes: 6 additions & 1 deletion bbot/modules/binaryedge.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,12 @@ class binaryedge(subdomain_enum_apikey):
watched_events = ["DNS_NAME"]
produced_events = ["DNS_NAME"]
flags = ["subdomain-enum", "passive", "safe"]
meta = {"description": "Query the BinaryEdge API", "auth_required": True}
meta = {
"description": "Query the BinaryEdge API",
"created_date": "2024-08-18",
"author": "@TheTechromancer",
"auth_required": True,
}
options = {"api_key": "", "max_records": 1000}
options_desc = {
"api_key": "BinaryEdge API key",
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/bucket_amazon.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ class bucket_amazon(bucket_template):
watched_events = ["DNS_NAME", "STORAGE_BUCKET"]
produced_events = ["STORAGE_BUCKET", "FINDING"]
flags = ["active", "safe", "cloud-enum", "web-basic", "web-thorough"]
meta = {"description": "Check for S3 buckets related to target"}
meta = {
"description": "Check for S3 buckets related to target",
"created_date": "2022-11-04",
"author": "@TheTechromancer",
}
options = {"permutations": False}
options_desc = {
"permutations": "Whether to try permutations",
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/bucket_azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ class bucket_azure(bucket_template):
watched_events = ["DNS_NAME", "STORAGE_BUCKET"]
produced_events = ["STORAGE_BUCKET", "FINDING"]
flags = ["active", "safe", "cloud-enum", "web-basic", "web-thorough"]
meta = {"description": "Check for Azure storage blobs related to target"}
meta = {
"description": "Check for Azure storage blobs related to target",
"created_date": "2022-11-04",
"author": "@TheTechromancer",
}
options = {"permutations": False}
options_desc = {
"permutations": "Whether to try permutations",
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/bucket_digitalocean.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ class bucket_digitalocean(bucket_template):
watched_events = ["DNS_NAME", "STORAGE_BUCKET"]
produced_events = ["STORAGE_BUCKET", "FINDING"]
flags = ["active", "safe", "slow", "cloud-enum", "web-thorough"]
meta = {"description": "Check for DigitalOcean spaces related to target"}
meta = {
"description": "Check for DigitalOcean spaces related to target",
"created_date": "2022-11-08",
"author": "@TheTechromancer",
}
options = {"permutations": False}
options_desc = {
"permutations": "Whether to try permutations",
Expand Down
4 changes: 3 additions & 1 deletion bbot/modules/bucket_file_enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@ class bucket_file_enum(BaseModule):
watched_events = ["STORAGE_BUCKET"]
produced_events = ["URL_UNVERIFIED"]
meta = {
"description": "Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS"
"description": "Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS",
"created_date": "2023-11-14",
"author": "@TheTechromancer",
}
flags = ["passive", "safe", "cloud-enum"]
options = {
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/bucket_firebase.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ class bucket_firebase(bucket_template):
watched_events = ["DNS_NAME", "STORAGE_BUCKET"]
produced_events = ["STORAGE_BUCKET", "FINDING"]
flags = ["active", "safe", "cloud-enum", "web-basic", "web-thorough"]
meta = {"description": "Check for open Firebase databases related to target"}
meta = {
"description": "Check for open Firebase databases related to target",
"created_date": "2023-03-20",
"author": "@TheTechromancer",
}
options = {"permutations": False}
options_desc = {
"permutations": "Whether to try permutations",
Expand Down
6 changes: 5 additions & 1 deletion bbot/modules/bucket_google.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,11 @@ class bucket_google(bucket_template):
watched_events = ["DNS_NAME", "STORAGE_BUCKET"]
produced_events = ["STORAGE_BUCKET", "FINDING"]
flags = ["active", "safe", "cloud-enum", "web-basic", "web-thorough"]
meta = {"description": "Check for Google object storage related to target"}
meta = {
"description": "Check for Google object storage related to target",
"created_date": "2022-11-04",
"author": "@TheTechromancer",
}
options = {"permutations": False}
options_desc = {
"permutations": "Whether to try permutations",
Expand Down
Loading

0 comments on commit eeae1cb

Please sign in to comment.