Merge branch 'master' into feature/python-3.12

This commit is contained in:
Siddharth Dushantha 2023-12-21 20:40:35 +01:00 committed by GitHub
commit fe43e9b247
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 388 additions and 234 deletions

View file

@ -837,5 +837,37 @@
"url": "https://forums.gunsandammo.com/profile/{}", "url": "https://forums.gunsandammo.com/profile/{}",
"urlMain": "https://gunsandammo.com/", "urlMain": "https://gunsandammo.com/",
"username_claimed": "adam" "username_claimed": "adam"
},
"TikTok": {
"errorType": "status_code",
"url": "https://tiktok.com/@{}",
"urlMain": "https://tiktok.com/",
"username_claimed": "red"
},
"Lolchess": {
"errorMsg": "No search results",
"errorType": "message",
"url": "https://lolchess.gg/profile/na/{}",
"urlMain": "https://lolchess.gg/",
"username_claimed": "blue"
},
"Virgool": {
"errorMsg": "\u06f4\u06f0\u06f4",
"errorType": "message",
"url": "https://virgool.io/@{}",
"urlMain": "https://virgool.io/",
"username_claimed": "blue"
},
"Whonix Forum": {
"errorType": "status_code",
"url": "https://forums.whonix.org/u/{}/summary",
"urlMain": "https://forums.whonix.org/",
"username_claimed": "red"
},
"ebio.gg": {
"errorType": "status_code",
"url": "https://ebio.gg/{}",
"urlMain": "https:/ebio.gg",
"username_claimed": "dev"
} }
} }

View file

@ -1825,3 +1825,61 @@ As of 2023.08.29, GunsAndAmmo responds with 404 from time to time
"username_claimed": "adam" "username_claimed": "adam"
} }
``` ```
## TikTok
As of 2023.12.21, TikTok returns false positives. This is because the webpage returns a somewhat blank page. This prevents us from being able to check for the existance of usernames. Proxitok does not work either.
```json
"TikTok": {
"errorType": "status_code",
"url": "https://tiktok.com/@{}",
"urlMain": "https://tiktok.com/",
"username_claimed": "red"
},
```
## Lolchess
As of 2023.12.21, Lolchess returns false positives.
```json
"Lolchess": {
"errorMsg": "No search results",
"errorType": "message",
"url": "https://lolchess.gg/profile/na/{}",
"urlMain": "https://lolchess.gg/",
"username_claimed": "blue"
},
```
## Virgool
As of 2023.12.21, Lolchess returns false positives.
```json
"Virgool": {
"errorMsg": "\u06f4\u06f0\u06f4",
"errorType": "message",
"url": "https://virgool.io/@{}",
"urlMain": "https://virgool.io/",
"username_claimed": "blue"
},
```
## Whonix Forum
As of 2023.12.21, Lolchess returns false positives.
```json
"Whonix Forum": {
"errorType": "status_code",
"url": "https://forums.whonix.org/u/{}/summary",
"urlMain": "https://forums.whonix.org/",
"username_claimed": "red"
},
```
## Ebio
As of 2023.12.21, Lolchess returns false positives.
```json
"ebio.gg": {
"errorType": "status_code",
"url": "https://ebio.gg/{}",
"urlMain": "https:/ebio.gg",
"username_claimed": "dev"
},
```

View file

@ -502,7 +502,7 @@
"username_claimed": "blue" "username_claimed": "blue"
}, },
"Contently": { "Contently": {
"errorMsg": "We can't find that page!", "errorMsg": "<title>Leading Content Marketing Platform | Contently</title>",
"errorType": "message", "errorType": "message",
"regexCheck": "^[a-zA-Z][a-zA-Z0-9_-]*$", "regexCheck": "^[a-zA-Z][a-zA-Z0-9_-]*$",
"url": "https://{}.contently.com/", "url": "https://{}.contently.com/",
@ -1237,13 +1237,6 @@
"urlMain": "https://lobste.rs/", "urlMain": "https://lobste.rs/",
"username_claimed": "jcs" "username_claimed": "jcs"
}, },
"Lolchess": {
"errorMsg": "No search results",
"errorType": "message",
"url": "https://lolchess.gg/profile/na/{}",
"urlMain": "https://lolchess.gg/",
"username_claimed": "blue"
},
"LottieFiles": { "LottieFiles": {
"errorType": "status_code", "errorType": "status_code",
"url": "https://lottiefiles.com/{}", "url": "https://lottiefiles.com/{}",
@ -1945,12 +1938,6 @@
"urlMain": "https://themeforest.net/", "urlMain": "https://themeforest.net/",
"username_claimed": "user" "username_claimed": "user"
}, },
"TikTok": {
"errorType": "status_code",
"url": "https://tiktok.com/@{}",
"urlMain": "https://tiktok.com/",
"username_claimed": "red"
},
"TnAFlix": { "TnAFlix": {
"errorType": "status_code", "errorType": "status_code",
"isNSFW": true, "isNSFW": true,
@ -2097,13 +2084,6 @@
"urlMain": "https://vimeo.com/", "urlMain": "https://vimeo.com/",
"username_claimed": "blue" "username_claimed": "blue"
}, },
"Virgool": {
"errorMsg": "\u06f4\u06f0\u06f4",
"errorType": "message",
"url": "https://virgool.io/@{}",
"urlMain": "https://virgool.io/",
"username_claimed": "blue"
},
"VirusTotal": { "VirusTotal": {
"errorType": "status_code", "errorType": "status_code",
"request_method": "GET", "request_method": "GET",
@ -2152,12 +2132,6 @@
"urlMain": "https://weebly.com/", "urlMain": "https://weebly.com/",
"username_claimed": "blue" "username_claimed": "blue"
}, },
"Whonix Forum": {
"errorType": "status_code",
"url": "https://forums.whonix.org/u/{}/summary",
"urlMain": "https://forums.whonix.org/",
"username_claimed": "red"
},
"Wikidot": { "Wikidot": {
"errorMsg": "User does not exist.", "errorMsg": "User does not exist.",
"errorType": "message", "errorType": "message",
@ -2344,12 +2318,6 @@
"urlMain": "https://egpu.io/", "urlMain": "https://egpu.io/",
"username_claimed": "blue" "username_claimed": "blue"
}, },
"ebio.gg": {
"errorType": "status_code",
"url": "https://ebio.gg/{}",
"urlMain": "https:/ebio.gg",
"username_claimed": "dev"
},
"eintracht": { "eintracht": {
"errorType": "status_code", "errorType": "status_code",
"url": "https://community.eintracht.de/fans/{}", "url": "https://community.eintracht.de/fans/{}",

View file

@ -26,6 +26,7 @@ from result import QueryResult
from notify import QueryNotifyPrint from notify import QueryNotifyPrint
from sites import SitesInformation from sites import SitesInformation
from colorama import init from colorama import init
from argparse import ArgumentTypeError
module_name = "Sherlock: Find Usernames Across Social Networks" module_name = "Sherlock: Find Usernames Across Social Networks"
__version__ = "0.14.3" __version__ = "0.14.3"
@ -91,10 +92,9 @@ class SherlockFuturesSession(FuturesSession):
# No response hook was already defined, so install it ourselves. # No response hook was already defined, so install it ourselves.
hooks["response"] = [response_time] hooks["response"] = [response_time]
return super(SherlockFuturesSession, self).request(method, return super(SherlockFuturesSession, self).request(
url, method, url, hooks=hooks, *args, **kwargs
hooks=hooks, )
*args, **kwargs)
def get_response(request_future, error_type, social_network): def get_response(request_future, error_type, social_network):
@ -127,42 +127,43 @@ def get_response(request_future, error_type, social_network):
return response, error_context, exception_text return response, error_context, exception_text
def interpolate_string(object, username): def interpolate_string(input_object, username):
"""Insert a string into the string properties of an object recursively.""" if isinstance(input_object, str):
return input_object.replace("{}", username)
if isinstance(object, str): elif isinstance(input_object, dict):
return object.replace("{}", username) return {k: interpolate_string(v, username) for k, v in input_object.items()}
elif isinstance(object, dict): elif isinstance(input_object, list):
for key, value in object.items(): return [interpolate_string(i, username) for i in input_object]
object[key] = interpolate_string(value, username) return input_object
elif isinstance(object, list):
for i in object:
object[i] = interpolate_string(object[i], username)
return object
def CheckForParameter(username): def check_for_parameter(username):
'''checks if {?} exists in the username """checks if {?} exists in the username
if exist it means that sherlock is looking for more multiple username''' if exist it means that sherlock is looking for more multiple username"""
return ("{?}" in username) return "{?}" in username
checksymbols = [] checksymbols = []
checksymbols = ["_", "-", "."] checksymbols = ["_", "-", "."]
def MultipleUsernames(username): def multiple_usernames(username):
'''replace the parameter with with symbols and return a list of usernames''' """replace the parameter with with symbols and return a list of usernames"""
allUsernames = [] allUsernames = []
for i in checksymbols: for i in checksymbols:
allUsernames.append(username.replace("{?}", i)) allUsernames.append(username.replace("{?}", i))
return allUsernames return allUsernames
def sherlock(username, site_data, query_notify, def sherlock(
tor=False, unique_tor=False, username,
proxy=None, timeout=60): site_data,
query_notify,
tor=False,
unique_tor=False,
proxy=None,
timeout=60,
):
"""Run Sherlock Analysis. """Run Sherlock Analysis.
Checks for existence of username on various social media sites. Checks for existence of username on various social media sites.
@ -214,15 +215,15 @@ def sherlock(username, site_data, query_notify,
max_workers = len(site_data) max_workers = len(site_data)
# Create multi-threaded session for all requests. # Create multi-threaded session for all requests.
session = SherlockFuturesSession(max_workers=max_workers, session = SherlockFuturesSession(
session=underlying_session) max_workers=max_workers, session=underlying_session
)
# Results from analysis of all sites # Results from analysis of all sites
results_total = {} results_total = {}
# First create futures for all requests. This allows for the requests to run in parallel # First create futures for all requests. This allows for the requests to run in parallel
for social_network, net_info in site_data.items(): for social_network, net_info in site_data.items():
# Results from analysis of this specific site # Results from analysis of this specific site
results_site = {"url_main": net_info.get("urlMain")} results_site = {"url_main": net_info.get("urlMain")}
@ -245,10 +246,9 @@ def sherlock(username, site_data, query_notify,
regex_check = net_info.get("regexCheck") regex_check = net_info.get("regexCheck")
if regex_check and re.search(regex_check, username) is None: if regex_check and re.search(regex_check, username) is None:
# No need to do the check at the site: this username is not allowed. # No need to do the check at the site: this username is not allowed.
results_site["status"] = QueryResult(username, results_site["status"] = QueryResult(
social_network, username, social_network, url, QueryStatus.ILLEGAL
url, )
QueryStatus.ILLEGAL)
results_site["url_user"] = "" results_site["url_user"] = ""
results_site["http_status"] = "" results_site["http_status"] = ""
results_site["response_text"] = "" results_site["response_text"] = ""
@ -309,17 +309,21 @@ def sherlock(username, site_data, query_notify,
# This future starts running the request in a new thread, doesn't block the main thread # This future starts running the request in a new thread, doesn't block the main thread
if proxy is not None: if proxy is not None:
proxies = {"http": proxy, "https": proxy} proxies = {"http": proxy, "https": proxy}
future = request(url=url_probe, headers=headers, future = request(
url=url_probe,
headers=headers,
proxies=proxies, proxies=proxies,
allow_redirects=allow_redirects, allow_redirects=allow_redirects,
timeout=timeout, timeout=timeout,
json=request_payload json=request_payload,
) )
else: else:
future = request(url=url_probe, headers=headers, future = request(
url=url_probe,
headers=headers,
allow_redirects=allow_redirects, allow_redirects=allow_redirects,
timeout=timeout, timeout=timeout,
json=request_payload json=request_payload,
) )
# Store future in data for access later # Store future in data for access later
@ -335,7 +339,6 @@ def sherlock(username, site_data, query_notify,
# Open the file containing account links # Open the file containing account links
# Core logic: If tor requests, make them here. If multi-threaded requests, wait for responses # Core logic: If tor requests, make them here. If multi-threaded requests, wait for responses
for social_network, net_info in site_data.items(): for social_network, net_info in site_data.items():
# Retrieve results again # Retrieve results again
results_site = results_total.get(social_network) results_site = results_total.get(social_network)
@ -352,9 +355,9 @@ def sherlock(username, site_data, query_notify,
# Retrieve future and ensure it has finished # Retrieve future and ensure it has finished
future = net_info["request_future"] future = net_info["request_future"]
r, error_text, exception_text = get_response(request_future=future, r, error_text, exception_text = get_response(
error_type=error_type, request_future=future, error_type=error_type, social_network=social_network
social_network=social_network) )
# Get response time for response of our request. # Get response time for response of our request.
try: try:
@ -365,11 +368,11 @@ def sherlock(username, site_data, query_notify,
# Attempt to get request information # Attempt to get request information
try: try:
http_status = r.status_code http_status = r.status_code
except: except Exception:
http_status = "?" http_status = "?"
try: try:
response_text = r.text.encode(r.encoding or "UTF-8") response_text = r.text.encode(r.encoding or "UTF-8")
except: except Exception:
response_text = "" response_text = ""
query_status = QueryStatus.UNKNOWN query_status = QueryStatus.UNKNOWN
@ -424,16 +427,19 @@ def sherlock(username, site_data, query_notify,
query_status = QueryStatus.AVAILABLE query_status = QueryStatus.AVAILABLE
else: else:
# It should be impossible to ever get here... # It should be impossible to ever get here...
raise ValueError(f"Unknown Error Type '{error_type}' for " raise ValueError(
f"site '{social_network}'") f"Unknown Error Type '{error_type}' for " f"site '{social_network}'"
)
# Notify caller about results of query. # Notify caller about results of query.
result = QueryResult(username=username, result = QueryResult(
username=username,
site_name=social_network, site_name=social_network,
site_url_user=url, site_url_user=url,
status=query_status, status=query_status,
query_time=response_time, query_time=response_time,
context=error_context) context=error_context,
)
query_notify.update(result) query_notify.update(result)
# Save status of request # Save status of request
@ -463,16 +469,13 @@ def timeout_check(value):
NOTE: Will raise an exception if the timeout in invalid. NOTE: Will raise an exception if the timeout in invalid.
""" """
from argparse import ArgumentTypeError
try: if value <= 0:
timeout = float(value)
except:
raise ArgumentTypeError(f"Timeout '{value}' must be a number.")
if timeout <= 0:
raise ArgumentTypeError( raise ArgumentTypeError(
f"Timeout '{value}' must be greater than 0.0s.") f"Invalid timeout value: {value}. Timeout must be a positive number."
return timeout )
return float(value)
def handler(signal_received, frame): def handler(signal_received, frame):
@ -484,86 +487,159 @@ def handler(signal_received, frame):
def main(): def main():
version_string = f"%(prog)s {__version__}\n" + \ version_string = (
f"{requests.__description__}: {requests.__version__}\n" + \ f"%(prog)s {__version__}\n"
f"Python: {platform.python_version()}" + f"{requests.__description__}: {requests.__version__}\n"
+ f"Python: {platform.python_version()}"
)
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, parser = ArgumentParser(
description=f"{module_name} (Version {__version__})" formatter_class=RawDescriptionHelpFormatter,
description=f"{module_name} (Version {__version__})",
) )
parser.add_argument("--version", parser.add_argument(
action="version", version=version_string, "--version",
help="Display version information and dependencies." action="version",
version=version_string,
help="Display version information and dependencies.",
) )
parser.add_argument("--verbose", "-v", "-d", "--debug", parser.add_argument(
action="store_true", dest="verbose", default=False, "--verbose",
help="Display extra debugging information and metrics." "-v",
"-d",
"--debug",
action="store_true",
dest="verbose",
default=False,
help="Display extra debugging information and metrics.",
) )
parser.add_argument("--folderoutput", "-fo", dest="folderoutput", parser.add_argument(
help="If using multiple usernames, the output of the results will be saved to this folder." "--folderoutput",
"-fo",
dest="folderoutput",
help="If using multiple usernames, the output of the results will be saved to this folder.",
) )
parser.add_argument("--output", "-o", dest="output", parser.add_argument(
help="If using single username, the output of the result will be saved to this file." "--output",
"-o",
dest="output",
help="If using single username, the output of the result will be saved to this file.",
) )
parser.add_argument("--tor", "-t", parser.add_argument(
action="store_true", dest="tor", default=False, "--tor",
help="Make requests over Tor; increases runtime; requires Tor to be installed and in system path.") "-t",
parser.add_argument("--unique-tor", "-u", action="store_true",
action="store_true", dest="unique_tor", default=False, dest="tor",
help="Make requests over Tor with new Tor circuit after each request; increases runtime; requires Tor to be installed and in system path.") default=False,
parser.add_argument("--csv", help="Make requests over Tor; increases runtime; requires Tor to be installed and in system path.",
action="store_true", dest="csv", default=False,
help="Create Comma-Separated Values (CSV) File."
) )
parser.add_argument("--xlsx", parser.add_argument(
action="store_true", dest="xlsx", default=False, "--unique-tor",
help="Create the standard file for the modern Microsoft Excel spreadsheet (xslx)." "-u",
action="store_true",
dest="unique_tor",
default=False,
help="Make requests over Tor with new Tor circuit after each request; increases runtime; requires Tor to be installed and in system path.",
) )
parser.add_argument("--site", parser.add_argument(
action="append", metavar="SITE_NAME", "--csv",
dest="site_list", default=None, action="store_true",
help="Limit analysis to just the listed sites. Add multiple options to specify more than one site." dest="csv",
default=False,
help="Create Comma-Separated Values (CSV) File.",
) )
parser.add_argument("--proxy", "-p", metavar="PROXY_URL", parser.add_argument(
action="store", dest="proxy", default=None, "--xlsx",
help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080" action="store_true",
dest="xlsx",
default=False,
help="Create the standard file for the modern Microsoft Excel spreadsheet (xslx).",
) )
parser.add_argument("--json", "-j", metavar="JSON_FILE", parser.add_argument(
dest="json_file", default=None, "--site",
help="Load data from a JSON file or an online, valid, JSON file.") action="append",
parser.add_argument("--timeout", metavar="SITE_NAME",
action="store", metavar="TIMEOUT", dest="site_list",
dest="timeout", type=timeout_check, default=60, default=None,
help="Time (in seconds) to wait for response to requests (Default: 60)" help="Limit analysis to just the listed sites. Add multiple options to specify more than one site.",
) )
parser.add_argument("--print-all", parser.add_argument(
action="store_true", dest="print_all", default=False, "--proxy",
help="Output sites where the username was not found." "-p",
) metavar="PROXY_URL",
parser.add_argument("--print-found",
action="store_true", dest="print_found", default=True,
help="Output sites where the username was found (also if exported as file)."
)
parser.add_argument("--no-color",
action="store_true", dest="no_color", default=False,
help="Don't color terminal output"
)
parser.add_argument("username",
nargs="+", metavar="USERNAMES",
action="store", action="store",
help="One or more usernames to check with social networks. Check similar usernames using {%%} (replace to '_', '-', '.')." dest="proxy",
default=None,
help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080",
)
parser.add_argument(
"--json",
"-j",
metavar="JSON_FILE",
dest="json_file",
default=None,
help="Load data from a JSON file or an online, valid, JSON file.",
)
parser.add_argument(
"--timeout",
action="store",
metavar="TIMEOUT",
dest="timeout",
type=timeout_check,
default=60,
help="Time (in seconds) to wait for response to requests (Default: 60)",
)
parser.add_argument(
"--print-all",
action="store_true",
dest="print_all",
default=False,
help="Output sites where the username was not found.",
)
parser.add_argument(
"--print-found",
action="store_true",
dest="print_found",
default=True,
help="Output sites where the username was found (also if exported as file).",
)
parser.add_argument(
"--no-color",
action="store_true",
dest="no_color",
default=False,
help="Don't color terminal output",
)
parser.add_argument(
"username",
nargs="+",
metavar="USERNAMES",
action="store",
help="One or more usernames to check with social networks. Check similar usernames using {%%} (replace to '_', '-', '.').",
)
parser.add_argument(
"--browse",
"-b",
action="store_true",
dest="browse",
default=False,
help="Browse to all results on default browser.",
) )
parser.add_argument("--browse", "-b",
action="store_true", dest="browse", default=False,
help="Browse to all results on default browser.")
parser.add_argument("--local", "-l", parser.add_argument(
action="store_true", default=False, "--local",
help="Force the use of the local data.json file.") "-l",
action="store_true",
default=False,
help="Force the use of the local data.json file.",
)
parser.add_argument("--nsfw", parser.add_argument(
action="store_true", default=False, "--nsfw",
help="Include checking of NSFW sites from default list.") action="store_true",
default=False,
help="Include checking of NSFW sites from default list.",
)
args = parser.parse_args() args = parser.parse_args()
@ -573,14 +649,17 @@ def main():
# Check for newer version of Sherlock. If it exists, let the user know about it # Check for newer version of Sherlock. If it exists, let the user know about it
try: try:
r = requests.get( r = requests.get(
"https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py") "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py"
)
remote_version = str(re.findall('__version__ = "(.*)"', r.text)[0]) remote_version = str(re.findall('__version__ = "(.*)"', r.text)[0])
local_version = __version__ local_version = __version__
if remote_version != local_version: if remote_version != local_version:
print("Update Available!\n" + print(
f"You are running version {local_version}. Version {remote_version} is available at https://github.com/sherlock-project/sherlock") "Update Available!\n"
+ f"You are running version {local_version}. Version {remote_version} is available at https://github.com/sherlock-project/sherlock"
)
except Exception as error: except Exception as error:
print(f"A problem occurred while checking for an update: {error}") print(f"A problem occurred while checking for an update: {error}")
@ -598,7 +677,8 @@ def main():
print("Using Tor to make requests") print("Using Tor to make requests")
print( print(
"Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors.") "Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors."
)
if args.no_color: if args.no_color:
# Disable color output. # Disable color output.
@ -620,8 +700,9 @@ def main():
# Create object with all information about sites we are aware of. # Create object with all information about sites we are aware of.
try: try:
if args.local: if args.local:
sites = SitesInformation(os.path.join( sites = SitesInformation(
os.path.dirname(__file__), "resources/data.json")) os.path.join(os.path.dirname(__file__), "resources/data.json")
)
else: else:
sites = SitesInformation(args.json_file) sites = SitesInformation(args.json_file)
except Exception as error: except Exception as error:
@ -654,35 +735,34 @@ def main():
site_missing.append(f"'{site}'") site_missing.append(f"'{site}'")
if site_missing: if site_missing:
print( print(f"Error: Desired sites not found: {', '.join(site_missing)}.")
f"Error: Desired sites not found: {', '.join(site_missing)}.")
if not site_data: if not site_data:
sys.exit(1) sys.exit(1)
# Create notify object for query results. # Create notify object for query results.
query_notify = QueryNotifyPrint(result=None, query_notify = QueryNotifyPrint(
verbose=args.verbose, result=None, verbose=args.verbose, print_all=args.print_all, browse=args.browse
print_all=args.print_all, )
browse=args.browse)
# Run report on all specified users. # Run report on all specified users.
all_usernames = [] all_usernames = []
for username in args.username: for username in args.username:
if (CheckForParameter(username)): if check_for_parameter(username):
for name in MultipleUsernames(username): for name in multiple_usernames(username):
all_usernames.append(name) all_usernames.append(name)
else: else:
all_usernames.append(username) all_usernames.append(username)
for username in all_usernames: for username in all_usernames:
results = sherlock(
results = sherlock(username, username,
site_data, site_data,
query_notify, query_notify,
tor=args.tor, tor=args.tor,
unique_tor=args.unique_tor, unique_tor=args.unique_tor,
proxy=args.proxy, proxy=args.proxy,
timeout=args.timeout) timeout=args.timeout,
)
if args.output: if args.output:
result_file = args.output result_file = args.output
@ -701,8 +781,7 @@ def main():
if dictionary.get("status").status == QueryStatus.CLAIMED: if dictionary.get("status").status == QueryStatus.CLAIMED:
exists_counter += 1 exists_counter += 1
file.write(dictionary["url_user"] + "\n") file.write(dictionary["url_user"] + "\n")
file.write( file.write(f"Total Websites Username Detected On : {exists_counter}\n")
f"Total Websites Username Detected On : {exists_counter}\n")
if args.csv: if args.csv:
result_file = f"{username}.csv" result_file = f"{username}.csv"
@ -712,31 +791,39 @@ def main():
os.makedirs(args.folderoutput, exist_ok=True) os.makedirs(args.folderoutput, exist_ok=True)
result_file = os.path.join(args.folderoutput, result_file) result_file = os.path.join(args.folderoutput, result_file)
with open(result_file, "w", newline='', encoding="utf-8") as csv_report: with open(result_file, "w", newline="", encoding="utf-8") as csv_report:
writer = csv.writer(csv_report) writer = csv.writer(csv_report)
writer.writerow(["username", writer.writerow(
[
"username",
"name", "name",
"url_main", "url_main",
"url_user", "url_user",
"exists", "exists",
"http_status", "http_status",
"response_time_s" "response_time_s",
] ]
) )
for site in results: for site in results:
if args.print_found and not args.print_all and results[site]["status"].status != QueryStatus.CLAIMED: if (
args.print_found
and not args.print_all
and results[site]["status"].status != QueryStatus.CLAIMED
):
continue continue
response_time_s = results[site]["status"].query_time response_time_s = results[site]["status"].query_time
if response_time_s is None: if response_time_s is None:
response_time_s = "" response_time_s = ""
writer.writerow([username, writer.writerow(
[
username,
site, site,
results[site]["url_main"], results[site]["url_main"],
results[site]["url_user"], results[site]["url_user"],
str(results[site]["status"].status), str(results[site]["status"].status),
results[site]["http_status"], results[site]["http_status"],
response_time_s response_time_s,
] ]
) )
if args.xlsx: if args.xlsx:
@ -749,7 +836,11 @@ def main():
response_time_s = [] response_time_s = []
for site in results: for site in results:
if args.print_found and not args.print_all and results[site]["status"].status != QueryStatus.CLAIMED: if (
args.print_found
and not args.print_all
and results[site]["status"].status != QueryStatus.CLAIMED
):
continue continue
if response_time_s is None: if response_time_s is None:
@ -763,8 +854,18 @@ def main():
exists.append(str(results[site]["status"].status)) exists.append(str(results[site]["status"].status))
http_status.append(results[site]["http_status"]) http_status.append(results[site]["http_status"])
DataFrame = pd.DataFrame({"username": usernames, "name": names, "url_main": url_main, "url_user": url_user, "exists": exists, "http_status": http_status, "response_time_s": response_time_s}) DataFrame = pd.DataFrame(
DataFrame.to_excel(f'{username}.xlsx', sheet_name='sheet1', index=False) {
"username": usernames,
"name": names,
"url_main": url_main,
"url_user": url_user,
"exists": exists,
"http_status": http_status,
"response_time_s": response_time_s,
}
)
DataFrame.to_excel(f"{username}.xlsx", sheet_name="sheet1", index=False)
print() print()
query_notify.finish() query_notify.finish()

View file

@ -1,4 +1,4 @@
import imp import importlib
import unittest import unittest
import sys import sys
sys.path.append('../') sys.path.append('../')
@ -7,9 +7,9 @@ import sherlock as sh
checksymbols = [] checksymbols = []
checksymbols = ["_", "-", "."] checksymbols = ["_", "-", "."]
"""Test for mulriple usernames. """Test for multiple usernames.
This test ensures that the function MultipleUsernames works properly. More specific, This test ensures that the function multiple_usernames works properly. More specific,
different scenarios are tested and only usernames that contain this specific sequence: {?} different scenarios are tested and only usernames that contain this specific sequence: {?}
should return positive. should return positive.
@ -23,7 +23,7 @@ class TestMultipleUsernames(unittest.TestCase):
def test_area(self): def test_area(self):
test_usernames = ["test{?}test" , "test{?feo" , "test"] test_usernames = ["test{?}test" , "test{?feo" , "test"]
for name in test_usernames: for name in test_usernames:
if(sh.CheckForParameter(name)): if(sh.check_for_parameter(name)):
self.assertAlmostEqual(sh.MultipleUsernames(name), ["test_test" , "test-test" , "test.test"]) self.assertAlmostEqual(sh.multiple_usernames(name), ["test_test" , "test-test" , "test.test"])
else: else:
self.assertAlmostEqual(name, name) self.assertAlmostEqual(name, name)

View file

@ -1,4 +1,4 @@
## List Of Supported Sites (401 Sites In Total!) ## List Of Supported Sites (396 Sites In Total!)
1. ![](https://www.google.com/s2/favicons?domain=https://2Dimensions.com/) [2Dimensions](https://2Dimensions.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://2Dimensions.com/) [2Dimensions](https://2Dimensions.com/)
1. ![](https://www.google.com/s2/favicons?domain=http://forum.3dnews.ru/) [3dnews](http://forum.3dnews.ru/) 1. ![](https://www.google.com/s2/favicons?domain=http://forum.3dnews.ru/) [3dnews](http://forum.3dnews.ru/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.7cups.com/) [7Cups](https://www.7cups.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.7cups.com/) [7Cups](https://www.7cups.com/)
@ -180,7 +180,6 @@
1. ![](https://www.google.com/s2/favicons?domain=https://listed.to/) [Listed](https://listed.to/) 1. ![](https://www.google.com/s2/favicons?domain=https://listed.to/) [Listed](https://listed.to/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.livejournal.com/) [LiveJournal](https://www.livejournal.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.livejournal.com/) [LiveJournal](https://www.livejournal.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://lobste.rs/) [Lobsters](https://lobste.rs/) 1. ![](https://www.google.com/s2/favicons?domain=https://lobste.rs/) [Lobsters](https://lobste.rs/)
1. ![](https://www.google.com/s2/favicons?domain=https://lolchess.gg/) [Lolchess](https://lolchess.gg/)
1. ![](https://www.google.com/s2/favicons?domain=https://lottiefiles.com/) [LottieFiles](https://lottiefiles.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://lottiefiles.com/) [LottieFiles](https://lottiefiles.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.lushstories.com/) [LushStories](https://www.lushstories.com/) **(NSFW)** 1. ![](https://www.google.com/s2/favicons?domain=https://www.lushstories.com/) [LushStories](https://www.lushstories.com/) **(NSFW)**
1. ![](https://www.google.com/s2/favicons?domain=https://forums.mmorpg.com/) [MMORPG Forum](https://forums.mmorpg.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://forums.mmorpg.com/) [MMORPG Forum](https://forums.mmorpg.com/)
@ -286,7 +285,6 @@
1. ![](https://www.google.com/s2/favicons?domain=https://tellonym.me/) [Tellonym.me](https://tellonym.me/) 1. ![](https://www.google.com/s2/favicons?domain=https://tellonym.me/) [Tellonym.me](https://tellonym.me/)
1. ![](https://www.google.com/s2/favicons?domain=https://tenor.com/) [Tenor](https://tenor.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://tenor.com/) [Tenor](https://tenor.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://themeforest.net/) [ThemeForest](https://themeforest.net/) 1. ![](https://www.google.com/s2/favicons?domain=https://themeforest.net/) [ThemeForest](https://themeforest.net/)
1. ![](https://www.google.com/s2/favicons?domain=https://tiktok.com/) [TikTok](https://tiktok.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.tnaflix.com/) [TnAFlix](https://www.tnaflix.com/) **(NSFW)** 1. ![](https://www.google.com/s2/favicons?domain=https://www.tnaflix.com/) [TnAFlix](https://www.tnaflix.com/) **(NSFW)**
1. ![](https://www.google.com/s2/favicons?domain=https://www.tradingview.com/) [TradingView](https://www.tradingview.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.tradingview.com/) [TradingView](https://www.tradingview.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.trakt.tv/) [Trakt](https://www.trakt.tv/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.trakt.tv/) [Trakt](https://www.trakt.tv/)
@ -307,7 +305,6 @@
1. ![](https://www.google.com/s2/favicons?domain=https://venmo.com/) [Venmo](https://venmo.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://venmo.com/) [Venmo](https://venmo.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://vero.co/) [Vero](https://vero.co/) 1. ![](https://www.google.com/s2/favicons?domain=https://vero.co/) [Vero](https://vero.co/)
1. ![](https://www.google.com/s2/favicons?domain=https://vimeo.com/) [Vimeo](https://vimeo.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://vimeo.com/) [Vimeo](https://vimeo.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://virgool.io/) [Virgool](https://virgool.io/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.virustotal.com/) [VirusTotal](https://www.virustotal.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.virustotal.com/) [VirusTotal](https://www.virustotal.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://discourse.wicg.io/) [WICG Forum](https://discourse.wicg.io/) 1. ![](https://www.google.com/s2/favicons?domain=https://discourse.wicg.io/) [WICG Forum](https://discourse.wicg.io/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.warriorforum.com/) [Warrior Forum](https://www.warriorforum.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.warriorforum.com/) [Warrior Forum](https://www.warriorforum.com/)
@ -315,7 +312,6 @@
1. ![](https://www.google.com/s2/favicons?domain=https://www.webnode.cz/) [WebNode](https://www.webnode.cz/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.webnode.cz/) [WebNode](https://www.webnode.cz/)
1. ![](https://www.google.com/s2/favicons?domain=https://hosted.weblate.org/) [Weblate](https://hosted.weblate.org/) 1. ![](https://www.google.com/s2/favicons?domain=https://hosted.weblate.org/) [Weblate](https://hosted.weblate.org/)
1. ![](https://www.google.com/s2/favicons?domain=https://weebly.com/) [Weebly](https://weebly.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://weebly.com/) [Weebly](https://weebly.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://forums.whonix.org/) [Whonix Forum](https://forums.whonix.org/)
1. ![](https://www.google.com/s2/favicons?domain=http://www.wikidot.com/) [Wikidot](http://www.wikidot.com/) 1. ![](https://www.google.com/s2/favicons?domain=http://www.wikidot.com/) [Wikidot](http://www.wikidot.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.wikipedia.org/) [Wikipedia](https://www.wikipedia.org/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.wikipedia.org/) [Wikipedia](https://www.wikipedia.org/)
1. ![](https://www.google.com/s2/favicons?domain=https://windy.com/) [Windy](https://windy.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://windy.com/) [Windy](https://windy.com/)
@ -344,7 +340,6 @@
1. ![](https://www.google.com/s2/favicons?domain=https://devrant.com/) [devRant](https://devrant.com/) 1. ![](https://www.google.com/s2/favicons?domain=https://devrant.com/) [devRant](https://devrant.com/)
1. ![](https://www.google.com/s2/favicons?domain=https://www.drive2.ru/) [drive2](https://www.drive2.ru/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.drive2.ru/) [drive2](https://www.drive2.ru/)
1. ![](https://www.google.com/s2/favicons?domain=https://egpu.io/) [eGPU](https://egpu.io/) 1. ![](https://www.google.com/s2/favicons?domain=https://egpu.io/) [eGPU](https://egpu.io/)
1. ![](https://www.google.com/s2/favicons?domain=https:/ebio.gg) [ebio.gg](https:/ebio.gg)
1. ![](https://www.google.com/s2/favicons?domain=https://eintracht.de) [eintracht](https://eintracht.de) 1. ![](https://www.google.com/s2/favicons?domain=https://eintracht.de) [eintracht](https://eintracht.de)
1. ![](https://www.google.com/s2/favicons?domain=https://www.fixya.com) [fixya](https://www.fixya.com) 1. ![](https://www.google.com/s2/favicons?domain=https://www.fixya.com) [fixya](https://www.fixya.com)
1. ![](https://www.google.com/s2/favicons?domain=https://www.fl.ru/) [fl](https://www.fl.ru/) 1. ![](https://www.google.com/s2/favicons?domain=https://www.fl.ru/) [fl](https://www.fl.ru/)