mirror of
https://github.com/sherlock-project/sherlock
synced 2024-11-22 03:43:02 +00:00
This commit is contained in:
commit
43c5605576
6 changed files with 855 additions and 667 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -22,5 +22,8 @@ src/
|
|||
# Comma-Separated Values (CSV) Reports
|
||||
*.csv
|
||||
|
||||
# Excluded sites list
|
||||
tests/.excluded_sites
|
||||
|
||||
# MacOS Folder Metadata File
|
||||
.DS_Store
|
||||
|
|
14
README.md
14
README.md
|
@ -66,10 +66,10 @@ usage: sherlock.py [-h] [--version] [--verbose] [--rank]
|
|||
[--unique-tor] [--csv] [--site SITE_NAME]
|
||||
[--proxy PROXY_URL] [--json JSON_FILE]
|
||||
[--proxy_list PROXY_LIST] [--check_proxies CHECK_PROXY]
|
||||
[--print-found]
|
||||
[--timeout TIMEOUT] [--print-found]
|
||||
USERNAMES [USERNAMES ...]
|
||||
|
||||
Sherlock: Find Usernames Across Social Networks (Version 0.9.11)
|
||||
Sherlock: Find Usernames Across Social Networks (Version 0.9.14)
|
||||
|
||||
positional arguments:
|
||||
USERNAMES One or more usernames to check with social networks.
|
||||
|
@ -110,8 +110,12 @@ optional arguments:
|
|||
file are working and anonymous.Put 0 for no limit on
|
||||
successfully checked proxies, or another number to
|
||||
institute a limit.
|
||||
--timeout TIMEOUT Time (in seconds) to wait for response to requests.
|
||||
Default timeout of 60.0s.A longer timeout will be more
|
||||
likely to get results from slow sites.On the other
|
||||
hand, this may cause a long delay to gather all
|
||||
results.
|
||||
--print-found Do not output sites where the username was not found.
|
||||
|
||||
```
|
||||
|
||||
To search for only one user:
|
||||
|
@ -197,6 +201,10 @@ Note that we do currently have 100% test coverage. Unfortunately, some of
|
|||
the sites that Sherlock checks are not always reliable, so it is common
|
||||
to get response errors.
|
||||
|
||||
If some sites are failing due to conection problems (site is down, in maintainence, etc)
|
||||
you can exclude them from tests by creating a `tests/.excluded_sites` file with a
|
||||
list of sites to ignore (one site name per line).
|
||||
|
||||
## Stargazers over time
|
||||
|
||||
[![Stargazers over time](https://starcharts.herokuapp.com/TheYahya/sherlock.svg)](https://starcharts.herokuapp.com/TheYahya/sherlock)
|
||||
|
|
75
sherlock.py
75
sherlock.py
|
@ -26,8 +26,7 @@ from torrequest import TorRequest
|
|||
from load_proxies import load_proxies_from_csv, check_proxy_list
|
||||
|
||||
module_name = "Sherlock: Find Usernames Across Social Networks"
|
||||
__version__ = "0.9.12"
|
||||
amount = 0
|
||||
__version__ = "0.9.14"
|
||||
|
||||
|
||||
global proxy_list
|
||||
|
@ -134,7 +133,8 @@ def get_response(request_future, error_type, social_network, verbose=False, retr
|
|||
return None, "", -1
|
||||
|
||||
|
||||
def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, proxy=None, print_found_only=False):
|
||||
def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False,
|
||||
proxy=None, print_found_only=False, timeout=None):
|
||||
"""Run Sherlock Analysis.
|
||||
|
||||
Checks for existence of username on various social media sites.
|
||||
|
@ -147,6 +147,8 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
tor -- Boolean indicating whether to use a tor circuit for the requests.
|
||||
unique_tor -- Boolean indicating whether to use a new tor circuit for each request.
|
||||
proxy -- String indicating the proxy URL
|
||||
timeout -- Time in seconds to wait before timing out request.
|
||||
Default is no timeout.
|
||||
|
||||
Return Value:
|
||||
Dictionary containing results from report. Key of dictionary is the name
|
||||
|
@ -160,8 +162,6 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
response_text: Text that came back from request. May be None if
|
||||
there was an HTTP error when checking for existence.
|
||||
"""
|
||||
global amount
|
||||
|
||||
print_info("Checking username", username)
|
||||
|
||||
# Allow 1 thread for each external service, so `len(site_data)` threads total
|
||||
|
@ -225,11 +225,11 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
# from where the user profile normally can be found.
|
||||
url_probe = url_probe.format(username)
|
||||
|
||||
request_method = session.get
|
||||
if social_network != "GitHub":
|
||||
# If only the status_code is needed don't download the body
|
||||
if net_info["errorType"] == 'status_code':
|
||||
request_method = session.head
|
||||
#If only the status_code is needed don't download the body
|
||||
if net_info["errorType"] == 'status_code':
|
||||
request_method = session.head
|
||||
else:
|
||||
request_method = session.get
|
||||
|
||||
if net_info["errorType"] == "response_url":
|
||||
# Site forwards request to a different URL if username not
|
||||
|
@ -246,11 +246,13 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
proxies = {"http": proxy, "https": proxy}
|
||||
future = request_method(url=url_probe, headers=headers,
|
||||
proxies=proxies,
|
||||
allow_redirects=allow_redirects
|
||||
allow_redirects=allow_redirects,
|
||||
timeout=timeout
|
||||
)
|
||||
else:
|
||||
future = request_method(url=url_probe, headers=headers,
|
||||
allow_redirects=allow_redirects
|
||||
allow_redirects=allow_redirects,
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
# Store future in data for access later
|
||||
|
@ -308,7 +310,6 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
if not error in r.text:
|
||||
print_found(social_network, url, response_time, verbose)
|
||||
exists = "yes"
|
||||
amount = amount+1
|
||||
else:
|
||||
if not print_found_only:
|
||||
print_not_found(social_network, response_time, verbose)
|
||||
|
@ -319,7 +320,6 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
if not r.status_code >= 300 or r.status_code < 200:
|
||||
print_found(social_network, url, response_time, verbose)
|
||||
exists = "yes"
|
||||
amount = amount+1
|
||||
else:
|
||||
if not print_found_only:
|
||||
print_not_found(social_network, response_time, verbose)
|
||||
|
@ -335,7 +335,6 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
#
|
||||
print_found(social_network, url, response_time, verbose)
|
||||
exists = "yes"
|
||||
amount = amount+1
|
||||
else:
|
||||
if not print_found_only:
|
||||
print_not_found(social_network, response_time, verbose)
|
||||
|
@ -359,6 +358,31 @@ def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, pr
|
|||
return results_total
|
||||
|
||||
|
||||
def timeout_check(value):
|
||||
"""Check Timeout Argument.
|
||||
|
||||
Checks timeout for validity.
|
||||
|
||||
Keyword Arguments:
|
||||
value -- Time in seconds to wait before timing out request.
|
||||
|
||||
Return Value:
|
||||
Floating point number representing the time (in seconds) that should be
|
||||
used for the timeout.
|
||||
|
||||
NOTE: Will raise an exception if the timeout in invalid.
|
||||
"""
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
try:
|
||||
timeout = float(value)
|
||||
except:
|
||||
raise ArgumentTypeError(f"Timeout '{value}' must be a number.")
|
||||
if timeout <= 0:
|
||||
raise ArgumentTypeError(f"Timeout '{value}' must be greater than 0.0s.")
|
||||
return timeout
|
||||
|
||||
|
||||
def main():
|
||||
# Colorama module's initialization.
|
||||
init(autoreset=True)
|
||||
|
@ -419,6 +443,14 @@ def main():
|
|||
"The script will check if the proxies supplied in the .csv file are working and anonymous."
|
||||
"Put 0 for no limit on successfully checked proxies, or another number to institute a limit."
|
||||
)
|
||||
parser.add_argument("--timeout",
|
||||
action="store", metavar='TIMEOUT',
|
||||
dest="timeout", type=timeout_check, default=None,
|
||||
help="Time (in seconds) to wait for response to requests. "
|
||||
"Default timeout of 60.0s."
|
||||
"A longer timeout will be more likely to get results from slow sites."
|
||||
"On the other hand, this may cause a long delay to gather all results."
|
||||
)
|
||||
parser.add_argument("--print-found",
|
||||
action="store_true", dest="print_found_only", default=False,
|
||||
help="Do not output sites where the username was not found."
|
||||
|
@ -571,9 +603,14 @@ def main():
|
|||
except (NameError, IndexError):
|
||||
proxy = args.proxy
|
||||
|
||||
results = {}
|
||||
results = sherlock(username, site_data, verbose=args.verbose,
|
||||
tor=args.tor, unique_tor=args.unique_tor, proxy=args.proxy, print_found_only=args.print_found_only)
|
||||
results = sherlock(username,
|
||||
site_data,
|
||||
verbose=args.verbose,
|
||||
tor=args.tor,
|
||||
unique_tor=args.unique_tor,
|
||||
proxy=args.proxy,
|
||||
print_found_only=args.print_found_only,
|
||||
timeout=args.timeout)
|
||||
|
||||
exists_counter = 0
|
||||
for website_name in results:
|
||||
|
@ -581,7 +618,7 @@ def main():
|
|||
if dictionary.get("exists") == "yes":
|
||||
exists_counter += 1
|
||||
file.write(dictionary["url_user"] + "\n")
|
||||
file.write("Total Websites : {}".format(exists_counter))
|
||||
file.write(f"Total Websites Username Detected On : {exists_counter}")
|
||||
file.close()
|
||||
|
||||
if args.csv == True:
|
||||
|
|
453
sites.md
453
sites.md
|
@ -1,205 +1,248 @@
|
|||
## List Of Supported Sites (204 Sites In Total!)
|
||||
1. [2Dimensions](https://2Dimensions.com/)
|
||||
2. [500px](https://500px.com/)
|
||||
3. [7Cups](https://www.7cups.com/)
|
||||
4. [9GAG](https://9gag.com/)
|
||||
5. [About.me](https://about.me/)
|
||||
6. [Academia.edu](https://www.academia.edu/)
|
||||
7. [Alik.cz](https://www.alik.cz/)
|
||||
8. [Anobii](https://www.anobii.com/)
|
||||
9. [Aptoide](https://en.aptoide.com/)
|
||||
10. [Archive.org](https://archive.org)
|
||||
11. [AskFM](https://ask.fm/)
|
||||
12. [Avízo.cz](https://www.avizo.cz/)
|
||||
13. [BLIP.fm](https://blip.fm/)
|
||||
14. [Badoo](https://badoo.com/)
|
||||
15. [Bandcamp](https://www.bandcamp.com/)
|
||||
16. [Basecamp](https://basecamp.com/)
|
||||
17. [Bazar.cz](https://www.bazar.cz/)
|
||||
18. [Behance](https://www.behance.net/)
|
||||
19. [BitBucket](https://bitbucket.org/)
|
||||
20. [BitCoinForum](https://bitcoinforum.com)
|
||||
21. [Blogger](https://www.blogger.com/)
|
||||
22. [Bookcrossing](https://www.bookcrossing.com/)
|
||||
23. [Brew](https://www.brew.com/)
|
||||
24. [BuyMeACoffee](https://www.buymeacoffee.com/)
|
||||
25. [BuzzFeed](https://buzzfeed.com/)
|
||||
26. [CNET](https://www.cnet.com/)
|
||||
27. [Canva](https://www.canva.com/)
|
||||
28. [CapFriendly](https://www.capfriendly.com/)
|
||||
29. [Carbonmade](https://carbonmade.com/)
|
||||
30. [CashMe](https://cash.me/)
|
||||
31. [Cent](https://cent.co/)
|
||||
32. [Chatujme.cz](https://chatujme.cz/)
|
||||
33. [Cloob](https://www.cloob.com/)
|
||||
34. [Codecademy](https://www.codecademy.com/)
|
||||
35. [Codechef](https://www.codechef.com/)
|
||||
36. [Codementor](https://www.codementor.io/)
|
||||
37. [Coderwall](https://coderwall.com/)
|
||||
38. [Codewars](https://www.codewars.com)
|
||||
39. [ColourLovers](https://www.colourlovers.com/)
|
||||
40. [Contently](https://contently.com/)
|
||||
41. [Coroflot](https://coroflot.com/)
|
||||
42. [Cracked](https://www.cracked.com/)
|
||||
43. [CreativeMarket](https://creativemarket.com/)
|
||||
44. [Crevado](https://crevado.com/)
|
||||
45. [Crunchyroll](https://www.crunchyroll.com/)
|
||||
46. [DEV Community](https://dev.to/)
|
||||
47. [DailyMotion](https://www.dailymotion.com/)
|
||||
48. [Designspiration](https://www.designspiration.net/)
|
||||
49. [DeviantART](https://deviantart.com)
|
||||
50. [Discogs](https://www.discogs.com/)
|
||||
51. [Discuss.Elastic.co](https://discuss.elastic.co/)
|
||||
52. [Disqus](https://disqus.com/)
|
||||
53. [Docker Hub](https://hub.docker.com/)
|
||||
54. [Dribbble](https://dribbble.com/)
|
||||
55. [Ebay](https://www.ebay.com/)
|
||||
56. [Ello](https://ello.co/)
|
||||
57. [Etsy](https://www.etsy.com/)
|
||||
58. [EyeEm](https://www.eyeem.com/)
|
||||
59. [Facebook](https://www.facebook.com/)
|
||||
60. [Fandom](https://www.fandom.com/)
|
||||
61. [Filmogs](https://www.filmo.gs/)
|
||||
62. [Fiverr](https://www.fiverr.com/)
|
||||
63. [Flickr](https://www.flickr.com/)
|
||||
64. [Flightradar24](https://www.flightradar24.com/)
|
||||
65. [Flipboard](https://flipboard.com/)
|
||||
66. [FortniteTracker](https://fortnitetracker.com/challenges)
|
||||
67. [GDProfiles](https://gdprofiles.com/)
|
||||
68. [GPSies](https://www.gpsies.com/)
|
||||
69. [Gamespot](https://www.gamespot.com/)
|
||||
70. [Giphy](https://giphy.com/)
|
||||
71. [GitHub](https://www.github.com/)
|
||||
72. [GitLab](https://gitlab.com/)
|
||||
73. [Gitee](https://gitee.com/)
|
||||
74. [GoodReads](https://www.goodreads.com/)
|
||||
75. [Gravatar](http://en.gravatar.com/)
|
||||
76. [Gumroad](https://www.gumroad.com/)
|
||||
77. [GuruShots](https://gurushots.com/)
|
||||
78. [HackerNews](https://news.ycombinator.com/)
|
||||
79. [HackerOne](https://hackerone.com/)
|
||||
80. [HackerRank](https://hackerrank.com/)
|
||||
81. [House-Mixes.com](https://www.house-mixes.com/)
|
||||
82. [Houzz](https://houzz.com/)
|
||||
83. [HubPages](https://hubpages.com/)
|
||||
84. [Hubski](https://hubski.com/)
|
||||
85. [IFTTT](https://www.ifttt.com/)
|
||||
86. [ImageShack](https://imageshack.us/)
|
||||
87. [ImgUp.cz](https://imgup.cz/)
|
||||
88. [Instagram](https://www.instagram.com/)
|
||||
89. [Instructables](https://www.instructables.com/)
|
||||
90. [Investing.com](https://www.investing.com/)
|
||||
91. [Issuu](https://issuu.com/)
|
||||
92. [Itch.io](https://itch.io/)
|
||||
93. [Jimdo](https://jimdosite.com/)
|
||||
94. [Kaggle](https://www.kaggle.com/)
|
||||
95. [KanoWorld](https://world.kano.me/)
|
||||
96. [Keybase](https://keybase.io/)
|
||||
97. [Kik](http://kik.me/)
|
||||
98. [Kongregate](https://www.kongregate.com/)
|
||||
99. [Launchpad](https://launchpad.net/)
|
||||
100. [LeetCode](https://leetcode.com/)
|
||||
101. [Letterboxd](https://letterboxd.com/)
|
||||
102. [LiveJournal](https://www.livejournal.com/)
|
||||
103. [LiveLeak](https://www.liveleak.com/)
|
||||
104. [Lobsters](https://lobste.rs/)
|
||||
105. [Mastodon](https://mstdn.io/)
|
||||
106. [Medium](https://medium.com/)
|
||||
107. [MeetMe](https://www.meetme.com/)
|
||||
108. [MixCloud](https://www.mixcloud.com/)
|
||||
109. [MyAnimeList](https://myanimelist.net/)
|
||||
110. [Myspace](https://myspace.com/)
|
||||
111. [NPM](https://www.npmjs.com/)
|
||||
112. [NPM-Package](https://www.npmjs.com/)
|
||||
113. [NameMC (Minecraft.net skins)](https://namemc.com/)
|
||||
114. [NationStates Nation](https://nationstates.net)
|
||||
115. [NationStates Region](https://nationstates.net)
|
||||
116. [Newgrounds](https://newgrounds.com)
|
||||
117. [OK](https://ok.ru/)
|
||||
118. [OpenCollective](https://opencollective.com/)
|
||||
119. [OpenStreetMap](https://www.openstreetmap.org/)
|
||||
120. [PSNProfiles.com](https://psnprofiles.com/)
|
||||
121. [Packagist](https://packagist.org/)
|
||||
122. [Pastebin](https://pastebin.com/)
|
||||
123. [Patreon](https://www.patreon.com/)
|
||||
124. [PCPartPicker](https://pcpartpicker.com)
|
||||
125. [Pexels](https://www.pexels.com/)
|
||||
126. [Photobucket](https://photobucket.com/)
|
||||
127. [Pinterest](https://www.pinterest.com/)
|
||||
128. [Pixabay](https://pixabay.com/)
|
||||
129. [PlayStore](https://play.google.com/store)
|
||||
130. [Plug.DJ](https://plug.dj/)
|
||||
131. [Pokemon Showdown](https://pokemonshowdown.com)
|
||||
132. [Polygon](https://www.polygon.com/)
|
||||
133. [ProductHunt](https://www.producthunt.com/)
|
||||
134. [Quora](https://www.quora.com/)
|
||||
135. [Rajce.net](https://www.rajce.idnes.cz/)
|
||||
136. [Rate Your Music](https://rateyourmusic.com/)
|
||||
137. [Reddit](https://www.reddit.com/)
|
||||
138. [Repl.it](https://repl.it/)
|
||||
139. [ResearchGate](https://www.researchgate.net/)
|
||||
140. [ReverbNation](https://www.reverbnation.com/)
|
||||
141. [Roblox](https://www.roblox.com/)
|
||||
142. [Sbazar.cz](https://www.sbazar.cz/)
|
||||
143. [Scratch](https://scratch.mit.edu/)
|
||||
144. [Scribd](https://www.scribd.com/)
|
||||
145. [Signal](https://community.signalusers.org)
|
||||
146. [Slack](https://slack.com)
|
||||
147. [SlideShare](https://slideshare.net/)
|
||||
148. [Smashcast](https://www.smashcast.tv/)
|
||||
149. [SoundCloud](https://soundcloud.com/)
|
||||
150. [SourceForge](https://sourceforge.net/)
|
||||
151. [Speedrun.com](https://speedrun.com/)
|
||||
152. [Splits.io](https://splits.io)
|
||||
153. [Spotify](https://open.spotify.com/)
|
||||
154. [Star Citizen](https://robertsspaceindustries.com/)
|
||||
155. [Steam](https://steamcommunity.com/)
|
||||
156. [SteamGroup](https://steamcommunity.com/)
|
||||
157. [T-MobileSupport](https://support.t-mobile.com)
|
||||
158. [Taringa](https://taringa.net/)
|
||||
159. [Telegram](https://t.me/)
|
||||
160. [Tellonym.me](https://tellonym.me/)
|
||||
161. [TikTok](https://www.tiktok.com/)
|
||||
162. [Tinder](https://tinder.com/)
|
||||
163. [TrackmaniaLadder](http://en.tm-ladder.com/index.php)
|
||||
164. [TradingView](https://www.tradingview.com/)
|
||||
165. [Trakt](https://www.trakt.tv/)
|
||||
166. [Trello](https://trello.com/)
|
||||
167. [Trip](https://www.trip.skyscanner.com/)
|
||||
168. [TripAdvisor](https://tripadvisor.com/)
|
||||
169. [Twitch](https://www.twitch.tv/)
|
||||
170. [Twitter](https://www.twitter.com/)
|
||||
171. [Ultimate-Guitar](https://ultimate-guitar.com/)
|
||||
172. [Unsplash](https://unsplash.com/)
|
||||
173. [VK](https://vk.com/)
|
||||
174. [VSCO](https://vsco.co/)
|
||||
175. [Venmo](https://venmo.com/)
|
||||
176. [Viadeo](http://fr.viadeo.com/en/)
|
||||
177. [Vimeo](https://vimeo.com/)
|
||||
178. [Virgool](https://virgool.io/)
|
||||
179. [VirusTotal](https://www.virustotal.com/)
|
||||
180. [Wattpad](https://www.wattpad.com/)
|
||||
181. [We Heart It](https://weheartit.com/)
|
||||
182. [WebNode](https://www.webnode.cz/)
|
||||
183. [Wikidot](http://www.wikidot.com/)
|
||||
184. [Wikipedia](https://www.wikipedia.org/)
|
||||
185. [Wix](https://wix.com/)
|
||||
186. [WordPress](https://wordpress.com)
|
||||
187. [WordPressOrg](https://wordpress.org/)
|
||||
188. [YouNow](https://www.younow.com/)
|
||||
189. [YouPic](https://youpic.com/)
|
||||
190. [YouTube](https://www.youtube.com/)
|
||||
191. [Zhihu](https://www.zhihu.com/)
|
||||
192. [Zomato](https://www.zomato.com/)
|
||||
193. [authorSTREAM](http://www.authorstream.com/)
|
||||
194. [boingboing.net](https://boingboing.net/)
|
||||
195. [devRant](https://devrant.com/)
|
||||
196. [fanpop](http://www.fanpop.com/)
|
||||
197. [gfycat](https://gfycat.com/)
|
||||
198. [habr](https://habr.com/)
|
||||
199. [iMGSRC.RU](https://imgsrc.ru/)
|
||||
200. [last.fm](https://last.fm/)
|
||||
201. [mixer.com](https://mixer.com/)
|
||||
202. [osu!](https://osu.ppy.sh/)
|
||||
203. [pikabu](https://pikabu.ru/)
|
||||
204. [segmentfault](https://segmentfault.com/)
|
||||
## List Of Supported Sites (245 Sites In Total!)
|
||||
1. [2Dimensions](https://2Dimensions.com/)
|
||||
2. [500px](https://500px.com/)
|
||||
3. [7Cups](https://www.7cups.com/)
|
||||
4. [9GAG](https://9gag.com/)
|
||||
5. [About.me](https://about.me/)
|
||||
6. [Academia.edu](https://www.academia.edu/)
|
||||
7. [Alik.cz](https://www.alik.cz/)
|
||||
8. [Anobii](https://www.anobii.com/)
|
||||
9. [Aptoide](https://en.aptoide.com/)
|
||||
10. [Archive.org](https://archive.org)
|
||||
11. [AskFM](https://ask.fm/)
|
||||
12. [Audiojungle](https://audiojungle.net/)
|
||||
13. [Avizo](https://www.avizo.cz/)
|
||||
14. [BLIP.fm](https://blip.fm/)
|
||||
15. [Badoo](https://badoo.com/)
|
||||
16. [Bandcamp](https://www.bandcamp.com/)
|
||||
17. [Basecamp](https://basecamp.com/)
|
||||
18. [Bazar.cz](https://www.bazar.cz/)
|
||||
19. [Behance](https://www.behance.net/)
|
||||
20. [BitBucket](https://bitbucket.org/)
|
||||
21. [BitCoinForum](https://bitcoinforum.com)
|
||||
22. [Blogger](https://www.blogger.com/)
|
||||
23. [Bookcrossing](https://www.bookcrossing.com/)
|
||||
24. [Brew](https://www.brew.com/)
|
||||
25. [BuyMeACoffee](https://www.buymeacoffee.com/)
|
||||
26. [BuzzFeed](https://buzzfeed.com/)
|
||||
27. [CNET](https://www.cnet.com/)
|
||||
28. [Canva](https://www.canva.com/)
|
||||
29. [CapFriendly](https://www.capfriendly.com/)
|
||||
30. [Carbonmade](https://carbonmade.com/)
|
||||
31. [CashMe](https://cash.me/)
|
||||
32. [Cent](https://cent.co/)
|
||||
33. [Championat](https://www.championat.com/)
|
||||
34. [Chatujme.cz](https://chatujme.cz/)
|
||||
35. [Chess](https://www.chess.com/ru/)
|
||||
36. [Cloob](https://www.cloob.com/)
|
||||
37. [Codecademy](https://www.codecademy.com/)
|
||||
38. [Codechef](https://www.codechef.com/)
|
||||
39. [Codementor](https://www.codementor.io/)
|
||||
40. [Coderwall](https://coderwall.com/)
|
||||
41. [Codewars](https://www.codewars.com)
|
||||
42. [ColourLovers](https://www.colourlovers.com/)
|
||||
43. [Contently](https://contently.com/)
|
||||
44. [Coroflot](https://coroflot.com/)
|
||||
45. [Cracked](https://www.cracked.com/)
|
||||
46. [CreativeMarket](https://creativemarket.com/)
|
||||
47. [Crevado](https://crevado.com/)
|
||||
48. [Crunchyroll](https://www.crunchyroll.com/)
|
||||
49. [DEV Community](https://dev.to/)
|
||||
50. [DailyMotion](https://www.dailymotion.com/)
|
||||
51. [Designspiration](https://www.designspiration.net/)
|
||||
52. [DeviantART](https://deviantart.com)
|
||||
53. [Discogs](https://www.discogs.com/)
|
||||
54. [Discuss.Elastic.co](https://discuss.elastic.co/)
|
||||
55. [Disqus](https://disqus.com/)
|
||||
56. [Docker Hub](https://hub.docker.com/)
|
||||
57. [Dribbble](https://dribbble.com/)
|
||||
58. [Ebay](https://www.ebay.com/)
|
||||
59. [Ello](https://ello.co/)
|
||||
60. [Etsy](https://www.etsy.com/)
|
||||
61. [EyeEm](https://www.eyeem.com/)
|
||||
62. [F3.cool](https://f3.cool/)
|
||||
63. [Facebook](https://www.facebook.com/)
|
||||
64. [Facenama](https://facenama.com/)
|
||||
65. [Fandom](https://www.fandom.com/)
|
||||
66. [Filmogs](https://www.filmo.gs/)
|
||||
67. [Fiverr](https://www.fiverr.com/)
|
||||
68. [Flickr](https://www.flickr.com/)
|
||||
69. [Flightradar24](https://www.flightradar24.com/)
|
||||
70. [Flipboard](https://flipboard.com/)
|
||||
71. [Football](https://www.rusfootball.info/)
|
||||
72. [FortniteTracker](https://fortnitetracker.com/challenges)
|
||||
73. [GDProfiles](https://gdprofiles.com/)
|
||||
74. [GPSies](https://www.gpsies.com/)
|
||||
75. [Gamespot](https://www.gamespot.com/)
|
||||
76. [Giphy](https://giphy.com/)
|
||||
77. [GipsysTeam](https://site.gipsyteam.ru/)
|
||||
78. [GitHub](https://www.github.com/)
|
||||
79. [GitLab](https://gitlab.com/)
|
||||
80. [Gitee](https://gitee.com/)
|
||||
81. [GoodReads](https://www.goodreads.com/)
|
||||
82. [Gravatar](http://en.gravatar.com/)
|
||||
83. [Gumroad](https://www.gumroad.com/)
|
||||
84. [GuruShots](https://gurushots.com/)
|
||||
85. [HackerNews](https://news.ycombinator.com/)
|
||||
86. [HackerOne](https://hackerone.com/)
|
||||
87. [HackerRank](https://hackerrank.com/)
|
||||
88. [House-Mixes.com](https://www.house-mixes.com/)
|
||||
89. [Houzz](https://houzz.com/)
|
||||
90. [HubPages](https://hubpages.com/)
|
||||
91. [Hubski](https://hubski.com/)
|
||||
92. [IFTTT](https://www.ifttt.com/)
|
||||
93. [ImageShack](https://imageshack.us/)
|
||||
94. [ImgUp.cz](https://imgup.cz/)
|
||||
95. [Insanejournal](insanejournal.com)
|
||||
96. [Instagram](https://www.instagram.com/)
|
||||
97. [Instructables](https://www.instructables.com/)
|
||||
98. [Investing.com](https://www.investing.com/)
|
||||
99. [Issuu](https://issuu.com/)
|
||||
100. [Itch.io](https://itch.io/)
|
||||
101. [Jimdo](https://jimdosite.com/)
|
||||
102. [Kaggle](https://www.kaggle.com/)
|
||||
103. [KanoWorld](https://world.kano.me/)
|
||||
104. [Keybase](https://keybase.io/)
|
||||
105. [Kik](http://kik.me/)
|
||||
106. [KiwiFarms](https://kiwifarms.net/)
|
||||
107. [Kongregate](https://www.kongregate.com/)
|
||||
108. [LOR](https://linux.org.ru/)
|
||||
109. [Launchpad](https://launchpad.net/)
|
||||
110. [LeetCode](https://leetcode.com/)
|
||||
111. [Letterboxd](https://letterboxd.com/)
|
||||
112. [LiveJournal](https://www.livejournal.com/)
|
||||
113. [LiveLeak](https://www.liveleak.com/)
|
||||
114. [Lobsters](https://lobste.rs/)
|
||||
115. [Mastodon](https://mstdn.io/)
|
||||
116. [Medium](https://medium.com/)
|
||||
117. [MeetMe](https://www.meetme.com/)
|
||||
118. [MixCloud](https://www.mixcloud.com/)
|
||||
119. [MyAnimeList](https://myanimelist.net/)
|
||||
120. [Myspace](https://myspace.com/)
|
||||
121. [NPM](https://www.npmjs.com/)
|
||||
122. [NPM-Package](https://www.npmjs.com/)
|
||||
123. [NameMC (Minecraft.net skins)](https://namemc.com/)
|
||||
124. [NationStates Nation](https://nationstates.net)
|
||||
125. [NationStates Region](https://nationstates.net)
|
||||
126. [Newgrounds](https://newgrounds.com)
|
||||
127. [OK](https://ok.ru/)
|
||||
128. [OpenCollective](https://opencollective.com/)
|
||||
129. [OpenStreetMap](https://www.openstreetmap.org/)
|
||||
130. [Otzovik](https://otzovik.com/)
|
||||
131. [PCPartPicker](https://pcpartpicker.com)
|
||||
132. [PSNProfiles.com](https://psnprofiles.com/)
|
||||
133. [Packagist](https://packagist.org/)
|
||||
134. [Pastebin](https://pastebin.com/)
|
||||
135. [Patreon](https://www.patreon.com/)
|
||||
136. [Periscope](https://www.periscope.tv/)
|
||||
137. [Pexels](https://www.pexels.com/)
|
||||
138. [Photobucket](https://photobucket.com/)
|
||||
139. [Pinkbike](https://www.pinkbike.com/)
|
||||
140. [Pinterest](https://www.pinterest.com/)
|
||||
141. [Pixabay](https://pixabay.com/)
|
||||
142. [PlayStore](https://play.google.com/store)
|
||||
143. [Pling](https://www.pling.com/)
|
||||
144. [Plug.DJ](https://plug.dj/)
|
||||
145. [Pokemon Showdown](https://pokemonshowdown.com)
|
||||
146. [Polygon](https://www.polygon.com/)
|
||||
147. [ProductHunt](https://www.producthunt.com/)
|
||||
148. [PromoDJ](http://promodj.com/)
|
||||
149. [Quora](https://www.quora.com/)
|
||||
150. [Rajce.net](https://www.rajce.idnes.cz/)
|
||||
151. [RamblerDating](https://dating.rambler.ru/)
|
||||
152. [Rate Your Music](https://rateyourmusic.com/)
|
||||
153. [Reddit](https://www.reddit.com/)
|
||||
154. [Redsun.tf](https://redsun.tf/)
|
||||
155. [Repl.it](https://repl.it/)
|
||||
156. [ResearchGate](https://www.researchgate.net/)
|
||||
157. [ReverbNation](https://www.reverbnation.com/)
|
||||
158. [Roblox](https://www.roblox.com/)
|
||||
159. [Sbazar.cz](https://www.sbazar.cz/)
|
||||
160. [Scratch](https://scratch.mit.edu/)
|
||||
161. [Scribd](https://www.scribd.com/)
|
||||
162. [ShitpostBot5000](https://www.shitpostbot.com/)
|
||||
163. [Signal](https://community.signalusers.org)
|
||||
164. [Slack](https://slack.com)
|
||||
165. [SlideShare](https://slideshare.net/)
|
||||
166. [Smashcast](https://www.smashcast.tv/)
|
||||
167. [Smule](https://www.smule.com/)
|
||||
168. [SoundCloud](https://soundcloud.com/)
|
||||
169. [SourceForge](https://sourceforge.net/)
|
||||
170. [Speedrun.com](https://speedrun.com/)
|
||||
171. [Splits.io](https://splits.io)
|
||||
172. [Sporcle](https://www.sporcle.com/)
|
||||
173. [SportsRU](https://www.sports.ru/)
|
||||
174. [SportsTracker](https://www.sports-tracker.com/)
|
||||
175. [Spotify](https://open.spotify.com/)
|
||||
176. [Star Citizen](https://robertsspaceindustries.com/)
|
||||
177. [Steam](https://steamcommunity.com/)
|
||||
178. [SteamGroup](https://steamcommunity.com/)
|
||||
179. [SublimeForum](https://forum.sublimetext.com/)
|
||||
180. [T-MobileSupport](https://support.t-mobile.com)
|
||||
181. [TamTam](https://tamtam.chat/)
|
||||
182. [Taringa](https://taringa.net/)
|
||||
183. [Telegram](https://t.me/)
|
||||
184. [Tellonym.me](https://tellonym.me/)
|
||||
185. [TikTok](https://www.tiktok.com/)
|
||||
186. [Tinder](https://tinder.com/)
|
||||
187. [TrackmaniaLadder](http://en.tm-ladder.com/index.php)
|
||||
188. [TradingView](https://www.tradingview.com/)
|
||||
189. [Trakt](https://www.trakt.tv/)
|
||||
190. [Trello](https://trello.com/)
|
||||
191. [Trip](https://www.trip.skyscanner.com/)
|
||||
192. [TripAdvisor](https://tripadvisor.com/)
|
||||
193. [Twitch](https://www.twitch.tv/)
|
||||
194. [Twitter](https://www.twitter.com/)
|
||||
195. [Ultimate-Guitar](https://ultimate-guitar.com/)
|
||||
196. [Unsplash](https://unsplash.com/)
|
||||
197. [VK](https://vk.com/)
|
||||
198. [VSCO](https://vsco.co/)
|
||||
199. [Venmo](https://venmo.com/)
|
||||
200. [Viadeo](http://fr.viadeo.com/en/)
|
||||
201. [Vimeo](https://vimeo.com/)
|
||||
202. [Virgool](https://virgool.io/)
|
||||
203. [VirusTotal](https://www.virustotal.com/)
|
||||
204. [Wattpad](https://www.wattpad.com/)
|
||||
205. [We Heart It](https://weheartit.com/)
|
||||
206. [WebNode](https://www.webnode.cz/)
|
||||
207. [Wikidot](http://www.wikidot.com/)
|
||||
208. [Wikipedia](https://www.wikipedia.org/)
|
||||
209. [Wix](https://wix.com/)
|
||||
210. [WordPress](https://wordpress.com)
|
||||
211. [WordPressOrg](https://wordpress.org/)
|
||||
212. [YandexCollection](https://yandex.ru/collections/)
|
||||
213. [YandexMarket](https://market.yandex.ru/)
|
||||
214. [YouNow](https://www.younow.com/)
|
||||
215. [YouPic](https://youpic.com/)
|
||||
216. [YouTube](https://www.youtube.com/)
|
||||
217. [Zhihu](https://www.zhihu.com/)
|
||||
218. [Zomato](https://www.zomato.com/)
|
||||
219. [authorSTREAM](http://www.authorstream.com/)
|
||||
220. [boingboing.net](https://boingboing.net/)
|
||||
221. [couchsurfing](https://www.couchsurfing.com/)
|
||||
222. [devRant](https://devrant.com/)
|
||||
223. [drive2](https://www.drive2.ru/)
|
||||
224. [eGPU](https://egpu.io/)
|
||||
225. [easyen](https://easyen.ru/)
|
||||
226. [fanpop](http://www.fanpop.com/)
|
||||
227. [fl](https://www.fl.ru/)
|
||||
228. [geocaching](https://www.geocaching.com/)
|
||||
229. [gfycat](https://gfycat.com/)
|
||||
230. [habr](https://habr.com/)
|
||||
231. [iMGSRC.RU](https://imgsrc.ru/)
|
||||
232. [interpals](https://www.interpals.net/)
|
||||
233. [irecommend](https://irecommend.ru/)
|
||||
234. [kwork](https://www.kwork.ru/)
|
||||
235. [last.fm](https://last.fm/)
|
||||
236. [livelib](https://www.livelib.ru/)
|
||||
237. [metacritic](https://www.metacritic.com/)
|
||||
238. [mixer.com](https://mixer.com/)
|
||||
239. [moikrug](https://moikrug.ru/)
|
||||
240. [opennet](https://www.opennet.ru/)
|
||||
241. [osu!](https://osu.ppy.sh/)
|
||||
242. [pedsovet](http://pedsovet.su/)
|
||||
243. [pikabu](https://pikabu.ru/)
|
||||
244. [segmentfault](https://segmentfault.com/)
|
||||
245. [toster](https://www.toster.ru/)
|
||||
|
||||
Alexa.com rank data fetched at (2019-12-07 23:41:36.794857 UTC)
|
||||
|
|
|
@ -32,9 +32,18 @@ class SherlockBaseTest(unittest.TestCase):
|
|||
with open(data_file_path, "r", encoding="utf-8") as raw:
|
||||
self.site_data_all = json.load(raw)
|
||||
|
||||
# Load excluded sites list, if any
|
||||
excluded_sites_path = os.path.join(os.path.dirname(os.path.realpath(sherlock.__file__)), "tests/.excluded_sites")
|
||||
try:
|
||||
with open(excluded_sites_path, "r", encoding="utf-8") as excluded_sites_file:
|
||||
self.excluded_sites = excluded_sites_file.read().splitlines()
|
||||
except FileNotFoundError:
|
||||
self.excluded_sites = []
|
||||
|
||||
self.verbose=False
|
||||
self.tor=False
|
||||
self.unique_tor=False
|
||||
self.timeout=None
|
||||
|
||||
return
|
||||
|
||||
|
@ -96,7 +105,8 @@ class SherlockBaseTest(unittest.TestCase):
|
|||
site_data,
|
||||
verbose=self.verbose,
|
||||
tor=self.tor,
|
||||
unique_tor=self.unique_tor
|
||||
unique_tor=self.unique_tor,
|
||||
timeout=self.timeout
|
||||
)
|
||||
for site, result in results.items():
|
||||
with self.subTest(f"Checking Username '{username}' "
|
||||
|
@ -134,6 +144,7 @@ class SherlockBaseTest(unittest.TestCase):
|
|||
|
||||
for site, site_data in self.site_data_all.items():
|
||||
if (
|
||||
(site in self.excluded_sites) or
|
||||
(site_data["errorType"] != detect_type) or
|
||||
(site_data.get("username_claimed") is None) or
|
||||
(site_data.get("username_unclaimed") is None)
|
||||
|
|
Loading…
Reference in a new issue