mirror of
https://github.com/The-Art-of-Hacking/h4cker
synced 2024-11-24 03:43:03 +00:00
adding quick_recon to osint
This commit is contained in:
parent
523b3fd208
commit
77af32a311
10 changed files with 225 additions and 2 deletions
|
@ -306218,7 +306218,7 @@ gree4fly
|
|||
Greddy
|
||||
grecos
|
||||
grecoohg
|
||||
grecon
|
||||
quick_recon
|
||||
grecian
|
||||
grebo
|
||||
Greaves
|
||||
|
|
|
@ -101,7 +101,7 @@ The following are a collection of recently-released pen test tools. I update thi
|
|||
- [Vulmap - Web Vulnerability Scanning And Verification Tools](http://feedproxy.google.com/~r/PentestTools/~3/ZY2bsPn-m08/vulmap-web-vulnerability-scanning-and.html)
|
||||
- [Censys-Python - An Easy-To-Use And Lightweight API Wrapper For The Censys Search Engine](http://feedproxy.google.com/~r/PentestTools/~3/enuM2IsKXsY/censys-python-easy-to-use-and.html)
|
||||
- [Swego - Swiss Army Knife Webserver In Golang](http://feedproxy.google.com/~r/PentestTools/~3/aYheVURWxao/swego-swiss-army-knife-webserver-in.html)
|
||||
- [GRecon - Your Google Recon Is Now Automated](http://feedproxy.google.com/~r/PentestTools/~3/ucwiubifmO4/grecon-your-google-recon-is-now.html)
|
||||
- [quick_recon - Your Google Recon Is Now Automated](http://feedproxy.google.com/~r/PentestTools/~3/ucwiubifmO4/quick_recon-your-google-recon-is-now.html)
|
||||
- [Kenzer - Automated Web Assets Enumeration And Scanning](http://feedproxy.google.com/~r/PentestTools/~3/UatODvipiLw/kenzer-automated-web-assets-enumeration.html)
|
||||
- [Grawler - Tool Which Comes With A Web Interface That Automates The Task Of Using Google Dorks, Scrapes The Results, And Stores Them In A File](http://feedproxy.google.com/~r/PentestTools/~3/7bBN-zmnyww/grawler-tool-which-comes-with-web.html)
|
||||
- [0D1N v3.4 - Tool For Automating Customized Attacks Against Web Applications (Full Made In C Language With Pthreads, Have A Fast Performance)](http://feedproxy.google.com/~r/PentestTools/~3/FCcpoal9Cig/0d1n-v34-tool-for-automating-customized.html)
|
||||
|
|
9
osint/quick_recon/alpha.txt
Normal file
9
osint/quick_recon/alpha.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
Current Micro Plugins :
|
||||
|
||||
[>] Subdomains...[UP]
|
||||
[>] Sub-Subdomains...[UP]
|
||||
[>] Signup/Login pages...[UP]
|
||||
[>] Dir Listing...[UP]
|
||||
[>] Exposed Docs...[UP]
|
||||
[>] WordPress Entries...[UP]
|
||||
[>] Pasting Sites...[UP]
|
BIN
osint/quick_recon/plugins/__pycache__/pasting.cpython-37.pyc
Normal file
BIN
osint/quick_recon/plugins/__pycache__/pasting.cpython-37.pyc
Normal file
Binary file not shown.
BIN
osint/quick_recon/plugins/__pycache__/pasting.cpython-38.pyc
Normal file
BIN
osint/quick_recon/plugins/__pycache__/pasting.cpython-38.pyc
Normal file
Binary file not shown.
35
osint/quick_recon/plugins/pasting.py
Normal file
35
osint/quick_recon/plugins/pasting.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/env python3
|
||||
# A script to perform a quick OSINT recon for a given domains
|
||||
# This is an example and work in progress
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import requests
|
||||
import random
|
||||
from googlesearch import search
|
||||
from termcolor import colored, cprint
|
||||
from http import cookiejar
|
||||
|
||||
TLD = ["co.ma","dz","ru","ca"]
|
||||
zolo = random.choice(TLD)
|
||||
|
||||
X = requests.get('https://www.google.com/webhp?ei=1') # : /
|
||||
X.status_code
|
||||
if X.status_code == 200:
|
||||
time.sleep(5)
|
||||
|
||||
f = open('quick_recon.config', 'r')
|
||||
alpha = f.read()
|
||||
key = alpha
|
||||
time.sleep(5)
|
||||
print(colored ('[>] Looking in Pasting Sites...' ,'green')) #Pasting Sites e.g : PasteBin...
|
||||
query = "site:pastebin.com | site:hastebin.com | site:carbon.now.sh " + key
|
||||
for gamma in search(query, tld=zolo, num=30 , stop=60 , pause=2):
|
||||
print("" + gamma)
|
||||
print ("")
|
||||
if os.path.exists(".google-cookie"):
|
||||
os.remove(".google-cookie")
|
||||
os.remove("quick_recon.config")
|
||||
print(colored ('[>] Done...Happy Hunting' ,'green'))
|
3
osint/quick_recon/qrecon_update.txt
Normal file
3
osint/quick_recon/qrecon_update.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
Update Status...[NO UPDATE]
|
||||
quick_recon V1.0
|
||||
Resuming...
|
139
osint/quick_recon/quick_recon.py
Normal file
139
osint/quick_recon/quick_recon.py
Normal file
|
@ -0,0 +1,139 @@
|
|||
#!/usr/bin/env python3
|
||||
# A script to perform a quick OSINT recon for a given domains
|
||||
# This is an example and work in progress
|
||||
|
||||
import os , sys , time , requests , random
|
||||
from googlesearch import search
|
||||
from termcolor import colored, cprint
|
||||
from http import cookiejar
|
||||
from urllib.parse import urlparse
|
||||
from plugins import pasting
|
||||
|
||||
Subdomains = []
|
||||
|
||||
def SubdomainFilter(URL):
|
||||
Parsed = urlparse(URL); Scheme = Parsed.scheme; Host = Parsed.netloc; URL = Scheme + "://" + Host + "/"
|
||||
if URL not in Subdomains:
|
||||
print(URL); Subdomains.append(URL)
|
||||
|
||||
if os.path.exists("alpha.txt"):
|
||||
print("")
|
||||
Qupdate = requests.get('https://raw.githubusercontent.com/The-Art-of-Hacking/h4cker/osint/quick_recon/qrecon_update.txt') #Quantom
|
||||
Qupdate.status_code
|
||||
if Qupdate.status_code == 200:
|
||||
print(colored ('Cheking Update...' ,'white'))
|
||||
print(colored(Qupdate.text , 'green'))
|
||||
time.sleep(3) #
|
||||
elif Qupdate.status_code == 404:
|
||||
print(colored ('Cheking Update...' ,'white'))
|
||||
print(colored ('Update Available ' ,'red'))
|
||||
print(colored ('See https://github.com/The-Art-of-Hacking/h4cker/tree/master/osint' ,'red'))
|
||||
print(colored ('Resuming...' ,'red'))
|
||||
print("")
|
||||
time.sleep(3) #
|
||||
|
||||
f = open('alpha.txt', 'r')
|
||||
alpha = f.read()
|
||||
print(colored (alpha,'yellow'))
|
||||
|
||||
|
||||
else:
|
||||
print("")
|
||||
print(colored ('Please Run the quick_recon Script in the Main Directory' ,'red'))
|
||||
print(colored ('First: cd quick_recon ' ,'red'))
|
||||
print(colored ('Then : python3 quick_recon.py' ,'red'))
|
||||
print(colored ('Exiting...' ,'red'))
|
||||
time.sleep(5)
|
||||
exit()
|
||||
|
||||
banner1 = """
|
||||
Quick OSINT Recon of a given domain
|
||||
̿з=(◕_◕)=ε
|
||||
"""
|
||||
print (banner1)
|
||||
|
||||
#--------------------------------------------------------------------------------#
|
||||
class BlockAll(cookiejar.CookiePolicy):
|
||||
return_ok = set_ok = domain_return_ok = path_return_ok = lambda self, *args, **kwargs: False
|
||||
netscape = True
|
||||
rfc2965 = hide_cookie2 = False
|
||||
TLD = ["com","com.tw","co.in"]
|
||||
beta = random.choice(TLD)
|
||||
s = requests.Session()
|
||||
s.cookies.set_policy(BlockAll())
|
||||
|
||||
#--------------------------------------------------------------------------------#
|
||||
|
||||
key = input (colored('[+] Set Target (site.com) : ', 'white' ))#Key
|
||||
file = open("quick_recon.config", "w")
|
||||
file.write(key)
|
||||
file.close()
|
||||
#V2
|
||||
#V2
|
||||
print("")
|
||||
print(colored ('[>] Looking For Subdomains...' ,'green'))
|
||||
query = "site:" + key + " -www." + key #SubTech1
|
||||
for gamma in search(query, tld=beta, num=30 , stop=60 , pause=2):
|
||||
SubdomainFilter(URL=gamma)
|
||||
query = "site:*." + key #SubTech2
|
||||
for gamma in search(query, tld=beta, num=30 , stop=60 , pause=2):
|
||||
SubdomainFilter(URL=gamma)
|
||||
print("")
|
||||
|
||||
if os.path.exists(".google-cookie"):
|
||||
os.remove(".google-cookie")
|
||||
|
||||
print(colored ('[>] Looking For Sub-Subdomains...' ,'green'))
|
||||
query = "site:*.*." + key
|
||||
for gamma in search(query, tld=beta, num=30 , stop=60 , pause=2):
|
||||
SubdomainFilter(URL=gamma)
|
||||
print("")
|
||||
|
||||
if os.path.exists(".google-cookie"):
|
||||
os.remove(".google-cookie")
|
||||
|
||||
|
||||
print(colored ('[>] Looking For Login/Signup Pages...' ,'green'))
|
||||
query = "inurl:login site:" + key #LogTech1
|
||||
for gamma in search(query, tld=beta, num=30 , stop=60 , pause=2):
|
||||
print("" + gamma)
|
||||
query = "site:" + key + " inurl:signup | inurl:register | intitle:Signup" #LogTech2
|
||||
for gamma in search(query, tld=beta, num=30 , stop=60 , pause=2):
|
||||
print("" + gamma)
|
||||
print ("")
|
||||
if os.path.exists(".google-cookie"):
|
||||
os.remove(".google-cookie")
|
||||
|
||||
# Sleeping for 60s to Avoid Google Block
|
||||
print(colored ('[!] 20s Sleep to avoid Google Block' ,'yellow'))
|
||||
time.sleep(21) # ; )
|
||||
print(colored ('[!] Switching Google TLDs...' ,'yellow'))
|
||||
TLD = ["co.ma","dz","ru","ca"]
|
||||
zolo = random.choice(TLD)
|
||||
print("")
|
||||
#ok
|
||||
|
||||
print(colored ('[>] Looking For Directory Listing...' ,'green')) #DirListing
|
||||
query = "site:" + key + " intitle:index of"
|
||||
for gamma in search(query, tld=zolo, num=10 , stop=60 , pause=2):
|
||||
print("" + gamma)
|
||||
print ("")
|
||||
if os.path.exists(".google-cookie"):
|
||||
os.remove(".google-cookie")
|
||||
|
||||
print(colored ('[>] Looking For Public Exposed Documents...' ,'green')) #Docs
|
||||
query = "site:" + key + " ext:doc | ext:docx | ext:odt | ext:pdf | ext:rtf | ext:sxw | ext:psw | ext:ppt | ext:pptx | ext:pps | ext:csv"
|
||||
for gamma in search(query, tld=zolo, num=30 , stop=60 , pause=2):
|
||||
print("" + gamma)
|
||||
print ("")
|
||||
if os.path.exists(".google-cookie"):
|
||||
os.remove(".google-cookie")
|
||||
|
||||
|
||||
print(colored ('[>] Looking For WordPress Entries...' ,'green')) #WP
|
||||
query = "site:" + key + " inurl:wp- | inurl:wp-content | inurl:plugins | inurl:uploads | inurl:themes | inurl:download"
|
||||
for gamma in search(query, tld=zolo, num=30 , stop=60 , pause=2):
|
||||
print("" + gamma)
|
||||
print ("")
|
||||
if os.path.exists(".google-cookie"):
|
||||
os.remove(".google-cookie")
|
29
osint/quick_recon/quick_recon_cli.py
Normal file
29
osint/quick_recon/quick_recon_cli.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
#quick_recon_Cli
|
||||
#Coded by Adnane X Tebbaa
|
||||
#Github : https://www.github.com/adnane-x-tebbaa/quick_recon
|
||||
#Twitter : @TebbaaX
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import requests
|
||||
import random
|
||||
from googlesearch import search
|
||||
from termcolor import colored, cprint
|
||||
from http import cookiejar
|
||||
|
||||
|
||||
|
||||
TLD = ["com","ru","com.hk"]
|
||||
beta = random.choice(TLD)
|
||||
s = requests.Session()
|
||||
|
||||
|
||||
print("")
|
||||
key = input (colored('[+] Set Query : ', 'white' ))
|
||||
print("")
|
||||
print(colored ('[>] Running...' ,'green'))
|
||||
query = key
|
||||
for gamma in search(query, tld=beta, num=30 , stop=90 , pause=2):
|
||||
print("" + gamma)
|
8
osint/quick_recon/requirements.txt
Normal file
8
osint/quick_recon/requirements.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
requests
|
||||
google
|
||||
termcolor
|
||||
txtorcon
|
||||
colorama
|
||||
bs4
|
||||
StringGenerator
|
||||
pysocks
|
Loading…
Reference in a new issue