mirror of
https://github.com/trustedsec/social-engineer-toolkit
synced 2024-11-23 13:03:10 +00:00
add a munch better handler for wget_deep
This commit is contained in:
parent
93ec005895
commit
37044e5774
3 changed files with 14 additions and 8 deletions
|
@ -260,6 +260,6 @@ DNS_SERVER=OFF
|
|||
BLEEDING_EDGE=OFF
|
||||
#
|
||||
### This will download 1 deep the website with WGET instead of the standard. This may give better results but takes longer
|
||||
WGET_DEEP=ON
|
||||
WGET_DEEP=OFF
|
||||
|
||||
#######################################################################################################################################
|
||||
|
|
|
@ -84,7 +84,8 @@ def value_type(value):
|
|||
'METASPLOIT_MODE': False,
|
||||
'HARVESTER_LOG': True,
|
||||
'STAGE_ENCODING': False,
|
||||
'TRACK_EMAIL_ADDRESSES': False
|
||||
'TRACK_EMAIL_ADDRESSES': False,
|
||||
'WGET_DEEP': True
|
||||
}.get(value, "ERROR")
|
||||
|
||||
|
||||
|
@ -111,7 +112,8 @@ def update_config():
|
|||
#######################################################################
|
||||
CONFIG_DATE='""" + timestamp + """'\n""")
|
||||
for line in init_file:
|
||||
if not line.startswith("#"):
|
||||
try:
|
||||
if not line.startswith("#"):
|
||||
line = line.rstrip()
|
||||
line = line.split("=")
|
||||
setting = line[0]
|
||||
|
@ -129,6 +131,7 @@ CONFIG_DATE='""" + timestamp + """'\n""")
|
|||
new_config.write(setting + '="' + value + '"\n')
|
||||
else:
|
||||
new_config.write(setting + '=' + value + '\n')
|
||||
except: pass
|
||||
|
||||
init_file.close()
|
||||
new_config.close()
|
||||
|
|
|
@ -135,12 +135,14 @@ try:
|
|||
# wget is called, but output is sent to devnull to hide "wget:
|
||||
# missing URL" error
|
||||
DNULL = open(os.devnull, 'w')
|
||||
wget = subprocess.call(
|
||||
'wget', shell=True, stdout=DNULL, stderr=subprocess.STDOUT)
|
||||
wget = subprocess.call('wget', shell=True, stdout=DNULL, stderr=subprocess.STDOUT)
|
||||
|
||||
if wget == 1:
|
||||
subprocess.Popen('%s;cd %s/web_clone/;wget --no-check-certificate -O index.html -c -k -U "%s" "%s";' % (
|
||||
proxy_config, setdir, user_agent, url), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).wait()
|
||||
print "WE BE CLONIN"
|
||||
if check_config("WGET_DEEP").lower() == "on":
|
||||
subprocess.Popen('%s;wget -H -N -k -p -l 2 -nd -P %s/web_clone/ --no-check-certificate -U "%s" "%s";' % (proxy_config, setdir, user_agent, url), shell=True).wait()
|
||||
else:
|
||||
subprocess.Popen('%s;cd %s/web_clone/;wget --no-check-certificate -O index.html -c -k -U "%s" "%s";' % (proxy_config, setdir, user_agent, url), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).wait()
|
||||
|
||||
else:
|
||||
# if we don't have wget installed we will use python to rip,
|
||||
|
@ -162,7 +164,8 @@ try:
|
|||
filewrite.close()
|
||||
|
||||
# if it failed ;(
|
||||
except:
|
||||
except Exception as e:
|
||||
print e
|
||||
pass
|
||||
|
||||
# If the website did not clone properly, exit out.
|
||||
|
|
Loading…
Reference in a new issue