Skip to content

Commit

Permalink
fix: [faup] fix new return types (bytes to str)
Browse files Browse the repository at this point in the history
  • Loading branch information
Terrtia committed May 6, 2019
1 parent a4c03b4 commit 2606220
Show file tree
Hide file tree
Showing 6 changed files with 87 additions and 16 deletions.
10 changes: 10 additions & 0 deletions bin/Credential.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,11 @@
for url in sites:
faup.decode(url)
domain = faup.get()['domain']
## TODO: # FIXME: remove me
try:
domain = domain.decode()
except:
pass
if domain in creds_sites.keys():
creds_sites[domain] += 1
else:
Expand All @@ -143,6 +148,11 @@
maildomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0]
faup.decode(maildomains)
tld = faup.get()['tld']
## TODO: # FIXME: remove me
try:
tld = tld.decode()
except:
pass
server_statistics.hincrby('credential_by_tld:'+date, tld, 1)
else:
publisher.info(to_print)
Expand Down
19 changes: 16 additions & 3 deletions bin/LibInjection.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,17 @@ def analyse(url, path):
faup.decode(url)
url_parsed = faup.get()
pprint.pprint(url_parsed)
resource_path = url_parsed['resource_path']
query_string = url_parsed['query_string']
## TODO: # FIXME: remove me
try:
resource_path = url_parsed['resource_path'].encode()
except:
resource_path = url_parsed['resource_path']

## TODO: # FIXME: remove me
try:
query_string = url_parsed['query_string'].encode()
except:
query_string = url_parsed['query_string']

result_path = {'sqli' : False}
result_query = {'sqli' : False}
Expand All @@ -56,7 +65,11 @@ def analyse(url, path):
p.populate_set_out(msg, 'Tags')

#statistics
tld = url_parsed['tld']
## TODO: # FIXME: remove me
try:
tld = url_parsed['tld'].decode()
except:
tld = url_parsed['tld']
if tld is not None:
date = datetime.datetime.now().strftime("%Y%m")
server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1)
Expand Down
4 changes: 4 additions & 0 deletions bin/Mail.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,10 @@

faup.decode(mail)
tld = faup.get()['tld']
try:
tld = tld.decode()
except:
pass
server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail])

else:
Expand Down
19 changes: 17 additions & 2 deletions bin/SQLInjectionDetection.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,20 @@ def analyse(url, path):
result_query = 0

if resource_path is not None:
result_path = is_sql_injection(resource_path.decode('utf8'))
## TODO: # FIXME: remove me
try:
resource_path = resource_path.decode()
except:
pass
result_path = is_sql_injection(resource_path)

if query_string is not None:
result_query = is_sql_injection(query_string.decode('utf8'))
## TODO: # FIXME: remove me
try:
query_string = query_string.decode()
except:
pass
result_query = is_sql_injection(query_string)

if (result_path > 0) or (result_query > 0):
paste = Paste.Paste(path)
Expand All @@ -89,6 +99,11 @@ def analyse(url, path):
#statistics
tld = url_parsed['tld']
if tld is not None:
## TODO: # FIXME: remove me
try:
tld = tld.decode()
except:
pass
date = datetime.datetime.now().strftime("%Y%m")
server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1)

Expand Down
16 changes: 10 additions & 6 deletions bin/Web.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,18 +94,22 @@ def avoidNone(a_string):
faup.decode(url)
domain = faup.get_domain()
subdomain = faup.get_subdomain()
f1 = None

publisher.debug('{} Published'.format(url))

if f1 == "onion":
print(domain)

if subdomain is not None:
subdomain = subdomain.decode('utf8')
## TODO: # FIXME: remove me
try:
subdomain = subdomain.decode()
except:
pass

if domain is not None:
domain = domain.decode('utf8')
## TODO: # FIXME: remove me
try:
domain = domain.decode()
except:
pass
domains_list.append(domain)

hostl = avoidNone(subdomain) + avoidNone(domain)
Expand Down
35 changes: 30 additions & 5 deletions var/www/modules/hiddenServices/Flask_hiddenServices.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,12 @@ def get_type_domain(domain):
def get_domain_from_url(url):
faup.decode(url)
unpack_url = faup.get()
domain = unpack_url['domain'].decode()
domain = unpack_url['domain']
## TODO: FIXME remove me
try:
domain = domain.decode()
except:
pass
return domain

def get_last_domains_crawled(type):
Expand Down Expand Up @@ -418,8 +423,19 @@ def create_spider_splash():
# get service_type
faup.decode(url)
unpack_url = faup.get()
domain = unpack_url['domain'].decode()
if unpack_url['tld'] == b'onion':
## TODO: # FIXME: remove me
try:
domain = unpack_url['domain'].decode()
except:
domain = unpack_url['domain']

## TODO: # FIXME: remove me
try:
tld = unpack_url['tld'].decode()
except:
tld = unpack_url['tld']

if tld == 'onion':
service_type = 'onion'
else:
service_type = 'regular'
Expand Down Expand Up @@ -694,10 +710,19 @@ def show_domain():
port = request.args.get('port')
faup.decode(domain)
unpack_url = faup.get()
domain = unpack_url['domain'].decode()

## TODO: # FIXME: remove me
try:
domain = unpack_url['domain'].decode()
except:
domain = unpack_url['domain']

if not port:
if unpack_url['port']:
port = unpack_url['port'].decode()
try:
port = unpack_url['port'].decode()
except:
port = unpack_url['port']
else:
port = 80
try:
Expand Down

0 comments on commit 2606220

Please sign in to comment.