-
Notifications
You must be signed in to change notification settings - Fork 0
/
get_links_ips.py
67 lines (53 loc) · 1.53 KB
/
get_links_ips.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import urllib2
import re
'''
Usage: script.py [ -h | -v ] url
Given a valid URL (must include protocol) connect to it and parse the returned content for http/s links.
Connect to each link in order and determine the IP address of the connected server.
Prints results in the following format:
<ip> <url>
'''
# TODO:
# select user agent - option to randomize from list
# group IPs on the same network
def usage():
print >> sys.stderr, 'Usage:', sys.argv[0], '[ -h | -v ]', 'url'
exit(2)
if len(sys.argv) == 3 or len(sys.argv) == 2:
if len(sys.argv) == 3 and sys.argv[1] == '-v':
verbose = True
initurl = sys.argv[2]
elif sys.argv[1] != '-h':
verbose = False
initurl = sys.argv[1]
else:
usage()
else:
usage()
if verbose:
print "- Trying", initurl
# get the html
page = urllib2.urlopen(initurl).read()
# find all links in the source code
links = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', page)
# trust only links with a . somewhere in there
# this prevents situations like random http://www in the code
# which gets your local search domain appended when resolved
validlinks = []
for link in links:
if re.search('\.', link):
validlinks.append(link)
if verbose:
print "-", len(validlinks), "links found on", initurl
# get the IP associated with each link
for link in validlinks:
if verbose:
print '- Connecting to', link
try:
q = urllib2.urlopen(link)
print q.fp._sock.fp._sock.getpeername()[0] + ' ' + link
except:
pass