-
Notifications
You must be signed in to change notification settings - Fork 1
/
namescraper.py
114 lines (92 loc) · 4.46 KB
/
namescraper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
from scrapers.viewdns import ViewDNSScraper
from scrapers.securitytrails import SecurityTrailsScraper
from scrapers.whoisxmlapi import WhoisXMLAPIScraper
from selenium.webdriver.support.ui import WebDriverWait
import undetected_chromedriver as uc
import argparse
import sys
SUPPORTED_LOOKUPS = {
'securitytrails': SecurityTrailsScraper.LOOKUP_MAP,
'viewdns': ViewDNSScraper.LOOKUP_MAP,
'whoisxmlapi': WhoisXMLAPIScraper.LOOKUP_MAP
}
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='NameScraper is...')
parser.add_argument('--timeout', default=10, type=int,
help='Default timeout to use in Selenium when looking for elements in the page.')
parser.add_argument('--output', help='Output results to a file.')
parser.add_argument('--queriesfile',
help='A file with lines to be looked up.')
parser.add_argument('--query', help='The text to query.')
parser.add_argument('--headless', action='store_true',
help='Run the webdriver in headless mode.')
parser.add_argument('--sessionfile', default='session.txt',
help='File with the session cookie for the selected module.')
subparsers = parser.add_subparsers(dest='module')
parser_sectrails = subparsers.add_parser('securitytrails', help='SecurityTrails scraper.')
parser_sectrails.add_argument('--lookup', default='subdomains',
choices=SUPPORTED_LOOKUPS['securitytrails'],
help='Type of the lookup to be performed.')
parser_viewdns = subparsers.add_parser('viewdns', help='ViewDNS scraper.')
parser_viewdns.add_argument('--lookup', default='subdomains',
choices=SUPPORTED_LOOKUPS['viewdns'],
help='Type of the lookup to be performed.')
parser_whoisxmlapi = subparsers.add_parser('whoisxmlapi', help='WhoisXMLAPI scraper.')
parser_whoisxmlapi.add_argument('--lookup', default='subdomains',
choices=SUPPORTED_LOOKUPS['whoisxmlapi'],
help='Type of the lookup to be performed.')
args = parser.parse_args()
specific_options = {}
if args.module == 'securitytrails':
module = 'SecurityTrails'
scraper_class = SecurityTrailsScraper
with open(args.sessionfile) as sessionfile:
session_cookie = sessionfile.read().rstrip()
specific_options['session_cookie'] = session_cookie
elif args.module == 'viewdns':
module = 'ViewDNS'
scraper_class = ViewDNSScraper
elif args.module == 'whoisxmlapi':
module = 'WhoisXMLAPI'
scraper_class = WhoisXMLAPIScraper
with open(args.sessionfile) as sessionfile:
session_cookie = sessionfile.read().rstrip()
specific_options['session_cookie'] = session_cookie
else:
print('Unknown module selected. Please select one of the available modules (check --help).')
sys.exit(1)
print(f'[+] Selected module: "{module}"')
lookup = args.lookup
output_filepath = args.output
# Initialize configs
if output_filepath is not None:
output_file = open(output_filepath, 'a+', encoding='utf-8')
print(f'[+] Saving results to file "{output_filepath}"')
else:
output_file = None
if not args.queriesfile and not args.query:
print('[-] You must specify at least one of --queriesfile or --query to run the tool.')
sys.exit(1)
if args.queriesfile:
with open(args.queriesfile) as queries_file:
queries = list(map(lambda x: x.rstrip(), queries_file.readlines()))
else:
queries = [args.query]
# Initialize Undetected Chrome with provided options
options = uc.ChromeOptions()
if args.headless:
options.add_argument('--headless')
driver = uc.Chrome(use_subprocess=True, options=options)
driver_wait = WebDriverWait(driver, args.timeout)
# Initialize scraper object from selected module's class
scraper = scraper_class(
driver, driver_wait,
output_file=output_file,
**specific_options
)
# Perform queries
for query in queries:
print(f'[+] Looking up "{query}" ({lookup})')
scraper.lookup(query, lookup_type=lookup)
if output_file is not None:
output_file.close()