-
Notifications
You must be signed in to change notification settings - Fork 2
/
scraper.py
59 lines (49 loc) · 1.93 KB
/
scraper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
__author__ = "Hryhorii Mosia (mosia.dev@gmail.com)"
import csv
import requests
from bs4 import BeautifulSoup
from tqdm import tqdm
from settings import FILE_NAME
URL = 'https://free-proxy-list.net/'
class FreeProxyScraper:
def __init__(self, url=URL, file_name=FILE_NAME):
self.url = url
self.file_name = file_name
def get_html(self, url=None, timeout=10):
try:
response = requests.get(url or self.url, timeout=timeout)
if response.status_code == 200:
return response.text
except requests.exceptions.RequestException as e:
print(e)
def scraping(self):
print('Proxy scraping...')
try:
html = self.get_html()
soup = BeautifulSoup(html, 'html.parser')
rows = soup.find('table', id='proxylisttable').find_all('tr')
tds = [[td.get_text() for td in row.find_all('td')]
for row in rows if row.find_all('td').__len__() > 0]
return [dict(ip_address=td[0], port=int(td[1]), code=td[2],
country=td[3], anonymity=td[4], google=td[5],
https=td[6], last_checked=td[7])
for td in tqdm(tds)]
except Exception as e:
print(e)
def save_to_csv(self, data):
with open(self.file_name, 'w') as csvfile:
writer = csv.writer(csvfile)
writer.writerow(('IP Address', 'Port', 'Code', 'Country',
'Anonymity', 'Google', 'Https', 'Last Checked'))
for item in data:
row = (
item['ip_address'],
item['port'],
item['code'],
item['country'],
item['anonymity'],
item['google'],
item['https'],
item['last_checked'],
)
writer.writerow(row)