-
Notifications
You must be signed in to change notification settings - Fork 1
/
userCrawler.py
200 lines (184 loc) · 6.64 KB
/
userCrawler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import re
import urllib2
import time
from bs4 import BeautifulSoup
import MySQLdb
#connect db
db = MySQLdb.connect(host="localhost", user="USER", passwd="PWD", db="sina_weibo")
cursor = db.cursor()
#config
FP_COOKIE = open("cookies.txt","r")
ARGS_COOKIE = FP_COOKIE.readline()
FP_COOKIE.close()
MY_UID = "YOUR_ID";
#class: User
class User:
def __init__(self):
self.id = 0
self.accountID = ''
self.nickName = ''
self.sex = ''
#fetch html
def fetchUrl(url):
try:
request = urllib2.Request(url);
request.add_header('Cookie',ARGS_COOKIE)
response = urllib2.urlopen(request)
return response.read()
except:
print '[error]Fetch html fail'
return ''
#get page num
def getPageNum(html):
start = html.find('W_pages') -13;
end = html.find('/div>', start) +5;
pageDiv = html[start:end];
pageDiv = pageDiv.replace("\\t","\t")
pageDiv = pageDiv.replace("\\r\\n","")
pageDiv = pageDiv.replace("\\/","/")
pageDiv = pageDiv.replace('\\"','"')
soup = BeautifulSoup(pageDiv);
pageBtn = soup.findAll('a', 'S_bg1');
if len(pageBtn) > 0:
return pageBtn[-1].contents[0]
else:
return 0
#get User list from html -- Start
def getUserList(html):
start = html.find('cnfList') - 12;
end = html.find('W_pages') - 27;
weiboUl = html[start:end];
weiboUl = weiboUl.replace("\\t","\t")
weiboUl = weiboUl.replace("\\r\\n","")
weiboUl = weiboUl.replace("\\/","/")
weiboUl = weiboUl.replace('\\"','"')
soup = BeautifulSoup(weiboUl)
weiboLi = soup.findAll("li")
result = []
for eachUser in weiboLi:
#parse uid,nikename,sex
reg = re.compile('^.+?uid=(?P<id>[^ ]*)&fnick=(?P<nickName>[^ ]*)&sex=(?P<sex>[^ ]*)".+?')
regMatch = reg.match(str(eachUser))
linebits = regMatch.groupdict()
result.append(linebits);
return result
#get accountID from html
def getAccountID(html):
start = html.find('pftb_ul') - 12
end = html.find('/ul>', start) + 4
weiboTab = html[start:end]
weiboTab = weiboTab.replace("\\t","\t")
weiboTab = weiboTab.replace("\\r\\n","")
weiboTab = weiboTab.replace("\\/","/")
weiboTab = weiboTab.replace('\\"','"')
soup = BeautifulSoup(weiboTab)
aList = soup.findAll('a');
if len(aList) == 0:
return 0;
else:
reg = re.compile('^.+?\/p\/(?P<accountID>[^ ]*)\/home.+?')
regMatch = reg.match(str(aList[0]))
linebits = regMatch.groupdict()
return linebits['accountID']
#read users from database
cursor.execute("select * from user")
userTable = cursor.fetchall()
unfetchedUser = [];
allUser = [];
newUser = [];
for row in userTable:
tmp = User()
tmp.id = row[0]
tmp.accountID = row[1]
allUser.append(long(row[0])) #all user
if row[3]==0:
unfetchedUser.append(tmp); #unfetched user
if len(unfetchedUser) == 0:
print '[info] no unfetched user,exit.'
exit()
#Start fetch users
for eachUser in unfetchedUser:
#ingore the cookie owner
if eachUser.id == MY_UID:
continue;
print '[info]Start fetch: UID - ' + str(eachUser.id) + '...'
#fetch follow, get accountID by the way
print '[info]Fetch Follow Page1...'
FOLLOW_INIT_PATH = "http://weibo.com/" + str(eachUser.id) + "/follow?refer=usercard&wvr=5&from=usercardnew"
rawcontents = fetchUrl(FOLLOW_INIT_PATH)
followPageNum = getPageNum(rawcontents)
followList = getUserList(rawcontents)
eachUser.accountID = getAccountID(rawcontents) #get current user's accountID
FOLLOW_PATH = "http://weibo.com/p/" + str(eachUser.accountID) + "/follow?page="
print '[info]Found User Num: ' + str(len(followList))
if(followPageNum > 1):
for x in range(2, int(followPageNum) + 1):
time.sleep(1.5)
print '[info]Fetch Follow Page' + str(x) + '...'
rawcontents = fetchUrl(FOLLOW_PATH + str(x))
tmpList = getUserList(rawcontents)
print '[info]Found User Num: ' + str(len(tmpList))
if len(tmpList) == 0:
print '[warning]fetch user fail, beak'
break;
followList.extend(tmpList)
#update db
print '[info]Found Follow User Num: ' + str(len(followList))
for follow in followList:
if long(follow['id']) not in allUser:
sql = "insert into user values(%s,'',%s,0,%s)"
try:
cursor.execute(sql, [follow['id'], follow['nickName'], follow['sex']])
except:
print '[error]insert [uid:' + follow['id'] + '] fail.';
allUser.append(long(follow['id']));
sql = "insert into relation values(null,%s,%s)"
cursor.execute(sql, [eachUser.id, follow['id']])
db.commit()
#fetch fans
print '[info]Fetch Fans Page1...'
FANS_PATH = "http://weibo.com/p/" + str(eachUser.accountID) + "/follow?relate=fans&page="
rawcontents = fetchUrl(FANS_PATH + "1")
fansPageNum = getPageNum(rawcontents)
fansList = getUserList(rawcontents)
print '[info]Found User Num: ' + str(len(followList))
if(fansPageNum > 1):
for x in range(2, int(fansPageNum) + 1):
time.sleep(1.5)
print '[info]Fetch Fans Page' + str(x) + '...'
rawcontents = fetchUrl(FANS_PATH + str(x))
tmpList = getUserList(rawcontents)
print '[info]Found User Num: ' + str(len(tmpList))
if len(tmpList) == 0:
print '[warning]fetch user fail, beak'
break;
fansList.extend(tmpList)
#update db
print '[info]Found Fans User Num: ' + str(len(fansList))
for fan in fansList:
if long(fan['id']) not in allUser:
sql = "insert into user values(%s,'',%s,0,%s)"
try:
cursor.execute(sql, [fan['id'], fan['nickName'], fan['sex']])
except:
print '[error]insert [uid:' + fan['id'] + '] fail.';
allUser.append(long(fan['id']));
sql = "insert into relation values(null,%s,%s)"
cursor.execute(sql, [fan['id'], eachUser.id])
try:
db.commit()
except:
print '[error]update db fail'
continue
#Lastly, update current user
sql = "update user set accountID=%s,isfetched=1 where id=%s";
cursor.execute(sql, [eachUser.accountID, eachUser.id])
db.commit()
time.sleep(5)
#close db
cursor.close()
db.close()