-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathscrapereviews.py
executable file
·47 lines (36 loc) · 1.37 KB
/
scrapereviews.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#!/usr/bin/python
# Scrape review information from gerrit
import datetime
import json
import time
import feedutils
import sql
import utility
def reviews():
cursor = feedutils.GetCursor()
day = datetime.datetime.now()
day -= datetime.timedelta(days=7)
while day < datetime.datetime.now():
print 'Processing %s/%s/%s' % (day.year, day.month, day.day)
data = utility.read_remote_file(
'http://www.rcbops.com/gerrit/merged/%s/%s/%s_reviews.json'
% (day.year, day.month, day.day))
j = json.loads(data)
for username in j:
summary = {}
for review in j[username]:
summary.setdefault(review['project'], 0)
summary.setdefault('__total__', 0)
summary[review['project']] += 1
summary['__total__'] += 1
cursor.execute('delete from reviewsummary where '
'username="%s" and day=date(%s);'
%(username, day))
cursor.execute('insert into reviewsummary'
'(day, username, data, epoch) '
'values (date(%s), "%s", \'%s\', %d);'
%(day, username, json.dumps(summary),
int(time.time())))
cursor.execute('commit;')
if __name__ == '__main__':
reviews()