-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcontrol.py
79 lines (58 loc) · 1.93 KB
/
control.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
from scholar import csv
from scholar import url_get
from scholar import txt
import social_trends
import time
import downloads
import json
import random
import pickle
PICKLE_FILE = "control.pickle"
cache = pickle.load(open(PICKLE_FILE, "rb"))
def main(query):
if query in cache:
return cache[query]
json_output = []
article_list = []
csv_data = csv(query, author='', count=3)#, header=True)
for line in csv_data:
line = line.split('|')
title = line[0]
authors = line[7].split(', ')
venue = line[8].rstrip()
venue = venue.lstrip()
version_urls = [line[1]]
if len(line) >3 and int(line[3]) > 0:
version_data = url_get(line[5], author='', count=20)
time.sleep(random.uniform(2,5))
for subline in version_data:
subline = subline.split('|')
version_urls.append(subline[1])
article_list.append((authors, title, version_urls, venue))
toSort = []
for paper in article_list:
tw = social_trends.SocialTrends(paper[0], paper[1], paper[2])
(score, tweets) = tw.paper_tweets()
recent_downloads = downloads.downloads(paper[1])
if score > 0 or recent_downloads[0] > 0:
json_paper = dict()
json_paper['title'] = paper[1]
json_paper['authors'] = paper[0]
json_paper['urls'] = paper[2]
json_paper['venue'] = paper[3]
json_paper['tweets'] = []
for (tid, tweet) in tweets:
json_tweet = dict()
json_tweet['author'] = tweet['user']['screen_name']
json_tweet['content'] = tweet['text']
json_tweet['url'] = "http://twitter.com/"+tweet['user']['screen_name']+"/status/"+tweet['id_str']
json_paper['tweets'].append(json_tweet)
toSort.append((score, recent_downloads[0], json_paper))
time.sleep(2)
toJson= [y[2] for y in sorted(toSort, key=lambda x : x[0]*10000+x[1])]
toReturn = json.dumps(toJson)
cache[query] = toReturn
pickle.dump(cache, open(PICKLE_FILE, 'wb'))
return toReturn
if __name__ == '__main__':
main('Retweeting Fukushima')