Skip to content

Commit

Permalink
updates
Browse files Browse the repository at this point in the history
  • Loading branch information
Xiaoyang Feng committed Jan 19, 2016
1 parent 96e1d56 commit 5b97d27
Show file tree
Hide file tree
Showing 5 changed files with 126 additions and 0 deletions.
1 change: 1 addition & 0 deletions 24h.json

Large diffs are not rendered by default.

28 changes: 28 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# 24 Hours
A 24-hour poetry composed by 1440 global tweets, part of gallery show 'Clock' at Frog NY 2015

[DEMO](http://xyfeng.github.io/24hours/) ( Data is queried at New York on April 10th, 2015, it happened to be the Apple Watch Pre-Order day )

## How did the data is collected
I used python script to crawl data from twitter search engine in [Iron](http://www.iron.io/) and store it in my [Parse](http://www.parse.com/) account.

## Script
IRON: `iron_worker upload iron`

### Thermal Printer
Download [Gimp-Print](http://gimp-print.sourceforge.net/)

Enable Cups Web Interfaces `cupsctl WebInterface=yes`

Go to URL `http://127.0.0.1:631/`

Add Printer `Choose 'Raw' as maker`

Check Printer Status `lpstat -p -d`

Print Txt File `lp -d Thermal_Label_Printer xxxx.txt`

### Linux (Setup using Raspberry Pi)
1. The printer uses a WinBond CDC USB chip, so linux mounts it automatically as /dev/usb/lp0.
2. $ sudo chmod a+rw /dev/usb/lp0
3. $ echo 'Testing 1 2 3' > /dev/usb/lp0
34 changes: 34 additions & 0 deletions printLive.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import datetime
import time
import json
import httplib
import urllib
import os

oneminuteearlier = datetime.datetime.now() - datetime.timedelta(minutes=1)
printTime = oneminuteearlier.strftime('%l:%M%p').replace(' ','')

while True:
currTime = time.strftime('%l:%M%p').replace(' ','')
if currTime != printTime:
printTime = currTime
connection = httplib.HTTPSConnection('api.parse.com', 443)
connection.connect()
params = urllib.urlencode({"where":json.dumps({
"timestamp": printTime
})})
connection.request('GET', '/1/classes/tweets?%s' % params, '', {
"X-Parse-Application-Id": "9apBIWvcBtevWLeFPifONqYg8kc1gvojPhBRPytI",
"X-Parse-REST-API-Key": "ftbwSwgPKg4gq9sVo8twze4fyDpL2McJhThQhA5Z"
})
result = json.loads(connection.getresponse().read())['results'][0]

print result['tweet']

# Save to File
text_file = open("toPrint.txt", "w")
text_file.write("%s\n\n\n" % result['tweet'].encode('utf-8'))
text_file.close()

# Print
os.system("lp -d Thermal_Label_Printer toPrint.txt")
18 changes: 18 additions & 0 deletions printTweet.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import time
import json
import os

data = json.load(open('24h.json'))

printTime = time.strftime('%l:%M%p').replace(' ','')
hourIndex = int(time.strftime('%H'))
while True:
currTime = time.strftime('%l:%M%p').replace(' ','')
if currTime != printTime:
printTime = currTime
msg = data[hourIndex][printTime]
text_file = open("toPrint.txt", "w")
text_file.write("%s\n\n\n" % data[hourIndex][printTime].encode('utf-8'))
text_file.close()
os.system("lp -d Thermal_Label_Printer toPrint.txt")
# print msg
45 changes: 45 additions & 0 deletions queryTimeStamp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from TwitterSearch import *
from datetime import datetime
import time
import json

msgList = []
sleep_for = 5

tso = TwitterSearchOrder()
tso.set_language('en')
tso.set_locale('ja')
tso.set_result_type('recent')
tso.set_count(1)

ts = TwitterSearch(
consumer_key = 'F8QnORsYQqXiY2zu7FDmxfRsL',
consumer_secret = '5v7l0GxfUb0ukigxTsvwiGungqWfZjBGv5fSygnXOUGH5JFbph',
access_token = '17522673-AmQBBeDcHwkwJVtkwxhOW8iESiW5xoVlZeLNMOEIp',
access_token_secret = 'hRyBq99BDmjXFaCnKTZ3YGVEUaFQz9bP0T7whvV0FFOv7'
)

for h in range(0, 24):
hourList = {}
for m in range(0, 60):
one = datetime(2000, 1, 1, h, m, 0, 0)
timestamp = one.strftime("%l:%M%p").replace (" ", "")
print timestamp
search_word = '#'+timestamp
tso.set_keywords([search_word])
try:
response = ts.search_tweets(tso)
# print( "rate-limiting status: %s" % ts.get_metadata()['x-rate-limit-remaining'] )

for tweet in response['content']['statuses']:
hourList[timestamp] = '@%s: %s' % ( tweet['user']['screen_name'].encode('utf-8'), tweet['text'].encode('utf-8') )
break

except TwitterSearchException as e:
print(e)

time.sleep(sleep_for)
msgList.append(hourList)

with open("24h.json", "w") as outfile:
json.dump(msgList, outfile)

0 comments on commit 5b97d27

Please sign in to comment.