diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..412eeda --- /dev/null +++ b/.gitattributes @@ -0,0 +1,22 @@ +# Auto detect text files and perform LF normalization +* text=auto + +# Custom for Visual Studio +*.cs diff=csharp +*.sln merge=union +*.csproj merge=union +*.vbproj merge=union +*.fsproj merge=union +*.dbproj merge=union + +# Standard to msysgit +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b9d6bd9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,215 @@ +################# +## Eclipse +################# + +*.pydevproject +.project +.metadata +bin/ +tmp/ +*.tmp +*.bak +*.swp +*~.nib +local.properties +.classpath +.settings/ +.loadpath + +# External tool builders +.externalToolBuilders/ + +# Locally stored "Eclipse launch configurations" +*.launch + +# CDT-specific +.cproject + +# PDT-specific +.buildpath + + +################# +## Visual Studio +################# + +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# User-specific files +*.suo +*.user +*.sln.docstates + +# Build results + +[Dd]ebug/ +[Rr]elease/ +x64/ +build/ +[Bb]in/ +[Oo]bj/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +*_i.c +*_p.c +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.log +*.scc + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opensdf +*.sdf +*.cachefile + +# Visual Studio profiler +*.psess +*.vsp +*.vspx + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +*.ncrunch* +.*crunch*.local.xml + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.Publish.xml +*.pubxml + +# NuGet Packages Directory +## TODO: If you have NuGet Package Restore enabled, uncomment the next line +#packages/ + +# Windows Azure Build Output +csx +*.build.csdef + +# Windows Store app package directory +AppPackages/ + +# Others +sql/ +*.Cache +ClientBin/ +[Ss]tyle[Cc]op.* +~$* +*~ +*.dbmdl +*.[Pp]ublish.xml +*.pfx +*.publishsettings + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file to a newer +# Visual Studio version. Backup files are not needed, because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +App_Data/*.mdf +App_Data/*.ldf + +############# +## Windows detritus +############# + +# Windows image file caches +Thumbs.db +ehthumbs.db + +# Folder config file +Desktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Mac crap +.DS_Store + + +############# +## Python +############# + +*.py[co] + +# Packages +*.egg +*.egg-info +dist/ +build/ +eggs/ +parts/ +var/ +sdist/ +develop-eggs/ +.installed.cfg + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.tox + +#Translations +*.mo + +#Mr Developer +.mr.developer.cfg diff --git a/README.md b/README.md new file mode 100644 index 0000000..e9d16fd --- /dev/null +++ b/README.md @@ -0,0 +1,58 @@ +Equilibrium +============== +Work made easy. +-------------- + +Equilibrium is managing *projects deadlines* within a workplace/firm etc more social. So that workplaces can have a centralized +social way of looking into various ongoing projects and organize them in such a way that results in a more productive environment. + +This application was submitted for Google Cloud Developer Challenge 2013 and was among one of the [finalists](http://www.google.com/events/gcdc2013/finalists.html) for India region. + +
+
+
+https://gcdc2013-equilibrium.appspot.com/ +
+ + +Tech Stack +---------- + +* Google App Engine +* Google Data Store +* Python +* Flask, Jinja2 +* Google Maps API +* Google+ Signin +* Google Custom Search API +* Wikipedia API +* Google Translate toolkit +* SimpleAuth(https://github.com/crhym3/simpleauth) + + + +Development +----------- + +* Clone this repo (`git clone git@github.com:yashrajsingh/Equilibrium-GCDC2013.git`) +* Download [Python SDK](https://developers.google.com/appengine/downloads#Google_App_Engine_SDK_for_Python "Python SDK for Google App Engine") for Google App Engine +* Unzip the downloaded SDK +* Change directory to project directory (`cd ~/path/to/Equilibrium-GCDC2013`) +* Change the application keys: + - Add your secret keys for social networks in `secrets.py`. + - Add your Google Maps Key in `templates/projects.html` and `templates/firm.html`. + - Add your Google Custom Search key and cx in `handlers.py`. + +* Run the application locally (`~/path/to/sdk/dev_appserver.py .`) +* Visit `http://localhost:8080/` using your browser to see it in action. + + + +Help and Support +-------------- + +[Homepage](https://gcdc2013-equilibrium.appspot.com/) + +[Yash Raj Singh](http://yashrajsingh.net/) \ No newline at end of file diff --git a/app.yaml b/app.yaml new file mode 100644 index 0000000..27aa305 --- /dev/null +++ b/app.yaml @@ -0,0 +1,111 @@ +# Most of the commens here are taken from: +# https://developers.google.com/appengine/docs/python/config/appconfig + +# Replace 'your-app-id-goes-here' with your application ID. +# Get it here if you don't have one: https://appengine.google.com/ +application: gcdc2013-equilibrium + +# App version +version: use-version-arg + +# Use Python 2.7 runtime here. +# See this for more info: +# http://code.google.com/appengine/docs/python/python27/using27.html +runtime: python27 +api_version: 1 + +# Use concurrent requests +threadsafe: true + +# Defaul expiration time. If omitted, the production server sets the expiration +# to 10 minutes. +default_expiration: "30d" + +# The Python 2.7 runtime includes some additional third-party modules +# and makes them available on demand. +# http://code.google.com/appengine/docs/python/tools/libraries27.html +# +# See 'Configuring Libraries' section on +# http://code.google.com/appengine/docs/python/python27/using27.html +# +# When adding jinja2 to your library configuration, you may want to +# add markupsafe and setuptools as well. markupsafe improves the performance +# of template rendering in jinja2 and setuptools is required for some template +# loading functionality. +libraries: +- name: webapp2 + version: "2.5.1" +- name: markupsafe + version: "0.15" +- name: setuptools + version: "0.6c11" +- name: jinja2 + version: "2.6" +- name: django + version: "1.5" +- name: lxml + version: '2.3' + +# The Python SDK includes a number of builtin handlers +# for common application functions. +builtins: +# See this page for more info: +# http://code.google.com/appengine/docs/python/tools/appstats.html +# - appstats: on + +# Usage article: +# http://code.google.com/appengine/articles/deferred.html +# - deferred: on + +# More info on Setting Up Remote API: +# http://code.google.com/appengine/docs/python/tools/uploadingdata.html +- remote_api: on + +# A list of URL patterns and descriptions of how they should be handled. +# Reserved URLs (you can't use them): +# /_ah/ +# /form +handlers: + +# static content handlers +- url: /(favicon\.ico) + mime_type: image/png + static_files: static/\1 + upload: static/favicon\.ico + +- url: /(robots\.txt) + static_files: static/\1 + upload: static/robots\.txt + +- url: /(img|css|js|fonts|files)/(.*) + static_files: static/\1/\2 + upload: static/(img|css|js|fonts)/(.*) + +- url: /_ah/mail/.+ + script: handle_incoming_email.app + login: admin + +# dynamic handlers +- url: /.* + script: main.app + secure: always + +# The skip_files element specifies which files in the application directory +# are not to be uploaded to App Engine. The value is either a regular +# expression, or a list of regular expressions. Any filename that matches +# any of the regular expression is omitted from the list of files to upload +# when the application is uploaded. +skip_files: +- ^(.*/)?#.*# +- ^(.*/)?.*~ +- ^(.*/)?.*sh +- ^(.*/)?.*\.py[co] +- ^(.*/)?.*/RCS/.* +- ^(.*/)?\..* +- ^(.*/)?.*\.bak$ +- ^(.*/)?.*\.template$ +- tmp +- tests + +inbound_services: +- mail \ No newline at end of file diff --git a/handle_incoming_email.py b/handle_incoming_email.py new file mode 100644 index 0000000..a68addb --- /dev/null +++ b/handle_incoming_email.py @@ -0,0 +1,7 @@ +import logging +import webapp2 +from google.appengine.ext.webapp.mail_handlers import InboundMailHandler + +class LogSenderHandler(InboundMailHandler): + def receive(self, mail_message): + logging.info("Received a message from: " + mail_message.sender) \ No newline at end of file diff --git a/handlers.py b/handlers.py new file mode 100644 index 0000000..d1e4147 --- /dev/null +++ b/handlers.py @@ -0,0 +1,951 @@ +# -*- coding: utf-8 -*- +import os +import json +import datetime +import logging +import secrets +import urllib +import collections + +import webapp2 +from webapp2_extras import auth, sessions, jinja2 +from jinja2.runtime import TemplateNotFound +from django.utils.html import strip_tags +from urlparse import urlparse + +from simpleauth import SimpleAuthHandler +from google.appengine.ext import ndb +from google.appengine.api import urlfetch +from google.appengine.api import mail +from google.appengine.api import users +from webapp2_extras.appengine.auth.models import User + +from google.appengine.ext import blobstore +from google.appengine.ext.webapp import blobstore_handlers +from google.appengine.api import images +from google.appengine.api import search +from urlparse import parse_qs + + +project_key = ndb.Key('Projects', 'default_projects') +firms_key = ndb.Key('Firms', 'default_firms') +comments_key = ndb.Key('Comments', 'default_comments') +user_key = ndb.Key('User', 'default_user') +notification_key = ndb.Key('Notification', 'default_notification') + +_INDEX_NAME_FIRMS = 'firms' + +custom_search_key = 'search key' +custom_search_cx = 'cx' + + +class Firms(ndb.Model): + admin = ndb.KeyProperty(kind='User') + admin_id = ndb.IntegerProperty() + members = ndb.StringProperty(repeated=True) + picture = ndb.StringProperty() + link = ndb.StringProperty() + desc = ndb.StringProperty() + logo = ndb.BlobKeyProperty() + title = ndb.StringProperty() + pos = ndb.GeoPtProperty(default=ndb.GeoPt(28.635308, 77.224960)) + privacy = ndb.IntegerProperty(default=0) + date = ndb.DateTimeProperty(auto_now_add=True) + + +class Projects(ndb.Model): + author = ndb.KeyProperty(kind='User') + author_id = ndb.IntegerProperty() + firm = ndb.KeyProperty(kind='Firms') + firm_id = ndb.IntegerProperty() + title = ndb.StringProperty() + content = ndb.StringProperty() + deadline = ndb.DateTimeProperty() + date = ndb.DateTimeProperty(auto_now_add=True) + + +class Comments(ndb.Model): + author = ndb.KeyProperty(kind='User') + firm = ndb.KeyProperty(kind='Firms') + project = ndb.KeyProperty(kind='Projects') + comment = ndb.StringProperty() + project_id = ndb.IntegerProperty() + date = ndb.DateTimeProperty(auto_now_add=True) + + +class Notification(ndb.Model): + author = ndb.KeyProperty(kind='User') + body_html = ndb.StringProperty() + id = ndb.IntegerProperty() + href = ndb.StringProperty() + time = ndb.DateTimeProperty(auto_now_add=True) + users = ndb.StringProperty(repeated=True) + + +class BaseRequestHandler(webapp2.RequestHandler): + def dispatch(self): + # Get a session store for this request. + self.session_store = sessions.get_store(request=self.request) + try: + # Dispatch the request. + webapp2.RequestHandler.dispatch(self) + finally: + # Save all sessions. + self.session_store.save_sessions(self.response) + + @webapp2.cached_property + def jinja2(self): + """Returns a Jinja2 renderer cached in the app registry""" + return jinja2.get_jinja2(app=self.app) + + @webapp2.cached_property + def session(self): + """Returns a session using the default cookie key""" + return self.session_store.get_session() + + @webapp2.cached_property + def auth(self): + return auth.get_auth() + + @webapp2.cached_property + def current_user(self): + """Returns currently logged in user""" + user_dict = self.auth.get_user_by_session() + return self.auth.store.user_model.get_by_id(user_dict['user_id']) + + @webapp2.cached_property + def logged_in(self): + """Returns true if a user is currently logged in, false otherwise""" + return self.auth.get_user_by_session() is not None + + def render(self, template_name, template_vars={}): + # Preset values for the template + values = { + 'url_for': self.uri_for, + 'logged_in': self.logged_in, + 'flashes': self.session.get_flashes() + } + + # Add manually supplied template values + values.update(template_vars) + + # read the template or 404.html + try: + self.response.write(self.jinja2.render_template(template_name, **values)) + except TemplateNotFound: + self.abort(404) + + def head(self, *args): + """Head is used by Twitter. If not there the tweet button shows 0""" + pass + + +class CommentAdd(BaseRequestHandler): + def post(self): + firm_id = int(self.request.get('firm_id')) + project_id = int(self.request.get('project_id')) + + firm = Firms.get_by_id(firm_id, parent=firms_key) + project = Projects.get_by_id(project_id, parent=project_key) + + comment = Comments(parent=comments_key) + comment.author = self.current_user.key + comment.firm = firm.key + comment.comment = self.request.get('comment') + comment.project = project.key + comment.project_id = project_id + comment.put() + + notification = Notification(parent=notification_key) + notification.author = self.current_user.key + notification.body_html = "" + self.current_user.name + " has commented on your project in " \ + + "" + firm.title + " .." + notification.href = "./projects?firm_id=" + str(firm.key.id()) + "#row" + str(project.key.id()) + notification.id = comment.key.id() + notification.users.append(project.author.get().email) + notification.put() + + if firm_id: + self.redirect('/projects?firm_id=' + str(firm_id) + '#row' + str(project_id)) + else: + self.redirect('/') + + +class ProjectAdd(BaseRequestHandler): + def post(self): + firm_id = int(self.request.get('firm_id')) + firm = Firms.get_by_id(firm_id, parent=firms_key) + + projects = Projects(parent=project_key) + projects.author = self.current_user.key + projects.author_id = int(self.current_user.key.id()) + projects.firm = firm.key + projects.firm_id = firm_id + projects.content = self.request.get('content') + projects.title = self.request.get('title') + projects.deadline = datetime.datetime.strptime(self.request.get('datetime'), '%m/%d/%Y %H:%M %p') + projects.put() + + notification = Notification(parent=notification_key) + notification.author = self.current_user.key + notification.body_html = "" + self.current_user.name + " has created a new project " \ + + self.request.get('title') + " in " \ + + "" + firm.title + " .." + notification.href = "./projects?firm_id=" + str(firm.key.id()) + notification.id = projects.key.id() + for member in firm.members: + notification.users.append(member) + notification.put() + + self.redirect('/projects?firm_id=' + str(firm_id)) + + +class firms(BaseRequestHandler): + def post(self): + if self.request.get('title'): + title = self.request.get('title') + + if title: + #google images search + title1 = urllib.quote_plus(title) + url = 'https://www.googleapis.com/customsearch/v1?key=' + custom_search_key + '&cx=' + custom_search_cx + '&searchType=image&fileType=jpg&imgSize=large&imgType=news&num=1&alt=json&q=' + title1 + result = urlfetch.fetch(url) + jsonr = json.loads(result.content) + picture = jsonr['items'][0]['link'] + link = jsonr['items'][0]['displayLink'] + desc1 = jsonr['items'][0]['title'] + desc = urllib.unquote_plus(desc1) + + + #wikipedia description + urlw = "https://en.wikipedia.org//w/api.php?action=query&prop=extracts&format=json&exsentences=1&exlimit=1&exsectionformat=plain&titles=" + title1 + resultw = urlfetch.fetch(urlw) + jsonrw = json.loads(resultw.content) + d = jsonrw['query']['pages'].keys() + x = d[0] + try: + desciption = jsonrw['query']['pages'][x]['extract'] + except: + desciption = "" + + if desciption: + desc = strip_tags(desciption) + + else: + picture = self.current_user.avatar_url + link = self.current_user.link + desc = "No data." + else: + self.render('error.html', { + 'user': self.current_user, + }) + + firmss = Firms(parent=firms_key) + firmss.admin = self.current_user.key + firmss.admin_id = self.current_user.key.id() + firmss.members = [self.current_user.email] + firmss.link = link + firmss.picture = picture + firmss.title = title + firmss.desc = desc + firmss.put() + + id = firmss.key.id() + search.Index(name=_INDEX_NAME_FIRMS).put(CreateFirmDoc(str(id), self.current_user, title, "null", desc, link)) + + self.redirect('/firm?firm_id=' + str(firmss.key.id())) + + def get(self): + upload_url = blobstore.create_upload_url('/editfirm') + firm_id = 0 + try: + firm_id = int(self.request.get('firm_id')) + except: + self.abort(404) + firmed = Firms.get_by_id(firm_id, parent=firms_key) + if firmed.logo: + image_url = images.get_serving_url(firmed.logo, 75) + else: + image_url = "/img/Hinder-red.png" + + if (firm_id != 0) and self.logged_in: + self.render('firm.html', { + 'title': 'Edit Firm Settings - ' + firmed.title, + 'user': self.current_user, + 'upload_url': upload_url, + 'image_url': image_url, + 'firm': firmed, + 'firm_id': firm_id + }) + else: + self.abort(404) + + +class EditFirm(BaseRequestHandler, blobstore_handlers.BlobstoreUploadHandler): + def post(self): + desc = self.request.get('desc') + desc = (desc[:75] + '..') if len(desc) > 75 else desc + + title = self.request.get('title') + title = (title[:60] + '..') if len(title) > 75 else title + + link = self.request.get('link') + parts = urlparse(link) + if not parts.scheme or not parts.netloc: + self.abort(404) + + firm_id = int(self.request.get('firm_id')) + firm = Firms.get_by_id(firm_id, parent=firms_key) + logo = self.get_uploads('logo') + + if logo: + blob_info = logo[0] + if firm.logo: + blobstore.delete(firm.logo) + firm.logo = blob_info.key() + logo = blob_info.key() + else: + logo = firm.logo + + firm.link = link + firm.picture = self.request.get('picture') + firm.title = title + firm.privacy = int(self.request.get('type')) + firm.desc = desc + firm.pos = ndb.GeoPt(self.request.get('lat'), self.request.get('lng')) + firm.put() + + notification = Notification(parent=notification_key) + notification.author = self.current_user.key + notification.body_html = "" + self.current_user.name + " updated " \ + + "" + firm.title + " Details..." + notification.href = "./projects?firm_id=" + str(firm.key.id()) + notification.id = firm.key.id() + for member in firm.members: + notification.users.append(member) + notification.put() + + + doc_index = search.Index(name=_INDEX_NAME_FIRMS) + if doc_index.get(doc_id=str(firm.key.id())): + doc_index.delete(document_ids=str(firm.key.id())) + + search.Index(name=_INDEX_NAME_FIRMS).put(CreateFirmDoc(str(firm_id), + self.current_user, title, str(logo), desc, link)) + + self.redirect('/') + + +class add_firm(BaseRequestHandler): + def post(self): + if self.request.get('title'): + title = self.request.get('title') + else: + title = "No Title" + if self.request.get('link'): + link = self.request.get('link') + else: + link = "No link" + if self.request.get('desc'): + desc = self.request.get('desc') + else: + desc = "No Description!" + + if self.logged_in: + self.render('add_firm.html', { + 'user': self.current_user, + 'title': title, + 'link': link, + 'desc': desc + }) + else: + self.render('add_firm.html', { + 'title': title, + 'link': link, + 'desc': desc, + }) + + +class DeleteFirm(BaseRequestHandler): + def get(self): + firm_id = 0 + try: + firm_id = int(self.request.get('firm_id')) + except: + self.abort(404) + firmed = Firms.get_by_id(firm_id, parent=firms_key) + + if (firm_id != 0) and self.logged_in: + self.render('delete.html', { + 'title': 'Delete Firm', + 'user': self.current_user, + 'firm': firmed, + 'firm_id': firm_id + }) + else: + self.abort(404) + + + def post(self): + logging.debug('Deleting the event!: %s' % self.request.get('event_id')) + firm_id = 0 + try: + firm_id = int(self.request.get('firm_id')) + except: + self.abort(404) + firmed = Firms.get_by_id(firm_id, parent=firms_key) + projects = Projects.query(Projects.firm_id == firm_id, ancestor=project_key) + comments = Comments.query(ancestor=comments_key) + + for project in projects: + for comment in comments: + if comment.project == project.key.id(): + comment.key.delete() + project.key.delete() + + doc_index = search.Index(name=_INDEX_NAME_FIRMS) + if doc_index.get(doc_id=str(firmed.key.id())): + doc_index.delete(document_ids=str(firmed.key.id())) + + firmed.key.delete() + self.redirect('/') + + +class AddFirmMember(BaseRequestHandler): + def post(self): + firm_id = self.request.get('firm_id') + email = self.request.get('member_email') + firm_id1 = int(self.request.get('firm_id')) + firm = Firms.get_by_id(firm_id1, parent=firms_key) + + if email in firm.members: + self.abort(404) + else: + firm.members.append(email) + firm.put() + + notification = Notification(parent=notification_key) + notification.author = self.current_user.key + notification.body_html = "" + self.current_user.name + " added a new member " + \ + self.request.get('member_email') + " to " \ + + "" + firm.title + "." + notification.href = "./projects?firm_id=" + str(firm.key.id()) + notification.id = firm.key.id() + for member in firm.members: + notification.users.append(member) + notification.put() + + if firm_id: + self.redirect('/projects?firm_id=' + firm_id + '#row') + else: + self.redirect('/') + + +class RootHandler(BaseRequestHandler): + def get(self): + """Handles default langing page""" + # Projects + + if self.logged_in: + firms = Firms.query(ancestor=firms_key).order(-Firms.date) + email = self.current_user.email + foundFirm = False + firmed = [] + for firm in firms: + if email in firm.members: + firmed.append(firm) + foundFirm = True + + #projects = Projects.query(ancestor=project_key).order(-Projects.deadline) + found = False + if self.logged_in: + projectsme = Projects.query(Projects.author_id == self.current_user.key.id(), + ancestor=project_key).order( + -Projects.deadline) + for project in projectsme: + found = True + else: + projectsme = None + + comments = Comments.query(ancestor=comments_key).order(Comments.date) + notifications = Notification.query(ancestor=notification_key).order(-Notification.time).fetch(5) + notification = [] + for notify in notifications: + if email in notify.users and notify.author.get().email != self.current_user.email: + notification.append(notify) + count = len(notification) + + self.render('home.html', { + 'title': 'Equilibrium', + 'notifications': notification, + 'count': count, + 'firms': firmed, + 'user': self.current_user, + 'greetings': projectsme, + 'comments': comments, + 'found': found, + 'foundFirm': foundFirm + }) + else: + self.render('hot.html') + + +class About(BaseRequestHandler): + def get(self): + if self.logged_in: + self.render('about.html', { + 'title': 'Equilibrium - Work made easy.', + 'user': self.current_user + }) + else: + self.render('about.html', { + 'title': 'Equilibrium - Work made easy.' + }) + + +class DeleteProject(BaseRequestHandler): + def post(self): + project_id = 0 + try: + project_id = int(self.request.get('project_id')) + except: + self.abort(404) + project = Projects.get_by_id(project_id, parent=project_key) + comments = Comments.query(Comments.project_id == project_id, ancestor=comments_key) + notifications = Notification.query(Notification.id == project_id, ancestor=notification_key) + + for notice in notifications: + notice.key.delete() + + for comment in comments: + comment.key.delete() + + project.key.delete() + self.redirect('/') + + +class DeleteComment(BaseRequestHandler): + def post(self): + comment_id = 0 + try: + comment_id = int(self.request.get('comment_id')) + except: + self.abort(404) + comment = Comments.get_by_id(comment_id, parent=comments_key) + + notifications = Notification.query(Notification.id == comment_id, ancestor=notification_key) + + for notice in notifications: + notice.key.delete() + + comment.key.delete() + self.redirect('/') + + +class DeleteMember(BaseRequestHandler): + def post(self): + logging.debug('Deleting the member in !: %s' % self.request.get('firm_id')) + firm_id = 0 + member = '' + try: + member = self.request.get('member') + firm_id = int(self.request.get('firm_id')) + except: + self.abort(404) + + firmed = Firms.get_by_id(firm_id, parent=firms_key) + if member in firmed.members: + firmed.members.remove(member) + + firmed.put() + self.redirect('/') + + +class Invite(BaseRequestHandler): + def get(self): + self.render('invite.html', { + 'title': 'Invite People To Equilibrium - Work made easy.', + 'user': self.current_user, + }) + + def post(self): + email = self.request.get('email') + message = mail.EmailMessage() + message.sender = self.current_user.email + message.to = email + message.subject = "Invitation to Equilibrium!" + message.body = """ + You have been invited to Equilibrium! + + To accept this invitation, click the following link, + or copy and paste the URL into your browser's address + bar: + + http://gcdc2013-equilibrium.appspot.com/ + """ + + message.send() + self.redirect('/') + + +class Privacy(BaseRequestHandler): + def get(self): + if self.logged_in: + self.render('privacy.html', { + 'title': 'Privacy / TOS For Equilibrium', + 'user': self.current_user + }) + else: + self.render('privacy.html', { + 'title': 'Privacy / TOS For Equilibrium' + }) + + +class Features(BaseRequestHandler): + def get(self): + if self.logged_in: + self.render('features.html', { + 'title': 'Features of Equilibrium - Work Made Easy', + 'user': self.current_user + }) + else: + self.render('features.html', { + 'title': 'Features of Equilibrium - Work Made Easy', + }) + + +class Browse(BaseRequestHandler): + def get(self): + firms = Firms.query(ancestor=firms_key).order(-Firms.date) + + if self.logged_in: + self.render('browse.html', { + 'title': 'Browse - Equilibrium', + 'user': self.current_user, + 'images': images.get_serving_url, + 'firms': firms + }) + else: + self.abort(404) + + +def CreateFirmDoc(id, author, title, logo, desc, link): + if author: + nickname = author.name + else: + nickname = 'anonymous' + + return search.Document(doc_id=id, + fields=[search.TextField(name='author', value=nickname), + search.TextField(name='title', value=title), + search.TextField(name='logo', value=logo), + search.TextField(name='desc', value=desc), + search.TextField(name='link', value=link), + search.DateField(name='date', value=datetime.datetime.now().date())]) + + +class Search(BaseRequestHandler): + def get(self): + uri = urlparse(self.request.uri) + query = 'null' + if uri.query: + query = parse_qs(uri.query) + query = query['query'][0] + + if query is not 'null': + expr_list = [search.SortExpression( + expression='author', default_value='', + direction=search.SortExpression.DESCENDING)] + + sort_opts = search.SortOptions(expressions=expr_list) + query_options = search.QueryOptions(limit=9, sort_options=sort_opts) + query_obj = search.Query(query_string=query, options=query_options) + + results_firms = search.Index(name=_INDEX_NAME_FIRMS).search(query=query_obj) + + self.render('search.html', { + 'user': self.current_user, + 'title': "Search - Equilibrium", + 'results_firms': results_firms, + 'images': images.get_serving_url, + 'len': results_firms.number_found + }) + else: + self.render('search.html', { + 'user': self.current_user, + 'title': "Search - Equilibrium" + }) + + def get(self): + if self.logged_in: + + firms = Firms.query(ancestor=firms_key).order(-Firms.date) + email = self.current_user.email + foundFirm = False + firmed = [] + for firm in firms: + if email in firm.members: + firmed.append(firm) + foundFirm = True + + notifications = Notification.query(ancestor=notification_key).order(-Notification.time).fetch(5) + notification = [] + for notify in notifications: + if email in notify.users and notify.author.get().email != self.current_user.email: + notification.append(notify) + count = len(notification) + + #projects = Projects.query(ancestor=project_key).order(-Projects.deadline) + found = False + + if self.logged_in: + projectsme = Projects.query(Projects.author_id == self.current_user.key.id(), + ancestor=project_key).order( + -Projects.deadline) + for project in projectsme: + found = True + else: + projectsme = None + + comments = Comments.query(ancestor=comments_key).order(Comments.date) + + self.render('semantic.html', { + 'title': 'Semantic - Equilibrium', + 'notifications': notification, + 'count': count, + 'user': self.current_user, + 'greetings': projectsme, + 'found': found, + 'foundFirm': foundFirm, + 'comments': comments, + }) + else: + self.render('semantic.html', { + 'title': 'Semantic - Equilibrium', + }) + + +class ProjectsHandler(BaseRequestHandler): + def get(self): + """Handles default langing page""" + # Projects + firm_id = 0 + try: + firm_id = int(self.request.get('firm_id')) + except: + self.abort(404) + + firmed = Firms.get_by_id(firm_id, parent=firms_key) + + projects = Projects.query(Projects.firm_id == firm_id, ancestor=project_key).order(-Projects.deadline) + + comments = Comments.query(ancestor=comments_key).order(Comments.date) + + found = False + for project in projects: + found = True + + if self.logged_in: + self.render('projects.html', { + 'title': firmed.title + ' on Equilibrium', + 'greetings': projects, + 'comments': comments, + 'user': self.current_user, + 'images': images.get_serving_url, + 'firm_id': firm_id, + 'firm': firmed, + 'found': found + }) + else: + self.render('projects.html', { + 'title': firmed.title + 'on Equilibrium', + 'greetings': projects, + 'comments': comments, + 'logo_url': images.get_serving_url(firmed.logo, 75), + 'user': None, + 'firm_id': firm_id, + 'firm': firmed, + 'found': found + }) + + +class ProfileHandler(BaseRequestHandler): + def get(self): + """Handles GET /profile""" + #projects = Projects.query(Projects.author == self.current_user.key, ancestor=project_key).order(-Projects.deadline) + + comments = Comments.query(ancestor=comments_key).order(Comments.date) + + found = False + + pro_id = 0 + try: + pro_id = int(self.request.get('pro_id')) + except: + self.abort(404) + profile = self.auth.store.user_model.get_by_id(pro_id) + + projects = Projects.query(Projects.author == profile.key, ancestor=project_key).order(-Projects.deadline) + + firms = Firms.query(ancestor=firms_key).order(-Firms.date) + email = profile.email + foundFirm = False + firmed = [] + for firm in firms: + if email in firm.members: + firmed.append(firm) + foundFirm = True + + for project in projects: + found = True + + self.render('profile.html', { + 'title': profile.name, + 'get_user': profile, + 'foundFirm': foundFirm, + 'firms': firmed, + 'user': self.current_user, + 'greetings': projects, + 'comments': comments, + 'found': found + }) + + +class AuthHandler(BaseRequestHandler, SimpleAuthHandler): + """Authentication handler for OAuth 2.0, 1.0(a) and OpenID.""" + + # Enable optional OAuth 2.0 CSRF guard + OAUTH2_CSRF_STATE = True + + USER_ATTRS = { + 'facebook': { + 'id': lambda id: ('avatar_url', + 'http://graph.facebook.com/{0}/picture?type=large'.format(id)), + 'name': 'name', + 'link': 'link' + }, + 'google': { + 'picture': 'avatar_url', + 'name': 'name', + 'profile': 'link', + 'email': 'email' + }, + 'windows_live': { + 'avatar_url': 'avatar_url', + 'name': 'name', + 'link': 'link' + }, + 'twitter': { + 'profile_image_url': 'avatar_url', + 'screen_name': 'name', + 'link': 'link' + }, + 'linkedin': { + 'picture-url': 'avatar_url', + 'first-name': 'name', + 'public-profile-url': 'link' + }, + 'linkedin2': { + 'picture-url': 'avatar_url', + 'first-name': 'name', + 'public-profile-url': 'link' + }, + 'foursquare': { + 'photo': lambda photo: ('avatar_url', photo.get('prefix') + '100x100' + photo.get('suffix')), + 'firstName': 'firstName', + 'lastName': 'lastName', + 'contact': lambda contact: ('email', contact.get('email')), + 'id': lambda id: ('link', 'http://foursquare.com/user/{0}'.format(id)) + }, + 'openid': { + 'id': lambda id: ('avatar_url', '/img/missing-avatar.png'), + 'nickname': 'name', + 'email': 'link' + } + } + + def _on_signin(self, data, auth_info, provider): + """Callback whenever a new or existing user is logging in. + data is a user info dictionary. + auth_info contains access token or oauth token and secret. + """ + auth_id = '%s:%s' % (provider, data['id']) + logging.info('Looking for a user with id %s', auth_id) + + user = self.auth.store.user_model.get_by_auth_id(auth_id) + _attrs = self._to_user_model_attrs(data, self.USER_ATTRS[provider]) + + if user: + logging.info('Found existing user to log in') + # Existing users might've changed their profile data so we update our + # local model anyway. This might result in quite inefficient usage + # of the Datastore, but we do this anyway for demo purposes. + # + # In a real app you could compare _attrs with user's properties fetched + # from the datastore and update local user in case something's changed. + user.populate(**_attrs) + user.put() + self.auth.set_session( + self.auth.store.user_to_dict(user)) + + else: + # check whether there's a user currently logged in + # then, create a new user if nobody's signed in, + # otherwise add this auth_id to currently logged in user. + + if self.logged_in: + logging.info('Updating currently logged in user') + + u = self.current_user + u.populate(**_attrs) + # The following will also do u.put(). Though, in a real app + # you might want to check the result, which is + # (boolean, info) tuple where boolean == True indicates success + # See webapp2_extras.appengine.auth.models.User for details. + u.add_auth_id(auth_id) + + else: + logging.info('Creating a brand new user') + ok, user = self.auth.store.user_model.create_user(auth_id, **_attrs) + if ok: + self.auth.set_session(self.auth.store.user_to_dict(user)) + + # Remember auth data during redirect, just for this demo. You wouldn't + # normally do this. + self.session.add_flash(data, 'data - from _on_signin(...)') + self.session.add_flash(auth_info, 'auth_info - from _on_signin(...)') + + # Go to the profile page + self.redirect('/') + + def logout(self): + self.auth.unset_session() + self.redirect('/') + + def handle_exception(self, exception, debug): + logging.error(exception) + self.render('error.html', {'exception': exception}) + + def _callback_uri_for(self, provider): + return self.uri_for('auth_callback', provider=provider, _full=True) + + def _get_consumer_info_for(self, provider): + """Returns a tuple (key, secret) for auth init requests.""" + return secrets.AUTH_CONFIG[provider] + + def _to_user_model_attrs(self, data, attrs_map): + """Get the needed information from the provider dataset.""" + user_attrs = {} + for k, v in attrs_map.iteritems(): + attr = (v, data.get(k)) if isinstance(v, str) else v(data.get(k)) + user_attrs.setdefault(*attr) + + return user_attrs diff --git a/index.yaml b/index.yaml new file mode 100644 index 0000000..e56e7ad --- /dev/null +++ b/index.yaml @@ -0,0 +1,60 @@ +# See this on how to configure indexes: +# http://code.google.com/appengine/docs/python/config/indexconfig.html +# +# For more information about indexes, see Queries and Indexes +# http://code.google.com/appengine/docs/python/datastore/queries.html + +indexes: + +# AUTOGENERATED + +# This index.yaml is automatically updated whenever the dev_appserver +# detects that a new type of query is run. If you want to manage the +# index.yaml file manually, remove the above marker line (the line +# saying "# AUTOGENERATED"). If you want to manage some indexes +# manually, move them above the marker line. The index.yaml file is +# automatically uploaded to the admin console when you next deploy +# your application using appcfg.py. + +- kind: Comments + ancestor: yes + properties: + - name: date + +- kind: Firms + ancestor: yes + properties: + - name: date + +- kind: Firms + ancestor: yes + properties: + - name: date + direction: desc + +- kind: Notification + ancestor: yes + properties: + - name: time + direction: desc + +- kind: Projects + ancestor: yes + properties: + - name: author + - name: deadline + direction: desc + +- kind: Projects + ancestor: yes + properties: + - name: author_id + - name: deadline + direction: desc + +- kind: Projects + ancestor: yes + properties: + - name: firm_id + - name: deadline + direction: desc diff --git a/lib/httplib2/__init__.py b/lib/httplib2/__init__.py new file mode 100644 index 0000000..fb74de2 --- /dev/null +++ b/lib/httplib2/__init__.py @@ -0,0 +1,1675 @@ +from __future__ import generators +""" +httplib2 + +A caching http interface that supports ETags and gzip +to conserve bandwidth. + +Requires Python 2.3 or later + +Changelog: +2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. + +""" + +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger"] +__license__ = "MIT" +__version__ = "0.7.6" + +import re +import sys +import email +import email.Utils +import email.Message +import email.FeedParser +import StringIO +import gzip +import zlib +import httplib +import urlparse +import urllib +import base64 +import os +import copy +import calendar +import time +import random +import errno +try: + from hashlib import sha1 as _sha, md5 as _md5 +except ImportError: + # prior to Python 2.5, these were separate modules + import sha + import md5 + _sha = sha.new + _md5 = md5.new +import hmac +from gettext import gettext as _ +import socket + +try: + from httplib2 import socks +except ImportError: + try: + import socks + except ImportError: + socks = None + +# Build the appropriate socket wrapper for ssl +try: + import ssl # python 2.6 + ssl_SSLError = ssl.SSLError + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if disable_validation: + cert_reqs = ssl.CERT_NONE + else: + cert_reqs = ssl.CERT_REQUIRED + # We should be specifying SSL version 3 or TLS v1, but the ssl module + # doesn't expose the necessary knobs. So we need to go with the default + # of SSLv23. + return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, + cert_reqs=cert_reqs, ca_certs=ca_certs) +except (AttributeError, ImportError): + ssl_SSLError = None + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if not disable_validation: + raise CertificateValidationUnsupported( + "SSL certificate validation is not supported without " + "the ssl module installed. To avoid this error, install " + "the ssl module, or explicity disable validation.") + ssl_sock = socket.ssl(sock, key_file, cert_file) + return httplib.FakeSocket(sock, ssl_sock) + + +if sys.version_info >= (2,3): + from iri2uri import iri2uri +else: + def iri2uri(uri): + return uri + +def has_timeout(timeout): # python 2.6 + if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'): + return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) + return (timeout is not None) + +__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', + 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', + 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', + 'debuglevel', 'ProxiesUnavailableError'] + + +# The httplib debug level, set to a non-zero value to get debug output +debuglevel = 0 + +# A request will be tried 'RETRIES' times if it fails at the socket/connection level. +RETRIES = 2 + +# Python 2.3 support +if sys.version_info < (2,4): + def sorted(seq): + seq.sort() + return seq + +# Python 2.3 support +def HTTPResponse__getheaders(self): + """Return list of (header, value) tuples.""" + if self.msg is None: + raise httplib.ResponseNotReady() + return self.msg.items() + +if not hasattr(httplib.HTTPResponse, 'getheaders'): + httplib.HTTPResponse.getheaders = HTTPResponse__getheaders + +# All exceptions raised here derive from HttpLib2Error +class HttpLib2Error(Exception): pass + +# Some exceptions can be caught and optionally +# be turned back into responses. +class HttpLib2ErrorWithResponse(HttpLib2Error): + def __init__(self, desc, response, content): + self.response = response + self.content = content + HttpLib2Error.__init__(self, desc) + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass +class RedirectLimit(HttpLib2ErrorWithResponse): pass +class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass + +class MalformedHeader(HttpLib2Error): pass +class RelativeURIError(HttpLib2Error): pass +class ServerNotFoundError(HttpLib2Error): pass +class ProxiesUnavailableError(HttpLib2Error): pass +class CertificateValidationUnsupported(HttpLib2Error): pass +class SSLHandshakeError(HttpLib2Error): pass +class NotSupportedOnThisPlatform(HttpLib2Error): pass +class CertificateHostnameMismatch(SSLHandshakeError): + def __init__(self, desc, host, cert): + HttpLib2Error.__init__(self, desc) + self.host = host + self.cert = cert + +# Open Items: +# ----------- +# Proxy support + +# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) + +# Pluggable cache storage (supports storing the cache in +# flat files by default. We need a plug-in architecture +# that can support Berkeley DB and Squid) + +# == Known Issues == +# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. +# Does not handle Cache-Control: max-stale +# Does not use Age: headers when calculating cache freshness. + + +# The number of redirections to follow before giving up. +# Note that only GET redirects are automatically followed. +# Will also honor 301 requests by saving that info and never +# requesting that URI again. +DEFAULT_MAX_REDIRECTS = 5 + +# Default CA certificates file bundled with httplib2. +CA_CERTS = os.path.join( + os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") + +# Which headers are hop-by-hop headers by default +HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] + +def _get_end2end_headers(response): + hopbyhop = list(HOP_BY_HOP) + hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) + return [header for header in response.keys() if header not in hopbyhop] + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + +def urlnorm(uri): + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) + authority = authority.lower() + scheme = scheme.lower() + if not path: + path = "/" + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + scheme = scheme.lower() + defrag_uri = scheme + "://" + authority + request_uri + return scheme, authority, request_uri, defrag_uri + + +# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) +re_url_scheme = re.compile(r'^\w+://') +re_slash = re.compile(r'[?/:|]+') + +def safename(filename): + """Return a filename suitable for the cache. + + Strips dangerous and common characters to create a filename we + can use to store the cache in. + """ + + try: + if re_url_scheme.match(filename): + if isinstance(filename,str): + filename = filename.decode('utf-8') + filename = filename.encode('idna') + else: + filename = filename.encode('idna') + except UnicodeError: + pass + if isinstance(filename,unicode): + filename=filename.encode('utf-8') + filemd5 = _md5(filename).hexdigest() + filename = re_url_scheme.sub("", filename) + filename = re_slash.sub(",", filename) + + # limit length of filename + if len(filename)>200: + filename=filename[:200] + return ",".join((filename, filemd5)) + +NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') +def _normalize_headers(headers): + return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) + +def _parse_cache_control(headers): + retval = {} + if headers.has_key('cache-control'): + parts = headers['cache-control'].split(',') + parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] + parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] + retval = dict(parts_with_args + parts_wo_args) + return retval + +# Whether to use a strict mode to parse WWW-Authenticate headers +# Might lead to bad results in case of ill-formed header value, +# so disabled by default, falling back to relaxed parsing. +# Set to true to turn on, usefull for testing servers. +USE_WWW_AUTH_STRICT_PARSING = 0 + +# In regex below: +# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP +# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space +# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: +# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"? +WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") +WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(? current_age: + retval = "FRESH" + return retval + +def _decompressContent(response, new_content): + content = new_content + try: + encoding = response.get('content-encoding', None) + if encoding in ['gzip', 'deflate']: + if encoding == 'gzip': + content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() + if encoding == 'deflate': + content = zlib.decompress(content) + response['content-length'] = str(len(content)) + # Record the historical presence of the encoding in a way the won't interfere. + response['-content-encoding'] = response['content-encoding'] + del response['content-encoding'] + except IOError: + content = "" + raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) + return content + +def _updateCache(request_headers, response_headers, content, cache, cachekey): + if cachekey: + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + if cc.has_key('no-store') or cc_response.has_key('no-store'): + cache.delete(cachekey) + else: + info = email.Message.Message() + for key, value in response_headers.iteritems(): + if key not in ['status','content-encoding','transfer-encoding']: + info[key] = value + + # Add annotations to the cache to indicate what headers + # are variant for this request. + vary = response_headers.get('vary', None) + if vary: + vary_headers = vary.lower().replace(' ', '').split(',') + for header in vary_headers: + key = '-varied-%s' % header + try: + info[key] = request_headers[header] + except KeyError: + pass + + status = response_headers.status + if status == 304: + status = 200 + + status_header = 'status: %d\r\n' % status + + header_str = info.as_string() + + header_str = re.sub("\r(?!\n)|(? 0: + service = "cl" + # No point in guessing Base or Spreadsheet + #elif request_uri.find("spreadsheets") > 0: + # service = "wise" + + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) + resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) + lines = content.split('\n') + d = dict([tuple(line.split("=", 1)) for line in lines if line]) + if resp.status == 403: + self.Auth = "" + else: + self.Auth = d['Auth'] + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'GoogleLogin Auth=' + self.Auth + + +AUTH_SCHEME_CLASSES = { + "basic": BasicAuthentication, + "wsse": WsseAuthentication, + "digest": DigestAuthentication, + "hmacdigest": HmacDigestAuthentication, + "googlelogin": GoogleLoginAuthentication +} + +AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] + +class FileCache(object): + """Uses a local directory as a store for cached files. + Not really safe to use if multiple threads or processes are going to + be running on the same cache. + """ + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + self.cache = cache + self.safe = safe + if not os.path.exists(cache): + os.makedirs(self.cache) + + def get(self, key): + retval = None + cacheFullPath = os.path.join(self.cache, self.safe(key)) + try: + f = file(cacheFullPath, "rb") + retval = f.read() + f.close() + except IOError: + pass + return retval + + def set(self, key, value): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + f = file(cacheFullPath, "wb") + f.write(value) + f.close() + + def delete(self, key): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + if os.path.exists(cacheFullPath): + os.remove(cacheFullPath) + +class Credentials(object): + def __init__(self): + self.credentials = [] + + def add(self, name, password, domain=""): + self.credentials.append((domain.lower(), name, password)) + + def clear(self): + self.credentials = [] + + def iter(self, domain): + for (cdomain, name, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (name, password) + +class KeyCerts(Credentials): + """Identical to Credentials except that + name/password are mapped to key/cert.""" + pass + +class AllHosts(object): + pass + +class ProxyInfo(object): + """Collect information required to use a proxy.""" + bypass_hosts = () + + def __init__(self, proxy_type, proxy_host, proxy_port, + proxy_rdns=None, proxy_user=None, proxy_pass=None): + """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX + constants. For example: + + p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, + proxy_host='localhost', proxy_port=8000) + """ + self.proxy_type = proxy_type + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_rdns = proxy_rdns + self.proxy_user = proxy_user + self.proxy_pass = proxy_pass + + def astuple(self): + return (self.proxy_type, self.proxy_host, self.proxy_port, + self.proxy_rdns, self.proxy_user, self.proxy_pass) + + def isgood(self): + return (self.proxy_host != None) and (self.proxy_port != None) + + @classmethod + def from_environment(cls, method='http'): + """ + Read proxy info from the environment variables. + """ + if method not in ['http', 'https']: + return + + env_var = method + '_proxy' + url = os.environ.get(env_var, os.environ.get(env_var.upper())) + if not url: + return + pi = cls.from_url(url, method) + + no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', '')) + bypass_hosts = [] + if no_proxy: + bypass_hosts = no_proxy.split(',') + # special case, no_proxy=* means all hosts bypassed + if no_proxy == '*': + bypass_hosts = AllHosts + + pi.bypass_hosts = bypass_hosts + return pi + + @classmethod + def from_url(cls, url, method='http'): + """ + Construct a ProxyInfo from a URL (such as http_proxy env var) + """ + url = urlparse.urlparse(url) + username = None + password = None + port = None + if '@' in url[1]: + ident, host_port = url[1].split('@', 1) + if ':' in ident: + username, password = ident.split(':', 1) + else: + password = ident + else: + host_port = url[1] + if ':' in host_port: + host, port = host_port.split(':', 1) + else: + host = host_port + + if port: + port = int(port) + else: + port = dict(https=443, http=80)[method] + + proxy_type = 3 # socks.PROXY_TYPE_HTTP + return cls( + proxy_type = proxy_type, + proxy_host = host, + proxy_port = port, + proxy_user = username or None, + proxy_pass = password or None, + ) + + def applies_to(self, hostname): + return not self.bypass_host(hostname) + + def bypass_host(self, hostname): + """Has this host been excluded from the proxy config""" + if self.bypass_hosts is AllHosts: + return True + + bypass = False + for domain in self.bypass_hosts: + if hostname.endswith(domain): + bypass = True + + return bypass + + +class HTTPConnectionWithTimeout(httplib.HTTPConnection): + """ + HTTPConnection subclass that supports timeouts + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + + def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): + httplib.HTTPConnection.__init__(self, host, port, strict) + self.timeout = timeout + self.proxy_info = proxy_info + + def connect(self): + """Connect to the host and port specified in __init__.""" + # Mostly verbatim from httplib.py. + if self.proxy_info and socks is None: + raise ProxiesUnavailableError( + 'Proxy support missing but proxy use was requested!') + msg = "getaddrinfo returns an empty list" + if self.proxy_info and self.proxy_info.isgood(): + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + else: + use_proxy = False + if use_proxy and proxy_rdns: + host = proxy_host + port = proxy_port + else: + host = self.host + port = self.port + + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + if use_proxy: + self.sock = socks.socksocket(af, socktype, proto) + self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) + else: + self.sock = socket.socket(af, socktype, proto) + self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + # Different from httplib: support timeouts. + if has_timeout(self.timeout): + self.sock.settimeout(self.timeout) + # End of difference from httplib. + if self.debuglevel > 0: + print "connect: (%s, %s) ************" % (self.host, self.port) + if use_proxy: + print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + + self.sock.connect((self.host, self.port) + sa[2:]) + except socket.error, msg: + if self.debuglevel > 0: + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): + """ + This class allows communication via SSL. + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, + ca_certs=None, disable_ssl_certificate_validation=False): + httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, + cert_file=cert_file, strict=strict) + self.timeout = timeout + self.proxy_info = proxy_info + if ca_certs is None: + ca_certs = CA_CERTS + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + + # The following two methods were adapted from https_wrapper.py, released + # with the Google Appengine SDK at + # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py + # under the following license: + # + # Copyright 2007 Google Inc. + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + + def _GetValidHostsForCert(self, cert): + """Returns a list of valid host globs for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + Returns: + list: A list of valid host globs. + """ + if 'subjectAltName' in cert: + return [x[1] for x in cert['subjectAltName'] + if x[0].lower() == 'dns'] + else: + return [x[0][1] for x in cert['subject'] + if x[0][0].lower() == 'commonname'] + + def _ValidateCertificateHostname(self, cert, hostname): + """Validates that a given hostname is valid for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + hostname: The hostname to test. + Returns: + bool: Whether or not the hostname is valid for this certificate. + """ + hosts = self._GetValidHostsForCert(cert) + for host in hosts: + host_re = host.replace('.', '\.').replace('*', '[^.]*') + if re.search('^%s$' % (host_re,), hostname, re.I): + return True + return False + + def connect(self): + "Connect to a host on a given (SSL) port." + + msg = "getaddrinfo returns an empty list" + if self.proxy_info and self.proxy_info.isgood(): + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + else: + use_proxy = False + if use_proxy and proxy_rdns: + host = proxy_host + port = proxy_port + else: + host = self.host + port = self.port + + for family, socktype, proto, canonname, sockaddr in socket.getaddrinfo( + host, port, 0, socket.SOCK_STREAM): + try: + if use_proxy: + sock = socks.socksocket(family, socktype, proto) + + sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) + else: + sock = socket.socket(family, socktype, proto) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + if has_timeout(self.timeout): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock =_ssl_wrap_socket( + sock, self.key_file, self.cert_file, + self.disable_ssl_certificate_validation, self.ca_certs) + if self.debuglevel > 0: + print "connect: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if not self.disable_ssl_certificate_validation: + cert = self.sock.getpeercert() + hostname = self.host.split(':', 0)[0] + if not self._ValidateCertificateHostname(cert, hostname): + raise CertificateHostnameMismatch( + 'Server presented certificate that does not match ' + 'host %s: %s' % (hostname, cert), hostname, cert) + except ssl_SSLError, e: + if sock: + sock.close() + if self.sock: + self.sock.close() + self.sock = None + # Unfortunately the ssl module doesn't seem to provide any way + # to get at more detailed error information, in particular + # whether the error is due to certificate validation or + # something else (such as SSL protocol mismatch). + if e.errno == ssl.SSL_ERROR_SSL: + raise SSLHandshakeError(e) + else: + raise + except (socket.timeout, socket.gaierror): + raise + except socket.error, msg: + if self.debuglevel > 0: + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +SCHEME_TO_CONNECTION = { + 'http': HTTPConnectionWithTimeout, + 'https': HTTPSConnectionWithTimeout + } + +# Use a different connection object for Google App Engine +try: + from google.appengine.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google.appengine.api.urlfetch import fetch + from google.appengine.api.urlfetch import InvalidURLError + from google.appengine.api.urlfetch import DownloadError + from google.appengine.api.urlfetch import ResponseTooLargeError + from google.appengine.api.urlfetch import SSLCertificateError + + + class ResponseDict(dict): + """Is a dictionary that also has a read() method, so + that it can pass itself off as an httlib.HTTPResponse().""" + def read(self): + pass + + + class AppEngineHttpConnection(object): + """Emulates an httplib.HTTPConnection object, but actually uses the Google + App Engine urlfetch library. This allows the timeout to be properly used on + Google App Engine, and avoids using httplib, which on Google App Engine is + just another wrapper around urlfetch. + """ + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + self.host = host + self.port = port + self.timeout = timeout + if key_file or cert_file or proxy_info or ca_certs: + raise NotSupportedOnThisPlatform() + self.response = None + self.scheme = 'http' + self.validate_certificate = not disable_ssl_certificate_validation + self.sock = True + + def request(self, method, url, body, headers): + # Calculate the absolute URI, which fetch requires + netloc = self.host + if self.port: + netloc = '%s:%s' % (self.host, self.port) + absolute_uri = '%s://%s%s' % (self.scheme, netloc, url) + try: + try: # 'body' can be a stream. + body = body.read() + except AttributeError: + pass + response = fetch(absolute_uri, payload=body, method=method, + headers=headers, allow_truncated=False, follow_redirects=False, + deadline=self.timeout, + validate_certificate=self.validate_certificate) + self.response = ResponseDict(response.headers) + self.response['status'] = str(response.status_code) + self.response['reason'] = httplib.responses.get(response.status_code, 'Ok') + self.response.status = response.status_code + setattr(self.response, 'read', lambda : response.content) + + # Make sure the exceptions raised match the exceptions expected. + except InvalidURLError: + raise socket.gaierror('') + except (DownloadError, ResponseTooLargeError, SSLCertificateError): + raise httplib.HTTPException() + + def getresponse(self): + if self.response: + return self.response + else: + raise httplib.HTTPException() + + def set_debuglevel(self, level): + pass + + def connect(self): + pass + + def close(self): + pass + + + class AppEngineHttpsConnection(AppEngineHttpConnection): + """Same as AppEngineHttpConnection, but for HTTPS URIs.""" + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + AppEngineHttpConnection.__init__(self, host, port, key_file, cert_file, + strict, timeout, proxy_info, ca_certs, disable_ssl_certificate_validation) + self.scheme = 'https' + + # Update the connection classes to use the Googel App Engine specific ones. + SCHEME_TO_CONNECTION = { + 'http': AppEngineHttpConnection, + 'https': AppEngineHttpsConnection + } + +except ImportError: + pass + + +class Http(object): + """An HTTP client that handles: +- all methods +- caching +- ETags +- compression, +- HTTPS +- Basic +- Digest +- WSSE + +and more. + """ + def __init__(self, cache=None, timeout=None, + proxy_info=ProxyInfo.from_environment, + ca_certs=None, disable_ssl_certificate_validation=False): + """If 'cache' is a string then it is used as a directory name for + a disk cache. Otherwise it must be an object that supports the + same interface as FileCache. + + All timeouts are in seconds. If None is passed for timeout + then Python's default timeout for sockets will be used. See + for example the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + + `proxy_info` may be: + - a callable that takes the http scheme ('http' or 'https') and + returns a ProxyInfo instance per request. By default, uses + ProxyInfo.from_environment. + - a ProxyInfo instance (static proxy config). + - None (proxy disabled). + + ca_certs is the path of a file containing root CA certificates for SSL + server certificate validation. By default, a CA cert file bundled with + httplib2 is used. + + If disable_ssl_certificate_validation is true, SSL cert validation will + not be performed. + """ + self.proxy_info = proxy_info + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + + # Map domain name to an httplib connection + self.connections = {} + # The location of the cache, for now a directory + # where cached responses are held. + if cache and isinstance(cache, basestring): + self.cache = FileCache(cache) + else: + self.cache = cache + + # Name/password + self.credentials = Credentials() + + # Key/cert + self.certificates = KeyCerts() + + # authorization objects + self.authorizations = [] + + # If set to False then no redirects are followed, even safe ones. + self.follow_redirects = True + + # Which HTTP methods do we apply optimistic concurrency to, i.e. + # which methods get an "if-match:" etag header added to them. + self.optimistic_concurrency_methods = ["PUT", "PATCH"] + + # If 'follow_redirects' is True, and this is set to True then + # all redirecs are followed, including unsafe ones. + self.follow_all_redirects = False + + self.ignore_etag = False + + self.force_exception_to_status_code = False + + self.timeout = timeout + + # Keep Authorization: headers on a redirect. + self.forward_authorization_headers = False + + def _auth_from_challenge(self, host, request_uri, headers, response, content): + """A generator that creates Authorization objects + that can be applied to requests. + """ + challenges = _parse_www_authenticate(response, 'www-authenticate') + for cred in self.credentials.iter(host): + for scheme in AUTH_SCHEME_ORDER: + if challenges.has_key(scheme): + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) + + def add_credentials(self, name, password, domain=""): + """Add a name and password that will be used + any time a request requires authentication.""" + self.credentials.add(name, password, domain) + + def add_certificate(self, key, cert, domain): + """Add a key and cert that will be used + any time a request requires authentication.""" + self.certificates.add(key, cert, domain) + + def clear_credentials(self): + """Remove all the names and passwords + that are used for authentication""" + self.credentials.clear() + self.authorizations = [] + + def _conn_request(self, conn, request_uri, method, body, headers): + for i in range(RETRIES): + try: + if conn.sock is None: + conn.connect() + conn.request(method, request_uri, body, headers) + except socket.timeout: + raise + except socket.gaierror: + conn.close() + raise ServerNotFoundError("Unable to find the server at %s" % conn.host) + except ssl_SSLError: + conn.close() + raise + except socket.error, e: + err = 0 + if hasattr(e, 'args'): + err = getattr(e, 'args')[0] + else: + err = e.errno + if err == errno.ECONNREFUSED: # Connection refused + raise + except httplib.HTTPException: + # Just because the server closed the connection doesn't apparently mean + # that the server didn't send a response. + if conn.sock is None: + if i < RETRIES-1: + conn.close() + conn.connect() + continue + else: + conn.close() + raise + if i < RETRIES-1: + conn.close() + conn.connect() + continue + try: + response = conn.getresponse() + except (socket.error, httplib.HTTPException): + if i < RETRIES-1: + conn.close() + conn.connect() + continue + else: + raise + else: + content = "" + if method == "HEAD": + conn.close() + else: + content = response.read() + response = Response(response) + if method != "HEAD": + content = _decompressContent(response, content) + break + return (response, content) + + + def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): + """Do the actual request using the connection object + and also follow one level of redirects if necessary""" + + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] + auth = auths and sorted(auths)[0][1] or None + if auth: + auth.request(method, request_uri, headers, body) + + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + + if auth: + if auth.response(response, body): + auth.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers ) + response._stale_digest = 1 + + if response.status == 401: + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): + authorization.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) + if response.status != 401: + self.authorizations.append(authorization) + authorization.response(response, body) + break + + if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): + if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: + # Pick out the location header and basically start from the beginning + # remembering first to strip the ETag header and decrement our 'depth' + if redirections: + if not response.has_key('location') and response.status != 300: + raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) + # Fix-up relative redirects (which violate an RFC 2616 MUST) + if response.has_key('location'): + location = response['location'] + (scheme, authority, path, query, fragment) = parse_uri(location) + if authority == None: + response['location'] = urlparse.urljoin(absolute_uri, location) + if response.status == 301 and method in ["GET", "HEAD"]: + response['-x-permanent-redirect-url'] = response['location'] + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + if headers.has_key('if-none-match'): + del headers['if-none-match'] + if headers.has_key('if-modified-since'): + del headers['if-modified-since'] + if 'authorization' in headers and not self.forward_authorization_headers: + del headers['authorization'] + if response.has_key('location'): + location = response['location'] + old_response = copy.deepcopy(response) + if not old_response.has_key('content-location'): + old_response['content-location'] = absolute_uri + redirect_method = method + if response.status in [302, 303]: + redirect_method = "GET" + body = None + (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) + response.previous = old_response + else: + raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) + elif response.status in [200, 203] and method in ["GET", "HEAD"]: + # Don't cache 206's since we aren't going to handle byte range requests + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + + return (response, content) + + def _normalize_headers(self, headers): + return _normalize_headers(headers) + +# Need to catch and rebrand some exceptions +# Then need to optionally turn all exceptions into status codes +# including all socket.* and httplib.* exceptions. + + + def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): + """ Performs a single HTTP request. +The 'uri' is the URI of the HTTP resource and can begin +with either 'http' or 'https'. The value of 'uri' must be an absolute URI. + +The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. +There is no restriction on the methods allowed. + +The 'body' is the entity body to be sent with the request. It is a string +object. + +Any extra headers that are to be sent with the request should be provided in the +'headers' dictionary. + +The maximum number of redirect to follow before raising an +exception is 'redirections. The default is 5. + +The return value is a tuple of (response, content), the first +being and instance of the 'Response' class, the second being +a string that contains the response entity body. + """ + try: + if headers is None: + headers = {} + else: + headers = self._normalize_headers(headers) + + if not headers.has_key('user-agent'): + headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ + + uri = iri2uri(uri) + + (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) + domain_port = authority.split(":")[0:2] + if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': + scheme = 'https' + authority = domain_port[0] + + proxy_info = self._get_proxy_info(scheme, authority) + + conn_key = scheme+":"+authority + if conn_key in self.connections: + conn = self.connections[conn_key] + else: + if not connection_type: + connection_type = SCHEME_TO_CONNECTION[scheme] + certs = list(self.certificates.iter(authority)) + if scheme == 'https': + if certs: + conn = self.connections[conn_key] = connection_type( + authority, key_file=certs[0][0], + cert_file=certs[0][1], timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info) + conn.set_debuglevel(debuglevel) + + if 'range' not in headers and 'accept-encoding' not in headers: + headers['accept-encoding'] = 'gzip, deflate' + + info = email.Message.Message() + cached_value = None + if self.cache: + cachekey = defrag_uri + cached_value = self.cache.get(cachekey) + if cached_value: + # info = email.message_from_string(cached_value) + # + # Need to replace the line above with the kludge below + # to fix the non-existent bug not fixed in this + # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html + try: + info, content = cached_value.split('\r\n\r\n', 1) + feedparser = email.FeedParser.FeedParser() + feedparser.feed(info) + info = feedparser.close() + feedparser._parse = None + except (IndexError, ValueError): + self.cache.delete(cachekey) + cachekey = None + cached_value = None + else: + cachekey = None + + if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: + # http://www.w3.org/1999/04/Editing/ + headers['if-match'] = info['etag'] + + if method not in ["GET", "HEAD"] and self.cache and cachekey: + # RFC 2616 Section 13.10 + self.cache.delete(cachekey) + + # Check the vary header in the cache to see if this request + # matches what varies in the cache. + if method in ['GET', 'HEAD'] and 'vary' in info: + vary = info['vary'] + vary_headers = vary.lower().replace(' ', '').split(',') + for header in vary_headers: + key = '-varied-%s' % header + value = info[key] + if headers.get(header, None) != value: + cached_value = None + break + + if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: + if info.has_key('-x-permanent-redirect-url'): + # Should cached permanent redirects be counted in our redirection count? For now, yes. + if redirections <= 0: + raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") + (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) + response.previous = Response(info) + response.previous.fromcache = True + else: + # Determine our course of action: + # Is the cached entry fresh or stale? + # Has the client requested a non-cached response? + # + # There seems to be three possible answers: + # 1. [FRESH] Return the cache entry w/o doing a GET + # 2. [STALE] Do the GET (but add in cache validators if available) + # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request + entry_disposition = _entry_disposition(info, headers) + + if entry_disposition == "FRESH": + if not cached_value: + info['status'] = '504' + content = "" + response = Response(info) + if cached_value: + response.fromcache = True + return (response, content) + + if entry_disposition == "STALE": + if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: + headers['if-none-match'] = info['etag'] + if info.has_key('last-modified') and not 'last-modified' in headers: + headers['if-modified-since'] = info['last-modified'] + elif entry_disposition == "TRANSPARENT": + pass + + (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + + if response.status == 304 and method == "GET": + # Rewrite the cache entry with the new end-to-end headers + # Take all headers that are in response + # and overwrite their values in info. + # unless they are hop-by-hop, or are listed in the connection header. + + for key in _get_end2end_headers(response): + info[key] = response[key] + merged_response = Response(info) + if hasattr(response, "_stale_digest"): + merged_response._stale_digest = response._stale_digest + _updateCache(headers, merged_response, content, self.cache, cachekey) + response = merged_response + response.status = 200 + response.fromcache = True + + elif response.status == 200: + content = new_content + else: + self.cache.delete(cachekey) + content = new_content + else: + cc = _parse_cache_control(headers) + if cc.has_key('only-if-cached'): + info['status'] = '504' + response = Response(info) + content = "" + else: + (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + except Exception, e: + if self.force_exception_to_status_code: + if isinstance(e, HttpLib2ErrorWithResponse): + response = e.response + content = e.content + response.status = 500 + response.reason = str(e) + elif isinstance(e, socket.timeout): + content = "Request Timeout" + response = Response( { + "content-type": "text/plain", + "status": "408", + "content-length": len(content) + }) + response.reason = "Request Timeout" + else: + content = str(e) + response = Response( { + "content-type": "text/plain", + "status": "400", + "content-length": len(content) + }) + response.reason = "Bad Request" + else: + raise + + + return (response, content) + + def _get_proxy_info(self, scheme, authority): + """Return a ProxyInfo instance (or None) based on the scheme + and authority. + """ + hostname, port = urllib.splitport(authority) + proxy_info = self.proxy_info + if callable(proxy_info): + proxy_info = proxy_info(scheme) + + if (hasattr(proxy_info, 'applies_to') + and not proxy_info.applies_to(hostname)): + proxy_info = None + return proxy_info + + +class Response(dict): + """An object more like email.Message than httplib.HTTPResponse.""" + + """Is this response from our local cache""" + fromcache = False + + """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ + version = 11 + + "Status code returned by server. " + status = 200 + + """Reason phrase returned by server.""" + reason = "Ok" + + previous = None + + def __init__(self, info): + # info is either an email.Message or + # an httplib.HTTPResponse object. + if isinstance(info, httplib.HTTPResponse): + for key, value in info.getheaders(): + self[key.lower()] = value + self.status = info.status + self['status'] = str(self.status) + self.reason = info.reason + self.version = info.version + elif isinstance(info, email.Message.Message): + for key, value in info.items(): + self[key.lower()] = value + self.status = int(self['status']) + else: + for key, value in info.iteritems(): + self[key.lower()] = value + self.status = int(self.get('status', self.status)) + self.reason = self.get('reason', self.reason) + + + def __getattr__(self, name): + if name == 'dict': + return self + else: + raise AttributeError, name diff --git a/lib/httplib2/cacerts.txt b/lib/httplib2/cacerts.txt new file mode 100644 index 0000000..da36ed1 --- /dev/null +++ b/lib/httplib2/cacerts.txt @@ -0,0 +1,714 @@ +# Certifcate Authority certificates for validating SSL connections. +# +# This file contains PEM format certificates generated from +# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt +# +# ***** BEGIN LICENSE BLOCK ***** +# Version: MPL 1.1/GPL 2.0/LGPL 2.1 +# +# The contents of this file are subject to the Mozilla Public License Version +# 1.1 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. +# +# The Original Code is the Netscape security libraries. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1994-2000 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# +# Alternatively, the contents of this file may be used under the terms of +# either the GNU General Public License Version 2 or later (the "GPL"), or +# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), +# in which case the provisions of the GPL or the LGPL are applicable instead +# of those above. If you wish to allow use of your version of this file only +# under the terms of either the GPL or the LGPL, and not to allow others to +# use your version of this file under the terms of the MPL, indicate your +# decision by deleting the provisions above and replace them with the notice +# and other provisions required by the GPL or the LGPL. If you do not delete +# the provisions above, a recipient may use your version of this file under +# the terms of any one of the MPL, the GPL or the LGPL. +# +# ***** END LICENSE BLOCK ***** + +Verisign/RSA Secure Server CA +============================= + +-----BEGIN CERTIFICATE----- +MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD +VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0 +MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV +BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy +dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ +ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII +0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI +uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI +hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3 +YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc +1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA== +-----END CERTIFICATE----- + +Thawte Personal Basic CA +======================== + +-----BEGIN CERTIFICATE----- +MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD +VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT +ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj +IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X +DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw +EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE +ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy +dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD +QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN +BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53 +dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK +wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7 +G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF +AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7 +c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P +9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ== +-----END CERTIFICATE----- + +Thawte Personal Premium CA +========================== + +-----BEGIN CERTIFICATE----- +MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD +VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT +ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p +dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv +bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa +QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY +BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u +IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl +bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu +Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs +Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI +Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD +ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG +SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH +b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh +KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ +-----END CERTIFICATE----- + +Thawte Personal Freemail CA +=========================== + +-----BEGIN CERTIFICATE----- +MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD +VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT +ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt +YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu +Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT +AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa +MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG +cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh +d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY +DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E +rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq +uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN +BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP +MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa +/RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei +gQ== +-----END CERTIFICATE----- + +Thawte Server CA +================ + +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +Thawte Premium Server CA +======================== + +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +Equifax Secure CA +================= + +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy +dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 +MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx +dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f +BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A +cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ +MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm +aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw +ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj +IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y +7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh +1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 +-----END CERTIFICATE----- + +Verisign Class 1 Public Primary Certification Authority +======================================================= + +-----BEGIN CERTIFICATE----- +MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh +c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05 +NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD +VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp +bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB +jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N +H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR +4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN +BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo +EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5 +FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx +lA== +-----END CERTIFICATE----- + +Verisign Class 2 Public Primary Certification Authority +======================================================= + +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh +YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7 +FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg +J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc +r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority +======================================================= + +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do +lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc +AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k +-----END CERTIFICATE----- + +Verisign Class 1 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMSBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQCq0Lq+Fi24g9TK0g+8djHKlNgdk4xWArzZbxpvUjZudVYK +VdPfQ4chEWWKfo+9Id5rMj8bhDSVBZ1BNeuS65bdqlk/AVNtmU/t5eIqWpDBucSm +Fc/IReumXY6cPvBkJHalzasab7bYe1FhbqZ/h8jit+U03EGI6glAvnOSPWvndQID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAKlPww3HZ74sy9mozS11534Vnjty637rXC0J +h9ZrbWB85a7FkCMMXErQr7Fd88e2CtvgFZMN3QO8x3aKtd1Pw5sTdbgBwObJW2ul +uIncrKTdcu1OofdPvAbT6shkdHvClUGcZXNY8ZCaPGqxmMnEh7zPRW1F4m4iP/68 +DzFc6PLZ +-----END CERTIFICATE----- + +Verisign Class 2 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns +YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y +aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe +Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj +IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx +KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM +HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw +DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC +AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji +nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX +rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn +jBJ7xUS0rg== +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +Verisign Class 4 Public Primary Certification Authority - G2 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM +HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK +qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj +cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y +cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP +T8qAkbYp +-----END CERTIFICATE----- + +Verisign Class 1 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN2E1Lm0+afY8wR4 +nN493GwTFtl63SRRZsDHJlkNrAYIwpTRMx/wgzUfbhvI3qpuFU5UJ+/EbRrsC+MO +8ESlV8dAWB6jRx9x7GD2bZTIGDnt/kIYVt/kTEkQeE4BdjVjEjbdZrwBBDajVWjV +ojYJrKshJlQGrT/KFOCsyq0GHZXi+J3x4GD/wn91K0zM2v6HmSHquv4+VNfSWXjb +PG7PoBMAGrgnoeS+Z5bKoMWznN3JdZ7rMJpfo83ZrngZPyPpXNspva1VyBtUjGP2 +6KbqxzcSXKMpHgLZ2x87tNcPVkeBFQRKr4Mn0cVYiMHd9qqnoxjaaKptEVHhv2Vr +n5Z20T0CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAq2aN17O6x5q25lXQBfGfMY1a +qtmqRiYPce2lrVNWYgFHKkTp/j90CxObufRNG7LRX7K20ohcs5/Ny9Sn2WCVhDr4 +wTcdYcrnsMXlkdpUpqwxga6X3s0IrLjAl4B/bnKk52kTlWUfxJM8/XmPBNQ+T+r3 +ns7NZ3xPZQL/kYVUc8f/NveGLezQXk//EZ9yBta4GvFMDSZl4kSAHsef493oCtrs +pSCAaWihT37ha88HQfqDjrw43bAuEbFrskLMmrz5SCJ5ShkPshw+IHTZasO+8ih4 +E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g== +-----END CERTIFICATE----- + +Verisign Class 2 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy +aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s +IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp +Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV +BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp +Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu +Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g +Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt +IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU +J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO +JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY +wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o +koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN +qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E +Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe +xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u +7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU +sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI +sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP +cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q +-----END CERTIFICATE----- + +Verisign Class 3 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +Verisign Class 4 Public Primary Certification Authority - G3 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 +GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ ++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd +U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm +NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY +ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ +ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 +CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq +g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm +fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c +2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ +bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== +-----END CERTIFICATE----- + +Equifax Secure Global eBusiness CA +================================== + +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +Equifax Secure eBusiness CA 1 +============================= + +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +Equifax Secure eBusiness CA 2 +============================= + +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj +dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 +NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD +VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G +vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ +BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX +MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl +IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw +NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq +y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy +0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 +E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN +-----END CERTIFICATE----- + +Thawte Time Stamping CA +======================= + +-----BEGIN CERTIFICATE----- +MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN +BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd +BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN +MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g +Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG +A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l +c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT +6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa +Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL +8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC +9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ +pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ +CayJSdM= +-----END CERTIFICATE----- + +thawte Primary Root CA +====================== + +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +VeriSign Class 3 Public Primary Certification Authority - G5 +============================================================ + +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +Entrust.net Secure Server Certification Authority +================================================= + +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +Go Daddy Certification Authority Root Certificate Bundle +======================================================== + +-----BEGIN CERTIFICATE----- +MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx +ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g +RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMTYw +MTU0MzdaFw0yNjExMTYwMTU0MzdaMIHKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMH +QXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5j +b20sIEluYy4xMzAxBgNVBAsTKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5j +b20vcmVwb3NpdG9yeTEwMC4GA1UEAxMnR28gRGFkZHkgU2VjdXJlIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MREwDwYDVQQFEwgwNzk2OTI4NzCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAMQt1RWMnCZM7DI161+4WQFapmGBWTtwY6vj3D3H +KrjJM9N55DrtPDAjhI6zMBS2sofDPZVUBJ7fmd0LJR4h3mUpfjWoqVTr9vcyOdQm +VZWt7/v+WIbXnvQAjYwqDL1CBM6nPwT27oDyqu9SoWlm2r4arV3aLGbqGmu75RpR +SgAvSMeYddi5Kcju+GZtCpyz8/x4fKL4o/K1w/O5epHBp+YlLpyo7RJlbmr2EkRT +cDCVw5wrWCs9CHRK8r5RsL+H0EwnWGu1NcWdrxcx+AuP7q2BNgWJCJjPOq8lh8BJ +6qf9Z/dFjpfMFDniNoW1fho3/Rb2cRGadDAW/hOUoz+EDU8CAwEAAaOCATIwggEu +MB0GA1UdDgQWBBT9rGEyk2xF1uLuhV+auud2mWjM5zAfBgNVHSMEGDAWgBTSxLDS +kdRMEXGzYcs9of7dqGrU4zASBgNVHRMBAf8ECDAGAQH/AgEAMDMGCCsGAQUFBwEB +BCcwJTAjBggrBgEFBQcwAYYXaHR0cDovL29jc3AuZ29kYWRkeS5jb20wRgYDVR0f +BD8wPTA7oDmgN4Y1aHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNvbS9yZXBv +c2l0b3J5L2dkcm9vdC5jcmwwSwYDVR0gBEQwQjBABgRVHSAAMDgwNgYIKwYBBQUH +AgEWKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5jb20vcmVwb3NpdG9yeTAO +BgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBANKGwOy9+aG2Z+5mC6IG +OgRQjhVyrEp0lVPLN8tESe8HkGsz2ZbwlFalEzAFPIUyIXvJxwqoJKSQ3kbTJSMU +A2fCENZvD117esyfxVgqwcSeIaha86ykRvOe5GPLL5CkKSkB2XIsKd83ASe8T+5o +0yGPwLPk9Qnt0hCqU7S+8MxZC9Y7lhyVJEnfzuz9p0iRFEUOOjZv2kWzRaJBydTX +RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH +qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV +U+4= +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh +bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu +Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g +QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe +BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX +DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE +YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC +ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv +2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q +N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO +r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN +f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH +U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU +TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb +VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg +SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv +biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg +MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw +AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv +ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu +Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd +IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv +bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1 +QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O +WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf +SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + diff --git a/lib/httplib2/iri2uri.py b/lib/httplib2/iri2uri.py new file mode 100644 index 0000000..70667ed --- /dev/null +++ b/lib/httplib2/iri2uri.py @@ -0,0 +1,110 @@ +""" +iri2uri + +Converts an IRI to a URI. + +""" +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [] +__version__ = "1.0.0" +__license__ = "MIT" +__history__ = """ +""" + +import urlparse + + +# Convert an IRI to a URI following the rules in RFC 3987 +# +# The characters we need to enocde and escape are defined in the spec: +# +# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD +# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF +# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD +# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD +# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD +# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD +# / %xD0000-DFFFD / %xE1000-EFFFD + +escape_range = [ + (0xA0, 0xD7FF ), + (0xE000, 0xF8FF ), + (0xF900, 0xFDCF ), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD ), + (0x20000, 0x2FFFD ), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD ), + (0x50000, 0x5FFFD ), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD ), + (0x80000, 0x8FFFD ), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD ), + (0xB0000, 0xBFFFD ), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD ), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD ), + (0x100000, 0x10FFFD) +] + +def encode(c): + retval = c + i = ord(c) + for low, high in escape_range: + if i < low: + break + if i >= low and i <= high: + retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) + break + return retval + + +def iri2uri(uri): + """Convert an IRI to a URI. Note that IRIs must be + passed in a unicode strings. That is, do not utf-8 encode + the IRI before passing it into the function.""" + if isinstance(uri ,unicode): + (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) + authority = authority.encode('idna') + # For each character in 'ucschar' or 'iprivate' + # 1. encode as utf-8 + # 2. then %-encode each octet of that utf-8 + uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) + uri = "".join([encode(c) for c in uri]) + return uri + +if __name__ == "__main__": + import unittest + + class Test(unittest.TestCase): + + def test_uris(self): + """Test that URIs are invariant under the transformation.""" + invariant = [ + u"ftp://ftp.is.co.za/rfc/rfc1808.txt", + u"http://www.ietf.org/rfc/rfc2396.txt", + u"ldap://[2001:db8::7]/c=GB?objectClass?one", + u"mailto:John.Doe@example.com", + u"news:comp.infosystems.www.servers.unix", + u"tel:+1-816-555-1212", + u"telnet://192.0.2.16:80/", + u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] + for uri in invariant: + self.assertEqual(uri, iri2uri(uri)) + + def test_iri(self): + """ Test that the right type of escaping is done for each part of the URI.""" + self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) + self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) + self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) + self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) + self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) + + unittest.main() + + diff --git a/lib/httplib2/socks.py b/lib/httplib2/socks.py new file mode 100644 index 0000000..0991f4c --- /dev/null +++ b/lib/httplib2/socks.py @@ -0,0 +1,438 @@ +"""SocksiPy - Python SOCKS module. +Version 1.00 + +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. + + +This module provides a standard socket-like interface for Python +for tunneling connections through SOCKS proxies. + +""" + +""" + +Minor modifications made by Christopher Gilbert (http://motomastyle.com/) +for use in PyLoris (http://pyloris.sourceforge.net/) + +Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) +mainly to merge bug fixes found in Sourceforge + +""" + +import base64 +import socket +import struct +import sys + +if getattr(socket, 'socket', None) is None: + raise ImportError('socket.socket missing, proxy support unusable') + +PROXY_TYPE_SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = 2 +PROXY_TYPE_HTTP = 3 +PROXY_TYPE_HTTP_NO_TUNNEL = 4 + +_defaultproxy = None +_orgsocket = socket.socket + +class ProxyError(Exception): pass +class GeneralProxyError(ProxyError): pass +class Socks5AuthError(ProxyError): pass +class Socks5Error(ProxyError): pass +class Socks4Error(ProxyError): pass +class HTTPError(ProxyError): pass + +_generalerrors = ("success", + "invalid data", + "not connected", + "not available", + "bad proxy type", + "bad input") + +_socks5errors = ("succeeded", + "general SOCKS server failure", + "connection not allowed by ruleset", + "Network unreachable", + "Host unreachable", + "Connection refused", + "TTL expired", + "Command not supported", + "Address type not supported", + "Unknown error") + +_socks5autherrors = ("succeeded", + "authentication is required", + "all offered authentication methods were rejected", + "unknown username or invalid password", + "unknown error") + +_socks4errors = ("request granted", + "request rejected or failed", + "request rejected because SOCKS server cannot connect to identd on the client", + "request rejected because the client program and identd report different user-ids", + "unknown error") + +def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets a default proxy which all further socksocket objects will use, + unless explicitly changed. + """ + global _defaultproxy + _defaultproxy = (proxytype, addr, port, rdns, username, password) + +def wrapmodule(module): + """wrapmodule(module) + Attempts to replace a module's socket library with a SOCKS socket. Must set + a default proxy using setdefaultproxy(...) first. + This will only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category. + """ + if _defaultproxy != None: + module.socket.socket = socksocket + else: + raise GeneralProxyError((4, "no proxy specified")) + +class socksocket(socket.socket): + """socksocket([family[, type[, proto]]]) -> socket object + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET, type=SOCK_STREAM and proto=0. + """ + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): + _orgsocket.__init__(self, family, type, proto, _sock) + if _defaultproxy != None: + self.__proxy = _defaultproxy + else: + self.__proxy = (None, None, None, None, None, None) + self.__proxysockname = None + self.__proxypeername = None + self.__httptunnel = True + + def __recvall(self, count): + """__recvall(count) -> data + Receive EXACTLY the number of bytes requested from the socket. + Blocks until the required number of bytes have been received. + """ + data = self.recv(count) + while len(data) < count: + d = self.recv(count-len(data)) + if not d: raise GeneralProxyError((0, "connection closed unexpectedly")) + data = data + d + return data + + def sendall(self, content, *args): + """ override socket.socket.sendall method to rewrite the header + for non-tunneling proxies if needed + """ + if not self.__httptunnel: + content = self.__rewriteproxy(content) + return super(socksocket, self).sendall(content, *args) + + def __rewriteproxy(self, header): + """ rewrite HTTP request headers to support non-tunneling proxies + (i.e. those which do not support the CONNECT method). + This only works for HTTP (not HTTPS) since HTTPS requires tunneling. + """ + host, endpt = None, None + hdrs = header.split("\r\n") + for hdr in hdrs: + if hdr.lower().startswith("host:"): + host = hdr + elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): + endpt = hdr + if host and endpt: + hdrs.remove(host) + hdrs.remove(endpt) + host = host.split(" ")[1] + endpt = endpt.split(" ") + if (self.__proxy[4] != None and self.__proxy[5] != None): + hdrs.insert(0, self.__getauthheader()) + hdrs.insert(0, "Host: %s" % host) + hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) + return "\r\n".join(hdrs) + + def __getauthheader(self): + auth = self.__proxy[4] + ":" + self.__proxy[5] + return "Proxy-Authorization: Basic " + base64.b64encode(auth) + + def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets the proxy to be used. + proxytype - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be preformed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided. + """ + self.__proxy = (proxytype, addr, port, rdns, username, password) + + def __negotiatesocks5(self, destaddr, destport): + """__negotiatesocks5(self,destaddr,destport) + Negotiates a connection through a SOCKS5 server. + """ + # First we'll send the authentication packages we support. + if (self.__proxy[4]!=None) and (self.__proxy[5]!=None): + # The username/password details were supplied to the + # setproxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02)) + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00)) + # We'll receive the server's response to determine which + # method was selected + chosenauth = self.__recvall(2) + if chosenauth[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + # Check the chosen authentication method + if chosenauth[1:2] == chr(0x00).encode(): + # No authentication is required + pass + elif chosenauth[1:2] == chr(0x02).encode(): + # Okay, we need to perform a basic username/password + # authentication. + self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5]) + authstat = self.__recvall(2) + if authstat[0:1] != chr(0x01).encode(): + # Bad response + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if authstat[1:2] != chr(0x00).encode(): + # Authentication failed + self.close() + raise Socks5AuthError((3, _socks5autherrors[3])) + # Authentication succeeded + else: + # Reaching here is always bad + self.close() + if chosenauth[1] == chr(0xFF).encode(): + raise Socks5AuthError((2, _socks5autherrors[2])) + else: + raise GeneralProxyError((1, _generalerrors[1])) + # Now we can request the actual connection + req = struct.pack('BBB', 0x05, 0x01, 0x00) + # If the given destination address is an IP address, we'll + # use the IPv4 address request even if remote resolving was specified. + try: + ipaddr = socket.inet_aton(destaddr) + req = req + chr(0x01).encode() + ipaddr + except socket.error: + # Well it's not an IP number, so it's probably a DNS name. + if self.__proxy[3]: + # Resolve remotely + ipaddr = None + req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr + else: + # Resolve locally + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + req = req + chr(0x01).encode() + ipaddr + req = req + struct.pack(">H", destport) + self.sendall(req) + # Get the response + resp = self.__recvall(4) + if resp[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + elif resp[1:2] != chr(0x00).encode(): + # Connection failed + self.close() + if ord(resp[1:2])<=8: + raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) + else: + raise Socks5Error((9, _socks5errors[9])) + # Get the bound address/port + elif resp[3:4] == chr(0x01).encode(): + boundaddr = self.__recvall(4) + elif resp[3:4] == chr(0x03).encode(): + resp = resp + self.recv(1) + boundaddr = self.__recvall(ord(resp[4:5])) + else: + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + boundport = struct.unpack(">H", self.__recvall(2))[0] + self.__proxysockname = (boundaddr, boundport) + if ipaddr != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def getproxysockname(self): + """getsockname() -> address info + Returns the bound IP address and port number at the proxy. + """ + return self.__proxysockname + + def getproxypeername(self): + """getproxypeername() -> address info + Returns the IP and port number of the proxy. + """ + return _orgsocket.getpeername(self) + + def getpeername(self): + """getpeername() -> address info + Returns the IP address and port number of the destination + machine (note: getproxypeername returns the proxy) + """ + return self.__proxypeername + + def __negotiatesocks4(self,destaddr,destport): + """__negotiatesocks4(self,destaddr,destport) + Negotiates a connection through a SOCKS4 server. + """ + # Check if the destination address provided is an IP address + rmtrslv = False + try: + ipaddr = socket.inet_aton(destaddr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if self.__proxy[3]: + ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) + rmtrslv = True + else: + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + # Construct the request packet + req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr + # The username parameter is considered userid for SOCKS4 + if self.__proxy[4] != None: + req = req + self.__proxy[4] + req = req + chr(0x00).encode() + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if rmtrslv: + req = req + destaddr + chr(0x00).encode() + self.sendall(req) + # Get the response from the server + resp = self.__recvall(8) + if resp[0:1] != chr(0x00).encode(): + # Bad data + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + if resp[1:2] != chr(0x5A).encode(): + # Server returned an error + self.close() + if ord(resp[1:2]) in (91, 92, 93): + self.close() + raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) + else: + raise Socks4Error((94, _socks4errors[4])) + # Get the bound address/port + self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) + if rmtrslv != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def __negotiatehttp(self, destaddr, destport): + """__negotiatehttp(self,destaddr,destport) + Negotiates a connection through an HTTP server. + """ + # If we need to resolve locally, we do this now + if not self.__proxy[3]: + addr = socket.gethostbyname(destaddr) + else: + addr = destaddr + headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] + headers += ["Host: ", destaddr, "\r\n"] + if (self.__proxy[4] != None and self.__proxy[5] != None): + headers += [self.__getauthheader(), "\r\n"] + headers.append("\r\n") + self.sendall("".join(headers).encode()) + # We read the response until we get the string "\r\n\r\n" + resp = self.recv(1) + while resp.find("\r\n\r\n".encode()) == -1: + resp = resp + self.recv(1) + # We just need the first line to check if the connection + # was successful + statusline = resp.splitlines()[0].split(" ".encode(), 2) + if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + try: + statuscode = int(statusline[1]) + except ValueError: + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if statuscode != 200: + self.close() + raise HTTPError((statuscode, statusline[2])) + self.__proxysockname = ("0.0.0.0", 0) + self.__proxypeername = (addr, destport) + + def connect(self, destpair): + """connect(self, despair) + Connects to the specified destination through a proxy. + destpar - A tuple of the IP/DNS address and the port number. + (identical to socket's connect). + To select the proxy server use setproxy(). + """ + # Do a minimal input check first + if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int): + raise GeneralProxyError((5, _generalerrors[5])) + if self.__proxy[0] == PROXY_TYPE_SOCKS5: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self, (self.__proxy[1], portnum)) + self.__negotiatesocks5(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_SOCKS4: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatesocks4(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatehttp(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1],portnum)) + if destpair[1] == 443: + self.__negotiatehttp(destpair[0],destpair[1]) + else: + self.__httptunnel = False + elif self.__proxy[0] == None: + _orgsocket.connect(self, (destpair[0], destpair[1])) + else: + raise GeneralProxyError((4, _generalerrors[4])) diff --git a/lib/oauth2/__init__.py b/lib/oauth2/__init__.py new file mode 100644 index 0000000..835270e --- /dev/null +++ b/lib/oauth2/__init__.py @@ -0,0 +1,860 @@ +""" +The MIT License + +Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import base64 +import urllib +import time +import random +import urlparse +import hmac +import binascii +import httplib2 + +try: + from urlparse import parse_qs + parse_qs # placate pyflakes +except ImportError: + # fall back for Python 2.5 + from cgi import parse_qs + +try: + from hashlib import sha1 + sha = sha1 +except ImportError: + # hashlib was added in Python 2.5 + import sha + +import _version + +__version__ = _version.__version__ + +OAUTH_VERSION = '1.0' # Hi Blaine! +HTTP_METHOD = 'GET' +SIGNATURE_METHOD = 'PLAINTEXT' + + +class Error(RuntimeError): + """Generic exception class.""" + + def __init__(self, message='OAuth error occurred.'): + self._message = message + + @property + def message(self): + """A hack to get around the deprecation errors in 2.6.""" + return self._message + + def __str__(self): + return self._message + + +class MissingSignature(Error): + pass + + +def build_authenticate_header(realm=''): + """Optional WWW-Authenticate header (401 error)""" + return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} + + +def build_xoauth_string(url, consumer, token=None): + """Build an XOAUTH string for use in SMTP/IMPA authentication.""" + request = Request.from_consumer_and_token(consumer, token, + "GET", url) + + signing_method = SignatureMethod_HMAC_SHA1() + request.sign_request(signing_method, consumer, token) + + params = [] + for k, v in sorted(request.iteritems()): + if v is not None: + params.append('%s="%s"' % (k, escape(v))) + + return "%s %s %s" % ("GET", url, ','.join(params)) + + +def to_unicode(s): + """ Convert to unicode, raise exception with instructive error + message if s is not unicode, ascii, or utf-8. """ + if not isinstance(s, unicode): + if not isinstance(s, str): + raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s)) + try: + s = s.decode('utf-8') + except UnicodeDecodeError, le: + raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,)) + return s + +def to_utf8(s): + return to_unicode(s).encode('utf-8') + +def to_unicode_if_string(s): + if isinstance(s, basestring): + return to_unicode(s) + else: + return s + +def to_utf8_if_string(s): + if isinstance(s, basestring): + return to_utf8(s) + else: + return s + +def to_unicode_optional_iterator(x): + """ + Raise TypeError if x is a str containing non-utf8 bytes or if x is + an iterable which contains such a str. + """ + if isinstance(x, basestring): + return to_unicode(x) + + try: + l = list(x) + except TypeError, e: + assert 'is not iterable' in str(e) + return x + else: + return [ to_unicode(e) for e in l ] + +def to_utf8_optional_iterator(x): + """ + Raise TypeError if x is a str or if x is an iterable which + contains a str. + """ + if isinstance(x, basestring): + return to_utf8(x) + + try: + l = list(x) + except TypeError, e: + assert 'is not iterable' in str(e) + return x + else: + return [ to_utf8_if_string(e) for e in l ] + +def escape(s): + """Escape a URL including any /.""" + return urllib.quote(s.encode('utf-8'), safe='~') + +def generate_timestamp(): + """Get seconds since epoch (UTC).""" + return int(time.time()) + + +def generate_nonce(length=8): + """Generate pseudorandom number.""" + return ''.join([str(random.randint(0, 9)) for i in range(length)]) + + +def generate_verifier(length=8): + """Generate pseudorandom number.""" + return ''.join([str(random.randint(0, 9)) for i in range(length)]) + + +class Consumer(object): + """A consumer of OAuth-protected services. + + The OAuth consumer is a "third-party" service that wants to access + protected resources from an OAuth service provider on behalf of an end + user. It's kind of the OAuth client. + + Usually a consumer must be registered with the service provider by the + developer of the consumer software. As part of that process, the service + provider gives the consumer a *key* and a *secret* with which the consumer + software can identify itself to the service. The consumer will include its + key in each request to identify itself, but will use its secret only when + signing requests, to prove that the request is from that particular + registered consumer. + + Once registered, the consumer can then use its consumer credentials to ask + the service provider for a request token, kicking off the OAuth + authorization process. + """ + + key = None + secret = None + + def __init__(self, key, secret): + self.key = key + self.secret = secret + + if self.key is None or self.secret is None: + raise ValueError("Key and secret must be set.") + + def __str__(self): + data = {'oauth_consumer_key': self.key, + 'oauth_consumer_secret': self.secret} + + return urllib.urlencode(data) + + +class Token(object): + """An OAuth credential used to request authorization or a protected + resource. + + Tokens in OAuth comprise a *key* and a *secret*. The key is included in + requests to identify the token being used, but the secret is used only in + the signature, to prove that the requester is who the server gave the + token to. + + When first negotiating the authorization, the consumer asks for a *request + token* that the live user authorizes with the service provider. The + consumer then exchanges the request token for an *access token* that can + be used to access protected resources. + """ + + key = None + secret = None + callback = None + callback_confirmed = None + verifier = None + + def __init__(self, key, secret): + self.key = key + self.secret = secret + + if self.key is None or self.secret is None: + raise ValueError("Key and secret must be set.") + + def set_callback(self, callback): + self.callback = callback + self.callback_confirmed = 'true' + + def set_verifier(self, verifier=None): + if verifier is not None: + self.verifier = verifier + else: + self.verifier = generate_verifier() + + def get_callback_url(self): + if self.callback and self.verifier: + # Append the oauth_verifier. + parts = urlparse.urlparse(self.callback) + scheme, netloc, path, params, query, fragment = parts[:6] + if query: + query = '%s&oauth_verifier=%s' % (query, self.verifier) + else: + query = 'oauth_verifier=%s' % self.verifier + return urlparse.urlunparse((scheme, netloc, path, params, + query, fragment)) + return self.callback + + def to_string(self): + """Returns this token as a plain string, suitable for storage. + + The resulting string includes the token's secret, so you should never + send or store this string where a third party can read it. + """ + + data = { + 'oauth_token': self.key, + 'oauth_token_secret': self.secret, + } + + if self.callback_confirmed is not None: + data['oauth_callback_confirmed'] = self.callback_confirmed + return urllib.urlencode(data) + + @staticmethod + def from_string(s): + """Deserializes a token from a string like one returned by + `to_string()`.""" + + if not len(s): + raise ValueError("Invalid parameter string.") + + params = parse_qs(s, keep_blank_values=False) + if not len(params): + raise ValueError("Invalid parameter string.") + + try: + key = params['oauth_token'][0] + except Exception: + raise ValueError("'oauth_token' not found in OAuth request.") + + try: + secret = params['oauth_token_secret'][0] + except Exception: + raise ValueError("'oauth_token_secret' not found in " + "OAuth request.") + + token = Token(key, secret) + try: + token.callback_confirmed = params['oauth_callback_confirmed'][0] + except KeyError: + pass # 1.0, no callback confirmed. + return token + + def __str__(self): + return self.to_string() + + +def setter(attr): + name = attr.__name__ + + def getter(self): + try: + return self.__dict__[name] + except KeyError: + raise AttributeError(name) + + def deleter(self): + del self.__dict__[name] + + return property(getter, attr, deleter) + + +class Request(dict): + + """The parameters and information for an HTTP request, suitable for + authorizing with OAuth credentials. + + When a consumer wants to access a service's protected resources, it does + so using a signed HTTP request identifying itself (the consumer) with its + key, and providing an access token authorized by the end user to access + those resources. + + """ + + version = OAUTH_VERSION + + def __init__(self, method=HTTP_METHOD, url=None, parameters=None, + body='', is_form_encoded=False): + if url is not None: + self.url = to_unicode(url) + self.method = method + if parameters is not None: + for k, v in parameters.iteritems(): + k = to_unicode(k) + v = to_unicode_optional_iterator(v) + self[k] = v + self.body = body + self.is_form_encoded = is_form_encoded + + + @setter + def url(self, value): + self.__dict__['url'] = value + if value is not None: + scheme, netloc, path, params, query, fragment = urlparse.urlparse(value) + + # Exclude default port numbers. + if scheme == 'http' and netloc[-3:] == ':80': + netloc = netloc[:-3] + elif scheme == 'https' and netloc[-4:] == ':443': + netloc = netloc[:-4] + if scheme not in ('http', 'https'): + raise ValueError("Unsupported URL %s (%s)." % (value, scheme)) + + # Normalized URL excludes params, query, and fragment. + self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None)) + else: + self.normalized_url = None + self.__dict__['url'] = None + + @setter + def method(self, value): + self.__dict__['method'] = value.upper() + + def _get_timestamp_nonce(self): + return self['oauth_timestamp'], self['oauth_nonce'] + + def get_nonoauth_parameters(self): + """Get any non-OAuth parameters.""" + return dict([(k, v) for k, v in self.iteritems() + if not k.startswith('oauth_')]) + + def to_header(self, realm=''): + """Serialize as a header for an HTTPAuth request.""" + oauth_params = ((k, v) for k, v in self.items() + if k.startswith('oauth_')) + stringy_params = ((k, escape(str(v))) for k, v in oauth_params) + header_params = ('%s="%s"' % (k, v) for k, v in stringy_params) + params_header = ', '.join(header_params) + + auth_header = 'OAuth realm="%s"' % realm + if params_header: + auth_header = "%s, %s" % (auth_header, params_header) + + return {'Authorization': auth_header} + + def to_postdata(self): + """Serialize as post data for a POST request.""" + d = {} + for k, v in self.iteritems(): + d[k.encode('utf-8')] = to_utf8_optional_iterator(v) + + # tell urlencode to deal with sequence values and map them correctly + # to resulting querystring. for example self["k"] = ["v1", "v2"] will + # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D + return urllib.urlencode(d, True).replace('+', '%20') + + def to_url(self): + """Serialize as a URL for a GET request.""" + base_url = urlparse.urlparse(self.url) + try: + query = base_url.query + except AttributeError: + # must be python <2.5 + query = base_url[4] + query = parse_qs(query) + for k, v in self.items(): + query.setdefault(k, []).append(v) + + try: + scheme = base_url.scheme + netloc = base_url.netloc + path = base_url.path + params = base_url.params + fragment = base_url.fragment + except AttributeError: + # must be python <2.5 + scheme = base_url[0] + netloc = base_url[1] + path = base_url[2] + params = base_url[3] + fragment = base_url[5] + + url = (scheme, netloc, path, params, + urllib.urlencode(query, True), fragment) + return urlparse.urlunparse(url) + + def get_parameter(self, parameter): + ret = self.get(parameter) + if ret is None: + raise Error('Parameter not found: %s' % parameter) + + return ret + + def get_normalized_parameters(self): + """Return a string that contains the parameters that must be signed.""" + items = [] + for key, value in self.iteritems(): + if key == 'oauth_signature': + continue + # 1.0a/9.1.1 states that kvp must be sorted by key, then by value, + # so we unpack sequence values into multiple items for sorting. + if isinstance(value, basestring): + items.append((to_utf8_if_string(key), to_utf8(value))) + else: + try: + value = list(value) + except TypeError, e: + assert 'is not iterable' in str(e) + items.append((to_utf8_if_string(key), to_utf8_if_string(value))) + else: + items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value) + + # Include any query string parameters from the provided URL + query = urlparse.urlparse(self.url)[4] + + url_items = self._split_url_string(query).items() + url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ] + items.extend(url_items) + + items.sort() + encoded_str = urllib.urlencode(items) + # Encode signature parameters per Oauth Core 1.0 protocol + # spec draft 7, section 3.6 + # (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6) + # Spaces must be encoded with "%20" instead of "+" + return encoded_str.replace('+', '%20').replace('%7E', '~') + + def sign_request(self, signature_method, consumer, token): + """Set the signature parameter to the result of sign.""" + + if not self.is_form_encoded: + # according to + # http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html + # section 4.1.1 "OAuth Consumers MUST NOT include an + # oauth_body_hash parameter on requests with form-encoded + # request bodies." + self['oauth_body_hash'] = base64.b64encode(sha(self.body).digest()) + + if 'oauth_consumer_key' not in self: + self['oauth_consumer_key'] = consumer.key + + if token and 'oauth_token' not in self: + self['oauth_token'] = token.key + + self['oauth_signature_method'] = signature_method.name + self['oauth_signature'] = signature_method.sign(self, consumer, token) + + @classmethod + def make_timestamp(cls): + """Get seconds since epoch (UTC).""" + return str(int(time.time())) + + @classmethod + def make_nonce(cls): + """Generate pseudorandom number.""" + return str(random.randint(0, 100000000)) + + @classmethod + def from_request(cls, http_method, http_url, headers=None, parameters=None, + query_string=None): + """Combines multiple parameter sources.""" + if parameters is None: + parameters = {} + + # Headers + if headers and 'Authorization' in headers: + auth_header = headers['Authorization'] + # Check that the authorization header is OAuth. + if auth_header[:6] == 'OAuth ': + auth_header = auth_header[6:] + try: + # Get the parameters from the header. + header_params = cls._split_header(auth_header) + parameters.update(header_params) + except: + raise Error('Unable to parse OAuth parameters from ' + 'Authorization header.') + + # GET or POST query string. + if query_string: + query_params = cls._split_url_string(query_string) + parameters.update(query_params) + + # URL parameters. + param_str = urlparse.urlparse(http_url)[4] # query + url_params = cls._split_url_string(param_str) + parameters.update(url_params) + + if parameters: + return cls(http_method, http_url, parameters) + + return None + + @classmethod + def from_consumer_and_token(cls, consumer, token=None, + http_method=HTTP_METHOD, http_url=None, parameters=None, + body='', is_form_encoded=False): + if not parameters: + parameters = {} + + defaults = { + 'oauth_consumer_key': consumer.key, + 'oauth_timestamp': cls.make_timestamp(), + 'oauth_nonce': cls.make_nonce(), + 'oauth_version': cls.version, + } + + defaults.update(parameters) + parameters = defaults + + if token: + parameters['oauth_token'] = token.key + if token.verifier: + parameters['oauth_verifier'] = token.verifier + + return Request(http_method, http_url, parameters, body=body, + is_form_encoded=is_form_encoded) + + @classmethod + def from_token_and_callback(cls, token, callback=None, + http_method=HTTP_METHOD, http_url=None, parameters=None): + + if not parameters: + parameters = {} + + parameters['oauth_token'] = token.key + + if callback: + parameters['oauth_callback'] = callback + + return cls(http_method, http_url, parameters) + + @staticmethod + def _split_header(header): + """Turn Authorization: header into parameters.""" + params = {} + parts = header.split(',') + for param in parts: + # Ignore realm parameter. + if param.find('realm') > -1: + continue + # Remove whitespace. + param = param.strip() + # Split key-value. + param_parts = param.split('=', 1) + # Remove quotes and unescape the value. + params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) + return params + + @staticmethod + def _split_url_string(param_str): + """Turn URL string into parameters.""" + parameters = parse_qs(param_str.encode('utf-8'), keep_blank_values=True) + for k, v in parameters.iteritems(): + parameters[k] = urllib.unquote(v[0]) + return parameters + + +class Client(httplib2.Http): + """OAuthClient is a worker to attempt to execute a request.""" + + def __init__(self, consumer, token=None, cache=None, timeout=None, + proxy_info=None): + + if consumer is not None and not isinstance(consumer, Consumer): + raise ValueError("Invalid consumer.") + + if token is not None and not isinstance(token, Token): + raise ValueError("Invalid token.") + + self.consumer = consumer + self.token = token + self.method = SignatureMethod_HMAC_SHA1() + + httplib2.Http.__init__(self, cache=cache, timeout=timeout, proxy_info=proxy_info) + + def set_signature_method(self, method): + if not isinstance(method, SignatureMethod): + raise ValueError("Invalid signature method.") + + self.method = method + + def request(self, uri, method="GET", body='', headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): + DEFAULT_POST_CONTENT_TYPE = 'application/x-www-form-urlencoded' + + if not isinstance(headers, dict): + headers = {} + + if method == "POST": + headers['Content-Type'] = headers.get('Content-Type', + DEFAULT_POST_CONTENT_TYPE) + + is_form_encoded = \ + headers.get('Content-Type') == 'application/x-www-form-urlencoded' + + if is_form_encoded and body: + parameters = parse_qs(body) + else: + parameters = None + + req = Request.from_consumer_and_token(self.consumer, + token=self.token, http_method=method, http_url=uri, + parameters=parameters, body=body, is_form_encoded=is_form_encoded) + + req.sign_request(self.method, self.consumer, self.token) + + schema, rest = urllib.splittype(uri) + if rest.startswith('//'): + hierpart = '//' + else: + hierpart = '' + host, rest = urllib.splithost(rest) + + realm = schema + ':' + hierpart + host + + if is_form_encoded: + body = req.to_postdata() + elif method == "GET": + uri = req.to_url() + else: + headers.update(req.to_header(realm=realm)) + + return httplib2.Http.request(self, uri, method=method, body=body, + headers=headers, redirections=redirections, + connection_type=connection_type) + + +class Server(object): + """A skeletal implementation of a service provider, providing protected + resources to requests from authorized consumers. + + This class implements the logic to check requests for authorization. You + can use it with your web server or web framework to protect certain + resources with OAuth. + """ + + timestamp_threshold = 300 # In seconds, five minutes. + version = OAUTH_VERSION + signature_methods = None + + def __init__(self, signature_methods=None): + self.signature_methods = signature_methods or {} + + def add_signature_method(self, signature_method): + self.signature_methods[signature_method.name] = signature_method + return self.signature_methods + + def verify_request(self, request, consumer, token): + """Verifies an api call and checks all the parameters.""" + + self._check_version(request) + self._check_signature(request, consumer, token) + parameters = request.get_nonoauth_parameters() + return parameters + + def build_authenticate_header(self, realm=''): + """Optional support for the authenticate header.""" + return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} + + def _check_version(self, request): + """Verify the correct version of the request for this server.""" + version = self._get_version(request) + if version and version != self.version: + raise Error('OAuth version %s not supported.' % str(version)) + + def _get_version(self, request): + """Return the version of the request for this server.""" + try: + version = request.get_parameter('oauth_version') + except: + version = OAUTH_VERSION + + return version + + def _get_signature_method(self, request): + """Figure out the signature with some defaults.""" + try: + signature_method = request.get_parameter('oauth_signature_method') + except: + signature_method = SIGNATURE_METHOD + + try: + # Get the signature method object. + signature_method = self.signature_methods[signature_method] + except: + signature_method_names = ', '.join(self.signature_methods.keys()) + raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names)) + + return signature_method + + def _get_verifier(self, request): + return request.get_parameter('oauth_verifier') + + def _check_signature(self, request, consumer, token): + timestamp, nonce = request._get_timestamp_nonce() + self._check_timestamp(timestamp) + signature_method = self._get_signature_method(request) + + try: + signature = request.get_parameter('oauth_signature') + except: + raise MissingSignature('Missing oauth_signature.') + + # Validate the signature. + valid = signature_method.check(request, consumer, token, signature) + + if not valid: + key, base = signature_method.signing_base(request, consumer, token) + + raise Error('Invalid signature. Expected signature base ' + 'string: %s' % base) + + def _check_timestamp(self, timestamp): + """Verify that timestamp is recentish.""" + timestamp = int(timestamp) + now = int(time.time()) + lapsed = now - timestamp + if lapsed > self.timestamp_threshold: + raise Error('Expired timestamp: given %d and now %s has a ' + 'greater difference than threshold %d' % (timestamp, now, + self.timestamp_threshold)) + + +class SignatureMethod(object): + """A way of signing requests. + + The OAuth protocol lets consumers and service providers pick a way to sign + requests. This interface shows the methods expected by the other `oauth` + modules for signing requests. Subclass it and implement its methods to + provide a new way to sign requests. + """ + + def signing_base(self, request, consumer, token): + """Calculates the string that needs to be signed. + + This method returns a 2-tuple containing the starting key for the + signing and the message to be signed. The latter may be used in error + messages to help clients debug their software. + + """ + raise NotImplementedError + + def sign(self, request, consumer, token): + """Returns the signature for the given request, based on the consumer + and token also provided. + + You should use your implementation of `signing_base()` to build the + message to sign. Otherwise it may be less useful for debugging. + + """ + raise NotImplementedError + + def check(self, request, consumer, token, signature): + """Returns whether the given signature is the correct signature for + the given consumer and token signing the given request.""" + built = self.sign(request, consumer, token) + return built == signature + + +class SignatureMethod_HMAC_SHA1(SignatureMethod): + name = 'HMAC-SHA1' + + def signing_base(self, request, consumer, token): + if not hasattr(request, 'normalized_url') or request.normalized_url is None: + raise ValueError("Base URL for request is not set.") + + sig = ( + escape(request.method), + escape(request.normalized_url), + escape(request.get_normalized_parameters()), + ) + + key = '%s&' % escape(consumer.secret) + if token: + key += escape(token.secret) + raw = '&'.join(sig) + return key, raw + + def sign(self, request, consumer, token): + """Builds the base signature string.""" + key, raw = self.signing_base(request, consumer, token) + + hashed = hmac.new(key, raw, sha) + + # Calculate the digest base 64. + return binascii.b2a_base64(hashed.digest())[:-1] + + +class SignatureMethod_PLAINTEXT(SignatureMethod): + + name = 'PLAINTEXT' + + def signing_base(self, request, consumer, token): + """Concatenates the consumer key and secret with the token's + secret.""" + sig = '%s&' % escape(consumer.secret) + if token: + sig = sig + escape(token.secret) + return sig, sig + + def sign(self, request, consumer, token): + key, raw = self.signing_base(request, consumer, token) + return raw diff --git a/lib/oauth2/_version.py b/lib/oauth2/_version.py new file mode 100644 index 0000000..9d779ea --- /dev/null +++ b/lib/oauth2/_version.py @@ -0,0 +1,18 @@ +# This is the version of this source code. + +manual_verstr = "1.5" + + + +auto_build_num = "211" + + + +verstr = manual_verstr + "." + auto_build_num +try: + from pyutil.version_class import Version as pyutil_Version + __version__ = pyutil_Version(verstr) +except (ImportError, ValueError): + # Maybe there is no pyutil installed. + from distutils.version import LooseVersion as distutils_Version + __version__ = distutils_Version(verstr) diff --git a/lib/simpleauth b/lib/simpleauth new file mode 100644 index 0000000..f620681 --- /dev/null +++ b/lib/simpleauth @@ -0,0 +1 @@ +../../simpleauth \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..7e9a0fe --- /dev/null +++ b/main.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +import sys +from secrets import SESSION_KEY + +from webapp2 import WSGIApplication, Route + +# inject './lib' dir in the path so that we can simply do "import ndb" +# or whatever there's in the app lib dir. +if 'lib' not in sys.path: + sys.path[0:0] = ['lib'] + +# webapp2 config +app_config = { + 'webapp2_extras.sessions': { + 'cookie_name': '_simpleauth_sess', + 'secret_key': SESSION_KEY + }, + 'webapp2_extras.auth': { + 'user_attributes': [] + } +} + +# Map URLs to handlers +routes = [ + Route('/', handler='handlers.RootHandler', name='home'), + Route('/semantic', handler='handlers.Semantic', name='Semantic'), + Route('/add', handler='handlers.ProjectAdd'), + Route('/firm', handler='handlers.firms'), + Route('/delete_firm', handler='handlers.DeleteFirm'), + Route('/delete_project', handler='handlers.DeleteProject'), + Route('/delete_comment', handler='handlers.DeleteComment'), + Route('/delete_member', handler='handlers.DeleteMember'), + Route('/about', handler='handlers.About'), + Route('/invite', handler='handlers.Invite'), + Route('/privacy', handler='handlers.Privacy'), + Route('/google0e3609a4a19b9bdc.html', handler='handlers.VerifyGoogle'), + Route('/features', handler='handlers.Features'), + Route('/browse', handler='handlers.Browse'), + Route('/search', handler='handlers.Search'), + Route('/chrome', handler='handlers.Chrome'), + Route('/editfirm', handler='handlers.EditFirm'), + Route('/addfirm', handler='handlers.firms'), + Route('/add_firm_new', handler='handlers.add_firm'), + Route('/cadd', handler='handlers.CommentAdd'), + Route('/add_member', handler='handlers.AddFirmMember'), + Route('/projects', handler='handlers.ProjectsHandler', name='projects'), + Route('/profile', handler='handlers.ProfileHandler', name='profile'), + + Route('/logout', handler='handlers.AuthHandler:logout', name='logout'), + Route('/t |