mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-06-23 15:30:45 -04:00
Start refactoring of Content Server to use Routes for URL dispatching and etree instead of genshi for templating. OPDS feeds are currently broken.
This commit is contained in:
parent
d7fa2363a8
commit
359c0cd40e
@ -123,7 +123,7 @@ function fetch_library_books(start, num, timeout, sort, order, search) {
|
||||
|
||||
current_library_request = $.ajax({
|
||||
type: "GET",
|
||||
url: "library",
|
||||
url: "xml",
|
||||
data: data,
|
||||
cache: false,
|
||||
timeout: timeout, //milliseconds
|
||||
|
@ -14,14 +14,46 @@ import cherrypy
|
||||
from calibre.constants import __appname__, __version__
|
||||
from calibre.utils.date import fromtimestamp
|
||||
from calibre.library.server import listen_on, log_access_file, log_error_file
|
||||
from calibre.library.server.utils import expose
|
||||
from calibre.utils.mdns import publish as publish_zeroconf, \
|
||||
stop_server as stop_zeroconf, get_external_ip
|
||||
from calibre.library.server.content import ContentServer
|
||||
from calibre.library.server.mobile import MobileServer
|
||||
from calibre.library.server.xml import XMLServer
|
||||
from calibre.library.server.opds import OPDSServer
|
||||
from calibre.library.server.cache import Cache
|
||||
|
||||
class LibraryServer(ContentServer, MobileServer, XMLServer, OPDSServer):
|
||||
|
||||
class DispatchController(object): # {{{
|
||||
|
||||
def __init__(self):
|
||||
self.dispatcher = cherrypy.dispatch.RoutesDispatcher()
|
||||
self.funcs = []
|
||||
self.seen = set([])
|
||||
|
||||
def __call__(self, name, route, func, **kwargs):
|
||||
if name in self.seen:
|
||||
raise NameError('Route name: '+ repr(name) + ' already used')
|
||||
self.seen.add(name)
|
||||
kwargs['action'] = 'f_%d'%len(self.funcs)
|
||||
self.dispatcher.connect(name, route, self, **kwargs)
|
||||
self.funcs.append(expose(func))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if not attr.startswith('f_'):
|
||||
raise AttributeError(attr + ' not found')
|
||||
num = attr.rpartition('_')[-1]
|
||||
try:
|
||||
num = int(num)
|
||||
except:
|
||||
raise AttributeError(attr + ' not found')
|
||||
if num < 0 or num >= len(self.funcs):
|
||||
raise AttributeError(attr + ' not found')
|
||||
return self.funcs[num]
|
||||
|
||||
# }}}
|
||||
|
||||
class LibraryServer(ContentServer, MobileServer, XMLServer, OPDSServer, Cache):
|
||||
|
||||
server_name = __appname__ + '/' + __version__
|
||||
|
||||
@ -88,8 +120,16 @@ class LibraryServer(ContentServer, MobileServer, XMLServer, OPDSServer):
|
||||
|
||||
def start(self):
|
||||
self.is_running = False
|
||||
d = DispatchController()
|
||||
for x in self.__class__.__bases__:
|
||||
if hasattr(x, 'add_routes'):
|
||||
x.add_routes(self, d)
|
||||
root_conf = self.config.get('/', {})
|
||||
root_conf['request.dispatch'] = d.dispatcher
|
||||
self.config['/'] = root_conf
|
||||
|
||||
self.setup_loggers()
|
||||
cherrypy.tree.mount(self, '', config=self.config)
|
||||
cherrypy.tree.mount(root=None, config=self.config)
|
||||
try:
|
||||
try:
|
||||
cherrypy.engine.start()
|
||||
|
18
src/calibre/library/server/cache.py
Normal file
18
src/calibre/library/server/cache.py
Normal file
@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
from calibre.utils.date import utcnow
|
||||
|
||||
class Cache(object):
|
||||
|
||||
@property
|
||||
def categories_cache(self):
|
||||
old = getattr(self, '_category_cache', None)
|
||||
if old is None or old[0] <= self.db.last_modified():
|
||||
categories = self.db.get_categories()
|
||||
self._category_cache = (utcnow(), categories)
|
||||
return self._category_cache[1]
|
@ -16,7 +16,7 @@ except ImportError:
|
||||
|
||||
from calibre import fit_image, guess_type
|
||||
from calibre.utils.date import fromtimestamp
|
||||
from calibre.library.server.utils import expose
|
||||
|
||||
|
||||
class ContentServer(object):
|
||||
|
||||
@ -25,6 +25,13 @@ class ContentServer(object):
|
||||
a few utility methods.
|
||||
'''
|
||||
|
||||
def add_routes(self, connect):
|
||||
connect('root', '/', self.index)
|
||||
connect('get', '/get/{what}/{id}', self.get,
|
||||
conditions=dict(method=["GET", "HEAD"]))
|
||||
connect('static', '/static/{name}', self.static,
|
||||
conditions=dict(method=["GET", "HEAD"]))
|
||||
|
||||
# Utility methods {{{
|
||||
def last_modified(self, updated):
|
||||
'''
|
||||
@ -68,8 +75,7 @@ class ContentServer(object):
|
||||
# }}}
|
||||
|
||||
|
||||
@expose
|
||||
def get(self, what, id, *args, **kwargs):
|
||||
def get(self, what, id):
|
||||
'Serves files, covers, thumbnails from the calibre database'
|
||||
try:
|
||||
id = int(id)
|
||||
@ -87,7 +93,6 @@ class ContentServer(object):
|
||||
return self.get_cover(id)
|
||||
return self.get_format(id, what)
|
||||
|
||||
@expose
|
||||
def static(self, name):
|
||||
'Serves static content'
|
||||
name = name.lower()
|
||||
@ -108,7 +113,6 @@ class ContentServer(object):
|
||||
cherrypy.response.headers['Last-Modified'] = self.last_modified(lm)
|
||||
return open(path, 'rb').read()
|
||||
|
||||
@expose
|
||||
def index(self, **kwargs):
|
||||
'The / URL'
|
||||
ua = cherrypy.request.headers.get('User-Agent', '').strip()
|
||||
|
@ -11,7 +11,7 @@ import __builtin__
|
||||
import cherrypy
|
||||
|
||||
from calibre.utils.genshi.template import MarkupTemplate
|
||||
from calibre.library.server.utils import strftime, expose
|
||||
from calibre.library.server.utils import strftime
|
||||
from calibre.ebooks.metadata import fmt_sidx
|
||||
|
||||
# Templates {{{
|
||||
@ -173,7 +173,9 @@ class MobileServer(object):
|
||||
|
||||
MOBILE_UA = re.compile('(?i)(?:iPhone|Opera Mini|NetFront|webOS|Mobile|Android|imode|DoCoMo|Minimo|Blackberry|MIDP|Symbian|HD2)')
|
||||
|
||||
@expose
|
||||
def add_routes(self, connect):
|
||||
connect('mobile', '/mobile', self.mobile)
|
||||
|
||||
def mobile(self, start='1', num='25', sort='date', search='',
|
||||
_=None, order='descending'):
|
||||
'''
|
||||
|
@ -5,15 +5,102 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import re
|
||||
import re, hashlib
|
||||
from itertools import repeat
|
||||
from functools import partial
|
||||
|
||||
import cherrypy
|
||||
from lxml import etree
|
||||
from lxml.builder import ElementMaker
|
||||
|
||||
from calibre.utils.genshi.template import MarkupTemplate
|
||||
from calibre.library.server.utils import strftime, expose
|
||||
from calibre.ebooks.metadata import fmt_sidx, title_sort
|
||||
from calibre import guess_type, prepare_string_for_xml
|
||||
from calibre.constants import __appname__
|
||||
|
||||
# Vocabulary for building OPDS feeds {{{
|
||||
E = ElementMaker(namespace='http://www.w3.org/2005/Atom',
|
||||
nsmap={
|
||||
None : 'http://www.w3.org/2005/Atom',
|
||||
'dc' : 'http://purl.org/dc/terms/',
|
||||
'opds' : 'http://opds-spec.org/2010/catalog',
|
||||
})
|
||||
|
||||
|
||||
FEED = E.feed
|
||||
TITLE = E.title
|
||||
ID = E.id
|
||||
|
||||
def UPDATED(dt, *args, **kwargs):
|
||||
return E.updated(dt.strftime('%Y-%m-%dT%H:%M:%S+00:00'), *args, **kwargs)
|
||||
|
||||
LINK = partial(E.link, type='application/atom+xml')
|
||||
NAVLINK = partial(E.link,
|
||||
type='application/atom+xml;type=feed;profile=opds-catalog')
|
||||
|
||||
def SEARCH(base_href, *args, **kwargs):
|
||||
kwargs['rel'] = 'search'
|
||||
kwargs['title'] = 'Search'
|
||||
kwargs['href'] = base_href+'/?search={searchTerms}'
|
||||
return LINK(*args, **kwargs)
|
||||
|
||||
def AUTHOR(name, uri=None):
|
||||
args = [E.name(name)]
|
||||
if uri is not None:
|
||||
args.append(E.uri(uri))
|
||||
return E.author(*args)
|
||||
|
||||
SUBTITLE = E.subtitle
|
||||
|
||||
def NAVCATALOG_ENTRY(base_href, updated, title, description, query_data):
|
||||
data = [u'%s=%s'%(key, val) for key, val in query_data.items()]
|
||||
data = '&'.join(data)
|
||||
href = base_href+'/?'+data
|
||||
id_ = 'calibre-subcatalog:'+str(hashlib.sha1(href).hexdigest())
|
||||
return E.entry(
|
||||
TITLE(title),
|
||||
ID(id_),
|
||||
UPDATED(updated),
|
||||
E.content(description, type='text'),
|
||||
NAVLINK(href=href)
|
||||
)
|
||||
|
||||
# }}}
|
||||
|
||||
class Feed(object):
|
||||
|
||||
def __str__(self):
|
||||
return etree.tostring(self.root, pretty_print=True, encoding='utf-8',
|
||||
xml_declaration=True)
|
||||
|
||||
class TopLevel(Feed):
|
||||
|
||||
def __init__(self,
|
||||
updated, # datetime object in UTC
|
||||
categories,
|
||||
id_ = 'urn:calibre:main',
|
||||
base_href = '/stanza'
|
||||
):
|
||||
self.base_href = base_href
|
||||
subc = partial(NAVCATALOG_ENTRY, base_href, updated)
|
||||
|
||||
subcatalogs = [subc('By '+title,
|
||||
'Books sorted by '+desc, {'sortby':q}) for title, desc, q in
|
||||
categories]
|
||||
|
||||
self.root = \
|
||||
FEED(
|
||||
TITLE(__appname__ + ' ' + _('Library')),
|
||||
ID(id_),
|
||||
UPDATED(updated),
|
||||
SEARCH(base_href),
|
||||
AUTHOR(__appname__, uri='http://calibre-ebook.com'),
|
||||
SUBTITLE(_('Books in your library')),
|
||||
*subcatalogs
|
||||
)
|
||||
|
||||
|
||||
|
||||
# Templates {{{
|
||||
|
||||
@ -42,6 +129,7 @@ STANZA_SUBCATALOG_ENTRY=MarkupTemplate('''\
|
||||
</entry>
|
||||
''')
|
||||
|
||||
# Feed of books
|
||||
STANZA = MarkupTemplate('''\
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:py="http://genshi.edgewall.org/">
|
||||
@ -63,62 +151,20 @@ STANZA = MarkupTemplate('''\
|
||||
</feed>
|
||||
''')
|
||||
|
||||
STANZA_MAIN = MarkupTemplate('''\
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:py="http://genshi.edgewall.org/">
|
||||
<title>calibre Library</title>
|
||||
<id>$id</id>
|
||||
<updated>${updated.strftime('%Y-%m-%dT%H:%M:%S+00:00')}</updated>
|
||||
<link rel="search" title="Search" type="application/atom+xml" href="/stanza/?search={searchTerms}"/>
|
||||
<author>
|
||||
<name>calibre</name>
|
||||
<uri>http://calibre-ebook.com</uri>
|
||||
</author>
|
||||
<subtitle>
|
||||
${subtitle}
|
||||
</subtitle>
|
||||
<entry>
|
||||
<title>By Author</title>
|
||||
<id>urn:uuid:fc000fa0-8c23-11de-a31d-0002a5d5c51b</id>
|
||||
<updated>${updated.strftime('%Y-%m-%dT%H:%M:%S+00:00')}</updated>
|
||||
<link type="application/atom+xml" href="/stanza/?sortby=byauthor" />
|
||||
<content type="text">Books sorted by Author</content>
|
||||
</entry>
|
||||
<entry>
|
||||
<title>By Title</title>
|
||||
<id>urn:uuid:1df4fe40-8c24-11de-b4c6-0002a5d5c51b</id>
|
||||
<updated>${updated.strftime('%Y-%m-%dT%H:%M:%S+00:00')}</updated>
|
||||
<link type="application/atom+xml" href="/stanza/?sortby=bytitle" />
|
||||
<content type="text">Books sorted by Title</content>
|
||||
</entry>
|
||||
<entry>
|
||||
<title>By Newest</title>
|
||||
<id>urn:uuid:3c6d4940-8c24-11de-a4d7-0002a5d5c51b</id>
|
||||
<updated>${updated.strftime('%Y-%m-%dT%H:%M:%S+00:00')}</updated>
|
||||
<link type="application/atom+xml" href="/stanza/?sortby=bynewest" />
|
||||
<content type="text">Books sorted by Date</content>
|
||||
</entry>
|
||||
<entry>
|
||||
<title>By Tag</title>
|
||||
<id>urn:uuid:824921e8-db8a-4e61-7d38-f1ce41502853</id>
|
||||
<updated>${updated.strftime('%Y-%m-%dT%H:%M:%S+00:00')}</updated>
|
||||
<link type="application/atom+xml" href="/stanza/?sortby=bytag" />
|
||||
<content type="text">Books sorted by Tags</content>
|
||||
</entry>
|
||||
<entry>
|
||||
<title>By Series</title>
|
||||
<id>urn:uuid:512a5e50-a88f-f6b8-82aa-8f129c719f61</id>
|
||||
<updated>${updated.strftime('%Y-%m-%dT%H:%M:%S+00:00')}</updated>
|
||||
<link type="application/atom+xml" href="/stanza/?sortby=byseries" />
|
||||
<content type="text">Books sorted by Series</content>
|
||||
</entry>
|
||||
</feed>
|
||||
''')
|
||||
|
||||
# }}}
|
||||
|
||||
class OPDSServer(object):
|
||||
|
||||
def build_top_level(self, updated, base_href='/stanza'):
|
||||
categories = self.categories_cache
|
||||
categories = [(x.capitalize(), x.capitalize(), x) for x in
|
||||
categories.keys()]
|
||||
categories.append(('Title', 'Title', '|title|'))
|
||||
categories.append(('Newest', 'Newest', '|newest|'))
|
||||
|
||||
return TopLevel(updated, categories, base_href=base_href)
|
||||
|
||||
def get_matches(self, location, query):
|
||||
base = self.db.data.get_matches(location, query)
|
||||
epub = self.db.data.get_matches('format', '=epub')
|
||||
@ -173,10 +219,6 @@ class OPDSServer(object):
|
||||
return STANZA.generate(subtitle=subtitle, data=entries, FM=self.db.FIELD_MAP,
|
||||
updated=updated, id='urn:calibre:main', next_link=next_link).render('xml')
|
||||
|
||||
def stanza_main(self, updated):
|
||||
return STANZA_MAIN.generate(subtitle='', data=[], FM=self.db.FIELD_MAP,
|
||||
updated=updated, id='urn:calibre:main').render('xml')
|
||||
|
||||
@expose
|
||||
def stanza(self, search=None, sortby=None, authorid=None, tagid=None,
|
||||
seriesid=None, offset=0):
|
||||
@ -186,9 +228,11 @@ class OPDSServer(object):
|
||||
offset = int(offset)
|
||||
cherrypy.response.headers['Last-Modified'] = self.last_modified(updated)
|
||||
cherrypy.response.headers['Content-Type'] = 'text/xml'
|
||||
# Main feed
|
||||
|
||||
# Top Level feed
|
||||
if not sortby and not search and not authorid and not tagid and not seriesid:
|
||||
return self.stanza_main(updated)
|
||||
return str(self.build_top_level(updated))
|
||||
|
||||
if sortby in ('byseries', 'byauthor', 'bytag'):
|
||||
return self.stanza_sortby_subcategory(updated, sortby, offset)
|
||||
|
||||
@ -296,5 +340,8 @@ class OPDSServer(object):
|
||||
next_link=next_link, updated=updated, id='urn:calibre:main').render('xml')
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from datetime import datetime
|
||||
f = TopLevel(datetime.utcnow())
|
||||
print f
|
||||
|
||||
|
@ -7,34 +7,33 @@ __docformat__ = 'restructuredtext en'
|
||||
|
||||
import time
|
||||
|
||||
import cherrypy
|
||||
|
||||
from calibre import strftime as _strftime, prints
|
||||
from calibre.utils.date import now as nowf
|
||||
|
||||
|
||||
def expose(func):
|
||||
import cherrypy
|
||||
|
||||
def do(self, *args, **kwargs):
|
||||
def do(*args, **kwargs):
|
||||
self = func.im_self
|
||||
if self.opts.develop:
|
||||
start = time.time()
|
||||
|
||||
dict.update(cherrypy.response.headers, {'Server':self.server_name})
|
||||
if not self.embedded:
|
||||
self.db.check_if_modified()
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return cherrypy.expose(do)
|
||||
|
||||
def timeit(func):
|
||||
|
||||
def do(self, *args, **kwargs):
|
||||
if self.opts.develop:
|
||||
start = time.time()
|
||||
ans = func(self, *args, **kwargs)
|
||||
ans = func(*args, **kwargs)
|
||||
if self.opts.develop:
|
||||
prints('Function', func.__name__, 'called with args:', args, kwargs)
|
||||
prints('\tTime:', func.__name__, time.time()-start)
|
||||
return ans
|
||||
|
||||
do.__name__ = func.__name__
|
||||
|
||||
return do
|
||||
|
||||
|
||||
def strftime(fmt='%Y/%m/%d %H:%M:%S', dt=None):
|
||||
if not hasattr(dt, 'timetuple'):
|
||||
dt = nowf()
|
||||
|
@ -5,52 +5,26 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import copy, __builtin__
|
||||
import __builtin__
|
||||
|
||||
import cherrypy
|
||||
from lxml.builder import ElementMaker
|
||||
from lxml import etree
|
||||
|
||||
from calibre.utils.genshi.template import MarkupTemplate
|
||||
from calibre.library.server.utils import strftime, expose
|
||||
from calibre.library.server.utils import strftime
|
||||
from calibre.ebooks.metadata import fmt_sidx
|
||||
from calibre.constants import preferred_encoding
|
||||
from calibre import isbytestring
|
||||
|
||||
# Templates {{{
|
||||
BOOK = '''\
|
||||
<book xmlns:py="http://genshi.edgewall.org/"
|
||||
id="${r[FM['id']]}"
|
||||
title="${r[FM['title']]}"
|
||||
sort="${r[FM['sort']]}"
|
||||
author_sort="${r[FM['author_sort']]}"
|
||||
authors="${authors}"
|
||||
rating="${r[FM['rating']]}"
|
||||
timestamp="${timestamp}"
|
||||
pubdate="${pubdate}"
|
||||
size="${r[FM['size']]}"
|
||||
isbn="${r[FM['isbn']] if r[FM['isbn']] else ''}"
|
||||
formats="${r[FM['formats']] if r[FM['formats']] else ''}"
|
||||
series = "${r[FM['series']] if r[FM['series']] else ''}"
|
||||
series_index="${r[FM['series_index']]}"
|
||||
tags="${r[FM['tags']] if r[FM['tags']] else ''}"
|
||||
publisher="${r[FM['publisher']] if r[FM['publisher']] else ''}">${r[FM['comments']] if r[FM['comments']] else ''}
|
||||
</book>
|
||||
'''
|
||||
|
||||
|
||||
LIBRARY = MarkupTemplate('''\
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<library xmlns:py="http://genshi.edgewall.org/" start="$start" num="${len(books)}" total="$total" updated="${updated.strftime('%Y-%m-%dT%H:%M:%S+00:00')}">
|
||||
<py:for each="book in books">
|
||||
${Markup(book)}
|
||||
</py:for>
|
||||
</library>
|
||||
''')
|
||||
|
||||
# }}}
|
||||
E = ElementMaker()
|
||||
|
||||
class XMLServer(object):
|
||||
'Serves XML and the Ajax based HTML frontend'
|
||||
|
||||
@expose
|
||||
def library(self, start='0', num='50', sort=None, search=None,
|
||||
def add_routes(self, connect):
|
||||
connect('xml', '/xml', self.xml)
|
||||
|
||||
def xml(self, start='0', num='50', sort=None, search=None,
|
||||
_=None, order='ascending'):
|
||||
'''
|
||||
Serves metadata from the calibre database as XML.
|
||||
@ -68,30 +42,63 @@ class XMLServer(object):
|
||||
num = int(num)
|
||||
except ValueError:
|
||||
raise cherrypy.HTTPError(400, 'num: %s is not an integer'%num)
|
||||
|
||||
order = order.lower().strip() == 'ascending'
|
||||
|
||||
ids = self.db.data.parse(search) if search and search.strip() else self.db.data.universal_set()
|
||||
ids = sorted(ids)
|
||||
|
||||
FM = self.db.FIELD_MAP
|
||||
items = copy.deepcopy([r for r in iter(self.db) if r[FM['id']] in ids])
|
||||
|
||||
items = [r for r in iter(self.db) if r[FM['id']] in ids]
|
||||
if sort is not None:
|
||||
self.sort(items, sort, order)
|
||||
|
||||
book, books = MarkupTemplate(BOOK), []
|
||||
|
||||
books = []
|
||||
|
||||
def serialize(x):
|
||||
if isinstance(x, unicode):
|
||||
return x
|
||||
if isbytestring(x):
|
||||
return x.decode(preferred_encoding, 'replace')
|
||||
return unicode(x)
|
||||
|
||||
for record in items[start:start+num]:
|
||||
kwargs = {}
|
||||
aus = record[FM['authors']] if record[FM['authors']] else __builtin__._('Unknown')
|
||||
authors = '|'.join([i.replace('|', ',') for i in aus.split(',')])
|
||||
record[FM['series_index']] = \
|
||||
kwargs['authors'] = authors
|
||||
|
||||
kwargs['series_index'] = \
|
||||
fmt_sidx(float(record[FM['series_index']]))
|
||||
ts, pd = strftime('%Y/%m/%d %H:%M:%S', record[FM['timestamp']]), \
|
||||
strftime('%Y/%m/%d %H:%M:%S', record[FM['pubdate']])
|
||||
books.append(book.generate(r=record, authors=authors, timestamp=ts,
|
||||
pubdate=pd, FM=FM).render('xml').decode('utf-8'))
|
||||
|
||||
for x in ('timestamp', 'pubdate'):
|
||||
kwargs[x] = strftime('%Y/%m/%d %H:%M:%S', record[FM[x]])
|
||||
|
||||
for x in ('id', 'title', 'sort', 'author_sort', 'rating', 'size'):
|
||||
kwargs[x] = serialize(record[FM[x]])
|
||||
|
||||
for x in ('isbn', 'formats', 'series', 'tags', 'publisher',
|
||||
'comments'):
|
||||
y = record[FM[x]]
|
||||
kwargs[x] = serialize(y) if y else ''
|
||||
|
||||
c = kwargs.pop('comments')
|
||||
books.append(E.book(c, **kwargs))
|
||||
|
||||
updated = self.db.last_modified()
|
||||
kwargs = dict(
|
||||
start = str(start),
|
||||
updated=updated.strftime('%Y-%m-%dT%H:%M:%S+00:00'),
|
||||
total=str(len(ids)),
|
||||
num=str(len(books)))
|
||||
ans = E.library(*books, **kwargs)
|
||||
|
||||
cherrypy.response.headers['Content-Type'] = 'text/xml'
|
||||
cherrypy.response.headers['Last-Modified'] = self.last_modified(updated)
|
||||
return LIBRARY.generate(books=books, start=start, updated=updated,
|
||||
total=len(ids), FM=FM).render('xml')
|
||||
|
||||
return etree.tostring(ans, encoding='utf-8', pretty_print=True,
|
||||
xml_declaration=True)
|
||||
|
||||
|
||||
|
||||
|
142
src/routes/__init__.py
Normal file
142
src/routes/__init__.py
Normal file
@ -0,0 +1,142 @@
|
||||
"""Provides common classes and functions most users will want access to."""
|
||||
import threading, sys
|
||||
|
||||
class _RequestConfig(object):
|
||||
"""
|
||||
RequestConfig thread-local singleton
|
||||
|
||||
The Routes RequestConfig object is a thread-local singleton that should
|
||||
be initialized by the web framework that is utilizing Routes.
|
||||
"""
|
||||
__shared_state = threading.local()
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.__shared_state, name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""
|
||||
If the name is environ, load the wsgi envion with load_wsgi_environ
|
||||
and set the environ
|
||||
"""
|
||||
if name == 'environ':
|
||||
self.load_wsgi_environ(value)
|
||||
return self.__shared_state.__setattr__(name, value)
|
||||
return self.__shared_state.__setattr__(name, value)
|
||||
|
||||
def __delattr__(self, name):
|
||||
delattr(self.__shared_state, name)
|
||||
|
||||
def load_wsgi_environ(self, environ):
|
||||
"""
|
||||
Load the protocol/server info from the environ and store it.
|
||||
Also, match the incoming URL if there's already a mapper, and
|
||||
store the resulting match dict in mapper_dict.
|
||||
"""
|
||||
if 'HTTPS' in environ or environ.get('wsgi.url_scheme') == 'https' \
|
||||
or environ.get('HTTP_X_FORWARDED_PROTO') == 'https':
|
||||
self.__shared_state.protocol = 'https'
|
||||
else:
|
||||
self.__shared_state.protocol = 'http'
|
||||
try:
|
||||
self.mapper.environ = environ
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Wrap in try/except as common case is that there is a mapper
|
||||
# attached to self
|
||||
try:
|
||||
if 'PATH_INFO' in environ:
|
||||
mapper = self.mapper
|
||||
path = environ['PATH_INFO']
|
||||
result = mapper.routematch(path)
|
||||
if result is not None:
|
||||
self.__shared_state.mapper_dict = result[0]
|
||||
self.__shared_state.route = result[1]
|
||||
else:
|
||||
self.__shared_state.mapper_dict = None
|
||||
self.__shared_state.route = None
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if 'HTTP_X_FORWARDED_HOST' in environ:
|
||||
self.__shared_state.host = environ['HTTP_X_FORWARDED_HOST']
|
||||
elif 'HTTP_HOST' in environ:
|
||||
self.__shared_state.host = environ['HTTP_HOST']
|
||||
else:
|
||||
self.__shared_state.host = environ['SERVER_NAME']
|
||||
if environ['wsgi.url_scheme'] == 'https':
|
||||
if environ['SERVER_PORT'] != '443':
|
||||
self.__shared_state.host += ':' + environ['SERVER_PORT']
|
||||
else:
|
||||
if environ['SERVER_PORT'] != '80':
|
||||
self.__shared_state.host += ':' + environ['SERVER_PORT']
|
||||
|
||||
def request_config(original=False):
|
||||
"""
|
||||
Returns the Routes RequestConfig object.
|
||||
|
||||
To get the Routes RequestConfig:
|
||||
|
||||
>>> from routes import *
|
||||
>>> config = request_config()
|
||||
|
||||
The following attributes must be set on the config object every request:
|
||||
|
||||
mapper
|
||||
mapper should be a Mapper instance thats ready for use
|
||||
host
|
||||
host is the hostname of the webapp
|
||||
protocol
|
||||
protocol is the protocol of the current request
|
||||
mapper_dict
|
||||
mapper_dict should be the dict returned by mapper.match()
|
||||
redirect
|
||||
redirect should be a function that issues a redirect,
|
||||
and takes a url as the sole argument
|
||||
prefix (optional)
|
||||
Set if the application is moved under a URL prefix. Prefix
|
||||
will be stripped before matching, and prepended on generation
|
||||
environ (optional)
|
||||
Set to the WSGI environ for automatic prefix support if the
|
||||
webapp is underneath a 'SCRIPT_NAME'
|
||||
|
||||
Setting the environ will use information in environ to try and
|
||||
populate the host/protocol/mapper_dict options if you've already
|
||||
set a mapper.
|
||||
|
||||
**Using your own requst local**
|
||||
|
||||
If you have your own request local object that you'd like to use instead
|
||||
of the default thread local provided by Routes, you can configure Routes
|
||||
to use it::
|
||||
|
||||
from routes import request_config()
|
||||
config = request_config()
|
||||
if hasattr(config, 'using_request_local'):
|
||||
config.request_local = YourLocalCallable
|
||||
config = request_config()
|
||||
|
||||
Once you have configured request_config, its advisable you retrieve it
|
||||
again to get the object you wanted. The variable you assign to
|
||||
request_local is assumed to be a callable that will get the local config
|
||||
object you wish.
|
||||
|
||||
This example tests for the presence of the 'using_request_local' attribute
|
||||
which will be present if you haven't assigned it yet. This way you can
|
||||
avoid repeat assignments of the request specific callable.
|
||||
|
||||
Should you want the original object, perhaps to change the callable its
|
||||
using or stop this behavior, call request_config(original=True).
|
||||
"""
|
||||
obj = _RequestConfig()
|
||||
try:
|
||||
if obj.request_local and original is False:
|
||||
return getattr(obj, 'request_local')()
|
||||
except AttributeError:
|
||||
obj.request_local = False
|
||||
obj.using_request_local = False
|
||||
return _RequestConfig()
|
||||
|
||||
from routes.mapper import Mapper
|
||||
from routes.util import redirect_to, url_for, URLGenerator
|
||||
__all__=['Mapper', 'url_for', 'URLGenerator', 'redirect_to', 'request_config']
|
4
src/routes/base.py
Normal file
4
src/routes/base.py
Normal file
@ -0,0 +1,4 @@
|
||||
"""Route and Mapper core classes"""
|
||||
from routes import request_config
|
||||
from routes.mapper import Mapper
|
||||
from routes.route import Route
|
70
src/routes/lru.py
Normal file
70
src/routes/lru.py
Normal file
@ -0,0 +1,70 @@
|
||||
"""LRU caching class and decorator"""
|
||||
import threading
|
||||
|
||||
_marker = object()
|
||||
|
||||
class LRUCache(object):
|
||||
def __init__(self, size):
|
||||
""" Implements a psueudo-LRU algorithm (CLOCK) """
|
||||
if size < 1:
|
||||
raise ValueError('size must be >1')
|
||||
self.clock = []
|
||||
for i in xrange(0, size):
|
||||
self.clock.append({'key':_marker, 'ref':False})
|
||||
self.size = size
|
||||
self.maxpos = size - 1
|
||||
self.hand = 0
|
||||
self.data = {}
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.data
|
||||
|
||||
def __getitem__(self, key, default=None):
|
||||
try:
|
||||
datum = self.data[key]
|
||||
except KeyError:
|
||||
return default
|
||||
pos, val = datum
|
||||
self.clock[pos]['ref'] = True
|
||||
hand = pos + 1
|
||||
if hand > self.maxpos:
|
||||
hand = 0
|
||||
self.hand = hand
|
||||
return val
|
||||
|
||||
def __setitem__(self, key, val, _marker=_marker):
|
||||
hand = self.hand
|
||||
maxpos = self.maxpos
|
||||
clock = self.clock
|
||||
data = self.data
|
||||
lock = self.lock
|
||||
|
||||
end = hand - 1
|
||||
if end < 0:
|
||||
end = maxpos
|
||||
|
||||
while 1:
|
||||
current = clock[hand]
|
||||
ref = current['ref']
|
||||
if ref is True:
|
||||
current['ref'] = False
|
||||
hand = hand + 1
|
||||
if hand > maxpos:
|
||||
hand = 0
|
||||
elif ref is False or hand == end:
|
||||
lock.acquire()
|
||||
try:
|
||||
oldkey = current['key']
|
||||
if oldkey in data:
|
||||
del data[oldkey]
|
||||
current['key'] = key
|
||||
current['ref'] = True
|
||||
data[key] = (hand, val)
|
||||
hand += 1
|
||||
if hand > maxpos:
|
||||
hand = 0
|
||||
self.hand = hand
|
||||
finally:
|
||||
lock.release()
|
||||
break
|
1161
src/routes/mapper.py
Normal file
1161
src/routes/mapper.py
Normal file
File diff suppressed because it is too large
Load Diff
146
src/routes/middleware.py
Normal file
146
src/routes/middleware.py
Normal file
@ -0,0 +1,146 @@
|
||||
"""Routes WSGI Middleware"""
|
||||
import re
|
||||
import logging
|
||||
|
||||
from webob import Request
|
||||
|
||||
from routes.base import request_config
|
||||
from routes.util import URLGenerator, url_for
|
||||
|
||||
log = logging.getLogger('routes.middleware')
|
||||
|
||||
class RoutesMiddleware(object):
|
||||
"""Routing middleware that handles resolving the PATH_INFO in
|
||||
addition to optionally recognizing method overriding."""
|
||||
def __init__(self, wsgi_app, mapper, use_method_override=True,
|
||||
path_info=True, singleton=True):
|
||||
"""Create a Route middleware object
|
||||
|
||||
Using the use_method_override keyword will require Paste to be
|
||||
installed, and your application should use Paste's WSGIRequest
|
||||
object as it will properly handle POST issues with wsgi.input
|
||||
should Routes check it.
|
||||
|
||||
If path_info is True, then should a route var contain
|
||||
path_info, the SCRIPT_NAME and PATH_INFO will be altered
|
||||
accordingly. This should be used with routes like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
map.connect('blog/*path_info', controller='blog', path_info='')
|
||||
|
||||
"""
|
||||
self.app = wsgi_app
|
||||
self.mapper = mapper
|
||||
self.singleton = singleton
|
||||
self.use_method_override = use_method_override
|
||||
self.path_info = path_info
|
||||
log_debug = self.log_debug = logging.DEBUG >= log.getEffectiveLevel()
|
||||
if self.log_debug:
|
||||
log.debug("Initialized with method overriding = %s, and path "
|
||||
"info altering = %s", use_method_override, path_info)
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
"""Resolves the URL in PATH_INFO, and uses wsgi.routing_args
|
||||
to pass on URL resolver results."""
|
||||
old_method = None
|
||||
if self.use_method_override:
|
||||
req = None
|
||||
|
||||
# In some odd cases, there's no query string
|
||||
try:
|
||||
qs = environ['QUERY_STRING']
|
||||
except KeyError:
|
||||
qs = ''
|
||||
if '_method' in qs:
|
||||
req = Request(environ)
|
||||
req.errors = 'ignore'
|
||||
if '_method' in req.GET:
|
||||
old_method = environ['REQUEST_METHOD']
|
||||
environ['REQUEST_METHOD'] = req.GET['_method'].upper()
|
||||
if self.log_debug:
|
||||
log.debug("_method found in QUERY_STRING, altering request"
|
||||
" method to %s", environ['REQUEST_METHOD'])
|
||||
elif environ['REQUEST_METHOD'] == 'POST' and is_form_post(environ):
|
||||
if req is None:
|
||||
req = Request(environ)
|
||||
req.errors = 'ignore'
|
||||
if '_method' in req.POST:
|
||||
old_method = environ['REQUEST_METHOD']
|
||||
environ['REQUEST_METHOD'] = req.POST['_method'].upper()
|
||||
if self.log_debug:
|
||||
log.debug("_method found in POST data, altering request "
|
||||
"method to %s", environ['REQUEST_METHOD'])
|
||||
|
||||
# Run the actual route matching
|
||||
# -- Assignment of environ to config triggers route matching
|
||||
if self.singleton:
|
||||
config = request_config()
|
||||
config.mapper = self.mapper
|
||||
config.environ = environ
|
||||
match = config.mapper_dict
|
||||
route = config.route
|
||||
else:
|
||||
results = self.mapper.routematch(environ=environ)
|
||||
if results:
|
||||
match, route = results[0], results[1]
|
||||
else:
|
||||
match = route = None
|
||||
|
||||
if old_method:
|
||||
environ['REQUEST_METHOD'] = old_method
|
||||
|
||||
if not match:
|
||||
match = {}
|
||||
if self.log_debug:
|
||||
urlinfo = "%s %s" % (environ['REQUEST_METHOD'], environ['PATH_INFO'])
|
||||
log.debug("No route matched for %s", urlinfo)
|
||||
elif self.log_debug:
|
||||
urlinfo = "%s %s" % (environ['REQUEST_METHOD'], environ['PATH_INFO'])
|
||||
log.debug("Matched %s", urlinfo)
|
||||
log.debug("Route path: '%s', defaults: %s", route.routepath,
|
||||
route.defaults)
|
||||
log.debug("Match dict: %s", match)
|
||||
|
||||
url = URLGenerator(self.mapper, environ)
|
||||
environ['wsgiorg.routing_args'] = ((url), match)
|
||||
environ['routes.route'] = route
|
||||
environ['routes.url'] = url
|
||||
|
||||
if route and route.redirect:
|
||||
route_name = '_redirect_%s' % id(route)
|
||||
location = url(route_name, **match)
|
||||
log.debug("Using redirect route, redirect to '%s' with status"
|
||||
"code: %s", location, route.redirect_status)
|
||||
start_response(route.redirect_status,
|
||||
[('Content-Type', 'text/plain; charset=utf8'),
|
||||
('Location', location)])
|
||||
return []
|
||||
|
||||
# If the route included a path_info attribute and it should be used to
|
||||
# alter the environ, we'll pull it out
|
||||
if self.path_info and 'path_info' in match:
|
||||
oldpath = environ['PATH_INFO']
|
||||
newpath = match.get('path_info') or ''
|
||||
environ['PATH_INFO'] = newpath
|
||||
if not environ['PATH_INFO'].startswith('/'):
|
||||
environ['PATH_INFO'] = '/' + environ['PATH_INFO']
|
||||
environ['SCRIPT_NAME'] += re.sub(r'^(.*?)/' + re.escape(newpath) + '$',
|
||||
r'\1', oldpath)
|
||||
|
||||
response = self.app(environ, start_response)
|
||||
|
||||
# Wrapped in try as in rare cases the attribute will be gone already
|
||||
try:
|
||||
del self.mapper.environ
|
||||
except AttributeError:
|
||||
pass
|
||||
return response
|
||||
|
||||
def is_form_post(environ):
|
||||
"""Determine whether the request is a POSTed html form"""
|
||||
content_type = environ.get('CONTENT_TYPE', '').lower()
|
||||
if ';' in content_type:
|
||||
content_type = content_type.split(';', 1)[0]
|
||||
return content_type in ('application/x-www-form-urlencoded',
|
||||
'multipart/form-data')
|
742
src/routes/route.py
Normal file
742
src/routes/route.py
Normal file
@ -0,0 +1,742 @@
|
||||
import re
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
if sys.version < '2.4':
|
||||
from sets import ImmutableSet as frozenset
|
||||
|
||||
from routes.util import _url_quote as url_quote, _str_encode
|
||||
|
||||
|
||||
class Route(object):
|
||||
"""The Route object holds a route recognition and generation
|
||||
routine.
|
||||
|
||||
See Route.__init__ docs for usage.
|
||||
|
||||
"""
|
||||
# reserved keys that don't count
|
||||
reserved_keys = ['requirements']
|
||||
|
||||
# special chars to indicate a natural split in the URL
|
||||
done_chars = ('/', ',', ';', '.', '#')
|
||||
|
||||
def __init__(self, name, routepath, **kargs):
|
||||
"""Initialize a route, with a given routepath for
|
||||
matching/generation
|
||||
|
||||
The set of keyword args will be used as defaults.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> from routes.base import Route
|
||||
>>> newroute = Route(None, ':controller/:action/:id')
|
||||
>>> sorted(newroute.defaults.items())
|
||||
[('action', 'index'), ('id', None)]
|
||||
>>> newroute = Route(None, 'date/:year/:month/:day',
|
||||
... controller="blog", action="view")
|
||||
>>> newroute = Route(None, 'archives/:page', controller="blog",
|
||||
... action="by_page", requirements = { 'page':'\d{1,2}' })
|
||||
>>> newroute.reqs
|
||||
{'page': '\\\d{1,2}'}
|
||||
|
||||
.. Note::
|
||||
Route is generally not called directly, a Mapper instance
|
||||
connect method should be used to add routes.
|
||||
|
||||
"""
|
||||
self.routepath = routepath
|
||||
self.sub_domains = False
|
||||
self.prior = None
|
||||
self.redirect = False
|
||||
self.name = name
|
||||
self._kargs = kargs
|
||||
self.minimization = kargs.pop('_minimize', False)
|
||||
self.encoding = kargs.pop('_encoding', 'utf-8')
|
||||
self.reqs = kargs.get('requirements', {})
|
||||
self.decode_errors = 'replace'
|
||||
|
||||
# Don't bother forming stuff we don't need if its a static route
|
||||
self.static = kargs.pop('_static', False)
|
||||
self.filter = kargs.pop('_filter', None)
|
||||
self.absolute = kargs.pop('_absolute', False)
|
||||
|
||||
# Pull out the member/collection name if present, this applies only to
|
||||
# map.resource
|
||||
self.member_name = kargs.pop('_member_name', None)
|
||||
self.collection_name = kargs.pop('_collection_name', None)
|
||||
self.parent_resource = kargs.pop('_parent_resource', None)
|
||||
|
||||
# Pull out route conditions
|
||||
self.conditions = kargs.pop('conditions', None)
|
||||
|
||||
# Determine if explicit behavior should be used
|
||||
self.explicit = kargs.pop('_explicit', False)
|
||||
|
||||
# Since static need to be generated exactly, treat them as
|
||||
# non-minimized
|
||||
if self.static:
|
||||
self.external = '://' in self.routepath
|
||||
self.minimization = False
|
||||
|
||||
# Strip preceding '/' if present, and not minimizing
|
||||
if routepath.startswith('/') and self.minimization:
|
||||
self.routepath = routepath[1:]
|
||||
self._setup_route()
|
||||
|
||||
def _setup_route(self):
|
||||
# Build our routelist, and the keys used in the route
|
||||
self.routelist = routelist = self._pathkeys(self.routepath)
|
||||
routekeys = frozenset([key['name'] for key in routelist
|
||||
if isinstance(key, dict)])
|
||||
self.dotkeys = frozenset([key['name'] for key in routelist
|
||||
if isinstance(key, dict) and
|
||||
key['type'] == '.'])
|
||||
|
||||
if not self.minimization:
|
||||
self.make_full_route()
|
||||
|
||||
# Build a req list with all the regexp requirements for our args
|
||||
self.req_regs = {}
|
||||
for key, val in self.reqs.iteritems():
|
||||
self.req_regs[key] = re.compile('^' + val + '$')
|
||||
# Update our defaults and set new default keys if needed. defaults
|
||||
# needs to be saved
|
||||
(self.defaults, defaultkeys) = self._defaults(routekeys,
|
||||
self.reserved_keys,
|
||||
self._kargs.copy())
|
||||
# Save the maximum keys we could utilize
|
||||
self.maxkeys = defaultkeys | routekeys
|
||||
|
||||
# Populate our minimum keys, and save a copy of our backward keys for
|
||||
# quicker generation later
|
||||
(self.minkeys, self.routebackwards) = self._minkeys(routelist[:])
|
||||
|
||||
# Populate our hardcoded keys, these are ones that are set and don't
|
||||
# exist in the route
|
||||
self.hardcoded = frozenset([key for key in self.maxkeys \
|
||||
if key not in routekeys and self.defaults[key] is not None])
|
||||
|
||||
# Cache our default keys
|
||||
self._default_keys = frozenset(self.defaults.keys())
|
||||
|
||||
def make_full_route(self):
|
||||
"""Make a full routelist string for use with non-minimized
|
||||
generation"""
|
||||
regpath = ''
|
||||
for part in self.routelist:
|
||||
if isinstance(part, dict):
|
||||
regpath += '%(' + part['name'] + ')s'
|
||||
else:
|
||||
regpath += part
|
||||
self.regpath = regpath
|
||||
|
||||
def make_unicode(self, s):
|
||||
"""Transform the given argument into a unicode string."""
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
elif isinstance(s, str):
|
||||
return s.decode(self.encoding)
|
||||
elif callable(s):
|
||||
return s
|
||||
else:
|
||||
return unicode(s)
|
||||
|
||||
def _pathkeys(self, routepath):
|
||||
"""Utility function to walk the route, and pull out the valid
|
||||
dynamic/wildcard keys."""
|
||||
collecting = False
|
||||
current = ''
|
||||
done_on = ''
|
||||
var_type = ''
|
||||
just_started = False
|
||||
routelist = []
|
||||
for char in routepath:
|
||||
if char in [':', '*', '{'] and not collecting and not self.static \
|
||||
or char in ['{'] and not collecting:
|
||||
just_started = True
|
||||
collecting = True
|
||||
var_type = char
|
||||
if char == '{':
|
||||
done_on = '}'
|
||||
just_started = False
|
||||
if len(current) > 0:
|
||||
routelist.append(current)
|
||||
current = ''
|
||||
elif collecting and just_started:
|
||||
just_started = False
|
||||
if char == '(':
|
||||
done_on = ')'
|
||||
else:
|
||||
current = char
|
||||
done_on = self.done_chars + ('-',)
|
||||
elif collecting and char not in done_on:
|
||||
current += char
|
||||
elif collecting:
|
||||
collecting = False
|
||||
if var_type == '{':
|
||||
if current[0] == '.':
|
||||
var_type = '.'
|
||||
current = current[1:]
|
||||
else:
|
||||
var_type = ':'
|
||||
opts = current.split(':')
|
||||
if len(opts) > 1:
|
||||
current = opts[0]
|
||||
self.reqs[current] = opts[1]
|
||||
routelist.append(dict(type=var_type, name=current))
|
||||
if char in self.done_chars:
|
||||
routelist.append(char)
|
||||
done_on = var_type = current = ''
|
||||
else:
|
||||
current += char
|
||||
if collecting:
|
||||
routelist.append(dict(type=var_type, name=current))
|
||||
elif current:
|
||||
routelist.append(current)
|
||||
return routelist
|
||||
|
||||
def _minkeys(self, routelist):
|
||||
"""Utility function to walk the route backwards
|
||||
|
||||
Will also determine the minimum keys we can handle to generate
|
||||
a working route.
|
||||
|
||||
routelist is a list of the '/' split route path
|
||||
defaults is a dict of all the defaults provided for the route
|
||||
|
||||
"""
|
||||
minkeys = []
|
||||
backcheck = routelist[:]
|
||||
|
||||
# If we don't honor minimization, we need all the keys in the
|
||||
# route path
|
||||
if not self.minimization:
|
||||
for part in backcheck:
|
||||
if isinstance(part, dict):
|
||||
minkeys.append(part['name'])
|
||||
return (frozenset(minkeys), backcheck)
|
||||
|
||||
gaps = False
|
||||
backcheck.reverse()
|
||||
for part in backcheck:
|
||||
if not isinstance(part, dict) and part not in self.done_chars:
|
||||
gaps = True
|
||||
continue
|
||||
elif not isinstance(part, dict):
|
||||
continue
|
||||
key = part['name']
|
||||
if self.defaults.has_key(key) and not gaps:
|
||||
continue
|
||||
minkeys.append(key)
|
||||
gaps = True
|
||||
return (frozenset(minkeys), backcheck)
|
||||
|
||||
def _defaults(self, routekeys, reserved_keys, kargs):
|
||||
"""Creates default set with values stringified
|
||||
|
||||
Put together our list of defaults, stringify non-None values
|
||||
and add in our action/id default if they use it and didn't
|
||||
specify it.
|
||||
|
||||
defaultkeys is a list of the currently assumed default keys
|
||||
routekeys is a list of the keys found in the route path
|
||||
reserved_keys is a list of keys that are not
|
||||
|
||||
"""
|
||||
defaults = {}
|
||||
# Add in a controller/action default if they don't exist
|
||||
if 'controller' not in routekeys and 'controller' not in kargs \
|
||||
and not self.explicit:
|
||||
kargs['controller'] = 'content'
|
||||
if 'action' not in routekeys and 'action' not in kargs \
|
||||
and not self.explicit:
|
||||
kargs['action'] = 'index'
|
||||
defaultkeys = frozenset([key for key in kargs.keys() \
|
||||
if key not in reserved_keys])
|
||||
for key in defaultkeys:
|
||||
if kargs[key] is not None:
|
||||
defaults[key] = self.make_unicode(kargs[key])
|
||||
else:
|
||||
defaults[key] = None
|
||||
if 'action' in routekeys and not defaults.has_key('action') \
|
||||
and not self.explicit:
|
||||
defaults['action'] = 'index'
|
||||
if 'id' in routekeys and not defaults.has_key('id') \
|
||||
and not self.explicit:
|
||||
defaults['id'] = None
|
||||
newdefaultkeys = frozenset([key for key in defaults.keys() \
|
||||
if key not in reserved_keys])
|
||||
|
||||
return (defaults, newdefaultkeys)
|
||||
|
||||
def makeregexp(self, clist, include_names=True):
|
||||
"""Create a regular expression for matching purposes
|
||||
|
||||
Note: This MUST be called before match can function properly.
|
||||
|
||||
clist should be a list of valid controller strings that can be
|
||||
matched, for this reason makeregexp should be called by the web
|
||||
framework after it knows all available controllers that can be
|
||||
utilized.
|
||||
|
||||
include_names indicates whether this should be a match regexp
|
||||
assigned to itself using regexp grouping names, or if names
|
||||
should be excluded for use in a single larger regexp to
|
||||
determine if any routes match
|
||||
|
||||
"""
|
||||
if self.minimization:
|
||||
reg = self.buildnextreg(self.routelist, clist, include_names)[0]
|
||||
if not reg:
|
||||
reg = '/'
|
||||
reg = reg + '/?' + '$'
|
||||
|
||||
if not reg.startswith('/'):
|
||||
reg = '/' + reg
|
||||
else:
|
||||
reg = self.buildfullreg(clist, include_names)
|
||||
|
||||
reg = '^' + reg
|
||||
|
||||
if not include_names:
|
||||
return reg
|
||||
|
||||
self.regexp = reg
|
||||
self.regmatch = re.compile(reg)
|
||||
|
||||
def buildfullreg(self, clist, include_names=True):
|
||||
"""Build the regexp by iterating through the routelist and
|
||||
replacing dicts with the appropriate regexp match"""
|
||||
regparts = []
|
||||
for part in self.routelist:
|
||||
if isinstance(part, dict):
|
||||
var = part['name']
|
||||
if var == 'controller':
|
||||
partmatch = '|'.join(map(re.escape, clist))
|
||||
elif part['type'] == ':':
|
||||
partmatch = self.reqs.get(var) or '[^/]+?'
|
||||
elif part['type'] == '.':
|
||||
partmatch = self.reqs.get(var) or '[^/.]+?'
|
||||
else:
|
||||
partmatch = self.reqs.get(var) or '.+?'
|
||||
if include_names:
|
||||
regpart = '(?P<%s>%s)' % (var, partmatch)
|
||||
else:
|
||||
regpart = '(?:%s)' % partmatch
|
||||
if part['type'] == '.':
|
||||
regparts.append('(?:\.%s)??' % regpart)
|
||||
else:
|
||||
regparts.append(regpart)
|
||||
else:
|
||||
regparts.append(re.escape(part))
|
||||
regexp = ''.join(regparts) + '$'
|
||||
return regexp
|
||||
|
||||
def buildnextreg(self, path, clist, include_names=True):
|
||||
"""Recursively build our regexp given a path, and a controller
|
||||
list.
|
||||
|
||||
Returns the regular expression string, and two booleans that
|
||||
can be ignored as they're only used internally by buildnextreg.
|
||||
|
||||
"""
|
||||
if path:
|
||||
part = path[0]
|
||||
else:
|
||||
part = ''
|
||||
reg = ''
|
||||
|
||||
# noreqs will remember whether the remainder has either a string
|
||||
# match, or a non-defaulted regexp match on a key, allblank remembers
|
||||
# if the rest could possible be completely empty
|
||||
(rest, noreqs, allblank) = ('', True, True)
|
||||
if len(path[1:]) > 0:
|
||||
self.prior = part
|
||||
(rest, noreqs, allblank) = self.buildnextreg(path[1:], clist, include_names)
|
||||
|
||||
if isinstance(part, dict) and part['type'] in (':', '.'):
|
||||
var = part['name']
|
||||
typ = part['type']
|
||||
partreg = ''
|
||||
|
||||
# First we plug in the proper part matcher
|
||||
if self.reqs.has_key(var):
|
||||
if include_names:
|
||||
partreg = '(?P<%s>%s)' % (var, self.reqs[var])
|
||||
else:
|
||||
partreg = '(?:%s)' % self.reqs[var]
|
||||
if typ == '.':
|
||||
partreg = '(?:\.%s)??' % partreg
|
||||
elif var == 'controller':
|
||||
if include_names:
|
||||
partreg = '(?P<%s>%s)' % (var, '|'.join(map(re.escape, clist)))
|
||||
else:
|
||||
partreg = '(?:%s)' % '|'.join(map(re.escape, clist))
|
||||
elif self.prior in ['/', '#']:
|
||||
if include_names:
|
||||
partreg = '(?P<' + var + '>[^' + self.prior + ']+?)'
|
||||
else:
|
||||
partreg = '(?:[^' + self.prior + ']+?)'
|
||||
else:
|
||||
if not rest:
|
||||
if typ == '.':
|
||||
exclude_chars = '/.'
|
||||
else:
|
||||
exclude_chars = '/'
|
||||
if include_names:
|
||||
partreg = '(?P<%s>[^%s]+?)' % (var, exclude_chars)
|
||||
else:
|
||||
partreg = '(?:[^%s]+?)' % exclude_chars
|
||||
if typ == '.':
|
||||
partreg = '(?:\.%s)??' % partreg
|
||||
else:
|
||||
end = ''.join(self.done_chars)
|
||||
rem = rest
|
||||
if rem[0] == '\\' and len(rem) > 1:
|
||||
rem = rem[1]
|
||||
elif rem.startswith('(\\') and len(rem) > 2:
|
||||
rem = rem[2]
|
||||
else:
|
||||
rem = end
|
||||
rem = frozenset(rem) | frozenset(['/'])
|
||||
if include_names:
|
||||
partreg = '(?P<%s>[^%s]+?)' % (var, ''.join(rem))
|
||||
else:
|
||||
partreg = '(?:[^%s]+?)' % ''.join(rem)
|
||||
|
||||
if self.reqs.has_key(var):
|
||||
noreqs = False
|
||||
if not self.defaults.has_key(var):
|
||||
allblank = False
|
||||
noreqs = False
|
||||
|
||||
# Now we determine if its optional, or required. This changes
|
||||
# depending on what is in the rest of the match. If noreqs is
|
||||
# true, then its possible the entire thing is optional as there's
|
||||
# no reqs or string matches.
|
||||
if noreqs:
|
||||
# The rest is optional, but now we have an optional with a
|
||||
# regexp. Wrap to ensure that if we match anything, we match
|
||||
# our regexp first. It's still possible we could be completely
|
||||
# blank as we have a default
|
||||
if self.reqs.has_key(var) and self.defaults.has_key(var):
|
||||
reg = '(' + partreg + rest + ')?'
|
||||
|
||||
# Or we have a regexp match with no default, so now being
|
||||
# completely blank form here on out isn't possible
|
||||
elif self.reqs.has_key(var):
|
||||
allblank = False
|
||||
reg = partreg + rest
|
||||
|
||||
# If the character before this is a special char, it has to be
|
||||
# followed by this
|
||||
elif self.defaults.has_key(var) and \
|
||||
self.prior in (',', ';', '.'):
|
||||
reg = partreg + rest
|
||||
|
||||
# Or we have a default with no regexp, don't touch the allblank
|
||||
elif self.defaults.has_key(var):
|
||||
reg = partreg + '?' + rest
|
||||
|
||||
# Or we have a key with no default, and no reqs. Not possible
|
||||
# to be all blank from here
|
||||
else:
|
||||
allblank = False
|
||||
reg = partreg + rest
|
||||
# In this case, we have something dangling that might need to be
|
||||
# matched
|
||||
else:
|
||||
# If they can all be blank, and we have a default here, we know
|
||||
# its safe to make everything from here optional. Since
|
||||
# something else in the chain does have req's though, we have
|
||||
# to make the partreg here required to continue matching
|
||||
if allblank and self.defaults.has_key(var):
|
||||
reg = '(' + partreg + rest + ')?'
|
||||
|
||||
# Same as before, but they can't all be blank, so we have to
|
||||
# require it all to ensure our matches line up right
|
||||
else:
|
||||
reg = partreg + rest
|
||||
elif isinstance(part, dict) and part['type'] == '*':
|
||||
var = part['name']
|
||||
if noreqs:
|
||||
if include_names:
|
||||
reg = '(?P<%s>.*)' % var + rest
|
||||
else:
|
||||
reg = '(?:.*)' + rest
|
||||
if not self.defaults.has_key(var):
|
||||
allblank = False
|
||||
noreqs = False
|
||||
else:
|
||||
if allblank and self.defaults.has_key(var):
|
||||
if include_names:
|
||||
reg = '(?P<%s>.*)' % var + rest
|
||||
else:
|
||||
reg = '(?:.*)' + rest
|
||||
elif self.defaults.has_key(var):
|
||||
if include_names:
|
||||
reg = '(?P<%s>.*)' % var + rest
|
||||
else:
|
||||
reg = '(?:.*)' + rest
|
||||
else:
|
||||
if include_names:
|
||||
reg = '(?P<%s>.*)' % var + rest
|
||||
else:
|
||||
reg = '(?:.*)' + rest
|
||||
allblank = False
|
||||
noreqs = False
|
||||
elif part and part[-1] in self.done_chars:
|
||||
if allblank:
|
||||
reg = re.escape(part[:-1]) + '(' + re.escape(part[-1]) + rest
|
||||
reg += ')?'
|
||||
else:
|
||||
allblank = False
|
||||
reg = re.escape(part) + rest
|
||||
|
||||
# We have a normal string here, this is a req, and it prevents us from
|
||||
# being all blank
|
||||
else:
|
||||
noreqs = False
|
||||
allblank = False
|
||||
reg = re.escape(part) + rest
|
||||
|
||||
return (reg, noreqs, allblank)
|
||||
|
||||
def match(self, url, environ=None, sub_domains=False,
|
||||
sub_domains_ignore=None, domain_match=''):
|
||||
"""Match a url to our regexp.
|
||||
|
||||
While the regexp might match, this operation isn't
|
||||
guaranteed as there's other factors that can cause a match to
|
||||
fail even though the regexp succeeds (Default that was relied
|
||||
on wasn't given, requirement regexp doesn't pass, etc.).
|
||||
|
||||
Therefore the calling function shouldn't assume this will
|
||||
return a valid dict, the other possible return is False if a
|
||||
match doesn't work out.
|
||||
|
||||
"""
|
||||
# Static routes don't match, they generate only
|
||||
if self.static:
|
||||
return False
|
||||
|
||||
match = self.regmatch.match(url)
|
||||
|
||||
if not match:
|
||||
return False
|
||||
|
||||
sub_domain = None
|
||||
|
||||
if sub_domains and environ and 'HTTP_HOST' in environ:
|
||||
host = environ['HTTP_HOST'].split(':')[0]
|
||||
sub_match = re.compile('^(.+?)\.%s$' % domain_match)
|
||||
subdomain = re.sub(sub_match, r'\1', host)
|
||||
if subdomain not in sub_domains_ignore and host != subdomain:
|
||||
sub_domain = subdomain
|
||||
|
||||
if self.conditions:
|
||||
if 'method' in self.conditions and environ and \
|
||||
environ['REQUEST_METHOD'] not in self.conditions['method']:
|
||||
return False
|
||||
|
||||
# Check sub-domains?
|
||||
use_sd = self.conditions.get('sub_domain')
|
||||
if use_sd and not sub_domain:
|
||||
return False
|
||||
elif not use_sd and 'sub_domain' in self.conditions and sub_domain:
|
||||
return False
|
||||
if isinstance(use_sd, list) and sub_domain not in use_sd:
|
||||
return False
|
||||
|
||||
matchdict = match.groupdict()
|
||||
result = {}
|
||||
extras = self._default_keys - frozenset(matchdict.keys())
|
||||
for key, val in matchdict.iteritems():
|
||||
if key != 'path_info' and self.encoding:
|
||||
# change back into python unicode objects from the URL
|
||||
# representation
|
||||
try:
|
||||
val = val and val.decode(self.encoding, self.decode_errors)
|
||||
except UnicodeDecodeError:
|
||||
return False
|
||||
|
||||
if not val and key in self.defaults and self.defaults[key]:
|
||||
result[key] = self.defaults[key]
|
||||
else:
|
||||
result[key] = val
|
||||
for key in extras:
|
||||
result[key] = self.defaults[key]
|
||||
|
||||
# Add the sub-domain if there is one
|
||||
if sub_domains:
|
||||
result['sub_domain'] = sub_domain
|
||||
|
||||
# If there's a function, call it with environ and expire if it
|
||||
# returns False
|
||||
if self.conditions and 'function' in self.conditions and \
|
||||
not self.conditions['function'](environ, result):
|
||||
return False
|
||||
|
||||
return result
|
||||
|
||||
def generate_non_minimized(self, kargs):
|
||||
"""Generate a non-minimal version of the URL"""
|
||||
# Iterate through the keys that are defaults, and NOT in the route
|
||||
# path. If its not in kargs, or doesn't match, or is None, this
|
||||
# route won't work
|
||||
for k in self.maxkeys - self.minkeys:
|
||||
if k not in kargs:
|
||||
return False
|
||||
elif self.make_unicode(kargs[k]) != \
|
||||
self.make_unicode(self.defaults[k]):
|
||||
return False
|
||||
|
||||
# Ensure that all the args in the route path are present and not None
|
||||
for arg in self.minkeys:
|
||||
if arg not in kargs or kargs[arg] is None:
|
||||
if arg in self.dotkeys:
|
||||
kargs[arg] = ''
|
||||
else:
|
||||
return False
|
||||
|
||||
# Encode all the argument that the regpath can use
|
||||
for k in kargs:
|
||||
if k in self.maxkeys:
|
||||
if k in self.dotkeys:
|
||||
if kargs[k]:
|
||||
kargs[k] = url_quote('.' + kargs[k], self.encoding)
|
||||
else:
|
||||
kargs[k] = url_quote(kargs[k], self.encoding)
|
||||
|
||||
return self.regpath % kargs
|
||||
|
||||
def generate_minimized(self, kargs):
|
||||
"""Generate a minimized version of the URL"""
|
||||
routelist = self.routebackwards
|
||||
urllist = []
|
||||
gaps = False
|
||||
for part in routelist:
|
||||
if isinstance(part, dict) and part['type'] in (':', '.'):
|
||||
arg = part['name']
|
||||
|
||||
# For efficiency, check these just once
|
||||
has_arg = kargs.has_key(arg)
|
||||
has_default = self.defaults.has_key(arg)
|
||||
|
||||
# Determine if we can leave this part off
|
||||
# First check if the default exists and wasn't provided in the
|
||||
# call (also no gaps)
|
||||
if has_default and not has_arg and not gaps:
|
||||
continue
|
||||
|
||||
# Now check to see if there's a default and it matches the
|
||||
# incoming call arg
|
||||
if (has_default and has_arg) and self.make_unicode(kargs[arg]) == \
|
||||
self.make_unicode(self.defaults[arg]) and not gaps:
|
||||
continue
|
||||
|
||||
# We need to pull the value to append, if the arg is None and
|
||||
# we have a default, use that
|
||||
if has_arg and kargs[arg] is None and has_default and not gaps:
|
||||
continue
|
||||
|
||||
# Otherwise if we do have an arg, use that
|
||||
elif has_arg:
|
||||
val = kargs[arg]
|
||||
|
||||
elif has_default and self.defaults[arg] is not None:
|
||||
val = self.defaults[arg]
|
||||
# Optional format parameter?
|
||||
elif part['type'] == '.':
|
||||
continue
|
||||
# No arg at all? This won't work
|
||||
else:
|
||||
return False
|
||||
|
||||
urllist.append(url_quote(val, self.encoding))
|
||||
if part['type'] == '.':
|
||||
urllist.append('.')
|
||||
|
||||
if has_arg:
|
||||
del kargs[arg]
|
||||
gaps = True
|
||||
elif isinstance(part, dict) and part['type'] == '*':
|
||||
arg = part['name']
|
||||
kar = kargs.get(arg)
|
||||
if kar is not None:
|
||||
urllist.append(url_quote(kar, self.encoding))
|
||||
gaps = True
|
||||
elif part and part[-1] in self.done_chars:
|
||||
if not gaps and part in self.done_chars:
|
||||
continue
|
||||
elif not gaps:
|
||||
urllist.append(part[:-1])
|
||||
gaps = True
|
||||
else:
|
||||
gaps = True
|
||||
urllist.append(part)
|
||||
else:
|
||||
gaps = True
|
||||
urllist.append(part)
|
||||
urllist.reverse()
|
||||
url = ''.join(urllist)
|
||||
return url
|
||||
|
||||
def generate(self, _ignore_req_list=False, _append_slash=False, **kargs):
|
||||
"""Generate a URL from ourself given a set of keyword arguments
|
||||
|
||||
Toss an exception if this
|
||||
set of keywords would cause a gap in the url.
|
||||
|
||||
"""
|
||||
# Verify that our args pass any regexp requirements
|
||||
if not _ignore_req_list:
|
||||
for key in self.reqs.keys():
|
||||
val = kargs.get(key)
|
||||
if val and not self.req_regs[key].match(self.make_unicode(val)):
|
||||
return False
|
||||
|
||||
# Verify that if we have a method arg, its in the method accept list.
|
||||
# Also, method will be changed to _method for route generation
|
||||
meth = kargs.get('method')
|
||||
if meth:
|
||||
if self.conditions and 'method' in self.conditions \
|
||||
and meth.upper() not in self.conditions['method']:
|
||||
return False
|
||||
kargs.pop('method')
|
||||
|
||||
if self.minimization:
|
||||
url = self.generate_minimized(kargs)
|
||||
else:
|
||||
url = self.generate_non_minimized(kargs)
|
||||
|
||||
if url is False:
|
||||
return url
|
||||
|
||||
if not url.startswith('/') and not self.static:
|
||||
url = '/' + url
|
||||
extras = frozenset(kargs.keys()) - self.maxkeys
|
||||
if extras:
|
||||
if _append_slash and not url.endswith('/'):
|
||||
url += '/'
|
||||
fragments = []
|
||||
# don't assume the 'extras' set preserves order: iterate
|
||||
# through the ordered kargs instead
|
||||
for key in kargs:
|
||||
if key not in extras:
|
||||
continue
|
||||
if key == 'action' or key == 'controller':
|
||||
continue
|
||||
val = kargs[key]
|
||||
if isinstance(val, (tuple, list)):
|
||||
for value in val:
|
||||
fragments.append((key, _str_encode(value, self.encoding)))
|
||||
else:
|
||||
fragments.append((key, _str_encode(val, self.encoding)))
|
||||
if fragments:
|
||||
url += '?'
|
||||
url += urllib.urlencode(fragments)
|
||||
elif _append_slash and not url.endswith('/'):
|
||||
url += '/'
|
||||
return url
|
503
src/routes/util.py
Normal file
503
src/routes/util.py
Normal file
@ -0,0 +1,503 @@
|
||||
"""Utility functions for use in templates / controllers
|
||||
|
||||
*PLEASE NOTE*: Many of these functions expect an initialized RequestConfig
|
||||
object. This is expected to have been initialized for EACH REQUEST by the web
|
||||
framework.
|
||||
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import urllib
|
||||
from routes import request_config
|
||||
|
||||
|
||||
class RoutesException(Exception):
|
||||
"""Tossed during Route exceptions"""
|
||||
|
||||
|
||||
class MatchException(RoutesException):
|
||||
"""Tossed during URL matching exceptions"""
|
||||
|
||||
|
||||
class GenerationException(RoutesException):
|
||||
"""Tossed during URL generation exceptions"""
|
||||
|
||||
|
||||
def _screenargs(kargs, mapper, environ, force_explicit=False):
|
||||
"""
|
||||
Private function that takes a dict, and screens it against the current
|
||||
request dict to determine what the dict should look like that is used.
|
||||
This is responsible for the requests "memory" of the current.
|
||||
"""
|
||||
# Coerce any unicode args with the encoding
|
||||
encoding = mapper.encoding
|
||||
for key, val in kargs.iteritems():
|
||||
if isinstance(val, unicode):
|
||||
kargs[key] = val.encode(encoding)
|
||||
|
||||
if mapper.explicit and mapper.sub_domains and not force_explicit:
|
||||
return _subdomain_check(kargs, mapper, environ)
|
||||
elif mapper.explicit and not force_explicit:
|
||||
return kargs
|
||||
|
||||
controller_name = kargs.get('controller')
|
||||
|
||||
if controller_name and controller_name.startswith('/'):
|
||||
# If the controller name starts with '/', ignore route memory
|
||||
kargs['controller'] = kargs['controller'][1:]
|
||||
return kargs
|
||||
elif controller_name and not kargs.has_key('action'):
|
||||
# Fill in an action if we don't have one, but have a controller
|
||||
kargs['action'] = 'index'
|
||||
|
||||
route_args = environ.get('wsgiorg.routing_args')
|
||||
if route_args:
|
||||
memory_kargs = route_args[1].copy()
|
||||
else:
|
||||
memory_kargs = {}
|
||||
|
||||
# Remove keys from memory and kargs if kargs has them as None
|
||||
for key in [key for key in kargs.keys() if kargs[key] is None]:
|
||||
del kargs[key]
|
||||
if memory_kargs.has_key(key):
|
||||
del memory_kargs[key]
|
||||
|
||||
# Merge the new args on top of the memory args
|
||||
memory_kargs.update(kargs)
|
||||
|
||||
# Setup a sub-domain if applicable
|
||||
if mapper.sub_domains:
|
||||
memory_kargs = _subdomain_check(memory_kargs, mapper, environ)
|
||||
return memory_kargs
|
||||
|
||||
|
||||
def _subdomain_check(kargs, mapper, environ):
|
||||
"""Screen the kargs for a subdomain and alter it appropriately depending
|
||||
on the current subdomain or lack therof."""
|
||||
if mapper.sub_domains:
|
||||
subdomain = kargs.pop('sub_domain', None)
|
||||
if isinstance(subdomain, unicode):
|
||||
subdomain = str(subdomain)
|
||||
|
||||
fullhost = environ.get('HTTP_HOST') or environ.get('SERVER_NAME')
|
||||
|
||||
# In case environ defaulted to {}
|
||||
if not fullhost:
|
||||
return kargs
|
||||
|
||||
hostmatch = fullhost.split(':')
|
||||
host = hostmatch[0]
|
||||
port = ''
|
||||
if len(hostmatch) > 1:
|
||||
port += ':' + hostmatch[1]
|
||||
sub_match = re.compile('^.+?\.(%s)$' % mapper.domain_match)
|
||||
domain = re.sub(sub_match, r'\1', host)
|
||||
if subdomain and not host.startswith(subdomain) and \
|
||||
subdomain not in mapper.sub_domains_ignore:
|
||||
kargs['_host'] = subdomain + '.' + domain + port
|
||||
elif (subdomain in mapper.sub_domains_ignore or \
|
||||
subdomain is None) and domain != host:
|
||||
kargs['_host'] = domain + port
|
||||
return kargs
|
||||
else:
|
||||
return kargs
|
||||
|
||||
|
||||
def _url_quote(string, encoding):
|
||||
"""A Unicode handling version of urllib.quote."""
|
||||
if encoding:
|
||||
if isinstance(string, unicode):
|
||||
s = string.encode(encoding)
|
||||
elif isinstance(string, str):
|
||||
# assume the encoding is already correct
|
||||
s = string
|
||||
else:
|
||||
s = unicode(string).encode(encoding)
|
||||
else:
|
||||
s = str(string)
|
||||
return urllib.quote(s, '/')
|
||||
|
||||
|
||||
def _str_encode(string, encoding):
|
||||
if encoding:
|
||||
if isinstance(string, unicode):
|
||||
s = string.encode(encoding)
|
||||
elif isinstance(string, str):
|
||||
# assume the encoding is already correct
|
||||
s = string
|
||||
else:
|
||||
s = unicode(string).encode(encoding)
|
||||
return s
|
||||
|
||||
|
||||
def url_for(*args, **kargs):
|
||||
"""Generates a URL
|
||||
|
||||
All keys given to url_for are sent to the Routes Mapper instance for
|
||||
generation except for::
|
||||
|
||||
anchor specified the anchor name to be appened to the path
|
||||
host overrides the default (current) host if provided
|
||||
protocol overrides the default (current) protocol if provided
|
||||
qualified creates the URL with the host/port information as
|
||||
needed
|
||||
|
||||
The URL is generated based on the rest of the keys. When generating a new
|
||||
URL, values will be used from the current request's parameters (if
|
||||
present). The following rules are used to determine when and how to keep
|
||||
the current requests parameters:
|
||||
|
||||
* If the controller is present and begins with '/', no defaults are used
|
||||
* If the controller is changed, action is set to 'index' unless otherwise
|
||||
specified
|
||||
|
||||
For example, if the current request yielded a dict of
|
||||
{'controller': 'blog', 'action': 'view', 'id': 2}, with the standard
|
||||
':controller/:action/:id' route, you'd get the following results::
|
||||
|
||||
url_for(id=4) => '/blog/view/4',
|
||||
url_for(controller='/admin') => '/admin',
|
||||
url_for(controller='admin') => '/admin/view/2'
|
||||
url_for(action='edit') => '/blog/edit/2',
|
||||
url_for(action='list', id=None) => '/blog/list'
|
||||
|
||||
**Static and Named Routes**
|
||||
|
||||
If there is a string present as the first argument, a lookup is done
|
||||
against the named routes table to see if there's any matching routes. The
|
||||
keyword defaults used with static routes will be sent in as GET query
|
||||
arg's if a route matches.
|
||||
|
||||
If no route by that name is found, the string is assumed to be a raw URL.
|
||||
Should the raw URL begin with ``/`` then appropriate SCRIPT_NAME data will
|
||||
be added if present, otherwise the string will be used as the url with
|
||||
keyword args becoming GET query args.
|
||||
|
||||
"""
|
||||
anchor = kargs.get('anchor')
|
||||
host = kargs.get('host')
|
||||
protocol = kargs.get('protocol')
|
||||
qualified = kargs.pop('qualified', None)
|
||||
|
||||
# Remove special words from kargs, convert placeholders
|
||||
for key in ['anchor', 'host', 'protocol']:
|
||||
if kargs.get(key):
|
||||
del kargs[key]
|
||||
config = request_config()
|
||||
route = None
|
||||
static = False
|
||||
encoding = config.mapper.encoding
|
||||
url = ''
|
||||
if len(args) > 0:
|
||||
route = config.mapper._routenames.get(args[0])
|
||||
|
||||
# No named route found, assume the argument is a relative path
|
||||
if not route:
|
||||
static = True
|
||||
url = args[0]
|
||||
|
||||
if url.startswith('/') and hasattr(config, 'environ') \
|
||||
and config.environ.get('SCRIPT_NAME'):
|
||||
url = config.environ.get('SCRIPT_NAME') + url
|
||||
|
||||
if static:
|
||||
if kargs:
|
||||
url += '?'
|
||||
query_args = []
|
||||
for key, val in kargs.iteritems():
|
||||
if isinstance(val, (list, tuple)):
|
||||
for value in val:
|
||||
query_args.append("%s=%s" % (
|
||||
urllib.quote(unicode(key).encode(encoding)),
|
||||
urllib.quote(unicode(value).encode(encoding))))
|
||||
else:
|
||||
query_args.append("%s=%s" % (
|
||||
urllib.quote(unicode(key).encode(encoding)),
|
||||
urllib.quote(unicode(val).encode(encoding))))
|
||||
url += '&'.join(query_args)
|
||||
environ = getattr(config, 'environ', {})
|
||||
if 'wsgiorg.routing_args' not in environ:
|
||||
environ = environ.copy()
|
||||
mapper_dict = getattr(config, 'mapper_dict', None)
|
||||
if mapper_dict is not None:
|
||||
match_dict = mapper_dict.copy()
|
||||
else:
|
||||
match_dict = {}
|
||||
environ['wsgiorg.routing_args'] = ((), match_dict)
|
||||
|
||||
if not static:
|
||||
route_args = []
|
||||
if route:
|
||||
if config.mapper.hardcode_names:
|
||||
route_args.append(route)
|
||||
newargs = route.defaults.copy()
|
||||
newargs.update(kargs)
|
||||
|
||||
# If this route has a filter, apply it
|
||||
if route.filter:
|
||||
newargs = route.filter(newargs)
|
||||
|
||||
if not route.static:
|
||||
# Handle sub-domains
|
||||
newargs = _subdomain_check(newargs, config.mapper, environ)
|
||||
else:
|
||||
newargs = _screenargs(kargs, config.mapper, environ)
|
||||
anchor = newargs.pop('_anchor', None) or anchor
|
||||
host = newargs.pop('_host', None) or host
|
||||
protocol = newargs.pop('_protocol', None) or protocol
|
||||
url = config.mapper.generate(*route_args, **newargs)
|
||||
if anchor is not None:
|
||||
url += '#' + _url_quote(anchor, encoding)
|
||||
if host or protocol or qualified:
|
||||
if not host and not qualified:
|
||||
# Ensure we don't use a specific port, as changing the protocol
|
||||
# means that we most likely need a new port
|
||||
host = config.host.split(':')[0]
|
||||
elif not host:
|
||||
host = config.host
|
||||
if not protocol:
|
||||
protocol = config.protocol
|
||||
if url is not None:
|
||||
url = protocol + '://' + host + url
|
||||
|
||||
if not isinstance(url, str) and url is not None:
|
||||
raise GenerationException("url_for can only return a string, got "
|
||||
"unicode instead: %s" % url)
|
||||
if url is None:
|
||||
raise GenerationException(
|
||||
"url_for could not generate URL. Called with args: %s %s" % \
|
||||
(args, kargs))
|
||||
return url
|
||||
|
||||
|
||||
class URLGenerator(object):
|
||||
"""The URL Generator generates URL's
|
||||
|
||||
It is automatically instantiated by the RoutesMiddleware and put
|
||||
into the ``wsgiorg.routing_args`` tuple accessible as::
|
||||
|
||||
url = environ['wsgiorg.routing_args'][0][0]
|
||||
|
||||
Or via the ``routes.url`` key::
|
||||
|
||||
url = environ['routes.url']
|
||||
|
||||
The url object may be instantiated outside of a web context for use
|
||||
in testing, however sub_domain support and fully qualified URL's
|
||||
cannot be generated without supplying a dict that must contain the
|
||||
key ``HTTP_HOST``.
|
||||
|
||||
"""
|
||||
def __init__(self, mapper, environ):
|
||||
"""Instantiate the URLGenerator
|
||||
|
||||
``mapper``
|
||||
The mapper object to use when generating routes.
|
||||
``environ``
|
||||
The environment dict used in WSGI, alternately, any dict
|
||||
that contains at least an ``HTTP_HOST`` value.
|
||||
|
||||
"""
|
||||
self.mapper = mapper
|
||||
if 'SCRIPT_NAME' not in environ:
|
||||
environ['SCRIPT_NAME'] = ''
|
||||
self.environ = environ
|
||||
|
||||
def __call__(self, *args, **kargs):
|
||||
"""Generates a URL
|
||||
|
||||
All keys given to url_for are sent to the Routes Mapper instance for
|
||||
generation except for::
|
||||
|
||||
anchor specified the anchor name to be appened to the path
|
||||
host overrides the default (current) host if provided
|
||||
protocol overrides the default (current) protocol if provided
|
||||
qualified creates the URL with the host/port information as
|
||||
needed
|
||||
|
||||
"""
|
||||
anchor = kargs.get('anchor')
|
||||
host = kargs.get('host')
|
||||
protocol = kargs.get('protocol')
|
||||
qualified = kargs.pop('qualified', None)
|
||||
|
||||
# Remove special words from kargs, convert placeholders
|
||||
for key in ['anchor', 'host', 'protocol']:
|
||||
if kargs.get(key):
|
||||
del kargs[key]
|
||||
|
||||
route = None
|
||||
use_current = '_use_current' in kargs and kargs.pop('_use_current')
|
||||
|
||||
static = False
|
||||
encoding = self.mapper.encoding
|
||||
url = ''
|
||||
|
||||
more_args = len(args) > 0
|
||||
if more_args:
|
||||
route = self.mapper._routenames.get(args[0])
|
||||
|
||||
if not route and more_args:
|
||||
static = True
|
||||
url = args[0]
|
||||
if url.startswith('/') and self.environ.get('SCRIPT_NAME'):
|
||||
url = self.environ.get('SCRIPT_NAME') + url
|
||||
|
||||
if static:
|
||||
if kargs:
|
||||
url += '?'
|
||||
query_args = []
|
||||
for key, val in kargs.iteritems():
|
||||
if isinstance(val, (list, tuple)):
|
||||
for value in val:
|
||||
query_args.append("%s=%s" % (
|
||||
urllib.quote(unicode(key).encode(encoding)),
|
||||
urllib.quote(unicode(value).encode(encoding))))
|
||||
else:
|
||||
query_args.append("%s=%s" % (
|
||||
urllib.quote(unicode(key).encode(encoding)),
|
||||
urllib.quote(unicode(val).encode(encoding))))
|
||||
url += '&'.join(query_args)
|
||||
if not static:
|
||||
route_args = []
|
||||
if route:
|
||||
if self.mapper.hardcode_names:
|
||||
route_args.append(route)
|
||||
newargs = route.defaults.copy()
|
||||
newargs.update(kargs)
|
||||
|
||||
# If this route has a filter, apply it
|
||||
if route.filter:
|
||||
newargs = route.filter(newargs)
|
||||
if not route.static or (route.static and not route.external):
|
||||
# Handle sub-domains, retain sub_domain if there is one
|
||||
sub = newargs.get('sub_domain', None)
|
||||
newargs = _subdomain_check(newargs, self.mapper,
|
||||
self.environ)
|
||||
# If the route requires a sub-domain, and we have it, restore
|
||||
# it
|
||||
if 'sub_domain' in route.defaults:
|
||||
newargs['sub_domain'] = sub
|
||||
|
||||
elif use_current:
|
||||
newargs = _screenargs(kargs, self.mapper, self.environ, force_explicit=True)
|
||||
elif 'sub_domain' in kargs:
|
||||
newargs = _subdomain_check(kargs, self.mapper, self.environ)
|
||||
else:
|
||||
newargs = kargs
|
||||
|
||||
anchor = anchor or newargs.pop('_anchor', None)
|
||||
host = host or newargs.pop('_host', None)
|
||||
protocol = protocol or newargs.pop('_protocol', None)
|
||||
url = self.mapper.generate(*route_args, **newargs)
|
||||
if anchor is not None:
|
||||
url += '#' + _url_quote(anchor, encoding)
|
||||
if host or protocol or qualified:
|
||||
if 'routes.cached_hostinfo' not in self.environ:
|
||||
cache_hostinfo(self.environ)
|
||||
hostinfo = self.environ['routes.cached_hostinfo']
|
||||
|
||||
if not host and not qualified:
|
||||
# Ensure we don't use a specific port, as changing the protocol
|
||||
# means that we most likely need a new port
|
||||
host = hostinfo['host'].split(':')[0]
|
||||
elif not host:
|
||||
host = hostinfo['host']
|
||||
if not protocol:
|
||||
protocol = hostinfo['protocol']
|
||||
if url is not None:
|
||||
if host[-1] != '/':
|
||||
host += '/'
|
||||
url = protocol + '://' + host + url.lstrip('/')
|
||||
|
||||
if not isinstance(url, str) and url is not None:
|
||||
raise GenerationException("Can only return a string, got "
|
||||
"unicode instead: %s" % url)
|
||||
if url is None:
|
||||
raise GenerationException(
|
||||
"Could not generate URL. Called with args: %s %s" % \
|
||||
(args, kargs))
|
||||
return url
|
||||
|
||||
def current(self, *args, **kwargs):
|
||||
"""Generate a route that includes params used on the current
|
||||
request
|
||||
|
||||
The arguments for this method are identical to ``__call__``
|
||||
except that arguments set to None will remove existing route
|
||||
matches of the same name from the set of arguments used to
|
||||
construct a URL.
|
||||
"""
|
||||
return self(_use_current=True, *args, **kwargs)
|
||||
|
||||
|
||||
def redirect_to(*args, **kargs):
|
||||
"""Issues a redirect based on the arguments.
|
||||
|
||||
Redirect's *should* occur as a "302 Moved" header, however the web
|
||||
framework may utilize a different method.
|
||||
|
||||
All arguments are passed to url_for to retrieve the appropriate URL, then
|
||||
the resulting URL it sent to the redirect function as the URL.
|
||||
"""
|
||||
target = url_for(*args, **kargs)
|
||||
config = request_config()
|
||||
return config.redirect(target)
|
||||
|
||||
|
||||
def cache_hostinfo(environ):
|
||||
"""Processes the host information and stores a copy
|
||||
|
||||
This work was previously done but wasn't stored in environ, nor is
|
||||
it guaranteed to be setup in the future (Routes 2 and beyond).
|
||||
|
||||
cache_hostinfo processes environ keys that may be present to
|
||||
determine the proper host, protocol, and port information to use
|
||||
when generating routes.
|
||||
|
||||
"""
|
||||
hostinfo = {}
|
||||
if environ.get('HTTPS') or environ.get('wsgi.url_scheme') == 'https' \
|
||||
or environ.get('HTTP_X_FORWARDED_PROTO') == 'https':
|
||||
hostinfo['protocol'] = 'https'
|
||||
else:
|
||||
hostinfo['protocol'] = 'http'
|
||||
if environ.get('HTTP_X_FORWARDED_HOST'):
|
||||
hostinfo['host'] = environ['HTTP_X_FORWARDED_HOST']
|
||||
elif environ.get('HTTP_HOST'):
|
||||
hostinfo['host'] = environ['HTTP_HOST']
|
||||
else:
|
||||
hostinfo['host'] = environ['SERVER_NAME']
|
||||
if environ.get('wsgi.url_scheme') == 'https':
|
||||
if environ['SERVER_PORT'] != '443':
|
||||
hostinfo['host'] += ':' + environ['SERVER_PORT']
|
||||
else:
|
||||
if environ['SERVER_PORT'] != '80':
|
||||
hostinfo['host'] += ':' + environ['SERVER_PORT']
|
||||
environ['routes.cached_hostinfo'] = hostinfo
|
||||
return hostinfo
|
||||
|
||||
|
||||
def controller_scan(directory=None):
|
||||
"""Scan a directory for python files and use them as controllers"""
|
||||
if directory is None:
|
||||
return []
|
||||
|
||||
def find_controllers(dirname, prefix=''):
|
||||
"""Locate controllers in a directory"""
|
||||
controllers = []
|
||||
for fname in os.listdir(dirname):
|
||||
filename = os.path.join(dirname, fname)
|
||||
if os.path.isfile(filename) and \
|
||||
re.match('^[^_]{1,1}.*\.py$', fname):
|
||||
controllers.append(prefix + fname[:-3])
|
||||
elif os.path.isdir(filename):
|
||||
controllers.extend(find_controllers(filename,
|
||||
prefix=prefix+fname+'/'))
|
||||
return controllers
|
||||
def longest_first(fst, lst):
|
||||
"""Compare the length of one string to another, shortest goes first"""
|
||||
return cmp(len(lst), len(fst))
|
||||
controllers = find_controllers(directory)
|
||||
controllers.sort(longest_first)
|
||||
return controllers
|
Loading…
x
Reference in New Issue
Block a user