[REM,IMP]:removed mako package from server as it is added in setup.py
bzr revid: nch@tinyerp.com-20091215052239-64nu0xf330xs6fgz
This commit is contained in:
parent
a51fb3af14
commit
e11130ec2f
|
@ -1,7 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# __init__.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
|
@ -1,834 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
ast
|
||||
~~~
|
||||
|
||||
The `ast` module helps Python applications to process trees of the Python
|
||||
abstract syntax grammar. The abstract syntax itself might change with
|
||||
each Python release; this module helps to find out programmatically what
|
||||
the current grammar looks like and allows modifications of it.
|
||||
|
||||
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
|
||||
a flag to the `compile()` builtin function or by using the `parse()`
|
||||
function from this module. The result will be a tree of objects whose
|
||||
classes all inherit from `ast.AST`.
|
||||
|
||||
A modified abstract syntax tree can be compiled into a Python code object
|
||||
using the built-in `compile()` function.
|
||||
|
||||
Additionally various helper functions are provided that make working with
|
||||
the trees simpler. The main intention of the helper functions and this
|
||||
module in general is to provide an easy to use interface for libraries
|
||||
that work tightly with the python syntax (template engines for example).
|
||||
|
||||
|
||||
:copyright: Copyright 2008 by Armin Ronacher.
|
||||
:license: Python License.
|
||||
"""
|
||||
from _ast import *
|
||||
|
||||
|
||||
BOOLOP_SYMBOLS = {
|
||||
And: 'and',
|
||||
Or: 'or'
|
||||
}
|
||||
|
||||
BINOP_SYMBOLS = {
|
||||
Add: '+',
|
||||
Sub: '-',
|
||||
Mult: '*',
|
||||
Div: '/',
|
||||
FloorDiv: '//',
|
||||
Mod: '%',
|
||||
LShift: '<<',
|
||||
RShift: '>>',
|
||||
BitOr: '|',
|
||||
BitAnd: '&',
|
||||
BitXor: '^'
|
||||
}
|
||||
|
||||
CMPOP_SYMBOLS = {
|
||||
Eq: '==',
|
||||
Gt: '>',
|
||||
GtE: '>=',
|
||||
In: 'in',
|
||||
Is: 'is',
|
||||
IsNot: 'is not',
|
||||
Lt: '<',
|
||||
LtE: '<=',
|
||||
NotEq: '!=',
|
||||
NotIn: 'not in'
|
||||
}
|
||||
|
||||
UNARYOP_SYMBOLS = {
|
||||
Invert: '~',
|
||||
Not: 'not',
|
||||
UAdd: '+',
|
||||
USub: '-'
|
||||
}
|
||||
|
||||
ALL_SYMBOLS = {}
|
||||
ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
|
||||
ALL_SYMBOLS.update(BINOP_SYMBOLS)
|
||||
ALL_SYMBOLS.update(CMPOP_SYMBOLS)
|
||||
ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
|
||||
|
||||
|
||||
def parse(expr, filename='<unknown>', mode='exec'):
|
||||
"""Parse an expression into an AST node."""
|
||||
return compile(expr, filename, mode, PyCF_ONLY_AST)
|
||||
|
||||
|
||||
def to_source(node, indent_with=' ' * 4):
|
||||
"""
|
||||
This function can convert a node tree back into python sourcecode. This
|
||||
is useful for debugging purposes, especially if you're dealing with custom
|
||||
asts not generated by python itself.
|
||||
|
||||
It could be that the sourcecode is evaluable when the AST itself is not
|
||||
compilable / evaluable. The reason for this is that the AST contains some
|
||||
more data than regular sourcecode does, which is dropped during
|
||||
conversion.
|
||||
|
||||
Each level of indentation is replaced with `indent_with`. Per default this
|
||||
parameter is equal to four spaces as suggested by PEP 8, but it might be
|
||||
adjusted to match the application's styleguide.
|
||||
"""
|
||||
generator = SourceGenerator(indent_with)
|
||||
generator.visit(node)
|
||||
return ''.join(generator.result)
|
||||
|
||||
|
||||
def dump(node):
|
||||
"""
|
||||
A very verbose representation of the node passed. This is useful for
|
||||
debugging purposes.
|
||||
"""
|
||||
def _format(node):
|
||||
if isinstance(node, AST):
|
||||
return '%s(%s)' % (node.__class__.__name__,
|
||||
', '.join('%s=%s' % (a, _format(b))
|
||||
for a, b in iter_fields(node)))
|
||||
elif isinstance(node, list):
|
||||
return '[%s]' % ', '.join(_format(x) for x in node)
|
||||
return repr(node)
|
||||
if not isinstance(node, AST):
|
||||
raise TypeError('expected AST, got %r' % node.__class__.__name__)
|
||||
return _format(node)
|
||||
|
||||
|
||||
def copy_location(new_node, old_node):
|
||||
"""
|
||||
Copy the source location hint (`lineno` and `col_offset`) from the
|
||||
old to the new node if possible and return the new one.
|
||||
"""
|
||||
for attr in 'lineno', 'col_offset':
|
||||
if attr in old_node._attributes and attr in new_node._attributes \
|
||||
and hasattr(old_node, attr):
|
||||
setattr(new_node, attr, getattr(old_node, attr))
|
||||
return new_node
|
||||
|
||||
|
||||
def fix_missing_locations(node):
|
||||
"""
|
||||
Some nodes require a line number and the column offset. Without that
|
||||
information the compiler will abort the compilation. Because it can be
|
||||
a dull task to add appropriate line numbers and column offsets when
|
||||
adding new nodes this function can help. It copies the line number and
|
||||
column offset of the parent node to the child nodes without this
|
||||
information.
|
||||
|
||||
Unlike `copy_location` this works recursive and won't touch nodes that
|
||||
already have a location information.
|
||||
"""
|
||||
def _fix(node, lineno, col_offset):
|
||||
if 'lineno' in node._attributes:
|
||||
if not hasattr(node, 'lineno'):
|
||||
node.lineno = lineno
|
||||
else:
|
||||
lineno = node.lineno
|
||||
if 'col_offset' in node._attributes:
|
||||
if not hasattr(node, 'col_offset'):
|
||||
node.col_offset = col_offset
|
||||
else:
|
||||
col_offset = node.col_offset
|
||||
for child in iter_child_nodes(node):
|
||||
_fix(child, lineno, col_offset)
|
||||
_fix(node, 1, 0)
|
||||
return node
|
||||
|
||||
|
||||
def increment_lineno(node, n=1):
|
||||
"""
|
||||
Increment the line numbers of all nodes by `n` if they have line number
|
||||
attributes. This is useful to "move code" to a different location in a
|
||||
file.
|
||||
"""
|
||||
for node in zip((node,), walk(node)):
|
||||
if 'lineno' in node._attributes:
|
||||
node.lineno = getattr(node, 'lineno', 0) + n
|
||||
|
||||
|
||||
def iter_fields(node):
|
||||
"""Iterate over all fields of a node, only yielding existing fields."""
|
||||
# CPython 2.5 compat
|
||||
if not hasattr(node, '_fields') or not node._fields:
|
||||
return
|
||||
for field in node._fields:
|
||||
try:
|
||||
yield field, getattr(node, field)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def get_fields(node):
|
||||
"""Like `iter_fiels` but returns a dict."""
|
||||
return dict(iter_fields(node))
|
||||
|
||||
|
||||
def iter_child_nodes(node):
|
||||
"""Iterate over all child nodes or a node."""
|
||||
for name, field in iter_fields(node):
|
||||
if isinstance(field, AST):
|
||||
yield field
|
||||
elif isinstance(field, list):
|
||||
for item in field:
|
||||
if isinstance(item, AST):
|
||||
yield item
|
||||
|
||||
|
||||
def get_child_nodes(node):
|
||||
"""Like `iter_child_nodes` but returns a list."""
|
||||
return list(iter_child_nodes(node))
|
||||
|
||||
|
||||
def get_compile_mode(node):
|
||||
"""
|
||||
Get the mode for `compile` of a given node. If the node is not a `mod`
|
||||
node (`Expression`, `Module` etc.) a `TypeError` is thrown.
|
||||
"""
|
||||
if not isinstance(node, mod):
|
||||
raise TypeError('expected mod node, got %r' % node.__class__.__name__)
|
||||
return {
|
||||
Expression: 'eval',
|
||||
Interactive: 'single'
|
||||
}.get(node.__class__, 'expr')
|
||||
|
||||
|
||||
def get_docstring(node):
|
||||
"""
|
||||
Return the docstring for the given node or `None` if no docstring can be
|
||||
found. If the node provided does not accept docstrings a `TypeError`
|
||||
will be raised.
|
||||
"""
|
||||
if not isinstance(node, (FunctionDef, ClassDef, Module)):
|
||||
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
|
||||
if node.body and isinstance(node.body[0], Str):
|
||||
return node.body[0].s
|
||||
|
||||
|
||||
def walk(node):
|
||||
"""
|
||||
Iterate over all nodes. This is useful if you only want to modify nodes in
|
||||
place and don't care about the context or the order the nodes are returned.
|
||||
"""
|
||||
from collections import deque
|
||||
todo = deque([node])
|
||||
while todo:
|
||||
node = todo.popleft()
|
||||
todo.extend(iter_child_nodes(node))
|
||||
yield node
|
||||
|
||||
|
||||
class NodeVisitor(object):
|
||||
"""
|
||||
Walks the abstract syntax tree and call visitor functions for every node
|
||||
found. The visitor functions may return values which will be forwarded
|
||||
by the `visit` method.
|
||||
|
||||
Per default the visitor functions for the nodes are ``'visit_'`` +
|
||||
class name of the node. So a `TryFinally` node visit function would
|
||||
be `visit_TryFinally`. This behavior can be changed by overriding
|
||||
the `get_visitor` function. If no visitor function exists for a node
|
||||
(return value `None`) the `generic_visit` visitor is used instead.
|
||||
|
||||
Don't use the `NodeVisitor` if you want to apply changes to nodes during
|
||||
traversing. For this a special visitor exists (`NodeTransformer`) that
|
||||
allows modifications.
|
||||
"""
|
||||
|
||||
def get_visitor(self, node):
|
||||
"""
|
||||
Return the visitor function for this node or `None` if no visitor
|
||||
exists for this node. In that case the generic visit function is
|
||||
used instead.
|
||||
"""
|
||||
method = 'visit_' + node.__class__.__name__
|
||||
return getattr(self, method, None)
|
||||
|
||||
def visit(self, node):
|
||||
"""Visit a node."""
|
||||
f = self.get_visitor(node)
|
||||
if f is not None:
|
||||
return f(node)
|
||||
return self.generic_visit(node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
"""Called if no explicit visitor function exists for a node."""
|
||||
for field, value in iter_fields(node):
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, AST):
|
||||
self.visit(item)
|
||||
elif isinstance(value, AST):
|
||||
self.visit(value)
|
||||
|
||||
|
||||
class NodeTransformer(NodeVisitor):
|
||||
"""
|
||||
Walks the abstract syntax tree and allows modifications of nodes.
|
||||
|
||||
The `NodeTransformer` will walk the AST and use the return value of the
|
||||
visitor functions to replace or remove the old node. If the return
|
||||
value of the visitor function is `None` the node will be removed
|
||||
from the previous location otherwise it's replaced with the return
|
||||
value. The return value may be the original node in which case no
|
||||
replacement takes place.
|
||||
|
||||
Here an example transformer that rewrites all `foo` to `data['foo']`::
|
||||
|
||||
class RewriteName(NodeTransformer):
|
||||
|
||||
def visit_Name(self, node):
|
||||
return copy_location(Subscript(
|
||||
value=Name(id='data', ctx=Load()),
|
||||
slice=Index(value=Str(s=node.id)),
|
||||
ctx=node.ctx
|
||||
), node)
|
||||
|
||||
Keep in mind that if the node you're operating on has child nodes
|
||||
you must either transform the child nodes yourself or call the generic
|
||||
visit function for the node first.
|
||||
|
||||
Nodes that were part of a collection of statements (that applies to
|
||||
all statement nodes) may also return a list of nodes rather than just
|
||||
a single node.
|
||||
|
||||
Usually you use the transformer like this::
|
||||
|
||||
node = YourTransformer().visit(node)
|
||||
"""
|
||||
|
||||
def generic_visit(self, node):
|
||||
for field, old_value in iter_fields(node):
|
||||
old_value = getattr(node, field, None)
|
||||
if isinstance(old_value, list):
|
||||
new_values = []
|
||||
for value in old_value:
|
||||
if isinstance(value, AST):
|
||||
value = self.visit(value)
|
||||
if value is None:
|
||||
continue
|
||||
elif not isinstance(value, AST):
|
||||
new_values.extend(value)
|
||||
continue
|
||||
new_values.append(value)
|
||||
old_value[:] = new_values
|
||||
elif isinstance(old_value, AST):
|
||||
new_node = self.visit(old_value)
|
||||
if new_node is None:
|
||||
delattr(node, field)
|
||||
else:
|
||||
setattr(node, field, new_node)
|
||||
return node
|
||||
|
||||
|
||||
class SourceGenerator(NodeVisitor):
|
||||
"""
|
||||
This visitor is able to transform a well formed syntax tree into python
|
||||
sourcecode. For more details have a look at the docstring of the
|
||||
`node_to_source` function.
|
||||
"""
|
||||
|
||||
def __init__(self, indent_with):
|
||||
self.result = []
|
||||
self.indent_with = indent_with
|
||||
self.indentation = 0
|
||||
self.new_lines = 0
|
||||
|
||||
def write(self, x):
|
||||
if self.new_lines:
|
||||
if self.result:
|
||||
self.result.append('\n' * self.new_lines)
|
||||
self.result.append(self.indent_with * self.indentation)
|
||||
self.new_lines = 0
|
||||
self.result.append(x)
|
||||
|
||||
def newline(self, n=1):
|
||||
self.new_lines = max(self.new_lines, n)
|
||||
|
||||
def body(self, statements):
|
||||
self.new_line = True
|
||||
self.indentation += 1
|
||||
for stmt in statements:
|
||||
self.visit(stmt)
|
||||
self.indentation -= 1
|
||||
|
||||
def body_or_else(self, node):
|
||||
self.body(node.body)
|
||||
if node.orelse:
|
||||
self.newline()
|
||||
self.write('else:')
|
||||
self.body(node.orelse)
|
||||
|
||||
def signature(self, node):
|
||||
want_comma = []
|
||||
def write_comma():
|
||||
if want_comma:
|
||||
self.write(', ')
|
||||
else:
|
||||
want_comma.append(True)
|
||||
|
||||
padding = [None] * (len(node.args) - len(node.defaults))
|
||||
for arg, default in zip(node.args, padding + node.defaults):
|
||||
write_comma()
|
||||
self.visit(arg)
|
||||
if default is not None:
|
||||
self.write('=')
|
||||
self.visit(default)
|
||||
if node.vararg is not None:
|
||||
write_comma()
|
||||
self.write('*' + node.vararg)
|
||||
if node.kwarg is not None:
|
||||
write_comma()
|
||||
self.write('**' + node.kwarg)
|
||||
|
||||
def decorators(self, node):
|
||||
for decorator in node.decorator_list:
|
||||
self.newline()
|
||||
self.write('@')
|
||||
self.visit(decorator)
|
||||
|
||||
# Statements
|
||||
|
||||
def visit_Assign(self, node):
|
||||
self.newline()
|
||||
for idx, target in enumerate(node.targets):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(target)
|
||||
self.write(' = ')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_AugAssign(self, node):
|
||||
self.newline()
|
||||
self.visit(node.target)
|
||||
self.write(BINOP_SYMBOLS[type(node.op)] + '=')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_ImportFrom(self, node):
|
||||
self.newline()
|
||||
self.write('from %s%s import ' % ('.' * node.level, node.module))
|
||||
for idx, item in enumerate(node.names):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.write(item)
|
||||
|
||||
def visit_Import(self, node):
|
||||
self.newline()
|
||||
for item in node.names:
|
||||
self.write('import ')
|
||||
self.visit(item)
|
||||
|
||||
def visit_Expr(self, node):
|
||||
self.newline()
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
self.newline(n=2)
|
||||
self.decorators(node)
|
||||
self.newline()
|
||||
self.write('def %s(' % node.name)
|
||||
self.signature(node.args)
|
||||
self.write('):')
|
||||
self.body(node.body)
|
||||
|
||||
def visit_ClassDef(self, node):
|
||||
have_args = []
|
||||
def paren_or_comma():
|
||||
if have_args:
|
||||
self.write(', ')
|
||||
else:
|
||||
have_args.append(True)
|
||||
self.write('(')
|
||||
|
||||
self.newline(n=3)
|
||||
self.decorators(node)
|
||||
self.newline()
|
||||
self.write('class %s' % node.name)
|
||||
for base in node.bases:
|
||||
paren_or_comma()
|
||||
self.visit(base)
|
||||
# XXX: the if here is used to keep this module compatible
|
||||
# with python 2.6.
|
||||
if hasattr(node, 'keywords'):
|
||||
for keyword in node.keywords:
|
||||
paren_or_comma()
|
||||
self.write(keyword.arg + '=')
|
||||
self.visit(keyword.value)
|
||||
if node.starargs is not None:
|
||||
paren_or_comma()
|
||||
self.write('*')
|
||||
self.visit(node.starargs)
|
||||
if node.kwargs is not None:
|
||||
paren_or_comma()
|
||||
self.write('**')
|
||||
self.visit(node.kwargs)
|
||||
self.write(have_args and '):' or ':')
|
||||
self.body(node.body)
|
||||
|
||||
def visit_If(self, node):
|
||||
self.newline()
|
||||
self.write('if ')
|
||||
self.visit(node.test)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
||||
while True:
|
||||
else_ = node.orelse
|
||||
if len(else_) == 1 and isinstance(else_[0], If):
|
||||
node = else_[0]
|
||||
self.newline()
|
||||
self.write('elif ')
|
||||
self.visit(node.test)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
||||
else:
|
||||
self.newline()
|
||||
self.write('else:')
|
||||
self.body(else_)
|
||||
break
|
||||
|
||||
def visit_For(self, node):
|
||||
self.newline()
|
||||
self.write('for ')
|
||||
self.visit(node.target)
|
||||
self.write(' in ')
|
||||
self.visit(node.iter)
|
||||
self.write(':')
|
||||
self.body_or_else(node)
|
||||
|
||||
def visit_While(self, node):
|
||||
self.newline()
|
||||
self.write('while ')
|
||||
self.visit(node.test)
|
||||
self.write(':')
|
||||
self.body_or_else(node)
|
||||
|
||||
def visit_With(self, node):
|
||||
self.newline()
|
||||
self.write('with ')
|
||||
self.visit(node.context_expr)
|
||||
if node.optional_vars is not None:
|
||||
self.write(' as ')
|
||||
self.visit(node.optional_vars)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
||||
|
||||
def visit_Pass(self, node):
|
||||
self.newline()
|
||||
self.write('pass')
|
||||
|
||||
def visit_Print(self, node):
|
||||
# XXX: python 2.6 only
|
||||
self.newline()
|
||||
self.write('print ')
|
||||
want_comma = False
|
||||
if node.dest is not None:
|
||||
self.write(' >> ')
|
||||
self.visit(node.dest)
|
||||
want_comma = True
|
||||
for value in node.values:
|
||||
if want_comma:
|
||||
self.write(', ')
|
||||
self.visit(value)
|
||||
want_comma = True
|
||||
if not node.nl:
|
||||
self.write(',')
|
||||
|
||||
def visit_Delete(self, node):
|
||||
self.newline()
|
||||
self.write('del ')
|
||||
for idx, target in enumerate(node):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(target)
|
||||
|
||||
def visit_TryExcept(self, node):
|
||||
self.newline()
|
||||
self.write('try:')
|
||||
self.body(node.body)
|
||||
for handler in node.handlers:
|
||||
self.visit(handler)
|
||||
|
||||
def visit_TryFinally(self, node):
|
||||
self.newline()
|
||||
self.write('try:')
|
||||
self.body(node.body)
|
||||
self.newline()
|
||||
self.write('finally:')
|
||||
self.body(node.finalbody)
|
||||
|
||||
def visit_Global(self, node):
|
||||
self.newline()
|
||||
self.write('global ' + ', '.join(node.names))
|
||||
|
||||
def visit_Nonlocal(self, node):
|
||||
self.newline()
|
||||
self.write('nonlocal ' + ', '.join(node.names))
|
||||
|
||||
def visit_Return(self, node):
|
||||
self.newline()
|
||||
self.write('return ')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_Break(self, node):
|
||||
self.newline()
|
||||
self.write('break')
|
||||
|
||||
def visit_Continue(self, node):
|
||||
self.newline()
|
||||
self.write('continue')
|
||||
|
||||
def visit_Raise(self, node):
|
||||
# XXX: Python 2.6 / 3.0 compatibility
|
||||
self.newline()
|
||||
self.write('raise')
|
||||
if hasattr(node, 'exc') and node.exc is not None:
|
||||
self.write(' ')
|
||||
self.visit(node.exc)
|
||||
if node.cause is not None:
|
||||
self.write(' from ')
|
||||
self.visit(node.cause)
|
||||
elif hasattr(node, 'type') and node.type is not None:
|
||||
self.visit(node.type)
|
||||
if node.inst is not None:
|
||||
self.write(', ')
|
||||
self.visit(node.inst)
|
||||
if node.tback is not None:
|
||||
self.write(', ')
|
||||
self.visit(node.tback)
|
||||
|
||||
# Expressions
|
||||
|
||||
def visit_Attribute(self, node):
|
||||
self.visit(node.value)
|
||||
self.write('.' + node.attr)
|
||||
|
||||
def visit_Call(self, node):
|
||||
want_comma = []
|
||||
def write_comma():
|
||||
if want_comma:
|
||||
self.write(', ')
|
||||
else:
|
||||
want_comma.append(True)
|
||||
|
||||
self.visit(node.func)
|
||||
self.write('(')
|
||||
for arg in node.args:
|
||||
write_comma()
|
||||
self.visit(arg)
|
||||
for keyword in node.keywords:
|
||||
write_comma()
|
||||
self.write(keyword.arg + '=')
|
||||
self.visit(keyword.value)
|
||||
if node.starargs is not None:
|
||||
write_comma()
|
||||
self.write('*')
|
||||
self.visit(node.starargs)
|
||||
if node.kwargs is not None:
|
||||
write_comma()
|
||||
self.write('**')
|
||||
self.visit(node.kwargs)
|
||||
self.write(')')
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.write(node.id)
|
||||
|
||||
def visit_Str(self, node):
|
||||
self.write(repr(node.s))
|
||||
|
||||
def visit_Bytes(self, node):
|
||||
self.write(repr(node.s))
|
||||
|
||||
def visit_Num(self, node):
|
||||
self.write(repr(node.n))
|
||||
|
||||
def visit_Tuple(self, node):
|
||||
self.write('(')
|
||||
idx = -1
|
||||
for idx, item in enumerate(node.elts):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(item)
|
||||
self.write(idx and ')' or ',)')
|
||||
|
||||
def sequence_visit(left, right):
|
||||
def visit(self, node):
|
||||
self.write(left)
|
||||
for idx, item in enumerate(node.elts):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(item)
|
||||
self.write(right)
|
||||
return visit
|
||||
|
||||
visit_List = sequence_visit('[', ']')
|
||||
visit_Set = sequence_visit('{', '}')
|
||||
del sequence_visit
|
||||
|
||||
def visit_Dict(self, node):
|
||||
self.write('{')
|
||||
for idx, (key, value) in enumerate(zip(node.keys, node.values)):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(key)
|
||||
self.write(': ')
|
||||
self.visit(value)
|
||||
self.write('}')
|
||||
|
||||
def visit_BinOp(self, node):
|
||||
self.write('(')
|
||||
self.visit(node.left)
|
||||
self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
|
||||
self.visit(node.right)
|
||||
self.write(')')
|
||||
|
||||
def visit_BoolOp(self, node):
|
||||
self.write('(')
|
||||
for idx, value in enumerate(node.values):
|
||||
if idx:
|
||||
self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
|
||||
self.visit(value)
|
||||
self.write(')')
|
||||
|
||||
def visit_Compare(self, node):
|
||||
self.write('(')
|
||||
self.visit(node.left)
|
||||
for op, right in zip(node.ops, node.comparators):
|
||||
self.write(' %s ' % CMPOP_SYMBOLS[type(op)])
|
||||
self.visit(right)
|
||||
self.write(')')
|
||||
|
||||
def visit_UnaryOp(self, node):
|
||||
self.write('(')
|
||||
op = UNARYOP_SYMBOLS[type(node.op)]
|
||||
self.write(op)
|
||||
if op == 'not':
|
||||
self.write(' ')
|
||||
self.visit(node.operand)
|
||||
self.write(')')
|
||||
|
||||
def visit_Subscript(self, node):
|
||||
self.visit(node.value)
|
||||
self.write('[')
|
||||
self.visit(node.slice)
|
||||
self.write(']')
|
||||
|
||||
def visit_Slice(self, node):
|
||||
if node.lower is not None:
|
||||
self.visit(node.lower)
|
||||
self.write(':')
|
||||
if node.upper is not None:
|
||||
self.visit(node.upper)
|
||||
if node.step is not None:
|
||||
self.write(':')
|
||||
if not (isinstance(node.step, Name) and node.step.id == 'None'):
|
||||
self.visit(node.step)
|
||||
|
||||
def visit_ExtSlice(self, node):
|
||||
for idx, item in node.dims:
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(item)
|
||||
|
||||
def visit_Yield(self, node):
|
||||
self.write('yield ')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_Lambda(self, node):
|
||||
self.write('lambda ')
|
||||
self.signature(node.args)
|
||||
self.write(': ')
|
||||
self.visit(node.body)
|
||||
|
||||
def visit_Ellipsis(self, node):
|
||||
self.write('Ellipsis')
|
||||
|
||||
def generator_visit(left, right):
|
||||
def visit(self, node):
|
||||
self.write(left)
|
||||
self.visit(node.elt)
|
||||
for comprehension in node.generators:
|
||||
self.visit(comprehension)
|
||||
self.write(right)
|
||||
return visit
|
||||
|
||||
visit_ListComp = generator_visit('[', ']')
|
||||
visit_GeneratorExp = generator_visit('(', ')')
|
||||
visit_SetComp = generator_visit('{', '}')
|
||||
del generator_visit
|
||||
|
||||
def visit_DictComp(self, node):
|
||||
self.write('{')
|
||||
self.visit(node.key)
|
||||
self.write(': ')
|
||||
self.visit(node.value)
|
||||
for comprehension in node.generators:
|
||||
self.visit(comprehension)
|
||||
self.write('}')
|
||||
|
||||
def visit_IfExp(self, node):
|
||||
self.visit(node.body)
|
||||
self.write(' if ')
|
||||
self.visit(node.test)
|
||||
self.write(' else ')
|
||||
self.visit(node.orelse)
|
||||
|
||||
def visit_Starred(self, node):
|
||||
self.write('*')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_Repr(self, node):
|
||||
# XXX: python 2.6 only
|
||||
self.write('`')
|
||||
self.visit(node.value)
|
||||
self.write('`')
|
||||
|
||||
# Helper Nodes
|
||||
|
||||
def visit_alias(self, node):
|
||||
self.write(node.name)
|
||||
if node.asname is not None:
|
||||
self.write(' as ' + node.asname)
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
self.write(' for ')
|
||||
self.visit(node.target)
|
||||
self.write(' in ')
|
||||
self.visit(node.iter)
|
||||
if node.ifs:
|
||||
for if_ in node.ifs:
|
||||
self.write(' if ')
|
||||
self.visit(if_)
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
self.newline()
|
||||
self.write('except')
|
||||
if node.type is not None:
|
||||
self.write(' ')
|
||||
self.visit(node.type)
|
||||
if node.name is not None:
|
||||
self.write(' as ')
|
||||
self.visit(node.name)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
126
bin/mako/ast.py
126
bin/mako/ast.py
|
@ -1,126 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# ast.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""utilities for analyzing expressions and blocks of Python code, as well as generating Python from AST nodes"""
|
||||
|
||||
from mako import exceptions, pyparser, util
|
||||
import re
|
||||
|
||||
class PythonCode(object):
|
||||
"""represents information about a string containing Python code"""
|
||||
def __init__(self, code, **exception_kwargs):
|
||||
self.code = code
|
||||
|
||||
# represents all identifiers which are assigned to at some point in the code
|
||||
self.declared_identifiers = util.Set()
|
||||
|
||||
# represents all identifiers which are referenced before their assignment, if any
|
||||
self.undeclared_identifiers = util.Set()
|
||||
|
||||
# note that an identifier can be in both the undeclared and declared lists.
|
||||
|
||||
# using AST to parse instead of using code.co_varnames, code.co_names has several advantages:
|
||||
# - we can locate an identifier as "undeclared" even if its declared later in the same block of code
|
||||
# - AST is less likely to break with version changes (for example, the behavior of co_names changed a little bit
|
||||
# in python version 2.5)
|
||||
if isinstance(code, basestring):
|
||||
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
|
||||
else:
|
||||
expr = code
|
||||
|
||||
f = pyparser.FindIdentifiers(self, **exception_kwargs)
|
||||
f.visit(expr)
|
||||
|
||||
class ArgumentList(object):
|
||||
"""parses a fragment of code as a comma-separated list of expressions"""
|
||||
def __init__(self, code, **exception_kwargs):
|
||||
self.codeargs = []
|
||||
self.args = []
|
||||
self.declared_identifiers = util.Set()
|
||||
self.undeclared_identifiers = util.Set()
|
||||
if isinstance(code, basestring):
|
||||
if re.match(r"\S", code) and not re.match(r",\s*$", code):
|
||||
# if theres text and no trailing comma, insure its parsed
|
||||
# as a tuple by adding a trailing comma
|
||||
code += ","
|
||||
expr = pyparser.parse(code, "exec", **exception_kwargs)
|
||||
else:
|
||||
expr = code
|
||||
|
||||
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
|
||||
f.visit(expr)
|
||||
|
||||
class PythonFragment(PythonCode):
|
||||
"""extends PythonCode to provide identifier lookups in partial control statements
|
||||
|
||||
e.g.
|
||||
for x in 5:
|
||||
elif y==9:
|
||||
except (MyException, e):
|
||||
etc.
|
||||
"""
|
||||
def __init__(self, code, **exception_kwargs):
|
||||
m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
|
||||
if not m:
|
||||
raise exceptions.CompileException("Fragment '%s' is not a partial control statement" % code, **exception_kwargs)
|
||||
if m.group(3):
|
||||
code = code[:m.start(3)]
|
||||
(keyword, expr) = m.group(1,2)
|
||||
if keyword in ['for','if', 'while']:
|
||||
code = code + "pass"
|
||||
elif keyword == 'try':
|
||||
code = code + "pass\nexcept:pass"
|
||||
elif keyword == 'elif' or keyword == 'else':
|
||||
code = "if False:pass\n" + code + "pass"
|
||||
elif keyword == 'except':
|
||||
code = "try:pass\n" + code + "pass"
|
||||
else:
|
||||
raise exceptions.CompileException("Unsupported control keyword: '%s'" % keyword, **exception_kwargs)
|
||||
super(PythonFragment, self).__init__(code, **exception_kwargs)
|
||||
|
||||
|
||||
class FunctionDecl(object):
|
||||
"""function declaration"""
|
||||
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
|
||||
self.code = code
|
||||
expr = pyparser.parse(code, "exec", **exception_kwargs)
|
||||
|
||||
f = pyparser.ParseFunc(self, **exception_kwargs)
|
||||
f.visit(expr)
|
||||
if not hasattr(self, 'funcname'):
|
||||
raise exceptions.CompileException("Code '%s' is not a function declaration" % code, **exception_kwargs)
|
||||
if not allow_kwargs and self.kwargs:
|
||||
raise exceptions.CompileException("'**%s' keyword argument not allowed here" % self.argnames[-1], **exception_kwargs)
|
||||
|
||||
def get_argument_expressions(self, include_defaults=True):
|
||||
"""return the argument declarations of this FunctionDecl as a printable list."""
|
||||
namedecls = []
|
||||
defaults = [d for d in self.defaults]
|
||||
kwargs = self.kwargs
|
||||
varargs = self.varargs
|
||||
argnames = [f for f in self.argnames]
|
||||
argnames.reverse()
|
||||
for arg in argnames:
|
||||
default = None
|
||||
if kwargs:
|
||||
arg = "**" + arg
|
||||
kwargs = False
|
||||
elif varargs:
|
||||
arg = "*" + arg
|
||||
varargs = False
|
||||
else:
|
||||
default = len(defaults) and defaults.pop() or None
|
||||
if include_defaults and default:
|
||||
namedecls.insert(0, "%s=%s" % (arg, pyparser.ExpressionGenerator(default).value()))
|
||||
else:
|
||||
namedecls.insert(0, arg)
|
||||
return namedecls
|
||||
|
||||
class FunctionArgs(FunctionDecl):
|
||||
"""the argument portion of a function declaration"""
|
||||
def __init__(self, code, **kwargs):
|
||||
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, **kwargs)
|
|
@ -1,56 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from mako import exceptions
|
||||
|
||||
try:
|
||||
from beaker import cache
|
||||
cache = cache.CacheManager()
|
||||
except ImportError:
|
||||
cache = None
|
||||
|
||||
class Cache(object):
|
||||
def __init__(self, id, starttime):
|
||||
self.id = id
|
||||
self.starttime = starttime
|
||||
self.def_regions = {}
|
||||
|
||||
def put(self, key, value, **kwargs):
|
||||
defname = kwargs.pop('defname', None)
|
||||
expiretime = kwargs.pop('expiretime', None)
|
||||
createfunc = kwargs.pop('createfunc', None)
|
||||
|
||||
self._get_cache(defname, **kwargs).put_value(key, starttime=self.starttime, expiretime=expiretime)
|
||||
|
||||
def get(self, key, **kwargs):
|
||||
defname = kwargs.pop('defname', None)
|
||||
expiretime = kwargs.pop('expiretime', None)
|
||||
createfunc = kwargs.pop('createfunc', None)
|
||||
|
||||
return self._get_cache(defname, **kwargs).get_value(key, starttime=self.starttime, expiretime=expiretime, createfunc=createfunc)
|
||||
|
||||
def invalidate(self, key, **kwargs):
|
||||
defname = kwargs.pop('defname', None)
|
||||
expiretime = kwargs.pop('expiretime', None)
|
||||
createfunc = kwargs.pop('createfunc', None)
|
||||
|
||||
self._get_cache(defname, **kwargs).remove_value(key, starttime=self.starttime, expiretime=expiretime)
|
||||
|
||||
def invalidate_body(self):
|
||||
self.invalidate('render_body', defname='render_body')
|
||||
|
||||
def invalidate_def(self, name):
|
||||
self.invalidate('render_%s' % name, defname='render_%s' % name)
|
||||
|
||||
def invalidate_closure(self, name):
|
||||
self.invalidate(name, defname=name)
|
||||
|
||||
def _get_cache(self, defname, type=None, **kw):
|
||||
if not cache:
|
||||
raise exceptions.RuntimeException("the Beaker package is required to use cache functionality.")
|
||||
if type == 'memcached':
|
||||
type = 'ext:memcached'
|
||||
if not type:
|
||||
(type, kw) = self.def_regions.get(defname, ('memory', {}))
|
||||
else:
|
||||
self.def_regions[defname] = (type, kw)
|
||||
return cache.get_cache(self.id, type=type, **kw)
|
||||
|
|
@ -1,707 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# codegen.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""provides functionality for rendering a parsetree constructing into module source code."""
|
||||
|
||||
import time
|
||||
import re
|
||||
from mako.pygen import PythonPrinter
|
||||
from mako import util, ast, parsetree, filters
|
||||
|
||||
MAGIC_NUMBER = 5
|
||||
|
||||
|
||||
def compile(node, uri, filename=None, default_filters=None, buffer_filters=None, imports=None, source_encoding=None, generate_unicode=True):
|
||||
"""generate module source code given a parsetree node, uri, and optional source filename"""
|
||||
|
||||
buf = util.FastEncodingBuffer(unicode=generate_unicode)
|
||||
|
||||
printer = PythonPrinter(buf)
|
||||
_GenerateRenderMethod(printer, _CompileContext(uri, filename, default_filters, buffer_filters, imports, source_encoding, generate_unicode), node)
|
||||
return buf.getvalue()
|
||||
|
||||
class _CompileContext(object):
|
||||
def __init__(self, uri, filename, default_filters, buffer_filters, imports, source_encoding, generate_unicode):
|
||||
self.uri = uri
|
||||
self.filename = filename
|
||||
self.default_filters = default_filters
|
||||
self.buffer_filters = buffer_filters
|
||||
self.imports = imports
|
||||
self.source_encoding = source_encoding
|
||||
self.generate_unicode = generate_unicode
|
||||
|
||||
class _GenerateRenderMethod(object):
|
||||
"""a template visitor object which generates the full module source for a template."""
|
||||
def __init__(self, printer, compiler, node):
|
||||
self.printer = printer
|
||||
self.last_source_line = -1
|
||||
self.compiler = compiler
|
||||
self.node = node
|
||||
self.identifier_stack = [None]
|
||||
|
||||
self.in_def = isinstance(node, parsetree.DefTag)
|
||||
|
||||
if self.in_def:
|
||||
name = "render_" + node.name
|
||||
args = node.function_decl.get_argument_expressions()
|
||||
filtered = len(node.filter_args.args) > 0
|
||||
buffered = eval(node.attributes.get('buffered', 'False'))
|
||||
cached = eval(node.attributes.get('cached', 'False'))
|
||||
defs = None
|
||||
pagetag = None
|
||||
else:
|
||||
defs = self.write_toplevel()
|
||||
pagetag = self.compiler.pagetag
|
||||
name = "render_body"
|
||||
if pagetag is not None:
|
||||
args = pagetag.body_decl.get_argument_expressions()
|
||||
if not pagetag.body_decl.kwargs:
|
||||
args += ['**pageargs']
|
||||
cached = eval(pagetag.attributes.get('cached', 'False'))
|
||||
else:
|
||||
args = ['**pageargs']
|
||||
cached = False
|
||||
buffered = filtered = False
|
||||
if args is None:
|
||||
args = ['context']
|
||||
else:
|
||||
args = [a for a in ['context'] + args]
|
||||
|
||||
self.write_render_callable(pagetag or node, name, args, buffered, filtered, cached)
|
||||
|
||||
if defs is not None:
|
||||
for node in defs:
|
||||
_GenerateRenderMethod(printer, compiler, node)
|
||||
|
||||
identifiers = property(lambda self:self.identifier_stack[-1])
|
||||
|
||||
def write_toplevel(self):
|
||||
"""traverse a template structure for module-level directives and generate the
|
||||
start of module-level code."""
|
||||
inherit = []
|
||||
namespaces = {}
|
||||
module_code = []
|
||||
encoding =[None]
|
||||
|
||||
self.compiler.pagetag = None
|
||||
|
||||
class FindTopLevel(object):
|
||||
def visitInheritTag(s, node):
|
||||
inherit.append(node)
|
||||
def visitNamespaceTag(s, node):
|
||||
namespaces[node.name] = node
|
||||
def visitPageTag(s, node):
|
||||
self.compiler.pagetag = node
|
||||
def visitCode(s, node):
|
||||
if node.ismodule:
|
||||
module_code.append(node)
|
||||
|
||||
f = FindTopLevel()
|
||||
for n in self.node.nodes:
|
||||
n.accept_visitor(f)
|
||||
|
||||
self.compiler.namespaces = namespaces
|
||||
|
||||
module_ident = util.Set()
|
||||
for n in module_code:
|
||||
module_ident = module_ident.union(n.declared_identifiers())
|
||||
|
||||
module_identifiers = _Identifiers()
|
||||
module_identifiers.declared = module_ident
|
||||
|
||||
# module-level names, python code
|
||||
if not self.compiler.generate_unicode and self.compiler.source_encoding:
|
||||
self.printer.writeline("# -*- encoding:%s -*-" % self.compiler.source_encoding)
|
||||
|
||||
self.printer.writeline("from mako import runtime, filters, cache")
|
||||
self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
|
||||
self.printer.writeline("__M_dict_builtin = dict")
|
||||
self.printer.writeline("__M_locals_builtin = locals")
|
||||
self.printer.writeline("_magic_number = %s" % repr(MAGIC_NUMBER))
|
||||
self.printer.writeline("_modified_time = %s" % repr(time.time()))
|
||||
self.printer.writeline("_template_filename=%s" % repr(self.compiler.filename))
|
||||
self.printer.writeline("_template_uri=%s" % repr(self.compiler.uri))
|
||||
self.printer.writeline("_template_cache=cache.Cache(__name__, _modified_time)")
|
||||
self.printer.writeline("_source_encoding=%s" % repr(self.compiler.source_encoding))
|
||||
if self.compiler.imports:
|
||||
buf = ''
|
||||
for imp in self.compiler.imports:
|
||||
buf += imp + "\n"
|
||||
self.printer.writeline(imp)
|
||||
impcode = ast.PythonCode(buf, source='', lineno=0, pos=0, filename='template defined imports')
|
||||
else:
|
||||
impcode = None
|
||||
|
||||
main_identifiers = module_identifiers.branch(self.node)
|
||||
module_identifiers.topleveldefs = module_identifiers.topleveldefs.union(main_identifiers.topleveldefs)
|
||||
[module_identifiers.declared.add(x) for x in ["UNDEFINED"]]
|
||||
if impcode:
|
||||
[module_identifiers.declared.add(x) for x in impcode.declared_identifiers]
|
||||
|
||||
self.compiler.identifiers = module_identifiers
|
||||
self.printer.writeline("_exports = %s" % repr([n.name for n in main_identifiers.topleveldefs.values()]))
|
||||
self.printer.write("\n\n")
|
||||
|
||||
if len(module_code):
|
||||
self.write_module_code(module_code)
|
||||
|
||||
if len(inherit):
|
||||
self.write_namespaces(namespaces)
|
||||
self.write_inherit(inherit[-1])
|
||||
elif len(namespaces):
|
||||
self.write_namespaces(namespaces)
|
||||
|
||||
return main_identifiers.topleveldefs.values()
|
||||
|
||||
def write_render_callable(self, node, name, args, buffered, filtered, cached):
|
||||
"""write a top-level render callable.
|
||||
|
||||
this could be the main render() method or that of a top-level def."""
|
||||
self.printer.writelines(
|
||||
"def %s(%s):" % (name, ','.join(args)),
|
||||
"context.caller_stack._push_frame()",
|
||||
"try:"
|
||||
)
|
||||
if buffered or filtered or cached:
|
||||
self.printer.writeline("context._push_buffer()")
|
||||
|
||||
self.identifier_stack.append(self.compiler.identifiers.branch(self.node))
|
||||
if not self.in_def and '**pageargs' in args:
|
||||
self.identifier_stack[-1].argument_declared.add('pageargs')
|
||||
|
||||
if not self.in_def and (len(self.identifiers.locally_assigned) > 0 or len(self.identifiers.argument_declared)>0):
|
||||
self.printer.writeline("__M_locals = __M_dict_builtin(%s)" % ','.join(["%s=%s" % (x, x) for x in self.identifiers.argument_declared]))
|
||||
|
||||
self.write_variable_declares(self.identifiers, toplevel=True)
|
||||
|
||||
for n in self.node.nodes:
|
||||
n.accept_visitor(self)
|
||||
|
||||
self.write_def_finish(self.node, buffered, filtered, cached)
|
||||
self.printer.writeline(None)
|
||||
self.printer.write("\n\n")
|
||||
if cached:
|
||||
self.write_cache_decorator(node, name, args, buffered, self.identifiers, toplevel=True)
|
||||
|
||||
def write_module_code(self, module_code):
|
||||
"""write module-level template code, i.e. that which is enclosed in <%! %> tags
|
||||
in the template."""
|
||||
for n in module_code:
|
||||
self.write_source_comment(n)
|
||||
self.printer.write_indented_block(n.text)
|
||||
|
||||
def write_inherit(self, node):
|
||||
"""write the module-level inheritance-determination callable."""
|
||||
self.printer.writelines(
|
||||
"def _mako_inherit(template, context):",
|
||||
"_mako_generate_namespaces(context)",
|
||||
"return runtime._inherit_from(context, %s, _template_uri)" % (node.parsed_attributes['file']),
|
||||
None
|
||||
)
|
||||
|
||||
def write_namespaces(self, namespaces):
|
||||
"""write the module-level namespace-generating callable."""
|
||||
self.printer.writelines(
|
||||
"def _mako_get_namespace(context, name):",
|
||||
"try:",
|
||||
"return context.namespaces[(__name__, name)]",
|
||||
"except KeyError:",
|
||||
"_mako_generate_namespaces(context)",
|
||||
"return context.namespaces[(__name__, name)]",
|
||||
None,None
|
||||
)
|
||||
self.printer.writeline("def _mako_generate_namespaces(context):")
|
||||
for node in namespaces.values():
|
||||
if node.attributes.has_key('import'):
|
||||
self.compiler.has_ns_imports = True
|
||||
self.write_source_comment(node)
|
||||
if len(node.nodes):
|
||||
self.printer.writeline("def make_namespace():")
|
||||
export = []
|
||||
identifiers = self.compiler.identifiers.branch(node)
|
||||
class NSDefVisitor(object):
|
||||
def visitDefTag(s, node):
|
||||
self.write_inline_def(node, identifiers, nested=False)
|
||||
export.append(node.name)
|
||||
vis = NSDefVisitor()
|
||||
for n in node.nodes:
|
||||
n.accept_visitor(vis)
|
||||
self.printer.writeline("return [%s]" % (','.join(export)))
|
||||
self.printer.writeline(None)
|
||||
callable_name = "make_namespace()"
|
||||
else:
|
||||
callable_name = "None"
|
||||
self.printer.writeline("ns = runtime.Namespace(%s, context._clean_inheritance_tokens(), templateuri=%s, callables=%s, calling_uri=_template_uri, module=%s)" % (repr(node.name), node.parsed_attributes.get('file', 'None'), callable_name, node.parsed_attributes.get('module', 'None')))
|
||||
if eval(node.attributes.get('inheritable', "False")):
|
||||
self.printer.writeline("context['self'].%s = ns" % (node.name))
|
||||
self.printer.writeline("context.namespaces[(__name__, %s)] = ns" % repr(node.name))
|
||||
self.printer.write("\n")
|
||||
if not len(namespaces):
|
||||
self.printer.writeline("pass")
|
||||
self.printer.writeline(None)
|
||||
|
||||
def write_variable_declares(self, identifiers, toplevel=False, limit=None):
|
||||
"""write variable declarations at the top of a function.
|
||||
|
||||
the variable declarations are in the form of callable definitions for defs and/or
|
||||
name lookup within the function's context argument. the names declared are based on the
|
||||
names that are referenced in the function body, which don't otherwise have any explicit
|
||||
assignment operation. names that are assigned within the body are assumed to be
|
||||
locally-scoped variables and are not separately declared.
|
||||
|
||||
for def callable definitions, if the def is a top-level callable then a
|
||||
'stub' callable is generated which wraps the current Context into a closure. if the def
|
||||
is not top-level, it is fully rendered as a local closure."""
|
||||
|
||||
# collection of all defs available to us in this scope
|
||||
comp_idents = dict([(c.name, c) for c in identifiers.defs])
|
||||
to_write = util.Set()
|
||||
|
||||
# write "context.get()" for all variables we are going to need that arent in the namespace yet
|
||||
to_write = to_write.union(identifiers.undeclared)
|
||||
|
||||
# write closure functions for closures that we define right here
|
||||
to_write = to_write.union(util.Set([c.name for c in identifiers.closuredefs.values()]))
|
||||
|
||||
# remove identifiers that are declared in the argument signature of the callable
|
||||
to_write = to_write.difference(identifiers.argument_declared)
|
||||
|
||||
# remove identifiers that we are going to assign to. in this way we mimic Python's behavior,
|
||||
# i.e. assignment to a variable within a block means that variable is now a "locally declared" var,
|
||||
# which cannot be referenced beforehand.
|
||||
to_write = to_write.difference(identifiers.locally_declared)
|
||||
|
||||
# if a limiting set was sent, constraint to those items in that list
|
||||
# (this is used for the caching decorator)
|
||||
if limit is not None:
|
||||
to_write = to_write.intersection(limit)
|
||||
|
||||
if toplevel and getattr(self.compiler, 'has_ns_imports', False):
|
||||
self.printer.writeline("_import_ns = {}")
|
||||
self.compiler.has_imports = True
|
||||
for ident, ns in self.compiler.namespaces.iteritems():
|
||||
if ns.attributes.has_key('import'):
|
||||
self.printer.writeline("_mako_get_namespace(context, %s)._populate(_import_ns, %s)" % (repr(ident), repr(re.split(r'\s*,\s*', ns.attributes['import']))))
|
||||
|
||||
for ident in to_write:
|
||||
if ident in comp_idents:
|
||||
comp = comp_idents[ident]
|
||||
if comp.is_root():
|
||||
self.write_def_decl(comp, identifiers)
|
||||
else:
|
||||
self.write_inline_def(comp, identifiers, nested=True)
|
||||
elif ident in self.compiler.namespaces:
|
||||
self.printer.writeline("%s = _mako_get_namespace(context, %s)" % (ident, repr(ident)))
|
||||
else:
|
||||
if getattr(self.compiler, 'has_ns_imports', False):
|
||||
self.printer.writeline("%s = _import_ns.get(%s, context.get(%s, UNDEFINED))" % (ident, repr(ident), repr(ident)))
|
||||
else:
|
||||
self.printer.writeline("%s = context.get(%s, UNDEFINED)" % (ident, repr(ident)))
|
||||
|
||||
self.printer.writeline("__M_writer = context.writer()")
|
||||
|
||||
def write_source_comment(self, node):
|
||||
"""write a source comment containing the line number of the corresponding template line."""
|
||||
if self.last_source_line != node.lineno:
|
||||
self.printer.writeline("# SOURCE LINE %d" % node.lineno)
|
||||
self.last_source_line = node.lineno
|
||||
|
||||
def write_def_decl(self, node, identifiers):
|
||||
"""write a locally-available callable referencing a top-level def"""
|
||||
funcname = node.function_decl.funcname
|
||||
namedecls = node.function_decl.get_argument_expressions()
|
||||
nameargs = node.function_decl.get_argument_expressions(include_defaults=False)
|
||||
if not self.in_def and (len(self.identifiers.locally_assigned) > 0 or len(self.identifiers.argument_declared) > 0):
|
||||
nameargs.insert(0, 'context.locals_(__M_locals)')
|
||||
else:
|
||||
nameargs.insert(0, 'context')
|
||||
self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
|
||||
self.printer.writeline("return render_%s(%s)" % (funcname, ",".join(nameargs)))
|
||||
self.printer.writeline(None)
|
||||
|
||||
def write_inline_def(self, node, identifiers, nested):
|
||||
"""write a locally-available def callable inside an enclosing def."""
|
||||
namedecls = node.function_decl.get_argument_expressions()
|
||||
self.printer.writeline("def %s(%s):" % (node.name, ",".join(namedecls)))
|
||||
filtered = len(node.filter_args.args) > 0
|
||||
buffered = eval(node.attributes.get('buffered', 'False'))
|
||||
cached = eval(node.attributes.get('cached', 'False'))
|
||||
self.printer.writelines(
|
||||
"context.caller_stack._push_frame()",
|
||||
"try:"
|
||||
)
|
||||
if buffered or filtered or cached:
|
||||
self.printer.writelines(
|
||||
"context._push_buffer()",
|
||||
)
|
||||
|
||||
identifiers = identifiers.branch(node, nested=nested)
|
||||
|
||||
self.write_variable_declares(identifiers)
|
||||
|
||||
self.identifier_stack.append(identifiers)
|
||||
for n in node.nodes:
|
||||
n.accept_visitor(self)
|
||||
self.identifier_stack.pop()
|
||||
|
||||
self.write_def_finish(node, buffered, filtered, cached)
|
||||
self.printer.writeline(None)
|
||||
if cached:
|
||||
self.write_cache_decorator(node, node.name, namedecls, False, identifiers, inline=True, toplevel=False)
|
||||
|
||||
def write_def_finish(self, node, buffered, filtered, cached, callstack=True):
|
||||
"""write the end section of a rendering function, either outermost or inline.
|
||||
|
||||
this takes into account if the rendering function was filtered, buffered, etc.
|
||||
and closes the corresponding try: block if any, and writes code to retrieve captured content,
|
||||
apply filters, send proper return value."""
|
||||
if not buffered and not cached and not filtered:
|
||||
self.printer.writeline("return ''")
|
||||
if callstack:
|
||||
self.printer.writelines(
|
||||
"finally:",
|
||||
"context.caller_stack._pop_frame()",
|
||||
None
|
||||
)
|
||||
|
||||
if buffered or filtered or cached:
|
||||
if buffered or cached:
|
||||
# in a caching scenario, don't try to get a writer
|
||||
# from the context after popping; assume the caching
|
||||
# implemenation might be using a context with no
|
||||
# extra buffers
|
||||
self.printer.writelines(
|
||||
"finally:",
|
||||
"__M_buf = context._pop_buffer()"
|
||||
)
|
||||
else:
|
||||
self.printer.writelines(
|
||||
"finally:",
|
||||
"__M_buf, __M_writer = context._pop_buffer_and_writer()"
|
||||
)
|
||||
|
||||
if callstack:
|
||||
self.printer.writeline("context.caller_stack._pop_frame()")
|
||||
|
||||
s = "__M_buf.getvalue()"
|
||||
if filtered:
|
||||
s = self.create_filter_callable(node.filter_args.args, s, False)
|
||||
self.printer.writeline(None)
|
||||
if buffered and not cached:
|
||||
s = self.create_filter_callable(self.compiler.buffer_filters, s, False)
|
||||
if buffered or cached:
|
||||
self.printer.writeline("return %s" % s)
|
||||
else:
|
||||
self.printer.writelines(
|
||||
"__M_writer(%s)" % s,
|
||||
"return ''"
|
||||
)
|
||||
|
||||
def write_cache_decorator(self, node_or_pagetag, name, args, buffered, identifiers, inline=False, toplevel=False):
|
||||
"""write a post-function decorator to replace a rendering callable with a cached version of itself."""
|
||||
self.printer.writeline("__M_%s = %s" % (name, name))
|
||||
cachekey = node_or_pagetag.parsed_attributes.get('cache_key', repr(name))
|
||||
cacheargs = {}
|
||||
for arg in (('cache_type', 'type'), ('cache_dir', 'data_dir'), ('cache_timeout', 'expiretime'), ('cache_url', 'url')):
|
||||
val = node_or_pagetag.parsed_attributes.get(arg[0], None)
|
||||
if val is not None:
|
||||
if arg[1] == 'expiretime':
|
||||
cacheargs[arg[1]] = int(eval(val))
|
||||
else:
|
||||
cacheargs[arg[1]] = val
|
||||
else:
|
||||
if self.compiler.pagetag is not None:
|
||||
val = self.compiler.pagetag.parsed_attributes.get(arg[0], None)
|
||||
if val is not None:
|
||||
if arg[1] == 'expiretime':
|
||||
cacheargs[arg[1]] == int(eval(val))
|
||||
else:
|
||||
cacheargs[arg[1]] = val
|
||||
|
||||
self.printer.writeline("def %s(%s):" % (name, ','.join(args)))
|
||||
|
||||
# form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
|
||||
pass_args = [ '=' in a and "%s=%s" % ((a.split('=')[0],)*2) or a for a in args]
|
||||
|
||||
self.write_variable_declares(identifiers, toplevel=toplevel, limit=node_or_pagetag.undeclared_identifiers())
|
||||
if buffered:
|
||||
s = "context.get('local').get_cached(%s, defname=%r, %screatefunc=lambda:__M_%s(%s))" % (cachekey, name, ''.join(["%s=%s, " % (k,v) for k, v in cacheargs.iteritems()]), name, ','.join(pass_args))
|
||||
# apply buffer_filters
|
||||
s = self.create_filter_callable(self.compiler.buffer_filters, s, False)
|
||||
self.printer.writelines("return " + s,None)
|
||||
else:
|
||||
self.printer.writelines(
|
||||
"__M_writer(context.get('local').get_cached(%s, defname=%r, %screatefunc=lambda:__M_%s(%s)))" % (cachekey, name, ''.join(["%s=%s, " % (k,v) for k, v in cacheargs.iteritems()]), name, ','.join(pass_args)),
|
||||
"return ''",
|
||||
None
|
||||
)
|
||||
|
||||
def create_filter_callable(self, args, target, is_expression):
|
||||
"""write a filter-applying expression based on the filters present in the given
|
||||
filter names, adjusting for the global 'default' filter aliases as needed."""
|
||||
def locate_encode(name):
|
||||
if re.match(r'decode\..+', name):
|
||||
return "filters." + name
|
||||
else:
|
||||
return filters.DEFAULT_ESCAPES.get(name, name)
|
||||
|
||||
if 'n' not in args:
|
||||
if is_expression:
|
||||
if self.compiler.pagetag:
|
||||
args = self.compiler.pagetag.filter_args.args + args
|
||||
if self.compiler.default_filters:
|
||||
args = self.compiler.default_filters + args
|
||||
for e in args:
|
||||
# if filter given as a function, get just the identifier portion
|
||||
if e == 'n':
|
||||
continue
|
||||
m = re.match(r'(.+?)(\(.*\))', e)
|
||||
if m:
|
||||
(ident, fargs) = m.group(1,2)
|
||||
f = locate_encode(ident)
|
||||
e = f + fargs
|
||||
else:
|
||||
x = e
|
||||
e = locate_encode(e)
|
||||
assert e is not None
|
||||
target = "%s(%s)" % (e, target)
|
||||
return target
|
||||
|
||||
def visitExpression(self, node):
|
||||
self.write_source_comment(node)
|
||||
if len(node.escapes) or (self.compiler.pagetag is not None and len(self.compiler.pagetag.filter_args.args)) or len(self.compiler.default_filters):
|
||||
s = self.create_filter_callable(node.escapes_code.args, "%s" % node.text, True)
|
||||
self.printer.writeline("__M_writer(%s)" % s)
|
||||
else:
|
||||
self.printer.writeline("__M_writer(%s)" % node.text)
|
||||
|
||||
def visitControlLine(self, node):
|
||||
if node.isend:
|
||||
self.printer.writeline(None)
|
||||
else:
|
||||
self.write_source_comment(node)
|
||||
self.printer.writeline(node.text)
|
||||
def visitText(self, node):
|
||||
self.write_source_comment(node)
|
||||
self.printer.writeline("__M_writer(%s)" % repr(node.content))
|
||||
def visitTextTag(self, node):
|
||||
filtered = len(node.filter_args.args) > 0
|
||||
if filtered:
|
||||
self.printer.writelines(
|
||||
"__M_writer = context._push_writer()",
|
||||
"try:",
|
||||
)
|
||||
for n in node.nodes:
|
||||
n.accept_visitor(self)
|
||||
if filtered:
|
||||
self.printer.writelines(
|
||||
"finally:",
|
||||
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
|
||||
"__M_writer(%s)" % self.create_filter_callable(node.filter_args.args, "__M_buf.getvalue()", False),
|
||||
None
|
||||
)
|
||||
|
||||
def visitCode(self, node):
|
||||
if not node.ismodule:
|
||||
self.write_source_comment(node)
|
||||
self.printer.write_indented_block(node.text)
|
||||
|
||||
if not self.in_def and len(self.identifiers.locally_assigned) > 0:
|
||||
# if we are the "template" def, fudge locally declared/modified variables into the "__M_locals" dictionary,
|
||||
# which is used for def calls within the same template, to simulate "enclosing scope"
|
||||
self.printer.writeline('__M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin()[__M_key]) for __M_key in [%s] if __M_key in __M_locals_builtin()]))' % ','.join([repr(x) for x in node.declared_identifiers()]))
|
||||
|
||||
def visitIncludeTag(self, node):
|
||||
self.write_source_comment(node)
|
||||
args = node.attributes.get('args')
|
||||
if args:
|
||||
self.printer.writeline("runtime._include_file(context, %s, _template_uri, %s)" % (node.parsed_attributes['file'], args))
|
||||
else:
|
||||
self.printer.writeline("runtime._include_file(context, %s, _template_uri)" % (node.parsed_attributes['file']))
|
||||
|
||||
def visitNamespaceTag(self, node):
|
||||
pass
|
||||
|
||||
def visitDefTag(self, node):
|
||||
pass
|
||||
|
||||
def visitCallNamespaceTag(self, node):
|
||||
# TODO: we can put namespace-specific checks here, such
|
||||
# as ensure the given namespace will be imported,
|
||||
# pre-import the namespace, etc.
|
||||
self.visitCallTag(node)
|
||||
|
||||
def visitCallTag(self, node):
|
||||
self.printer.writeline("def ccall(caller):")
|
||||
export = ['body']
|
||||
callable_identifiers = self.identifiers.branch(node, nested=True)
|
||||
body_identifiers = callable_identifiers.branch(node, nested=False)
|
||||
# we want the 'caller' passed to ccall to be used for the body() function,
|
||||
# but for other non-body() <%def>s within <%call> we want the current caller off the call stack (if any)
|
||||
body_identifiers.add_declared('caller')
|
||||
|
||||
self.identifier_stack.append(body_identifiers)
|
||||
class DefVisitor(object):
|
||||
def visitDefTag(s, node):
|
||||
self.write_inline_def(node, callable_identifiers, nested=False)
|
||||
export.append(node.name)
|
||||
# remove defs that are within the <%call> from the "closuredefs" defined
|
||||
# in the body, so they dont render twice
|
||||
if node.name in body_identifiers.closuredefs:
|
||||
del body_identifiers.closuredefs[node.name]
|
||||
|
||||
vis = DefVisitor()
|
||||
for n in node.nodes:
|
||||
n.accept_visitor(vis)
|
||||
self.identifier_stack.pop()
|
||||
|
||||
bodyargs = node.body_decl.get_argument_expressions()
|
||||
self.printer.writeline("def body(%s):" % ','.join(bodyargs))
|
||||
# TODO: figure out best way to specify buffering/nonbuffering (at call time would be better)
|
||||
buffered = False
|
||||
if buffered:
|
||||
self.printer.writelines(
|
||||
"context._push_buffer()",
|
||||
"try:"
|
||||
)
|
||||
self.write_variable_declares(body_identifiers)
|
||||
self.identifier_stack.append(body_identifiers)
|
||||
|
||||
for n in node.nodes:
|
||||
n.accept_visitor(self)
|
||||
self.identifier_stack.pop()
|
||||
|
||||
self.write_def_finish(node, buffered, False, False, callstack=False)
|
||||
self.printer.writelines(
|
||||
None,
|
||||
"return [%s]" % (','.join(export)),
|
||||
None
|
||||
)
|
||||
|
||||
self.printer.writelines(
|
||||
# get local reference to current caller, if any
|
||||
"caller = context.caller_stack._get_caller()",
|
||||
# push on caller for nested call
|
||||
"context.caller_stack.nextcaller = runtime.Namespace('caller', context, callables=ccall(caller))",
|
||||
"try:")
|
||||
self.write_source_comment(node)
|
||||
self.printer.writelines(
|
||||
"__M_writer(%s)" % self.create_filter_callable([], node.expression, True),
|
||||
"finally:",
|
||||
"context.caller_stack.nextcaller = None",
|
||||
None
|
||||
)
|
||||
|
||||
class _Identifiers(object):
|
||||
"""tracks the status of identifier names as template code is rendered."""
|
||||
def __init__(self, node=None, parent=None, nested=False):
|
||||
if parent is not None:
|
||||
# things that have already been declared in an enclosing namespace (i.e. names we can just use)
|
||||
self.declared = util.Set(parent.declared).union([c.name for c in parent.closuredefs.values()]).union(parent.locally_declared).union(parent.argument_declared)
|
||||
|
||||
# if these identifiers correspond to a "nested" scope, it means whatever the
|
||||
# parent identifiers had as undeclared will have been declared by that parent,
|
||||
# and therefore we have them in our scope.
|
||||
if nested:
|
||||
self.declared = self.declared.union(parent.undeclared)
|
||||
|
||||
# top level defs that are available
|
||||
self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
|
||||
else:
|
||||
self.declared = util.Set()
|
||||
self.topleveldefs = util.SetLikeDict()
|
||||
|
||||
# things within this level that are referenced before they are declared (e.g. assigned to)
|
||||
self.undeclared = util.Set()
|
||||
|
||||
# things that are declared locally. some of these things could be in the "undeclared"
|
||||
# list as well if they are referenced before declared
|
||||
self.locally_declared = util.Set()
|
||||
|
||||
# assignments made in explicit python blocks. these will be propigated to
|
||||
# the context of local def calls.
|
||||
self.locally_assigned = util.Set()
|
||||
|
||||
# things that are declared in the argument signature of the def callable
|
||||
self.argument_declared = util.Set()
|
||||
|
||||
# closure defs that are defined in this level
|
||||
self.closuredefs = util.SetLikeDict()
|
||||
|
||||
self.node = node
|
||||
|
||||
if node is not None:
|
||||
node.accept_visitor(self)
|
||||
|
||||
def branch(self, node, **kwargs):
|
||||
"""create a new Identifiers for a new Node, with this Identifiers as the parent."""
|
||||
return _Identifiers(node, self, **kwargs)
|
||||
|
||||
defs = property(lambda self:util.Set(self.topleveldefs.union(self.closuredefs).values()))
|
||||
|
||||
def __repr__(self):
|
||||
return "Identifiers(declared=%s, locally_declared=%s, undeclared=%s, topleveldefs=%s, closuredefs=%s, argumenetdeclared=%s)" % (repr(list(self.declared)), repr(list(self.locally_declared)), repr(list(self.undeclared)), repr([c.name for c in self.topleveldefs.values()]), repr([c.name for c in self.closuredefs.values()]), repr(self.argument_declared))
|
||||
|
||||
def check_declared(self, node):
|
||||
"""update the state of this Identifiers with the undeclared and declared identifiers of the given node."""
|
||||
for ident in node.undeclared_identifiers():
|
||||
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
|
||||
self.undeclared.add(ident)
|
||||
for ident in node.declared_identifiers():
|
||||
self.locally_declared.add(ident)
|
||||
|
||||
def add_declared(self, ident):
|
||||
self.declared.add(ident)
|
||||
if ident in self.undeclared:
|
||||
self.undeclared.remove(ident)
|
||||
|
||||
def visitExpression(self, node):
|
||||
self.check_declared(node)
|
||||
def visitControlLine(self, node):
|
||||
self.check_declared(node)
|
||||
def visitCode(self, node):
|
||||
if not node.ismodule:
|
||||
self.check_declared(node)
|
||||
self.locally_assigned = self.locally_assigned.union(node.declared_identifiers())
|
||||
def visitDefTag(self, node):
|
||||
if node.is_root():
|
||||
self.topleveldefs[node.name] = node
|
||||
elif node is not self.node:
|
||||
self.closuredefs[node.name] = node
|
||||
for ident in node.undeclared_identifiers():
|
||||
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
|
||||
self.undeclared.add(ident)
|
||||
# visit defs only one level deep
|
||||
if node is self.node:
|
||||
for ident in node.declared_identifiers():
|
||||
self.argument_declared.add(ident)
|
||||
for n in node.nodes:
|
||||
n.accept_visitor(self)
|
||||
def visitIncludeTag(self, node):
|
||||
self.check_declared(node)
|
||||
def visitPageTag(self, node):
|
||||
for ident in node.declared_identifiers():
|
||||
self.argument_declared.add(ident)
|
||||
self.check_declared(node)
|
||||
|
||||
def visitCallNamespaceTag(self, node):
|
||||
self.visitCallTag(node)
|
||||
|
||||
def visitCallTag(self, node):
|
||||
if node is self.node:
|
||||
for ident in node.undeclared_identifiers():
|
||||
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
|
||||
self.undeclared.add(ident)
|
||||
for ident in node.declared_identifiers():
|
||||
self.argument_declared.add(ident)
|
||||
for n in node.nodes:
|
||||
n.accept_visitor(self)
|
||||
else:
|
||||
for ident in node.undeclared_identifiers():
|
||||
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
|
||||
self.undeclared.add(ident)
|
||||
|
|
@ -1,256 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# exceptions.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""exception classes"""
|
||||
|
||||
import traceback, sys, re
|
||||
from mako import util
|
||||
|
||||
class MakoException(Exception):
|
||||
pass
|
||||
|
||||
class RuntimeException(MakoException):
|
||||
pass
|
||||
|
||||
def _format_filepos(lineno, pos, filename):
|
||||
if filename is None:
|
||||
return " at line: %d char: %d" % (lineno, pos)
|
||||
else:
|
||||
return " in file '%s' at line: %d char: %d" % (filename, lineno, pos)
|
||||
class CompileException(MakoException):
|
||||
def __init__(self, message, source, lineno, pos, filename):
|
||||
MakoException.__init__(self, message + _format_filepos(lineno, pos, filename))
|
||||
self.lineno =lineno
|
||||
self.pos = pos
|
||||
self.filename = filename
|
||||
self.source = source
|
||||
|
||||
class SyntaxException(MakoException):
|
||||
def __init__(self, message, source, lineno, pos, filename):
|
||||
MakoException.__init__(self, message + _format_filepos(lineno, pos, filename))
|
||||
self.lineno =lineno
|
||||
self.pos = pos
|
||||
self.filename = filename
|
||||
self.source = source
|
||||
|
||||
class TemplateLookupException(MakoException):
|
||||
pass
|
||||
|
||||
class TopLevelLookupException(TemplateLookupException):
|
||||
pass
|
||||
|
||||
class RichTraceback(object):
|
||||
"""pulls the current exception from the sys traceback and extracts Mako-specific
|
||||
template information.
|
||||
|
||||
Usage:
|
||||
|
||||
RichTraceback()
|
||||
|
||||
Properties:
|
||||
|
||||
error - the exception instance.
|
||||
source - source code of the file where the error occured. if the error occured within a compiled template,
|
||||
this is the template source.
|
||||
lineno - line number where the error occured. if the error occured within a compiled template, the line number
|
||||
is adjusted to that of the template source
|
||||
records - a list of 8-tuples containing the original python traceback elements, plus the
|
||||
filename, line number, source line, and full template source for the traceline mapped back to its originating source
|
||||
template, if any for that traceline (else the fields are None).
|
||||
reverse_records - the list of records in reverse
|
||||
traceback - a list of 4-tuples, in the same format as a regular python traceback, with template-corresponding
|
||||
traceback records replacing the originals
|
||||
reverse_traceback - the traceback list in reverse
|
||||
|
||||
"""
|
||||
def __init__(self, traceback=None):
|
||||
(self.source, self.lineno) = ("", 0)
|
||||
(t, self.error, self.records) = self._init(traceback)
|
||||
if self.error is None:
|
||||
self.error = t
|
||||
if isinstance(self.error, CompileException) or isinstance(self.error, SyntaxException):
|
||||
import mako.template
|
||||
self.source = self.error.source
|
||||
self.lineno = self.error.lineno
|
||||
self._has_source = True
|
||||
self.reverse_records = [r for r in self.records]
|
||||
self.reverse_records.reverse()
|
||||
def _get_reformatted_records(self, records):
|
||||
for rec in records:
|
||||
if rec[6] is not None:
|
||||
yield (rec[4], rec[5], rec[2], rec[6])
|
||||
else:
|
||||
yield tuple(rec[0:4])
|
||||
traceback = property(lambda self:self._get_reformatted_records(self.records), doc="""
|
||||
return a list of 4-tuple traceback records (i.e. normal python format)
|
||||
with template-corresponding lines remapped to the originating template
|
||||
""")
|
||||
reverse_traceback = property(lambda self:self._get_reformatted_records(self.reverse_records), doc="""
|
||||
return the same data as traceback, except in reverse order
|
||||
""")
|
||||
def _init(self, trcback):
|
||||
"""format a traceback from sys.exc_info() into 7-item tuples, containing
|
||||
the regular four traceback tuple items, plus the original template
|
||||
filename, the line number adjusted relative to the template source, and
|
||||
code line from that line number of the template."""
|
||||
import mako.template
|
||||
mods = {}
|
||||
if not trcback:
|
||||
(type, value, trcback) = sys.exc_info()
|
||||
rawrecords = traceback.extract_tb(trcback)
|
||||
new_trcback = []
|
||||
for filename, lineno, function, line in rawrecords:
|
||||
try:
|
||||
(line_map, template_lines) = mods[filename]
|
||||
except KeyError:
|
||||
try:
|
||||
info = mako.template._get_module_info(filename)
|
||||
module_source = info.code
|
||||
template_source = info.source
|
||||
template_filename = info.template_filename or filename
|
||||
except KeyError:
|
||||
new_trcback.append((filename, lineno, function, line, None, None, None, None))
|
||||
continue
|
||||
|
||||
template_ln = module_ln = 1
|
||||
line_map = {}
|
||||
for line in module_source.split("\n"):
|
||||
match = re.match(r'\s*# SOURCE LINE (\d+)', line)
|
||||
if match:
|
||||
template_ln = int(match.group(1))
|
||||
else:
|
||||
template_ln += 1
|
||||
module_ln += 1
|
||||
line_map[module_ln] = template_ln
|
||||
template_lines = [line for line in template_source.split("\n")]
|
||||
mods[filename] = (line_map, template_lines)
|
||||
|
||||
template_ln = line_map[lineno]
|
||||
if template_ln <= len(template_lines):
|
||||
template_line = template_lines[template_ln - 1]
|
||||
else:
|
||||
template_line = None
|
||||
new_trcback.append((filename, lineno, function, line, template_filename, template_ln, template_line, template_source))
|
||||
if not self.source:
|
||||
for l in range(len(new_trcback)-1, 0, -1):
|
||||
if new_trcback[l][5]:
|
||||
self.source = new_trcback[l][7]
|
||||
self.lineno = new_trcback[l][5]
|
||||
break
|
||||
else:
|
||||
try:
|
||||
# A normal .py file (not a Template)
|
||||
fp = open(new_trcback[-1][0])
|
||||
encoding = util.parse_encoding(fp)
|
||||
fp.seek(0)
|
||||
self.source = fp.read()
|
||||
fp.close()
|
||||
if encoding:
|
||||
self.source = self.source.decode(encoding)
|
||||
except IOError:
|
||||
self.source = ''
|
||||
self.lineno = new_trcback[-1][1]
|
||||
return (type, value, new_trcback)
|
||||
|
||||
|
||||
def text_error_template(lookup=None):
|
||||
"""provides a template that renders a stack trace in a similar format to the Python interpreter,
|
||||
substituting source template filenames, line numbers and code for that of the originating
|
||||
source template, as applicable."""
|
||||
import mako.template
|
||||
return mako.template.Template(r"""
|
||||
<%page args="traceback=None"/>
|
||||
<%!
|
||||
from mako.exceptions import RichTraceback
|
||||
%>\
|
||||
<%
|
||||
tback = RichTraceback(traceback=traceback)
|
||||
%>\
|
||||
Traceback (most recent call last):
|
||||
% for (filename, lineno, function, line) in tback.traceback:
|
||||
File "${filename}", line ${lineno}, in ${function or '?'}
|
||||
${line | unicode.strip}
|
||||
% endfor
|
||||
${str(tback.error.__class__.__name__)}: ${str(tback.error)}
|
||||
""")
|
||||
|
||||
def html_error_template():
|
||||
"""provides a template that renders a stack trace in an HTML format, providing an excerpt of
|
||||
code as well as substituting source template filenames, line numbers and code
|
||||
for that of the originating source template, as applicable.
|
||||
|
||||
the template's default encoding_errors value is 'htmlentityreplace'. the template has
|
||||
two options:
|
||||
|
||||
with the full option disabled, only a section of an HTML document is returned.
|
||||
with the css option disabled, the default stylesheet won't be included."""
|
||||
import mako.template
|
||||
return mako.template.Template(r"""
|
||||
<%!
|
||||
from mako.exceptions import RichTraceback
|
||||
%>
|
||||
<%page args="full=True, css=True, traceback=None"/>
|
||||
% if full:
|
||||
<html>
|
||||
<head>
|
||||
<title>Mako Runtime Error</title>
|
||||
% endif
|
||||
% if css:
|
||||
<style>
|
||||
body { font-family:verdana; margin:10px 30px 10px 30px;}
|
||||
.stacktrace { margin:5px 5px 5px 5px; }
|
||||
.highlight { padding:0px 10px 0px 10px; background-color:#9F9FDF; }
|
||||
.nonhighlight { padding:0px; background-color:#DFDFDF; }
|
||||
.sample { padding:10px; margin:10px 10px 10px 10px; font-family:monospace; }
|
||||
.sampleline { padding:0px 10px 0px 10px; }
|
||||
.sourceline { margin:5px 5px 10px 5px; font-family:monospace;}
|
||||
.location { font-size:80%; }
|
||||
</style>
|
||||
% endif
|
||||
% if full:
|
||||
</head>
|
||||
<body>
|
||||
% endif
|
||||
|
||||
<h2>Error !</h2>
|
||||
<%
|
||||
tback = RichTraceback(traceback=traceback)
|
||||
src = tback.source
|
||||
line = tback.lineno
|
||||
if src:
|
||||
lines = src.split('\n')
|
||||
else:
|
||||
lines = None
|
||||
%>
|
||||
<h3>${str(tback.error.__class__.__name__)}: ${str(tback.error)}</h3>
|
||||
|
||||
% if lines:
|
||||
<div class="sample">
|
||||
<div class="nonhighlight">
|
||||
% for index in range(max(0, line-4),min(len(lines), line+5)):
|
||||
% if index + 1 == line:
|
||||
<div class="highlight">${index + 1} ${lines[index] | h}</div>
|
||||
% else:
|
||||
<div class="sampleline">${index + 1} ${lines[index] | h}</div>
|
||||
% endif
|
||||
% endfor
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
||||
|
||||
<div class="stacktrace">
|
||||
% for (filename, lineno, function, line) in tback.reverse_traceback:
|
||||
<div class="location">${filename}, line ${lineno}:</div>
|
||||
<div class="sourceline">${line | h}</div>
|
||||
% endfor
|
||||
</div>
|
||||
|
||||
% if full:
|
||||
</body>
|
||||
</html>
|
||||
% endif
|
||||
""", output_encoding=sys.getdefaultencoding(), encoding_errors='htmlentityreplace')
|
|
@ -1,20 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
|
@ -1,58 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""adds autohandler functionality to Mako templates.
|
||||
|
||||
requires that the TemplateLookup class is used with templates.
|
||||
|
||||
usage:
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context)}"/>
|
||||
|
||||
|
||||
or with custom autohandler filename:
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
|
||||
|
||||
"""
|
||||
|
||||
import posixpath, os, re
|
||||
|
||||
def autohandler(template, context, name='autohandler'):
|
||||
lookup = context.lookup
|
||||
_template_uri = template.module._template_uri
|
||||
if not lookup.filesystem_checks:
|
||||
try:
|
||||
return lookup._uri_cache[(autohandler, _template_uri, name)]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name]
|
||||
while len(tokens):
|
||||
path = '/' + '/'.join(tokens)
|
||||
if path != _template_uri and _file_exists(lookup, path):
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault((autohandler, _template_uri, name), path)
|
||||
else:
|
||||
return path
|
||||
if len(tokens) == 1:
|
||||
break
|
||||
tokens[-2:] = [name]
|
||||
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault((autohandler, _template_uri, name), None)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _file_exists(lookup, path):
|
||||
psub = re.sub(r'^/', '',path)
|
||||
for d in lookup.directories:
|
||||
if os.path.exists(d + '/' + psub):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
|
@ -1,119 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
|
||||
from StringIO import StringIO
|
||||
|
||||
from babel.messages.extract import extract_python
|
||||
|
||||
from mako import lexer, parsetree
|
||||
|
||||
def extract(fileobj, keywords, comment_tags, options):
|
||||
"""Extract messages from Mako templates.
|
||||
|
||||
:param fileobj: the file-like object the messages should be extracted from
|
||||
:param keywords: a list of keywords (i.e. function names) that should be
|
||||
recognized as translation functions
|
||||
:param comment_tags: a list of translator tags to search for and include
|
||||
in the results
|
||||
:param options: a dictionary of additional options (optional)
|
||||
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
|
||||
:rtype: ``iterator``
|
||||
"""
|
||||
encoding = options.get('input_encoding', options.get('encoding', None))
|
||||
|
||||
template_node = lexer.Lexer(fileobj.read(),
|
||||
input_encoding=encoding).parse()
|
||||
for extracted in extract_nodes(template_node.get_children(),
|
||||
keywords, comment_tags, options):
|
||||
yield extracted
|
||||
|
||||
def extract_nodes(nodes, keywords, comment_tags, options):
|
||||
"""Extract messages from Mako's lexer node objects
|
||||
|
||||
:param nodes: an iterable of Mako parsetree.Node objects to extract from
|
||||
:param keywords: a list of keywords (i.e. function names) that should be
|
||||
recognized as translation functions
|
||||
:param comment_tags: a list of translator tags to search for and include
|
||||
in the results
|
||||
:param options: a dictionary of additional options (optional)
|
||||
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
|
||||
:rtype: ``iterator``
|
||||
"""
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
|
||||
for node in nodes:
|
||||
child_nodes = None
|
||||
if in_translator_comments and isinstance(node, parsetree.Text) and \
|
||||
not node.content.strip():
|
||||
# Ignore whitespace within translator comments
|
||||
continue
|
||||
|
||||
if isinstance(node, parsetree.Comment):
|
||||
value = node.text.strip()
|
||||
if in_translator_comments:
|
||||
translator_comments.extend(_split_comment(node.lineno, value))
|
||||
continue
|
||||
for comment_tag in comment_tags:
|
||||
if value.startswith(comment_tag):
|
||||
in_translator_comments = True
|
||||
translator_comments.extend(_split_comment(node.lineno,
|
||||
value))
|
||||
continue
|
||||
|
||||
if isinstance(node, parsetree.DefTag):
|
||||
code = node.function_decl.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.CallTag):
|
||||
code = node.code.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.PageTag):
|
||||
code = node.body_decl.code
|
||||
elif isinstance(node, parsetree.ControlLine):
|
||||
if node.isend:
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
continue
|
||||
code = node.text
|
||||
elif isinstance(node, parsetree.Code):
|
||||
# <% and <%! blocks would provide their own translator comments
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
|
||||
code = node.code.code
|
||||
elif isinstance(node, parsetree.Expression):
|
||||
code = node.code.code
|
||||
else:
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
continue
|
||||
|
||||
# Comments don't apply unless they immediately preceed the message
|
||||
if translator_comments and \
|
||||
translator_comments[-1][0] < node.lineno - 1:
|
||||
translator_comments = []
|
||||
else:
|
||||
translator_comments = \
|
||||
[comment[1] for comment in translator_comments]
|
||||
|
||||
if isinstance(code, unicode):
|
||||
code = code.encode('ascii', 'backslashreplace')
|
||||
code = StringIO(code)
|
||||
for lineno, funcname, messages, python_translator_comments \
|
||||
in extract_python(code, keywords, comment_tags, options):
|
||||
yield (node.lineno + (lineno - 1), funcname, messages,
|
||||
translator_comments + python_translator_comments)
|
||||
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
|
||||
if child_nodes:
|
||||
for extracted in extract_nodes(child_nodes, keywords, comment_tags,
|
||||
options):
|
||||
yield extracted
|
||||
|
||||
|
||||
def _split_comment(lineno, comment):
|
||||
"""Return the multiline comment at lineno split into a list of comment line
|
||||
numbers and the accompanying comment line"""
|
||||
return [(lineno + index, line) for index, line in
|
||||
enumerate(comment.splitlines())]
|
|
@ -1,21 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""preprocessing functions, used with the 'preprocessor' argument on Template, TemplateLookup"""
|
||||
|
||||
import re
|
||||
|
||||
def convert_comments(text):
|
||||
"""preprocess old style comments.
|
||||
|
||||
example:
|
||||
|
||||
from mako.ext.preprocessors import convert_comments
|
||||
t = Template(..., preprocessor=preprocess_comments)"""
|
||||
return re.sub(r'(?<=\n)\s*#[^#]', "##", text)
|
||||
|
||||
# TODO
|
||||
def create_tag(callable):
|
||||
"""given a callable, extract the *args and **kwargs, and produce a preprocessor
|
||||
that will parse for <%<funcname> <args>> and convert to an appropriate <%call> statement.
|
||||
|
||||
this allows any custom tag to be created which looks like a pure Mako-style tag."""
|
||||
raise NotImplementedError("Future functionality....")
|
|
@ -1,102 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
try:
|
||||
set
|
||||
except NameError:
|
||||
from sets import Set as set
|
||||
|
||||
from pygments.lexers.web import \
|
||||
HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
|
||||
from pygments.lexers.agile import PythonLexer
|
||||
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
|
||||
include, using, this
|
||||
from pygments.token import Error, Punctuation, \
|
||||
Text, Comment, Operator, Keyword, Name, String, Number, Other, Literal
|
||||
from pygments.util import html_doctype_matches, looks_like_xml
|
||||
|
||||
class MakoLexer(RegexLexer):
|
||||
name = 'Mako'
|
||||
aliases = ['mako']
|
||||
filenames = ['*.mao']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Keyword, Other)),
|
||||
(r'(\s*)(\%)([^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
|
||||
(r'(\s*)(##[^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Other)),
|
||||
(r'''(?s)<%doc>.*?</%doc>''', Comment.Preproc),
|
||||
(r'(<%)([\w\.\:]+)', bygroups(Comment.Preproc, Name.Builtin), 'tag'),
|
||||
(r'(</%)([\w\.\:]+)(>)', bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
|
||||
(r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
|
||||
(r'(<%(?:!?))(.*?)(%>)(?s)', bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'(\$\{)(.*?)(\})',
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'''(?sx)
|
||||
(.+?) # anything, followed by:
|
||||
(?:
|
||||
(?<=\n)(?=%|\#\#) | # an eval or comment line
|
||||
(?=\#\*) | # multiline comment
|
||||
(?=</?%) | # a python block
|
||||
# call start or end
|
||||
(?=\$\{) | # a substitution
|
||||
(?<=\n)(?=\s*%) |
|
||||
# - don't consume
|
||||
(\\\n) | # an escaped newline
|
||||
\Z # end of string
|
||||
)
|
||||
''', bygroups(Other, Operator)),
|
||||
(r'\s+', Text),
|
||||
],
|
||||
'ondeftags': [
|
||||
(r'<%', Comment.Preproc),
|
||||
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
|
||||
include('tag'),
|
||||
],
|
||||
'tag': [
|
||||
(r'((?:\w+)\s*=)\s*(".*?")',
|
||||
bygroups(Name.Attribute, String)),
|
||||
(r'/?\s*>', Comment.Preproc, '#pop'),
|
||||
(r'\s+', Text),
|
||||
],
|
||||
'attr': [
|
||||
('".*?"', String, '#pop'),
|
||||
("'.*?'", String, '#pop'),
|
||||
(r'[^\s>]+', String, '#pop'),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class MakoHtmlLexer(DelegatingLexer):
|
||||
name = 'HTML+Mako'
|
||||
aliases = ['html+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
class MakoXmlLexer(DelegatingLexer):
|
||||
name = 'XML+Mako'
|
||||
aliases = ['xml+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
class MakoJavascriptLexer(DelegatingLexer):
|
||||
name = 'JavaScript+Mako'
|
||||
aliases = ['js+mako', 'javascript+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
|
||||
MakoLexer, **options)
|
||||
|
||||
class MakoCssLexer(DelegatingLexer):
|
||||
name = 'CSS+Mako'
|
||||
aliases = ['css+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
|
||||
**options)
|
|
@ -1,51 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import re, inspect
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako.template import Template
|
||||
|
||||
class TGPlugin(object):
|
||||
"""TurboGears compatible Template Plugin."""
|
||||
|
||||
def __init__(self, extra_vars_func=None, options=None, extension='mak'):
|
||||
self.extra_vars_func = extra_vars_func
|
||||
self.extension = extension
|
||||
if not options:
|
||||
options = {}
|
||||
|
||||
# Pull the options out and initialize the lookup
|
||||
lookup_options = {}
|
||||
for k, v in options.iteritems():
|
||||
if k.startswith('mako.'):
|
||||
lookup_options[k[5:]] = v
|
||||
elif k in ['directories', 'filesystem_checks', 'module_directory']:
|
||||
lookup_options[k] = v
|
||||
self.lookup = TemplateLookup(**lookup_options)
|
||||
|
||||
self.tmpl_options = {}
|
||||
# transfer lookup args to template args, based on those available
|
||||
# in getargspec
|
||||
for kw in inspect.getargspec(Template.__init__)[0]:
|
||||
if kw in lookup_options:
|
||||
self.tmpl_options[kw] = lookup_options[kw]
|
||||
|
||||
def load_template(self, templatename, template_string=None):
|
||||
"""Loads a template from a file or a string"""
|
||||
if template_string is not None:
|
||||
return Template(template_string, **self.tmpl_options)
|
||||
# Translate TG dot notation to normal / template path
|
||||
if '/' not in templatename:
|
||||
templatename = '/' + templatename.replace('.', '/') + '.' + self.extension
|
||||
|
||||
# Lookup template
|
||||
return self.lookup.get_template(templatename)
|
||||
|
||||
def render(self, info, format="html", fragment=False, template=None):
|
||||
if isinstance(template, basestring):
|
||||
template = self.load_template(template)
|
||||
|
||||
# Load extra vars func if provided
|
||||
if self.extra_vars_func:
|
||||
info.update(self.extra_vars_func())
|
||||
|
||||
return template.render(**info)
|
||||
|
|
@ -1,171 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# filters.py
|
||||
# Copyright (C) 2006, 2007, 2008 Geoffrey T. Dairiki <dairiki@dairiki.org> and Michael Bayer <mike_mp@zzzcomputing.com>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
import re, cgi, urllib, htmlentitydefs, codecs
|
||||
from StringIO import StringIO
|
||||
|
||||
xml_escapes = {
|
||||
'&' : '&',
|
||||
'>' : '>',
|
||||
'<' : '<',
|
||||
'"' : '"', # also " in html-only
|
||||
"'" : ''' # also ' in html-only
|
||||
}
|
||||
# XXX: " is valid in HTML and XML
|
||||
# ' is not valid HTML, but is valid XML
|
||||
|
||||
def html_escape(string):
|
||||
return cgi.escape(string, True)
|
||||
|
||||
def xml_escape(string):
|
||||
return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
|
||||
|
||||
def url_escape(string):
|
||||
# convert into a list of octets
|
||||
string = string.encode("utf8")
|
||||
return urllib.quote_plus(string)
|
||||
|
||||
def url_unescape(string):
|
||||
text = urllib.unquote_plus(string)
|
||||
if not is_ascii_str(text):
|
||||
text = text.decode("utf8")
|
||||
return text
|
||||
|
||||
def trim(string):
|
||||
return string.strip()
|
||||
|
||||
|
||||
class Decode(object):
|
||||
def __getattr__(self, key):
|
||||
def decode(x):
|
||||
if isinstance(x, unicode):
|
||||
return x
|
||||
elif not isinstance(x, str):
|
||||
return unicode(str(x), encoding=key)
|
||||
else:
|
||||
return unicode(x, encoding=key)
|
||||
return decode
|
||||
decode = Decode()
|
||||
|
||||
|
||||
_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z')
|
||||
|
||||
def is_ascii_str(text):
|
||||
return isinstance(text, str) and _ASCII_re.match(text)
|
||||
|
||||
################################################################
|
||||
|
||||
class XMLEntityEscaper(object):
|
||||
def __init__(self, codepoint2name, name2codepoint):
|
||||
self.codepoint2entity = dict([(c, u'&%s;' % n)
|
||||
for c,n in codepoint2name.iteritems()])
|
||||
self.name2codepoint = name2codepoint
|
||||
|
||||
def escape_entities(self, text):
|
||||
"""Replace characters with their character entity references.
|
||||
|
||||
Only characters corresponding to a named entity are replaced.
|
||||
"""
|
||||
return unicode(text).translate(self.codepoint2entity)
|
||||
|
||||
def __escape(self, m):
|
||||
codepoint = ord(m.group())
|
||||
try:
|
||||
return self.codepoint2entity[codepoint]
|
||||
except (KeyError, IndexError):
|
||||
return '&#x%X;' % codepoint
|
||||
|
||||
|
||||
__escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
|
||||
|
||||
def escape(self, text):
|
||||
"""Replace characters with their character references.
|
||||
|
||||
Replace characters by their named entity references.
|
||||
Non-ASCII characters, if they do not have a named entity reference,
|
||||
are replaced by numerical character references.
|
||||
|
||||
The return value is guaranteed to be ASCII.
|
||||
"""
|
||||
return self.__escapable.sub(self.__escape, unicode(text)
|
||||
).encode('ascii')
|
||||
|
||||
# XXX: This regexp will not match all valid XML entity names__.
|
||||
# (It punts on details involving involving CombiningChars and Extenders.)
|
||||
#
|
||||
# .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
|
||||
__characterrefs = re.compile(r'''& (?:
|
||||
\#(\d+)
|
||||
| \#x([\da-f]+)
|
||||
| ( (?!\d) [:\w] [-.:\w]+ )
|
||||
) ;''',
|
||||
re.X | re.UNICODE)
|
||||
|
||||
def __unescape(self, m):
|
||||
dval, hval, name = m.groups()
|
||||
if dval:
|
||||
codepoint = int(dval)
|
||||
elif hval:
|
||||
codepoint = int(hval, 16)
|
||||
else:
|
||||
codepoint = self.name2codepoint.get(name, 0xfffd)
|
||||
# U+FFFD = "REPLACEMENT CHARACTER"
|
||||
if codepoint < 128:
|
||||
return chr(codepoint)
|
||||
return unichr(codepoint)
|
||||
|
||||
def unescape(self, text):
|
||||
"""Unescape character references.
|
||||
|
||||
All character references (both entity references and numerical
|
||||
character references) are unescaped.
|
||||
"""
|
||||
return self.__characterrefs.sub(self.__unescape, text)
|
||||
|
||||
|
||||
_html_entities_escaper = XMLEntityEscaper(htmlentitydefs.codepoint2name,
|
||||
htmlentitydefs.name2codepoint)
|
||||
|
||||
html_entities_escape = _html_entities_escaper.escape_entities
|
||||
html_entities_unescape = _html_entities_escaper.unescape
|
||||
|
||||
|
||||
def htmlentityreplace_errors(ex):
|
||||
"""An encoding error handler.
|
||||
|
||||
This python `codecs`_ error handler replaces unencodable
|
||||
characters with HTML entities, or, if no HTML entity exists for
|
||||
the character, XML character references.
|
||||
|
||||
>>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
|
||||
'The cost was €12.'
|
||||
"""
|
||||
if isinstance(ex, UnicodeEncodeError):
|
||||
# Handle encoding errors
|
||||
bad_text = ex.object[ex.start:ex.end]
|
||||
text = _html_entities_escaper.escape(bad_text)
|
||||
return (unicode(text), ex.end)
|
||||
raise ex
|
||||
|
||||
codecs.register_error('htmlentityreplace', htmlentityreplace_errors)
|
||||
|
||||
|
||||
# TODO: options to make this dynamic per-compilation will be added in a later release
|
||||
DEFAULT_ESCAPES = {
|
||||
'x':'filters.xml_escape',
|
||||
'h':'filters.html_escape',
|
||||
'u':'filters.url_escape',
|
||||
'trim':'filters.trim',
|
||||
'entity':'filters.html_entities_escape',
|
||||
'unicode':'unicode',
|
||||
'decode':'decode',
|
||||
'str':'str',
|
||||
'n':'n'
|
||||
}
|
||||
|
||||
|
|
@ -1,329 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# lexer.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""provides the Lexer class for parsing template strings into parse trees."""
|
||||
|
||||
import re, codecs
|
||||
from mako import parsetree, exceptions
|
||||
from mako.pygen import adjust_whitespace
|
||||
|
||||
_regexp_cache = {}
|
||||
|
||||
class Lexer(object):
|
||||
def __init__(self, text, filename=None, disable_unicode=False, input_encoding=None, preprocessor=None):
|
||||
self.text = text
|
||||
self.filename = filename
|
||||
self.template = parsetree.TemplateNode(self.filename)
|
||||
self.matched_lineno = 1
|
||||
self.matched_charpos = 0
|
||||
self.lineno = 1
|
||||
self.match_position = 0
|
||||
self.tag = []
|
||||
self.control_line = []
|
||||
self.disable_unicode = disable_unicode
|
||||
self.encoding = input_encoding
|
||||
if preprocessor is None:
|
||||
self.preprocessor = []
|
||||
elif not hasattr(preprocessor, '__iter__'):
|
||||
self.preprocessor = [preprocessor]
|
||||
else:
|
||||
self.preprocessor = preprocessor
|
||||
|
||||
exception_kwargs = property(lambda self:{'source':self.text, 'lineno':self.matched_lineno, 'pos':self.matched_charpos, 'filename':self.filename})
|
||||
|
||||
def match(self, regexp, flags=None):
|
||||
"""match the given regular expression string and flags to the current text position.
|
||||
|
||||
if a match occurs, update the current text and line position."""
|
||||
mp = self.match_position
|
||||
try:
|
||||
reg = _regexp_cache[(regexp, flags)]
|
||||
except KeyError:
|
||||
if flags:
|
||||
reg = re.compile(regexp, flags)
|
||||
else:
|
||||
reg = re.compile(regexp)
|
||||
_regexp_cache[(regexp, flags)] = reg
|
||||
|
||||
match = reg.match(self.text, self.match_position)
|
||||
if match:
|
||||
(start, end) = match.span()
|
||||
if end == start:
|
||||
self.match_position = end + 1
|
||||
else:
|
||||
self.match_position = end
|
||||
self.matched_lineno = self.lineno
|
||||
lines = re.findall(r"\n", self.text[mp:self.match_position])
|
||||
cp = mp - 1
|
||||
while (cp >= 0 and cp<self.textlength and self.text[cp] != '\n'):
|
||||
cp -=1
|
||||
self.matched_charpos = mp - cp
|
||||
self.lineno += len(lines)
|
||||
#print "MATCHED:", match.group(0), "LINE START:", self.matched_lineno, "LINE END:", self.lineno
|
||||
#print "MATCH:", regexp, "\n", self.text[mp : mp + 15], (match and "TRUE" or "FALSE")
|
||||
return match
|
||||
|
||||
def parse_until_text(self, *text):
|
||||
startpos = self.match_position
|
||||
while True:
|
||||
match = self.match(r'#.*\n')
|
||||
if match:
|
||||
continue
|
||||
match = self.match(r'(\"\"\"|\'\'\'|\"|\')')
|
||||
if match:
|
||||
m = self.match(r'.*?%s' % match.group(1), re.S)
|
||||
if not m:
|
||||
raise exceptions.SyntaxException("Unmatched '%s'" % match.group(1), **self.exception_kwargs)
|
||||
else:
|
||||
match = self.match(r'(%s)' % r'|'.join(text))
|
||||
if match:
|
||||
return (self.text[startpos:self.match_position-len(match.group(1))], match.group(1))
|
||||
else:
|
||||
match = self.match(r".*?(?=\"|\'|#|%s)" % r'|'.join(text), re.S)
|
||||
if not match:
|
||||
raise exceptions.SyntaxException("Expected: %s" % ','.join(text), **self.exception_kwargs)
|
||||
|
||||
def append_node(self, nodecls, *args, **kwargs):
|
||||
kwargs.setdefault('source', self.text)
|
||||
kwargs.setdefault('lineno', self.matched_lineno)
|
||||
kwargs.setdefault('pos', self.matched_charpos)
|
||||
kwargs['filename'] = self.filename
|
||||
node = nodecls(*args, **kwargs)
|
||||
if len(self.tag):
|
||||
self.tag[-1].nodes.append(node)
|
||||
else:
|
||||
self.template.nodes.append(node)
|
||||
if isinstance(node, parsetree.Tag):
|
||||
if len(self.tag):
|
||||
node.parent = self.tag[-1]
|
||||
self.tag.append(node)
|
||||
elif isinstance(node, parsetree.ControlLine):
|
||||
if node.isend:
|
||||
self.control_line.pop()
|
||||
elif node.is_primary:
|
||||
self.control_line.append(node)
|
||||
elif len(self.control_line) and not self.control_line[-1].is_ternary(node.keyword):
|
||||
raise exceptions.SyntaxException("Keyword '%s' not a legal ternary for keyword '%s'" % (node.keyword, self.control_line[-1].keyword), **self.exception_kwargs)
|
||||
|
||||
def escape_code(self, text):
|
||||
if not self.disable_unicode and self.encoding:
|
||||
return text.encode('ascii', 'backslashreplace')
|
||||
else:
|
||||
return text
|
||||
|
||||
def parse(self):
|
||||
for preproc in self.preprocessor:
|
||||
self.text = preproc(self.text)
|
||||
if not isinstance(self.text, unicode) and self.text.startswith(codecs.BOM_UTF8):
|
||||
self.text = self.text[len(codecs.BOM_UTF8):]
|
||||
parsed_encoding = 'utf-8'
|
||||
me = self.match_encoding()
|
||||
if me is not None and me != 'utf-8':
|
||||
raise exceptions.CompileException("Found utf-8 BOM in file, with conflicting magic encoding comment of '%s'" % me, self.text.decode('utf-8', 'ignore'), 0, 0, self.filename)
|
||||
else:
|
||||
parsed_encoding = self.match_encoding()
|
||||
if parsed_encoding:
|
||||
self.encoding = parsed_encoding
|
||||
if not self.disable_unicode and not isinstance(self.text, unicode):
|
||||
if self.encoding:
|
||||
try:
|
||||
self.text = self.text.decode(self.encoding)
|
||||
except UnicodeDecodeError, e:
|
||||
raise exceptions.CompileException("Unicode decode operation of encoding '%s' failed" % self.encoding, self.text.decode('utf-8', 'ignore'), 0, 0, self.filename)
|
||||
else:
|
||||
try:
|
||||
self.text = self.text.decode()
|
||||
except UnicodeDecodeError, e:
|
||||
raise exceptions.CompileException("Could not read template using encoding of 'ascii'. Did you forget a magic encoding comment?", self.text.decode('utf-8', 'ignore'), 0, 0, self.filename)
|
||||
|
||||
self.textlength = len(self.text)
|
||||
|
||||
while (True):
|
||||
if self.match_position > self.textlength:
|
||||
break
|
||||
|
||||
if self.match_end():
|
||||
break
|
||||
if self.match_expression():
|
||||
continue
|
||||
if self.match_control_line():
|
||||
continue
|
||||
if self.match_comment():
|
||||
continue
|
||||
if self.match_tag_start():
|
||||
continue
|
||||
if self.match_tag_end():
|
||||
continue
|
||||
if self.match_python_block():
|
||||
continue
|
||||
if self.match_text():
|
||||
continue
|
||||
|
||||
if self.match_position > self.textlength:
|
||||
break
|
||||
raise exceptions.CompileException("assertion failed")
|
||||
|
||||
if len(self.tag):
|
||||
raise exceptions.SyntaxException("Unclosed tag: <%%%s>" % self.tag[-1].keyword, **self.exception_kwargs)
|
||||
if len(self.control_line):
|
||||
raise exceptions.SyntaxException("Unterminated control keyword: '%s'" % self.control_line[-1].keyword, self.text, self.control_line[-1].lineno, self.control_line[-1].pos, self.filename)
|
||||
return self.template
|
||||
|
||||
def match_encoding(self):
|
||||
match = self.match(r'#.*coding[:=]\s*([-\w.]+).*\r?\n')
|
||||
if match:
|
||||
return match.group(1)
|
||||
else:
|
||||
return None
|
||||
|
||||
def match_tag_start(self):
|
||||
match = self.match(r'''
|
||||
\<% # opening tag
|
||||
|
||||
([\w\.\:]+) # keyword
|
||||
|
||||
((?:\s+\w+|=|".*?"|'.*?')*) # attrname, = sign, string expression
|
||||
|
||||
\s* # more whitespace
|
||||
|
||||
(/)?> # closing
|
||||
|
||||
''',
|
||||
|
||||
re.I | re.S | re.X)
|
||||
|
||||
if match:
|
||||
(keyword, attr, isend) = (match.group(1).lower(), match.group(2), match.group(3))
|
||||
self.keyword = keyword
|
||||
attributes = {}
|
||||
if attr:
|
||||
for att in re.findall(r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
|
||||
(key, val1, val2) = att
|
||||
text = val1 or val2
|
||||
text = text.replace('\r\n', '\n')
|
||||
attributes[key] = self.escape_code(text)
|
||||
self.append_node(parsetree.Tag, keyword, attributes)
|
||||
if isend:
|
||||
self.tag.pop()
|
||||
else:
|
||||
if keyword == 'text':
|
||||
match = self.match(r'(.*?)(?=\</%text>)', re.S)
|
||||
if not match:
|
||||
raise exceptions.SyntaxException("Unclosed tag: <%%%s>" % self.tag[-1].keyword, **self.exception_kwargs)
|
||||
self.append_node(parsetree.Text, match.group(1))
|
||||
return self.match_tag_end()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_tag_end(self):
|
||||
match = self.match(r'\</%[\t ]*(.+?)[\t ]*>')
|
||||
if match:
|
||||
if not len(self.tag):
|
||||
raise exceptions.SyntaxException("Closing tag without opening tag: </%%%s>" % match.group(1), **self.exception_kwargs)
|
||||
elif self.tag[-1].keyword != match.group(1):
|
||||
raise exceptions.SyntaxException("Closing tag </%%%s> does not match tag: <%%%s>" % (match.group(1), self.tag[-1].keyword), **self.exception_kwargs)
|
||||
self.tag.pop()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_end(self):
|
||||
match = self.match(r'\Z', re.S)
|
||||
if match:
|
||||
string = match.group()
|
||||
if string:
|
||||
return string
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_text(self):
|
||||
match = self.match(r"""
|
||||
(.*?) # anything, followed by:
|
||||
(
|
||||
(?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based comment preceded by a consumed \n and whitespace
|
||||
|
|
||||
(?=\${) # an expression
|
||||
|
|
||||
(?=\#\*) # multiline comment
|
||||
|
|
||||
(?=</?[%&]) # a substitution or block or call start or end
|
||||
# - don't consume
|
||||
|
|
||||
(\\\r?\n) # an escaped newline - throw away
|
||||
|
|
||||
\Z # end of string
|
||||
)""", re.X | re.S)
|
||||
|
||||
if match:
|
||||
text = match.group(1)
|
||||
self.append_node(parsetree.Text, text)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_python_block(self):
|
||||
match = self.match(r"<%(!)?")
|
||||
if match:
|
||||
(line, pos) = (self.matched_lineno, self.matched_charpos)
|
||||
(text, end) = self.parse_until_text(r'%>')
|
||||
text = adjust_whitespace(text) + "\n" # the trailing newline helps compiler.parse() not complain about indentation
|
||||
self.append_node(parsetree.Code, self.escape_code(text), match.group(1)=='!', lineno=line, pos=pos)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_expression(self):
|
||||
match = self.match(r"\${")
|
||||
if match:
|
||||
(line, pos) = (self.matched_lineno, self.matched_charpos)
|
||||
(text, end) = self.parse_until_text(r'\|', r'}')
|
||||
if end == '|':
|
||||
(escapes, end) = self.parse_until_text(r'}')
|
||||
else:
|
||||
escapes = ""
|
||||
text = text.replace('\r\n', '\n')
|
||||
self.append_node(parsetree.Expression, self.escape_code(text), escapes.strip(), lineno=line, pos=pos)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_control_line(self):
|
||||
match = self.match(r"(?<=^)[\t ]*(%|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)(?:\r?\n|\Z)", re.M)
|
||||
if match:
|
||||
operator = match.group(1)
|
||||
text = match.group(2)
|
||||
if operator == '%':
|
||||
m2 = re.match(r'(end)?(\w+)\s*(.*)', text)
|
||||
if not m2:
|
||||
raise exceptions.SyntaxException("Invalid control line: '%s'" % text, **self.exception_kwargs)
|
||||
(isend, keyword) = m2.group(1, 2)
|
||||
isend = (isend is not None)
|
||||
|
||||
if isend:
|
||||
if not len(self.control_line):
|
||||
raise exceptions.SyntaxException("No starting keyword '%s' for '%s'" % (keyword, text), **self.exception_kwargs)
|
||||
elif self.control_line[-1].keyword != keyword:
|
||||
raise exceptions.SyntaxException("Keyword '%s' doesn't match keyword '%s'" % (text, self.control_line[-1].keyword), **self.exception_kwargs)
|
||||
self.append_node(parsetree.ControlLine, keyword, isend, self.escape_code(text))
|
||||
else:
|
||||
self.append_node(parsetree.Comment, text)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_comment(self):
|
||||
"""matches the multiline version of a comment"""
|
||||
match = self.match(r"<%doc>(.*?)</%doc>", re.S)
|
||||
if match:
|
||||
self.append_node(parsetree.Comment, match.group(1))
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
|
@ -1,152 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# lookup.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import os, stat, posixpath, re
|
||||
from mako import exceptions, util
|
||||
from mako.template import Template
|
||||
|
||||
try:
|
||||
import threading
|
||||
except:
|
||||
import dummy_threading as threading
|
||||
|
||||
class TemplateCollection(object):
|
||||
def has_template(self, uri):
|
||||
try:
|
||||
self.get_template(uri)
|
||||
return True
|
||||
except exceptions.TemplateLookupException, e:
|
||||
return False
|
||||
def get_template(self, uri, relativeto=None):
|
||||
raise NotImplementedError()
|
||||
def filename_to_uri(self, uri, filename):
|
||||
"""convert the given filename to a uri relative to this TemplateCollection."""
|
||||
return uri
|
||||
|
||||
def adjust_uri(self, uri, filename):
|
||||
"""adjust the given uri based on the calling filename.
|
||||
|
||||
when this method is called from the runtime, the 'filename' parameter
|
||||
is taken directly to the 'filename' attribute of the calling
|
||||
template. Therefore a custom TemplateCollection subclass can place any string
|
||||
identifier desired in the "filename" parameter of the Template objects it constructs
|
||||
and have them come back here."""
|
||||
return uri
|
||||
|
||||
class TemplateLookup(TemplateCollection):
|
||||
def __init__(self, directories=None, module_directory=None, filesystem_checks=True, collection_size=-1, format_exceptions=False,
|
||||
error_handler=None, disable_unicode=False, output_encoding=None, encoding_errors='strict', cache_type=None, cache_dir=None, cache_url=None,
|
||||
cache_enabled=True, modulename_callable=None, default_filters=None, buffer_filters=[], imports=None, input_encoding=None, preprocessor=None):
|
||||
if isinstance(directories, basestring):
|
||||
directories = [directories]
|
||||
self.directories = [posixpath.normpath(d) for d in directories or []]
|
||||
self.module_directory = module_directory
|
||||
self.modulename_callable = modulename_callable
|
||||
self.filesystem_checks = filesystem_checks
|
||||
self.collection_size = collection_size
|
||||
self.template_args = {
|
||||
'format_exceptions':format_exceptions,
|
||||
'error_handler':error_handler,
|
||||
'disable_unicode':disable_unicode,
|
||||
'output_encoding':output_encoding,
|
||||
'encoding_errors':encoding_errors,
|
||||
'input_encoding':input_encoding,
|
||||
'module_directory':module_directory,
|
||||
'cache_type':cache_type,
|
||||
'cache_dir':cache_dir or module_directory,
|
||||
'cache_url':cache_url,
|
||||
'cache_enabled':cache_enabled,
|
||||
'default_filters':default_filters,
|
||||
'buffer_filters':buffer_filters,
|
||||
'imports':imports,
|
||||
'preprocessor':preprocessor}
|
||||
if collection_size == -1:
|
||||
self.__collection = {}
|
||||
self._uri_cache = {}
|
||||
else:
|
||||
self.__collection = util.LRUCache(collection_size)
|
||||
self._uri_cache = util.LRUCache(collection_size)
|
||||
self._mutex = threading.Lock()
|
||||
|
||||
def get_template(self, uri):
|
||||
try:
|
||||
if self.filesystem_checks:
|
||||
return self.__check(uri, self.__collection[uri])
|
||||
else:
|
||||
return self.__collection[uri]
|
||||
except KeyError:
|
||||
u = re.sub(r'^\/+', '', uri)
|
||||
for dir in self.directories:
|
||||
srcfile = posixpath.normpath(posixpath.join(dir, u))
|
||||
if os.path.exists(srcfile):
|
||||
return self.__load(srcfile, uri)
|
||||
else:
|
||||
raise exceptions.TopLevelLookupException("Cant locate template for uri '%s'" % uri)
|
||||
|
||||
def adjust_uri(self, uri, relativeto):
|
||||
"""adjust the given uri based on the calling filename."""
|
||||
|
||||
if uri[0] != '/':
|
||||
if relativeto is not None:
|
||||
return posixpath.join(posixpath.dirname(relativeto), uri)
|
||||
else:
|
||||
return '/' + uri
|
||||
else:
|
||||
return uri
|
||||
|
||||
|
||||
def filename_to_uri(self, filename):
|
||||
try:
|
||||
return self._uri_cache[filename]
|
||||
except KeyError:
|
||||
value = self.__relativeize(filename)
|
||||
self._uri_cache[filename] = value
|
||||
return value
|
||||
|
||||
def __relativeize(self, filename):
|
||||
"""return the portion of a filename that is 'relative' to the directories in this lookup."""
|
||||
filename = posixpath.normpath(filename)
|
||||
for dir in self.directories:
|
||||
if filename[0:len(dir)] == dir:
|
||||
return filename[len(dir):]
|
||||
else:
|
||||
return None
|
||||
|
||||
def __load(self, filename, uri):
|
||||
self._mutex.acquire()
|
||||
try:
|
||||
try:
|
||||
# try returning from collection one more time in case concurrent thread already loaded
|
||||
return self.__collection[uri]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
self.__collection[uri] = Template(uri=uri, filename=posixpath.normpath(filename), lookup=self, module_filename=(self.modulename_callable is not None and self.modulename_callable(filename, uri) or None), **self.template_args)
|
||||
return self.__collection[uri]
|
||||
except:
|
||||
self.__collection.pop(uri, None)
|
||||
raise
|
||||
finally:
|
||||
self._mutex.release()
|
||||
|
||||
def __check(self, uri, template):
|
||||
if template.filename is None:
|
||||
return template
|
||||
if not os.path.exists(template.filename):
|
||||
self.__collection.pop(uri, None)
|
||||
raise exceptions.TemplateLookupException("Cant locate template for uri '%s'" % uri)
|
||||
elif template.module._modified_time < os.stat(template.filename)[stat.ST_MTIME]:
|
||||
self.__collection.pop(uri, None)
|
||||
return self.__load(template.filename, uri)
|
||||
else:
|
||||
return template
|
||||
|
||||
def put_string(self, uri, text):
|
||||
self.__collection[uri] = Template(text, lookup=self, uri=uri, **self.template_args)
|
||||
def put_template(self, uri, template):
|
||||
self.__collection[uri] = template
|
||||
|
|
@ -1,422 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# parsetree.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""defines the parse tree components for Mako templates."""
|
||||
|
||||
from mako import exceptions, ast, util, filters
|
||||
import re
|
||||
|
||||
class Node(object):
|
||||
"""base class for a Node in the parse tree."""
|
||||
def __init__(self, source, lineno, pos, filename):
|
||||
self.source = source
|
||||
self.lineno = lineno
|
||||
self.pos = pos
|
||||
self.filename = filename
|
||||
|
||||
def exception_kwargs(self):
|
||||
return {'source':self.source, 'lineno':self.lineno, 'pos':self.pos, 'filename':self.filename}
|
||||
exception_kwargs = property(exception_kwargs)
|
||||
|
||||
def get_children(self):
|
||||
return []
|
||||
|
||||
def accept_visitor(self, visitor):
|
||||
def traverse(node):
|
||||
for n in node.get_children():
|
||||
n.accept_visitor(visitor)
|
||||
method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
|
||||
method(self)
|
||||
|
||||
class TemplateNode(Node):
|
||||
"""a 'container' node that stores the overall collection of nodes."""
|
||||
|
||||
def __init__(self, filename):
|
||||
super(TemplateNode, self).__init__('', 0, 0, filename)
|
||||
self.nodes = []
|
||||
self.page_attributes = {}
|
||||
|
||||
def get_children(self):
|
||||
return self.nodes
|
||||
|
||||
def __repr__(self):
|
||||
return "TemplateNode(%s, %r)" % (util.sorted_dict_repr(self.page_attributes), self.nodes)
|
||||
|
||||
class ControlLine(Node):
|
||||
"""defines a control line, a line-oriented python line or end tag.
|
||||
|
||||
e.g.::
|
||||
|
||||
% if foo:
|
||||
(markup)
|
||||
% endif
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, keyword, isend, text, **kwargs):
|
||||
super(ControlLine, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
self.keyword = keyword
|
||||
self.isend = isend
|
||||
self.is_primary = keyword in ['for','if', 'while', 'try']
|
||||
if self.isend:
|
||||
self._declared_identifiers = []
|
||||
self._undeclared_identifiers = []
|
||||
else:
|
||||
code = ast.PythonFragment(text, **self.exception_kwargs)
|
||||
self._declared_identifiers = code.declared_identifiers
|
||||
self._undeclared_identifiers = code.undeclared_identifiers
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self._declared_identifiers
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self._undeclared_identifiers
|
||||
|
||||
def is_ternary(self, keyword):
|
||||
"""return true if the given keyword is a ternary keyword for this ControlLine"""
|
||||
|
||||
return keyword in {
|
||||
'if':util.Set(['else', 'elif']),
|
||||
'try':util.Set(['except', 'finally']),
|
||||
'for':util.Set(['else'])
|
||||
}.get(self.keyword, [])
|
||||
|
||||
def __repr__(self):
|
||||
return "ControlLine(%r, %r, %r, %r)" % (
|
||||
self.keyword,
|
||||
self.text,
|
||||
self.isend,
|
||||
(self.lineno, self.pos)
|
||||
)
|
||||
|
||||
class Text(Node):
|
||||
"""defines plain text in the template."""
|
||||
|
||||
def __init__(self, content, **kwargs):
|
||||
super(Text, self).__init__(**kwargs)
|
||||
self.content = content
|
||||
|
||||
def __repr__(self):
|
||||
return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
|
||||
|
||||
class Code(Node):
|
||||
"""defines a Python code block, either inline or module level.
|
||||
|
||||
e.g.::
|
||||
|
||||
inline:
|
||||
<%
|
||||
x = 12
|
||||
%>
|
||||
|
||||
module level:
|
||||
<%!
|
||||
import logger
|
||||
%>
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, text, ismodule, **kwargs):
|
||||
super(Code, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
self.ismodule = ismodule
|
||||
self.code = ast.PythonCode(text, **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.code.declared_identifiers
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.code.undeclared_identifiers
|
||||
|
||||
def __repr__(self):
|
||||
return "Code(%r, %r, %r)" % (
|
||||
self.text,
|
||||
self.ismodule,
|
||||
(self.lineno, self.pos)
|
||||
)
|
||||
|
||||
class Comment(Node):
|
||||
"""defines a comment line.
|
||||
|
||||
# this is a comment
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, text, **kwargs):
|
||||
super(Comment, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
|
||||
def __repr__(self):
|
||||
return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
|
||||
|
||||
class Expression(Node):
|
||||
"""defines an inline expression.
|
||||
|
||||
${x+y}
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, text, escapes, **kwargs):
|
||||
super(Expression, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
self.escapes = escapes
|
||||
self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
|
||||
self.code = ast.PythonCode(text, **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
# TODO: make the "filter" shortcut list configurable at parse/gen time
|
||||
return self.code.undeclared_identifiers.union(
|
||||
self.escapes_code.undeclared_identifiers.difference(
|
||||
util.Set(filters.DEFAULT_ESCAPES.keys())
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "Expression(%r, %r, %r)" % (
|
||||
self.text,
|
||||
self.escapes_code.args,
|
||||
(self.lineno, self.pos)
|
||||
)
|
||||
|
||||
class _TagMeta(type):
|
||||
"""metaclass to allow Tag to produce a subclass according to its keyword"""
|
||||
|
||||
_classmap = {}
|
||||
|
||||
def __init__(cls, clsname, bases, dict):
|
||||
if cls.__keyword__ is not None:
|
||||
cls._classmap[cls.__keyword__] = cls
|
||||
super(_TagMeta, cls).__init__(clsname, bases, dict)
|
||||
|
||||
def __call__(cls, keyword, attributes, **kwargs):
|
||||
if ":" in keyword:
|
||||
ns, defname = keyword.split(':')
|
||||
return type.__call__(CallNamespaceTag, ns, defname, attributes, **kwargs)
|
||||
|
||||
try:
|
||||
cls = _TagMeta._classmap[keyword]
|
||||
except KeyError:
|
||||
raise exceptions.CompileException(
|
||||
"No such tag: '%s'" % keyword,
|
||||
source=kwargs['source'],
|
||||
lineno=kwargs['lineno'],
|
||||
pos=kwargs['pos'],
|
||||
filename=kwargs['filename']
|
||||
)
|
||||
return type.__call__(cls, keyword, attributes, **kwargs)
|
||||
|
||||
class Tag(Node):
|
||||
"""abstract base class for tags.
|
||||
|
||||
<%sometag/>
|
||||
|
||||
<%someothertag>
|
||||
stuff
|
||||
</%someothertag>
|
||||
|
||||
"""
|
||||
|
||||
__metaclass__ = _TagMeta
|
||||
__keyword__ = None
|
||||
|
||||
def __init__(self, keyword, attributes, expressions, nonexpressions, required, **kwargs):
|
||||
"""construct a new Tag instance.
|
||||
|
||||
this constructor not called directly, and is only called by subclasses.
|
||||
|
||||
keyword - the tag keyword
|
||||
|
||||
attributes - raw dictionary of attribute key/value pairs
|
||||
|
||||
expressions - a util.Set of identifiers that are legal attributes, which can also contain embedded expressions
|
||||
|
||||
nonexpressions - a util.Set of identifiers that are legal attributes, which cannot contain embedded expressions
|
||||
|
||||
**kwargs - other arguments passed to the Node superclass (lineno, pos)
|
||||
|
||||
"""
|
||||
super(Tag, self).__init__(**kwargs)
|
||||
self.keyword = keyword
|
||||
self.attributes = attributes
|
||||
self._parse_attributes(expressions, nonexpressions)
|
||||
missing = [r for r in required if r not in self.parsed_attributes]
|
||||
if len(missing):
|
||||
raise exceptions.CompileException(
|
||||
"Missing attribute(s): %s" % ",".join([repr(m) for m in missing]),
|
||||
**self.exception_kwargs)
|
||||
self.parent = None
|
||||
self.nodes = []
|
||||
|
||||
def is_root(self):
|
||||
return self.parent is None
|
||||
|
||||
def get_children(self):
|
||||
return self.nodes
|
||||
|
||||
def _parse_attributes(self, expressions, nonexpressions):
|
||||
undeclared_identifiers = util.Set()
|
||||
self.parsed_attributes = {}
|
||||
for key in self.attributes:
|
||||
if key in expressions:
|
||||
expr = []
|
||||
for x in re.split(r'(\${.+?})', self.attributes[key]):
|
||||
m = re.match(r'^\${(.+?)}$', x)
|
||||
if m:
|
||||
code = ast.PythonCode(m.group(1), **self.exception_kwargs)
|
||||
undeclared_identifiers = undeclared_identifiers.union(code.undeclared_identifiers)
|
||||
expr.append("(%s)" % m.group(1))
|
||||
else:
|
||||
if x:
|
||||
expr.append(repr(x))
|
||||
self.parsed_attributes[key] = " + ".join(expr) or repr('')
|
||||
elif key in nonexpressions:
|
||||
if re.search(r'${.+?}', self.attributes[key]):
|
||||
raise exceptions.CompileException(
|
||||
"Attibute '%s' in tag '%s' does not allow embedded expressions" % (key, self.keyword),
|
||||
**self.exception_kwargs)
|
||||
self.parsed_attributes[key] = repr(self.attributes[key])
|
||||
else:
|
||||
raise exceptions.CompileException("Invalid attribute for tag '%s': '%s'" %(self.keyword, key), **self.exception_kwargs)
|
||||
self.expression_undeclared_identifiers = undeclared_identifiers
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.expression_undeclared_identifiers
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
|
||||
self.keyword,
|
||||
util.sorted_dict_repr(self.attributes),
|
||||
(self.lineno, self.pos),
|
||||
[repr(x) for x in self.nodes]
|
||||
)
|
||||
|
||||
class IncludeTag(Tag):
|
||||
__keyword__ = 'include'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(IncludeTag, self).__init__(keyword, attributes, ('file', 'import', 'args'), (), ('file',), **kwargs)
|
||||
self.page_args = ast.PythonCode("__DUMMY(%s)" % attributes.get('args', ''), **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
identifiers = self.page_args.undeclared_identifiers.difference(util.Set(["__DUMMY"]))
|
||||
return identifiers.union(super(IncludeTag, self).undeclared_identifiers())
|
||||
|
||||
class NamespaceTag(Tag):
|
||||
__keyword__ = 'namespace'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(NamespaceTag, self).__init__(keyword, attributes, (), ('name','inheritable','file','import','module'), (), **kwargs)
|
||||
self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
|
||||
if not 'name' in attributes and not 'import' in attributes:
|
||||
raise exceptions.CompileException("'name' and/or 'import' attributes are required for <%namespace>", **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
class TextTag(Tag):
|
||||
__keyword__ = 'text'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(TextTag, self).__init__(keyword, attributes, (), ('filter'), (), **kwargs)
|
||||
self.filter_args = ast.ArgumentList(attributes.get('filter', ''), **self.exception_kwargs)
|
||||
|
||||
class DefTag(Tag):
|
||||
__keyword__ = 'def'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(DefTag, self).__init__(
|
||||
keyword,
|
||||
attributes,
|
||||
('buffered', 'cached', 'cache_key', 'cache_timeout', 'cache_type', 'cache_dir', 'cache_url'),
|
||||
('name','filter'),
|
||||
('name',),
|
||||
**kwargs)
|
||||
name = attributes['name']
|
||||
if re.match(r'^[\w_]+$',name):
|
||||
raise exceptions.CompileException("Missing parenthesis in %def", **self.exception_kwargs)
|
||||
self.function_decl = ast.FunctionDecl("def " + name + ":pass", **self.exception_kwargs)
|
||||
self.name = self.function_decl.funcname
|
||||
self.filter_args = ast.ArgumentList(attributes.get('filter', ''), **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.function_decl.argnames
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
res = []
|
||||
for c in self.function_decl.defaults:
|
||||
res += list(ast.PythonCode(c, **self.exception_kwargs).undeclared_identifiers)
|
||||
return res + list(self.filter_args.undeclared_identifiers.difference(util.Set(filters.DEFAULT_ESCAPES.keys())))
|
||||
|
||||
class CallTag(Tag):
|
||||
__keyword__ = 'call'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(CallTag, self).__init__(keyword, attributes, ('args'), ('expr',), ('expr',), **kwargs)
|
||||
self.expression = attributes['expr']
|
||||
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
|
||||
self.body_decl = ast.FunctionArgs(attributes.get('args', ''), **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.code.declared_identifiers.union(self.body_decl.argnames)
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.code.undeclared_identifiers
|
||||
|
||||
class CallNamespaceTag(Tag):
|
||||
|
||||
def __init__(self, namespace, defname, attributes, **kwargs):
|
||||
super(CallNamespaceTag, self).__init__(
|
||||
namespace + ":" + defname,
|
||||
attributes,
|
||||
tuple(attributes.keys()) + ('args', ),
|
||||
(),
|
||||
(),
|
||||
**kwargs)
|
||||
self.expression = "%s.%s(%s)" % (namespace, defname, ",".join(["%s=%s" % (k, v) for k, v in self.parsed_attributes.iteritems() if k != 'args']))
|
||||
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
|
||||
self.body_decl = ast.FunctionArgs(attributes.get('args', ''), **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.code.declared_identifiers.union(self.body_decl.argnames)
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.code.undeclared_identifiers
|
||||
|
||||
class InheritTag(Tag):
|
||||
__keyword__ = 'inherit'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(InheritTag, self).__init__(keyword, attributes, ('file',), (), ('file',), **kwargs)
|
||||
|
||||
class PageTag(Tag):
|
||||
__keyword__ = 'page'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(PageTag, self).__init__(
|
||||
keyword,
|
||||
attributes,
|
||||
('cached', 'cache_key', 'cache_timeout', 'cache_type', 'cache_dir', 'cache_url', 'args', 'expression_filter'),
|
||||
(),
|
||||
(),
|
||||
**kwargs)
|
||||
self.body_decl = ast.FunctionArgs(attributes.get('args', ''), **self.exception_kwargs)
|
||||
self.filter_args = ast.ArgumentList(attributes.get('expression_filter', ''), **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.body_decl.argnames
|
||||
|
||||
|
|
@ -1,268 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# pygen.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""utilities for generating and formatting literal Python code."""
|
||||
|
||||
import re, string
|
||||
from StringIO import StringIO
|
||||
|
||||
class PythonPrinter(object):
|
||||
def __init__(self, stream):
|
||||
# indentation counter
|
||||
self.indent = 0
|
||||
|
||||
# a stack storing information about why we incremented
|
||||
# the indentation counter, to help us determine if we
|
||||
# should decrement it
|
||||
self.indent_detail = []
|
||||
|
||||
# the string of whitespace multiplied by the indent
|
||||
# counter to produce a line
|
||||
self.indentstring = " "
|
||||
|
||||
# the stream we are writing to
|
||||
self.stream = stream
|
||||
|
||||
# a list of lines that represents a buffered "block" of code,
|
||||
# which can be later printed relative to an indent level
|
||||
self.line_buffer = []
|
||||
|
||||
self.in_indent_lines = False
|
||||
|
||||
self._reset_multi_line_flags()
|
||||
|
||||
def write(self, text):
|
||||
self.stream.write(text)
|
||||
|
||||
def write_indented_block(self, block):
|
||||
"""print a line or lines of python which already contain indentation.
|
||||
|
||||
The indentation of the total block of lines will be adjusted to that of
|
||||
the current indent level."""
|
||||
self.in_indent_lines = False
|
||||
for l in re.split(r'\r?\n', block):
|
||||
self.line_buffer.append(l)
|
||||
|
||||
def writelines(self, *lines):
|
||||
"""print a series of lines of python."""
|
||||
for line in lines:
|
||||
self.writeline(line)
|
||||
|
||||
def writeline(self, line):
|
||||
"""print a line of python, indenting it according to the current indent level.
|
||||
|
||||
this also adjusts the indentation counter according to the content of the line."""
|
||||
|
||||
if not self.in_indent_lines:
|
||||
self._flush_adjusted_lines()
|
||||
self.in_indent_lines = True
|
||||
|
||||
decreased_indent = False
|
||||
|
||||
if (line is None or
|
||||
re.match(r"^\s*#",line) or
|
||||
re.match(r"^\s*$", line)
|
||||
):
|
||||
hastext = False
|
||||
else:
|
||||
hastext = True
|
||||
|
||||
is_comment = line and len(line) and line[0] == '#'
|
||||
|
||||
# see if this line should decrease the indentation level
|
||||
if (not decreased_indent and
|
||||
not is_comment and
|
||||
(not hastext or self._is_unindentor(line))
|
||||
):
|
||||
|
||||
if self.indent > 0:
|
||||
self.indent -=1
|
||||
# if the indent_detail stack is empty, the user
|
||||
# probably put extra closures - the resulting
|
||||
# module wont compile.
|
||||
if len(self.indent_detail) == 0:
|
||||
raise "Too many whitespace closures"
|
||||
self.indent_detail.pop()
|
||||
|
||||
if line is None:
|
||||
return
|
||||
|
||||
# write the line
|
||||
self.stream.write(self._indent_line(line) + "\n")
|
||||
|
||||
# see if this line should increase the indentation level.
|
||||
# note that a line can both decrase (before printing) and
|
||||
# then increase (after printing) the indentation level.
|
||||
|
||||
if re.search(r":[ \t]*(?:#.*)?$", line):
|
||||
# increment indentation count, and also
|
||||
# keep track of what the keyword was that indented us,
|
||||
# if it is a python compound statement keyword
|
||||
# where we might have to look for an "unindent" keyword
|
||||
match = re.match(r"^\s*(if|try|elif|while|for)", line)
|
||||
if match:
|
||||
# its a "compound" keyword, so we will check for "unindentors"
|
||||
indentor = match.group(1)
|
||||
self.indent +=1
|
||||
self.indent_detail.append(indentor)
|
||||
else:
|
||||
indentor = None
|
||||
# its not a "compound" keyword. but lets also
|
||||
# test for valid Python keywords that might be indenting us,
|
||||
# else assume its a non-indenting line
|
||||
m2 = re.match(r"^\s*(def|class|else|elif|except|finally)", line)
|
||||
if m2:
|
||||
self.indent += 1
|
||||
self.indent_detail.append(indentor)
|
||||
|
||||
def close(self):
|
||||
"""close this printer, flushing any remaining lines."""
|
||||
self._flush_adjusted_lines()
|
||||
|
||||
def _is_unindentor(self, line):
|
||||
"""return true if the given line is an 'unindentor', relative to the last 'indent' event received."""
|
||||
|
||||
# no indentation detail has been pushed on; return False
|
||||
if len(self.indent_detail) == 0:
|
||||
return False
|
||||
|
||||
indentor = self.indent_detail[-1]
|
||||
|
||||
# the last indent keyword we grabbed is not a
|
||||
# compound statement keyword; return False
|
||||
if indentor is None:
|
||||
return False
|
||||
|
||||
# if the current line doesnt have one of the "unindentor" keywords,
|
||||
# return False
|
||||
match = re.match(r"^\s*(else|elif|except|finally).*\:", line)
|
||||
if not match:
|
||||
return False
|
||||
|
||||
# whitespace matches up, we have a compound indentor,
|
||||
# and this line has an unindentor, this
|
||||
# is probably good enough
|
||||
return True
|
||||
|
||||
# should we decide that its not good enough, heres
|
||||
# more stuff to check.
|
||||
#keyword = match.group(1)
|
||||
|
||||
# match the original indent keyword
|
||||
#for crit in [
|
||||
# (r'if|elif', r'else|elif'),
|
||||
# (r'try', r'except|finally|else'),
|
||||
# (r'while|for', r'else'),
|
||||
#]:
|
||||
# if re.match(crit[0], indentor) and re.match(crit[1], keyword): return True
|
||||
|
||||
#return False
|
||||
|
||||
def _indent_line(self, line, stripspace = ''):
|
||||
"""indent the given line according to the current indent level.
|
||||
|
||||
stripspace is a string of space that will be truncated from the start of the line
|
||||
before indenting."""
|
||||
return re.sub(r"^%s" % stripspace, self.indentstring * self.indent, line)
|
||||
|
||||
def _reset_multi_line_flags(self):
|
||||
"""reset the flags which would indicate we are in a backslashed or triple-quoted section."""
|
||||
(self.backslashed, self.triplequoted) = (False, False)
|
||||
|
||||
def _in_multi_line(self, line):
|
||||
"""return true if the given line is part of a multi-line block, via backslash or triple-quote."""
|
||||
# we are only looking for explicitly joined lines here,
|
||||
# not implicit ones (i.e. brackets, braces etc.). this is just
|
||||
# to guard against the possibility of modifying the space inside
|
||||
# of a literal multiline string with unfortunately placed whitespace
|
||||
|
||||
current_state = (self.backslashed or self.triplequoted)
|
||||
|
||||
if re.search(r"\\$", line):
|
||||
self.backslashed = True
|
||||
else:
|
||||
self.backslashed = False
|
||||
|
||||
triples = len(re.findall(r"\"\"\"|\'\'\'", line))
|
||||
if triples == 1 or triples % 2 != 0:
|
||||
self.triplequoted = not self.triplequoted
|
||||
|
||||
return current_state
|
||||
|
||||
def _flush_adjusted_lines(self):
|
||||
stripspace = None
|
||||
self._reset_multi_line_flags()
|
||||
|
||||
for entry in self.line_buffer:
|
||||
if self._in_multi_line(entry):
|
||||
self.stream.write(entry + "\n")
|
||||
else:
|
||||
entry = string.expandtabs(entry)
|
||||
if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry):
|
||||
stripspace = re.match(r"^([ \t]*)", entry).group(1)
|
||||
self.stream.write(self._indent_line(entry, stripspace) + "\n")
|
||||
|
||||
self.line_buffer = []
|
||||
self._reset_multi_line_flags()
|
||||
|
||||
|
||||
def adjust_whitespace(text):
|
||||
"""remove the left-whitespace margin of a block of Python code."""
|
||||
state = [False, False]
|
||||
(backslashed, triplequoted) = (0, 1)
|
||||
|
||||
def in_multi_line(line):
|
||||
start_state = (state[backslashed] or state[triplequoted])
|
||||
|
||||
if re.search(r"\\$", line):
|
||||
state[backslashed] = True
|
||||
else:
|
||||
state[backslashed] = False
|
||||
|
||||
def match(reg, t):
|
||||
m = re.match(reg, t)
|
||||
if m:
|
||||
return m, t[len(m.group(0)):]
|
||||
else:
|
||||
return None, t
|
||||
|
||||
while line:
|
||||
if state[triplequoted]:
|
||||
m, line = match(r"%s" % state[triplequoted], line)
|
||||
if m:
|
||||
state[triplequoted] = False
|
||||
else:
|
||||
m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
|
||||
else:
|
||||
m, line = match(r'#', line)
|
||||
if m:
|
||||
return start_state
|
||||
|
||||
m, line = match(r"\"\"\"|\'\'\'", line)
|
||||
if m:
|
||||
state[triplequoted] = m.group(0)
|
||||
continue
|
||||
|
||||
m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line)
|
||||
|
||||
return start_state
|
||||
|
||||
def _indent_line(line, stripspace = ''):
|
||||
return re.sub(r"^%s" % stripspace, '', line)
|
||||
|
||||
lines = []
|
||||
stripspace = None
|
||||
|
||||
for line in re.split(r'\r?\n', text):
|
||||
if in_multi_line(line):
|
||||
lines.append(line)
|
||||
else:
|
||||
line = string.expandtabs(line)
|
||||
if stripspace is None and re.search(r"^[ \t]*[^# \t]", line):
|
||||
stripspace = re.match(r"^([ \t]*)", line).group(1)
|
||||
lines.append(_indent_line(line, stripspace))
|
||||
return "\n".join(lines)
|
|
@ -1,372 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# ast.py
|
||||
# Copyright (C) Mako developers
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Handles parsing of Python code.
|
||||
|
||||
Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
|
||||
module is used.
|
||||
"""
|
||||
|
||||
from StringIO import StringIO
|
||||
from mako import exceptions, util
|
||||
|
||||
# words that cannot be assigned to (notably smaller than the total keys in __builtins__)
|
||||
reserved = util.Set(['True', 'False', 'None'])
|
||||
|
||||
try:
|
||||
import _ast
|
||||
util.restore__ast(_ast)
|
||||
import _ast_util
|
||||
except ImportError:
|
||||
_ast = None
|
||||
from compiler import parse as compiler_parse
|
||||
from compiler import visitor
|
||||
|
||||
|
||||
def parse(code, mode='exec', **exception_kwargs):
|
||||
"""Parse an expression into AST"""
|
||||
try:
|
||||
if _ast:
|
||||
return _ast_util.parse(code, '<unknown>', mode)
|
||||
else:
|
||||
return compiler_parse(code, mode)
|
||||
except Exception, e:
|
||||
raise exceptions.SyntaxException("(%s) %s (%s)" % (e.__class__.__name__, str(e), repr(code[0:50])), **exception_kwargs)
|
||||
|
||||
|
||||
if _ast:
|
||||
class FindIdentifiers(_ast_util.NodeVisitor):
|
||||
def __init__(self, listener, **exception_kwargs):
|
||||
self.in_function = False
|
||||
self.in_assign_targets = False
|
||||
self.local_ident_stack = {}
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
def _add_declared(self, name):
|
||||
if not self.in_function:
|
||||
self.listener.declared_identifiers.add(name)
|
||||
def visit_ClassDef(self, node):
|
||||
self._add_declared(node.name)
|
||||
def visit_Assign(self, node):
|
||||
# flip around the visiting of Assign so the expression gets evaluated first,
|
||||
# in the case of a clause like "x=x+5" (x is undeclared)
|
||||
self.visit(node.value)
|
||||
in_a = self.in_assign_targets
|
||||
self.in_assign_targets = True
|
||||
for n in node.targets:
|
||||
self.visit(n)
|
||||
self.in_assign_targets = in_a
|
||||
def visit_FunctionDef(self, node):
|
||||
self._add_declared(node.name)
|
||||
# push function state onto stack. dont log any
|
||||
# more identifiers as "declared" until outside of the function,
|
||||
# but keep logging identifiers as "undeclared".
|
||||
# track argument names in each function header so they arent counted as "undeclared"
|
||||
saved = {}
|
||||
inf = self.in_function
|
||||
self.in_function = True
|
||||
for arg in node.args.args:
|
||||
if arg.id in self.local_ident_stack:
|
||||
saved[arg.id] = True
|
||||
else:
|
||||
self.local_ident_stack[arg.id] = True
|
||||
for n in node.body:
|
||||
self.visit(n)
|
||||
self.in_function = inf
|
||||
for arg in node.args.args:
|
||||
if arg.id not in saved:
|
||||
del self.local_ident_stack[arg.id]
|
||||
def visit_For(self, node):
|
||||
# flip around visit
|
||||
self.visit(node.iter)
|
||||
self.visit(node.target)
|
||||
for statement in node.body:
|
||||
self.visit(statement)
|
||||
for statement in node.orelse:
|
||||
self.visit(statement)
|
||||
def visit_Name(self, node):
|
||||
if isinstance(node.ctx, _ast.Store):
|
||||
self._add_declared(node.id)
|
||||
if node.id not in reserved and node.id not in self.listener.declared_identifiers and node.id not in self.local_ident_stack:
|
||||
self.listener.undeclared_identifiers.add(node.id)
|
||||
def visit_Import(self, node):
|
||||
for name in node.names:
|
||||
if name.asname is not None:
|
||||
self._add_declared(name.asname)
|
||||
else:
|
||||
self._add_declared(name.name.split('.')[0])
|
||||
def visit_ImportFrom(self, node):
|
||||
for name in node.names:
|
||||
if name.asname is not None:
|
||||
self._add_declared(name.asname)
|
||||
else:
|
||||
if name.name == '*':
|
||||
raise exceptions.CompileException("'import *' is not supported, since all identifier names must be explicitly declared. Please use the form 'from <modulename> import <name1>, <name2>, ...' instead.", **self.exception_kwargs)
|
||||
self._add_declared(name.name)
|
||||
|
||||
class FindTuple(_ast_util.NodeVisitor):
|
||||
def __init__(self, listener, code_factory, **exception_kwargs):
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
self.code_factory = code_factory
|
||||
def visit_Tuple(self, node):
|
||||
for n in node.elts:
|
||||
p = self.code_factory(n, **self.exception_kwargs)
|
||||
self.listener.codeargs.append(p)
|
||||
self.listener.args.append(ExpressionGenerator(n).value())
|
||||
self.listener.declared_identifiers = self.listener.declared_identifiers.union(p.declared_identifiers)
|
||||
self.listener.undeclared_identifiers = self.listener.undeclared_identifiers.union(p.undeclared_identifiers)
|
||||
|
||||
class ParseFunc(_ast_util.NodeVisitor):
|
||||
def __init__(self, listener, **exception_kwargs):
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
def visit_FunctionDef(self, node):
|
||||
self.listener.funcname = node.name
|
||||
argnames = [arg.id for arg in node.args.args]
|
||||
if node.args.vararg:
|
||||
argnames.append(node.args.vararg)
|
||||
if node.args.kwarg:
|
||||
argnames.append(node.args.kwarg)
|
||||
self.listener.argnames = argnames
|
||||
self.listener.defaults = node.args.defaults # ast
|
||||
self.listener.varargs = node.args.vararg
|
||||
self.listener.kwargs = node.args.kwarg
|
||||
|
||||
class ExpressionGenerator(object):
|
||||
def __init__(self, astnode):
|
||||
self.generator = _ast_util.SourceGenerator(' ' * 4)
|
||||
self.generator.visit(astnode)
|
||||
def value(self):
|
||||
return ''.join(self.generator.result)
|
||||
else:
|
||||
class FindIdentifiers(object):
|
||||
def __init__(self, listener, **exception_kwargs):
|
||||
self.in_function = False
|
||||
self.local_ident_stack = {}
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
def _add_declared(self, name):
|
||||
if not self.in_function:
|
||||
self.listener.declared_identifiers.add(name)
|
||||
def visitClass(self, node, *args):
|
||||
self._add_declared(node.name)
|
||||
def visitAssName(self, node, *args):
|
||||
self._add_declared(node.name)
|
||||
def visitAssign(self, node, *args):
|
||||
# flip around the visiting of Assign so the expression gets evaluated first,
|
||||
# in the case of a clause like "x=x+5" (x is undeclared)
|
||||
self.visit(node.expr, *args)
|
||||
for n in node.nodes:
|
||||
self.visit(n, *args)
|
||||
def visitFunction(self,node, *args):
|
||||
self._add_declared(node.name)
|
||||
# push function state onto stack. dont log any
|
||||
# more identifiers as "declared" until outside of the function,
|
||||
# but keep logging identifiers as "undeclared".
|
||||
# track argument names in each function header so they arent counted as "undeclared"
|
||||
saved = {}
|
||||
inf = self.in_function
|
||||
self.in_function = True
|
||||
for arg in node.argnames:
|
||||
if arg in self.local_ident_stack:
|
||||
saved[arg] = True
|
||||
else:
|
||||
self.local_ident_stack[arg] = True
|
||||
for n in node.getChildNodes():
|
||||
self.visit(n, *args)
|
||||
self.in_function = inf
|
||||
for arg in node.argnames:
|
||||
if arg not in saved:
|
||||
del self.local_ident_stack[arg]
|
||||
def visitFor(self, node, *args):
|
||||
# flip around visit
|
||||
self.visit(node.list, *args)
|
||||
self.visit(node.assign, *args)
|
||||
self.visit(node.body, *args)
|
||||
def visitName(self, node, *args):
|
||||
if node.name not in reserved and node.name not in self.listener.declared_identifiers and node.name not in self.local_ident_stack:
|
||||
self.listener.undeclared_identifiers.add(node.name)
|
||||
def visitImport(self, node, *args):
|
||||
for (mod, alias) in node.names:
|
||||
if alias is not None:
|
||||
self._add_declared(alias)
|
||||
else:
|
||||
self._add_declared(mod.split('.')[0])
|
||||
def visitFrom(self, node, *args):
|
||||
for (mod, alias) in node.names:
|
||||
if alias is not None:
|
||||
self._add_declared(alias)
|
||||
else:
|
||||
if mod == '*':
|
||||
raise exceptions.CompileException("'import *' is not supported, since all identifier names must be explicitly declared. Please use the form 'from <modulename> import <name1>, <name2>, ...' instead.", **self.exception_kwargs)
|
||||
self._add_declared(mod)
|
||||
def visit(self, expr):
|
||||
visitor.walk(expr, self) #, walker=walker())
|
||||
|
||||
class FindTuple(object):
|
||||
def __init__(self, listener, code_factory, **exception_kwargs):
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
self.code_factory = code_factory
|
||||
def visitTuple(self, node, *args):
|
||||
for n in node.nodes:
|
||||
p = self.code_factory(n, **self.exception_kwargs)
|
||||
self.listener.codeargs.append(p)
|
||||
self.listener.args.append(ExpressionGenerator(n).value())
|
||||
self.listener.declared_identifiers = self.listener.declared_identifiers.union(p.declared_identifiers)
|
||||
self.listener.undeclared_identifiers = self.listener.undeclared_identifiers.union(p.undeclared_identifiers)
|
||||
def visit(self, expr):
|
||||
visitor.walk(expr, self) #, walker=walker())
|
||||
|
||||
class ParseFunc(object):
|
||||
def __init__(self, listener, **exception_kwargs):
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
def visitFunction(self, node, *args):
|
||||
self.listener.funcname = node.name
|
||||
self.listener.argnames = node.argnames
|
||||
self.listener.defaults = node.defaults
|
||||
self.listener.varargs = node.varargs
|
||||
self.listener.kwargs = node.kwargs
|
||||
def visit(self, expr):
|
||||
visitor.walk(expr, self)
|
||||
|
||||
class ExpressionGenerator(object):
|
||||
"""given an AST node, generates an equivalent literal Python expression."""
|
||||
def __init__(self, astnode):
|
||||
self.buf = StringIO()
|
||||
visitor.walk(astnode, self) #, walker=walker())
|
||||
def value(self):
|
||||
return self.buf.getvalue()
|
||||
def operator(self, op, node, *args):
|
||||
self.buf.write("(")
|
||||
self.visit(node.left, *args)
|
||||
self.buf.write(" %s " % op)
|
||||
self.visit(node.right, *args)
|
||||
self.buf.write(")")
|
||||
def booleanop(self, op, node, *args):
|
||||
self.visit(node.nodes[0])
|
||||
for n in node.nodes[1:]:
|
||||
self.buf.write(" " + op + " ")
|
||||
self.visit(n, *args)
|
||||
def visitConst(self, node, *args):
|
||||
self.buf.write(repr(node.value))
|
||||
def visitAssName(self, node, *args):
|
||||
# TODO: figure out OP_ASSIGN, other OP_s
|
||||
self.buf.write(node.name)
|
||||
def visitName(self, node, *args):
|
||||
self.buf.write(node.name)
|
||||
def visitMul(self, node, *args):
|
||||
self.operator("*", node, *args)
|
||||
def visitAnd(self, node, *args):
|
||||
self.booleanop("and", node, *args)
|
||||
def visitOr(self, node, *args):
|
||||
self.booleanop("or", node, *args)
|
||||
def visitBitand(self, node, *args):
|
||||
self.booleanop("&", node, *args)
|
||||
def visitBitor(self, node, *args):
|
||||
self.booleanop("|", node, *args)
|
||||
def visitBitxor(self, node, *args):
|
||||
self.booleanop("^", node, *args)
|
||||
def visitAdd(self, node, *args):
|
||||
self.operator("+", node, *args)
|
||||
def visitGetattr(self, node, *args):
|
||||
self.visit(node.expr, *args)
|
||||
self.buf.write(".%s" % node.attrname)
|
||||
def visitSub(self, node, *args):
|
||||
self.operator("-", node, *args)
|
||||
def visitNot(self, node, *args):
|
||||
self.buf.write("not ")
|
||||
self.visit(node.expr)
|
||||
def visitDiv(self, node, *args):
|
||||
self.operator("/", node, *args)
|
||||
def visitFloorDiv(self, node, *args):
|
||||
self.operator("//", node, *args)
|
||||
def visitSubscript(self, node, *args):
|
||||
self.visit(node.expr)
|
||||
self.buf.write("[")
|
||||
[self.visit(x) for x in node.subs]
|
||||
self.buf.write("]")
|
||||
def visitUnarySub(self, node, *args):
|
||||
self.buf.write("-")
|
||||
self.visit(node.expr)
|
||||
def visitUnaryAdd(self, node, *args):
|
||||
self.buf.write("-")
|
||||
self.visit(node.expr)
|
||||
def visitSlice(self, node, *args):
|
||||
self.visit(node.expr)
|
||||
self.buf.write("[")
|
||||
if node.lower is not None:
|
||||
self.visit(node.lower)
|
||||
self.buf.write(":")
|
||||
if node.upper is not None:
|
||||
self.visit(node.upper)
|
||||
self.buf.write("]")
|
||||
def visitDict(self, node):
|
||||
self.buf.write("{")
|
||||
c = node.getChildren()
|
||||
for i in range(0, len(c), 2):
|
||||
self.visit(c[i])
|
||||
self.buf.write(": ")
|
||||
self.visit(c[i+1])
|
||||
if i<len(c) -2:
|
||||
self.buf.write(", ")
|
||||
self.buf.write("}")
|
||||
def visitTuple(self, node):
|
||||
self.buf.write("(")
|
||||
c = node.getChildren()
|
||||
for i in range(0, len(c)):
|
||||
self.visit(c[i])
|
||||
if i<len(c) - 1:
|
||||
self.buf.write(", ")
|
||||
self.buf.write(")")
|
||||
def visitList(self, node):
|
||||
self.buf.write("[")
|
||||
c = node.getChildren()
|
||||
for i in range(0, len(c)):
|
||||
self.visit(c[i])
|
||||
if i<len(c) - 1:
|
||||
self.buf.write(", ")
|
||||
self.buf.write("]")
|
||||
def visitListComp(self, node):
|
||||
self.buf.write("[")
|
||||
self.visit(node.expr)
|
||||
self.buf.write(" ")
|
||||
for n in node.quals:
|
||||
self.visit(n)
|
||||
self.buf.write("]")
|
||||
def visitListCompFor(self, node):
|
||||
self.buf.write(" for ")
|
||||
self.visit(node.assign)
|
||||
self.buf.write(" in ")
|
||||
self.visit(node.list)
|
||||
for n in node.ifs:
|
||||
self.visit(n)
|
||||
def visitListCompIf(self, node):
|
||||
self.buf.write(" if ")
|
||||
self.visit(node.test)
|
||||
def visitCompare(self, node):
|
||||
self.visit(node.expr)
|
||||
for tup in node.ops:
|
||||
self.buf.write(tup[0])
|
||||
self.visit(tup[1])
|
||||
def visitCallFunc(self, node, *args):
|
||||
self.visit(node.node)
|
||||
self.buf.write("(")
|
||||
if len(node.args):
|
||||
self.visit(node.args[0])
|
||||
for a in node.args[1:]:
|
||||
self.buf.write(", ")
|
||||
self.visit(a)
|
||||
self.buf.write(")")
|
||||
|
||||
class walker(visitor.ASTVisitor):
|
||||
def dispatch(self, node, *args):
|
||||
print "Node:", str(node)
|
||||
#print "dir:", dir(node)
|
||||
return visitor.ASTVisitor.dispatch(self, node, *args)
|
|
@ -1,398 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# runtime.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""provides runtime services for templates, including Context, Namespace, and various helper functions."""
|
||||
|
||||
from mako import exceptions, util
|
||||
import __builtin__, inspect, sys
|
||||
|
||||
class Context(object):
|
||||
"""provides runtime namespace, output buffer, and various callstacks for templates."""
|
||||
def __init__(self, buffer, **data):
|
||||
self._buffer_stack = [buffer]
|
||||
self._orig = data # original data, minus the builtins
|
||||
self._data = __builtin__.__dict__.copy() # the context data which includes builtins
|
||||
self._data.update(data)
|
||||
self._kwargs = data.copy()
|
||||
self._with_template = None
|
||||
self.namespaces = {}
|
||||
|
||||
# "capture" function which proxies to the generic "capture" function
|
||||
self._data['capture'] = lambda x, *args, **kwargs: capture(self, x, *args, **kwargs)
|
||||
|
||||
# "caller" stack used by def calls with content
|
||||
self.caller_stack = self._data['caller'] = CallerStack()
|
||||
|
||||
lookup = property(lambda self:self._with_template.lookup)
|
||||
kwargs = property(lambda self:self._kwargs.copy())
|
||||
|
||||
def push_caller(self, caller):
|
||||
self.caller_stack.append(caller)
|
||||
|
||||
def pop_caller(self):
|
||||
del self.caller_stack[-1]
|
||||
|
||||
def keys(self):
|
||||
return self._data.keys()
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._data[key]
|
||||
|
||||
def _push_writer(self):
|
||||
"""push a capturing buffer onto this Context and return the new Writer function."""
|
||||
|
||||
buf = util.FastEncodingBuffer()
|
||||
self._buffer_stack.append(buf)
|
||||
return buf.write
|
||||
|
||||
def _pop_buffer_and_writer(self):
|
||||
"""pop the most recent capturing buffer from this Context
|
||||
and return the current writer after the pop.
|
||||
|
||||
"""
|
||||
|
||||
buf = self._buffer_stack.pop()
|
||||
return buf, self._buffer_stack[-1].write
|
||||
|
||||
def _push_buffer(self):
|
||||
"""push a capturing buffer onto this Context."""
|
||||
|
||||
self._push_writer()
|
||||
|
||||
def _pop_buffer(self):
|
||||
"""pop the most recent capturing buffer from this Context."""
|
||||
|
||||
return self._buffer_stack.pop()
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self._data.get(key, default)
|
||||
|
||||
def write(self, string):
|
||||
"""write a string to this Context's underlying output buffer."""
|
||||
|
||||
self._buffer_stack[-1].write(string)
|
||||
|
||||
def writer(self):
|
||||
"""return the current writer function"""
|
||||
|
||||
return self._buffer_stack[-1].write
|
||||
|
||||
def _copy(self):
|
||||
c = Context.__new__(Context)
|
||||
c._buffer_stack = self._buffer_stack
|
||||
c._data = self._data.copy()
|
||||
c._orig = self._orig
|
||||
c._kwargs = self._kwargs
|
||||
c._with_template = self._with_template
|
||||
c.namespaces = self.namespaces
|
||||
c.caller_stack = self.caller_stack
|
||||
return c
|
||||
def locals_(self, d):
|
||||
"""create a new Context with a copy of this Context's current state, updated with the given dictionary."""
|
||||
if len(d) == 0:
|
||||
return self
|
||||
c = self._copy()
|
||||
c._data.update(d)
|
||||
return c
|
||||
def _clean_inheritance_tokens(self):
|
||||
"""create a new copy of this Context with tokens related to inheritance state removed."""
|
||||
c = self._copy()
|
||||
x = c._data
|
||||
x.pop('self', None)
|
||||
x.pop('parent', None)
|
||||
x.pop('next', None)
|
||||
return c
|
||||
|
||||
class CallerStack(list):
|
||||
def __init__(self):
|
||||
self.nextcaller = None
|
||||
def __nonzero__(self):
|
||||
return self._get_caller() and True or False
|
||||
def _get_caller(self):
|
||||
return self[-1]
|
||||
def __getattr__(self, key):
|
||||
return getattr(self._get_caller(), key)
|
||||
def _push_frame(self):
|
||||
self.append(self.nextcaller or None)
|
||||
self.nextcaller = None
|
||||
def _pop_frame(self):
|
||||
self.nextcaller = self.pop()
|
||||
|
||||
|
||||
class Undefined(object):
|
||||
"""represents an undefined value in a template."""
|
||||
def __str__(self):
|
||||
raise NameError("Undefined")
|
||||
def __nonzero__(self):
|
||||
return False
|
||||
|
||||
UNDEFINED = Undefined()
|
||||
|
||||
class _NSAttr(object):
|
||||
def __init__(self, parent):
|
||||
self.__parent = parent
|
||||
def __getattr__(self, key):
|
||||
ns = self.__parent
|
||||
while ns:
|
||||
if hasattr(ns.module, key):
|
||||
return getattr(ns.module, key)
|
||||
else:
|
||||
ns = ns.inherits
|
||||
raise AttributeError(key)
|
||||
|
||||
class Namespace(object):
|
||||
"""provides access to collections of rendering methods, which can be local, from other templates, or from imported modules"""
|
||||
def __init__(self, name, context, module=None, template=None, templateuri=None, callables=None, inherits=None, populate_self=True, calling_uri=None):
|
||||
self.name = name
|
||||
if module is not None:
|
||||
mod = __import__(module)
|
||||
for token in module.split('.')[1:]:
|
||||
mod = getattr(mod, token)
|
||||
self._module = mod
|
||||
else:
|
||||
self._module = None
|
||||
if templateuri is not None:
|
||||
self.template = _lookup_template(context, templateuri, calling_uri)
|
||||
self._templateuri = self.template.module._template_uri
|
||||
else:
|
||||
self.template = template
|
||||
if self.template is not None:
|
||||
self._templateuri = self.template.module._template_uri
|
||||
self.context = context
|
||||
self.inherits = inherits
|
||||
if callables is not None:
|
||||
self.callables = dict([(c.func_name, c) for c in callables])
|
||||
else:
|
||||
self.callables = None
|
||||
if populate_self and self.template is not None:
|
||||
(lclcallable, lclcontext) = _populate_self_namespace(context, self.template, self_ns=self)
|
||||
|
||||
module = property(lambda s:s._module or s.template.module)
|
||||
filename = property(lambda s:s._module and s._module.__file__ or s.template.filename)
|
||||
uri = property(lambda s:s.template.uri)
|
||||
|
||||
def attr(self):
|
||||
if not hasattr(self, '_attr'):
|
||||
self._attr = _NSAttr(self)
|
||||
return self._attr
|
||||
attr = property(attr)
|
||||
|
||||
def get_namespace(self, uri):
|
||||
"""return a namespace corresponding to the given template uri.
|
||||
|
||||
if a relative uri, it is adjusted to that of the template of this namespace"""
|
||||
key = (self, uri)
|
||||
if self.context.namespaces.has_key(key):
|
||||
return self.context.namespaces[key]
|
||||
else:
|
||||
ns = Namespace(uri, self.context._copy(), templateuri=uri, calling_uri=self._templateuri)
|
||||
self.context.namespaces[key] = ns
|
||||
return ns
|
||||
|
||||
def get_template(self, uri):
|
||||
return _lookup_template(self.context, uri, self._templateuri)
|
||||
|
||||
def get_cached(self, key, **kwargs):
|
||||
if self.template:
|
||||
if not self.template.cache_enabled:
|
||||
createfunc = kwargs.get('createfunc', None)
|
||||
if createfunc:
|
||||
return createfunc()
|
||||
else:
|
||||
return None
|
||||
|
||||
if self.template.cache_dir:
|
||||
kwargs.setdefault('data_dir', self.template.cache_dir)
|
||||
if self.template.cache_type:
|
||||
kwargs.setdefault('type', self.template.cache_type)
|
||||
if self.template.cache_url:
|
||||
kwargs.setdefault('url', self.template.cache_url)
|
||||
return self.cache.get(key, **kwargs)
|
||||
|
||||
def cache(self):
|
||||
return self.template.cache
|
||||
cache = property(cache)
|
||||
|
||||
def include_file(self, uri, **kwargs):
|
||||
"""include a file at the given uri"""
|
||||
_include_file(self.context, uri, self._templateuri, **kwargs)
|
||||
|
||||
def _populate(self, d, l):
|
||||
for ident in l:
|
||||
if ident == '*':
|
||||
for (k, v) in self._get_star():
|
||||
d[k] = v
|
||||
else:
|
||||
d[ident] = getattr(self, ident)
|
||||
|
||||
def _get_star(self):
|
||||
if self.callables:
|
||||
for key in self.callables:
|
||||
yield (key, self.callables[key])
|
||||
if self.template:
|
||||
def get(key):
|
||||
callable_ = self.template.get_def(key).callable_
|
||||
return lambda *args, **kwargs:callable_(self.context, *args, **kwargs)
|
||||
for k in self.template.module._exports:
|
||||
yield (k, get(k))
|
||||
if self._module:
|
||||
def get(key):
|
||||
callable_ = getattr(self._module, key)
|
||||
return lambda *args, **kwargs:callable_(self.context, *args, **kwargs)
|
||||
for k in dir(self._module):
|
||||
if k[0] != '_':
|
||||
yield (k, get(k))
|
||||
|
||||
def __getattr__(self, key):
|
||||
if self.callables and key in self.callables:
|
||||
return self.callables[key]
|
||||
|
||||
if self.template and self.template.has_def(key):
|
||||
callable_ = self.template.get_def(key).callable_
|
||||
return lambda *args, **kwargs:callable_(self.context, *args, **kwargs)
|
||||
|
||||
if self._module and hasattr(self._module, key):
|
||||
callable_ = getattr(self._module, key)
|
||||
return lambda *args, **kwargs:callable_(self.context, *args, **kwargs)
|
||||
|
||||
if self.inherits is not None:
|
||||
return getattr(self.inherits, key)
|
||||
raise exceptions.RuntimeException("Namespace '%s' has no member '%s'" % (self.name, key))
|
||||
|
||||
def supports_caller(func):
|
||||
"""apply a caller_stack compatibility decorator to a plain Python function."""
|
||||
def wrap_stackframe(context, *args, **kwargs):
|
||||
context.caller_stack._push_frame()
|
||||
try:
|
||||
return func(context, *args, **kwargs)
|
||||
finally:
|
||||
context.caller_stack._pop_frame()
|
||||
return wrap_stackframe
|
||||
|
||||
def capture(context, callable_, *args, **kwargs):
|
||||
"""execute the given template def, capturing the output into a buffer."""
|
||||
if not callable(callable_):
|
||||
raise exceptions.RuntimeException("capture() function expects a callable as its argument (i.e. capture(func, *args, **kwargs))")
|
||||
context._push_buffer()
|
||||
try:
|
||||
callable_(*args, **kwargs)
|
||||
finally:
|
||||
buf = context._pop_buffer()
|
||||
return buf.getvalue()
|
||||
|
||||
def _include_file(context, uri, calling_uri, **kwargs):
|
||||
"""locate the template from the given uri and include it in the current output."""
|
||||
template = _lookup_template(context, uri, calling_uri)
|
||||
(callable_, ctx) = _populate_self_namespace(context._clean_inheritance_tokens(), template)
|
||||
callable_(ctx, **_kwargs_for_callable(callable_, context._orig, **kwargs))
|
||||
|
||||
def _inherit_from(context, uri, calling_uri):
|
||||
"""called by the _inherit method in template modules to set up the inheritance chain at the start
|
||||
of a template's execution."""
|
||||
if uri is None:
|
||||
return None
|
||||
template = _lookup_template(context, uri, calling_uri)
|
||||
self_ns = context['self']
|
||||
ih = self_ns
|
||||
while ih.inherits is not None:
|
||||
ih = ih.inherits
|
||||
lclcontext = context.locals_({'next':ih})
|
||||
ih.inherits = Namespace("self:%s" % template.uri, lclcontext, template = template, populate_self=False)
|
||||
context._data['parent'] = lclcontext._data['local'] = ih.inherits
|
||||
callable_ = getattr(template.module, '_mako_inherit', None)
|
||||
if callable_ is not None:
|
||||
ret = callable_(template, lclcontext)
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
|
||||
if gen_ns is not None:
|
||||
gen_ns(context)
|
||||
return (template.callable_, lclcontext)
|
||||
|
||||
def _lookup_template(context, uri, relativeto):
|
||||
lookup = context._with_template.lookup
|
||||
if lookup is None:
|
||||
raise exceptions.TemplateLookupException("Template '%s' has no TemplateLookup associated" % context._with_template.uri)
|
||||
uri = lookup.adjust_uri(uri, relativeto)
|
||||
try:
|
||||
return lookup.get_template(uri)
|
||||
except exceptions.TopLevelLookupException, e:
|
||||
raise exceptions.TemplateLookupException(str(e))
|
||||
|
||||
def _populate_self_namespace(context, template, self_ns=None):
|
||||
if self_ns is None:
|
||||
self_ns = Namespace('self:%s' % template.uri, context, template=template, populate_self=False)
|
||||
context._data['self'] = context._data['local'] = self_ns
|
||||
if hasattr(template.module, '_mako_inherit'):
|
||||
ret = template.module._mako_inherit(template, context)
|
||||
if ret:
|
||||
return ret
|
||||
return (template.callable_, context)
|
||||
|
||||
def _render(template, callable_, args, data, as_unicode=False):
|
||||
"""create a Context and return the string output of the given template and template callable."""
|
||||
|
||||
if as_unicode:
|
||||
buf = util.FastEncodingBuffer(unicode=True)
|
||||
elif template.output_encoding:
|
||||
buf = util.FastEncodingBuffer(unicode=as_unicode, encoding=template.output_encoding, errors=template.encoding_errors)
|
||||
else:
|
||||
buf = util.StringIO()
|
||||
context = Context(buf, **data)
|
||||
context._with_template = template
|
||||
_render_context(template, callable_, context, *args, **_kwargs_for_callable(callable_, data))
|
||||
return context._pop_buffer().getvalue()
|
||||
|
||||
def _kwargs_for_callable(callable_, data, **kwargs):
|
||||
argspec = inspect.getargspec(callable_)
|
||||
namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
|
||||
for arg in namedargs:
|
||||
if arg != 'context' and arg in data and arg not in kwargs:
|
||||
kwargs[arg] = data[arg]
|
||||
return kwargs
|
||||
|
||||
def _render_context(tmpl, callable_, context, *args, **kwargs):
|
||||
import mako.template as template
|
||||
# create polymorphic 'self' namespace for this template with possibly updated context
|
||||
if not isinstance(tmpl, template.DefTemplate):
|
||||
# if main render method, call from the base of the inheritance stack
|
||||
(inherit, lclcontext) = _populate_self_namespace(context, tmpl)
|
||||
_exec_template(inherit, lclcontext, args=args, kwargs=kwargs)
|
||||
else:
|
||||
# otherwise, call the actual rendering method specified
|
||||
(inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent)
|
||||
_exec_template(callable_, context, args=args, kwargs=kwargs)
|
||||
|
||||
def _exec_template(callable_, context, args=None, kwargs=None):
|
||||
"""execute a rendering callable given the callable, a Context, and optional explicit arguments
|
||||
|
||||
the contextual Template will be located if it exists, and the error handling options specified
|
||||
on that Template will be interpreted here.
|
||||
"""
|
||||
template = context._with_template
|
||||
if template is not None and (template.format_exceptions or template.error_handler):
|
||||
error = None
|
||||
try:
|
||||
callable_(context, *args, **kwargs)
|
||||
except Exception, e:
|
||||
error = e
|
||||
except:
|
||||
e = sys.exc_info()[0]
|
||||
error = e
|
||||
if error:
|
||||
if template.error_handler:
|
||||
result = template.error_handler(context, error)
|
||||
if not result:
|
||||
raise error
|
||||
else:
|
||||
error_template = exceptions.html_error_template()
|
||||
context._buffer_stack[:] = [util.FastEncodingBuffer(error_template.output_encoding, error_template.encoding_errors)]
|
||||
context._with_template = error_template
|
||||
error_template.render_context(context, error=error)
|
||||
else:
|
||||
callable_(context, *args, **kwargs)
|
|
@ -1,277 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# template.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""provides the Template class, a facade for parsing, generating and executing template strings,
|
||||
as well as template runtime operations."""
|
||||
|
||||
from mako.lexer import Lexer
|
||||
from mako import codegen
|
||||
from mako import runtime, util, exceptions
|
||||
import imp, os, re, shutil, stat, sys, tempfile, time, types, weakref
|
||||
|
||||
|
||||
class Template(object):
|
||||
"""a compiled template"""
|
||||
def __init__(self, text=None, filename=None, uri=None, format_exceptions=False, error_handler=None,
|
||||
lookup=None, output_encoding=None, encoding_errors='strict', module_directory=None, cache_type=None,
|
||||
cache_dir=None, cache_url=None, module_filename=None, input_encoding=None, disable_unicode=False, default_filters=None,
|
||||
buffer_filters=[], imports=None, preprocessor=None, cache_enabled=True):
|
||||
"""construct a new Template instance using either literal template text, or a previously loaded template module
|
||||
|
||||
text - textual template source, or None if a module is to be provided
|
||||
|
||||
uri - the uri of this template, or some identifying string. defaults to the
|
||||
full filename given, or "memory:(hex id of this Template)" if no filename
|
||||
|
||||
filename - filename of the source template, if any
|
||||
|
||||
format_exceptions - catch exceptions and format them into an error display template
|
||||
"""
|
||||
|
||||
if uri:
|
||||
self.module_id = re.sub(r'\W', "_", uri)
|
||||
self.uri = uri
|
||||
elif filename:
|
||||
self.module_id = re.sub(r'\W', "_", filename)
|
||||
self.uri = filename
|
||||
else:
|
||||
self.module_id = "memory:" + hex(id(self))
|
||||
self.uri = self.module_id
|
||||
|
||||
self.input_encoding = input_encoding
|
||||
self.output_encoding = output_encoding
|
||||
self.encoding_errors = encoding_errors
|
||||
self.disable_unicode = disable_unicode
|
||||
if default_filters is None:
|
||||
if self.disable_unicode:
|
||||
self.default_filters = ['str']
|
||||
else:
|
||||
self.default_filters = ['unicode']
|
||||
else:
|
||||
self.default_filters = default_filters
|
||||
self.buffer_filters = buffer_filters
|
||||
|
||||
self.imports = imports
|
||||
self.preprocessor = preprocessor
|
||||
|
||||
# if plain text, compile code in memory only
|
||||
if text is not None:
|
||||
(code, module) = _compile_text(self, text, filename)
|
||||
self._code = code
|
||||
self._source = text
|
||||
ModuleInfo(module, None, self, filename, code, text)
|
||||
elif filename is not None:
|
||||
# if template filename and a module directory, load
|
||||
# a filesystem-based module file, generating if needed
|
||||
if module_filename is not None:
|
||||
path = module_filename
|
||||
elif module_directory is not None:
|
||||
u = self.uri
|
||||
if u[0] == '/':
|
||||
u = u[1:]
|
||||
path = os.path.abspath(os.path.join(module_directory.replace('/', os.path.sep), u + ".py"))
|
||||
else:
|
||||
path = None
|
||||
if path is not None:
|
||||
util.verify_directory(os.path.dirname(path))
|
||||
filemtime = os.stat(filename)[stat.ST_MTIME]
|
||||
if not os.path.exists(path) or os.stat(path)[stat.ST_MTIME] < filemtime:
|
||||
_compile_module_file(self, file(filename).read(), filename, path)
|
||||
module = imp.load_source(self.module_id, path, file(path))
|
||||
del sys.modules[self.module_id]
|
||||
if module._magic_number != codegen.MAGIC_NUMBER:
|
||||
_compile_module_file(self, file(filename).read(), filename, path)
|
||||
module = imp.load_source(self.module_id, path, file(path))
|
||||
del sys.modules[self.module_id]
|
||||
ModuleInfo(module, path, self, filename, None, None)
|
||||
else:
|
||||
# template filename and no module directory, compile code
|
||||
# in memory
|
||||
(code, module) = _compile_text(self, file(filename).read(), filename)
|
||||
self._source = None
|
||||
self._code = code
|
||||
ModuleInfo(module, None, self, filename, code, None)
|
||||
else:
|
||||
raise exceptions.RuntimeException("Template requires text or filename")
|
||||
|
||||
self.module = module
|
||||
self.filename = filename
|
||||
self.callable_ = self.module.render_body
|
||||
self.format_exceptions = format_exceptions
|
||||
self.error_handler = error_handler
|
||||
self.lookup = lookup
|
||||
self.cache_type = cache_type
|
||||
self.cache_dir = cache_dir
|
||||
self.cache_url = cache_url
|
||||
self.cache_enabled = cache_enabled
|
||||
|
||||
def source(self):
|
||||
"""return the template source code for this Template."""
|
||||
return _get_module_info_from_callable(self.callable_).source
|
||||
source = property(source)
|
||||
|
||||
def code(self):
|
||||
"""return the module source code for this Template"""
|
||||
return _get_module_info_from_callable(self.callable_).code
|
||||
code = property(code)
|
||||
|
||||
def cache(self):
|
||||
return self.module._template_cache
|
||||
cache = property(cache)
|
||||
|
||||
def render(self, *args, **data):
|
||||
"""render the output of this template as a string.
|
||||
|
||||
if the template specifies an output encoding, the string will be encoded accordingly, else the output
|
||||
is raw (raw output uses cStringIO and can't handle multibyte characters).
|
||||
a Context object is created corresponding to the given data. Arguments that are explictly
|
||||
declared by this template's internal rendering method are also pulled from the given *args, **data
|
||||
members."""
|
||||
return runtime._render(self, self.callable_, args, data)
|
||||
|
||||
def render_unicode(self, *args, **data):
|
||||
"""render the output of this template as a unicode object."""
|
||||
|
||||
return runtime._render(self, self.callable_, args, data, as_unicode=True)
|
||||
|
||||
def render_context(self, context, *args, **kwargs):
|
||||
"""render this Template with the given context.
|
||||
|
||||
the data is written to the context's buffer."""
|
||||
if getattr(context, '_with_template', None) is None:
|
||||
context._with_template = self
|
||||
runtime._render_context(self, self.callable_, context, *args, **kwargs)
|
||||
|
||||
def has_def(self, name):
|
||||
return hasattr(self.module, "render_%s" % name)
|
||||
|
||||
def get_def(self, name):
|
||||
"""return a def of this template as an individual Template of its own."""
|
||||
return DefTemplate(self, getattr(self.module, "render_%s" % name))
|
||||
|
||||
|
||||
class ModuleTemplate(Template):
|
||||
"""A Template which is constructed given an existing Python module.
|
||||
|
||||
e.g.::
|
||||
|
||||
t = Template("this is a template")
|
||||
f = file("mymodule.py", "w")
|
||||
f.write(t.code)
|
||||
f.close()
|
||||
|
||||
import mymodule
|
||||
|
||||
t = ModuleTemplate(mymodule)
|
||||
print t.render()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, module,
|
||||
module_filename=None,
|
||||
template=None, template_filename=None,
|
||||
module_source=None, template_source=None,
|
||||
output_encoding=None, encoding_errors='strict', disable_unicode=False, format_exceptions=False,
|
||||
error_handler=None, lookup=None, cache_type=None, cache_dir=None, cache_url=None, cache_enabled=True
|
||||
):
|
||||
self.module_id = re.sub(r'\W', "_", module._template_uri)
|
||||
self.uri = module._template_uri
|
||||
self.input_encoding = module._source_encoding
|
||||
self.output_encoding = output_encoding
|
||||
self.encoding_errors = encoding_errors
|
||||
self.disable_unicode = disable_unicode
|
||||
self.module = module
|
||||
self.filename = template_filename
|
||||
ModuleInfo(module, module_filename, self, template_filename, module_source, template_source)
|
||||
|
||||
self.callable_ = self.module.render_body
|
||||
self.format_exceptions = format_exceptions
|
||||
self.error_handler = error_handler
|
||||
self.lookup = lookup
|
||||
self.cache_type = cache_type
|
||||
self.cache_dir = cache_dir
|
||||
self.cache_url = cache_url
|
||||
self.cache_enabled = cache_enabled
|
||||
|
||||
class DefTemplate(Template):
|
||||
"""a Template which represents a callable def in a parent template."""
|
||||
def __init__(self, parent, callable_):
|
||||
self.parent = parent
|
||||
self.callable_ = callable_
|
||||
self.output_encoding = parent.output_encoding
|
||||
self.encoding_errors = parent.encoding_errors
|
||||
self.format_exceptions = parent.format_exceptions
|
||||
self.error_handler = parent.error_handler
|
||||
self.lookup = parent.lookup
|
||||
|
||||
def get_def(self, name):
|
||||
return self.parent.get_def(name)
|
||||
|
||||
class ModuleInfo(object):
|
||||
"""stores information about a module currently loaded into memory,
|
||||
provides reverse lookups of template source, module source code based on
|
||||
a module's identifier."""
|
||||
_modules = weakref.WeakValueDictionary()
|
||||
|
||||
def __init__(self, module, module_filename, template, template_filename, module_source, template_source):
|
||||
self.module = module
|
||||
self.module_filename = module_filename
|
||||
self.template_filename = template_filename
|
||||
self.module_source = module_source
|
||||
self.template_source = template_source
|
||||
self._modules[module.__name__] = template._mmarker = self
|
||||
if module_filename:
|
||||
self._modules[module_filename] = self
|
||||
def _get_code(self):
|
||||
if self.module_source is not None:
|
||||
return self.module_source
|
||||
else:
|
||||
return file(self.module_filename).read()
|
||||
code = property(_get_code)
|
||||
def _get_source(self):
|
||||
if self.template_source is not None:
|
||||
if self.module._source_encoding and not isinstance(self.template_source, unicode):
|
||||
return self.template_source.decode(self.module._source_encoding)
|
||||
else:
|
||||
return self.template_source
|
||||
else:
|
||||
if self.module._source_encoding:
|
||||
return file(self.template_filename).read().decode(self.module._source_encoding)
|
||||
else:
|
||||
return file(self.template_filename).read()
|
||||
source = property(_get_source)
|
||||
|
||||
def _compile_text(template, text, filename):
|
||||
identifier = template.module_id
|
||||
lexer = Lexer(text, filename, disable_unicode=template.disable_unicode, input_encoding=template.input_encoding, preprocessor=template.preprocessor)
|
||||
node = lexer.parse()
|
||||
source = codegen.compile(node, template.uri, filename, default_filters=template.default_filters, buffer_filters=template.buffer_filters, imports=template.imports, source_encoding=lexer.encoding, generate_unicode=not template.disable_unicode)
|
||||
#print source
|
||||
cid = identifier
|
||||
if isinstance(cid, unicode):
|
||||
cid = cid.encode()
|
||||
module = types.ModuleType(cid)
|
||||
code = compile(source, cid, 'exec')
|
||||
exec code in module.__dict__, module.__dict__
|
||||
return (source, module)
|
||||
|
||||
def _compile_module_file(template, text, filename, outputpath):
|
||||
identifier = template.module_id
|
||||
lexer = Lexer(text, filename, disable_unicode=template.disable_unicode, input_encoding=template.input_encoding, preprocessor=template.preprocessor)
|
||||
node = lexer.parse()
|
||||
source = codegen.compile(node, template.uri, filename, default_filters=template.default_filters, buffer_filters=template.buffer_filters, imports=template.imports, source_encoding=lexer.encoding, generate_unicode=not template.disable_unicode)
|
||||
(dest, name) = tempfile.mkstemp()
|
||||
os.write(dest, source)
|
||||
os.close(dest)
|
||||
shutil.move(name, outputpath)
|
||||
|
||||
def _get_module_info_from_callable(callable_):
|
||||
return _get_module_info(callable_.func_globals['__name__'])
|
||||
|
||||
def _get_module_info(filename):
|
||||
return ModuleInfo._modules[filename]
|
||||
|
267
bin/mako/util.py
267
bin/mako/util.py
|
@ -1,267 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# util.py
|
||||
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import sys
|
||||
try:
|
||||
Set = set
|
||||
except:
|
||||
import sets
|
||||
Set = sets.Set
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except:
|
||||
from StringIO import StringIO
|
||||
|
||||
import codecs, re, weakref, os, time
|
||||
|
||||
try:
|
||||
import threading
|
||||
import thread
|
||||
except ImportError:
|
||||
import dummy_threading as threading
|
||||
import dummy_thread as thread
|
||||
|
||||
if sys.platform.startswith('win') or sys.platform.startswith('java'):
|
||||
time_func = time.clock
|
||||
else:
|
||||
time_func = time.time
|
||||
|
||||
def verify_directory(dir):
|
||||
"""create and/or verify a filesystem directory."""
|
||||
|
||||
tries = 0
|
||||
|
||||
while not os.path.exists(dir):
|
||||
try:
|
||||
tries += 1
|
||||
os.makedirs(dir, 0750)
|
||||
except:
|
||||
if tries > 5:
|
||||
raise
|
||||
|
||||
class SetLikeDict(dict):
|
||||
"""a dictionary that has some setlike methods on it"""
|
||||
def union(self, other):
|
||||
"""produce a 'union' of this dict and another (at the key level).
|
||||
|
||||
values in the second dict take precedence over that of the first"""
|
||||
x = SetLikeDict(**self)
|
||||
x.update(other)
|
||||
return x
|
||||
|
||||
class FastEncodingBuffer(object):
|
||||
"""a very rudimentary buffer that is faster than StringIO, but doesnt crash on unicode data like cStringIO."""
|
||||
|
||||
def __init__(self, encoding=None, errors='strict', unicode=False):
|
||||
self.data = []
|
||||
self.encoding = encoding
|
||||
if unicode:
|
||||
self.delim = u''
|
||||
else:
|
||||
self.delim = ''
|
||||
self.unicode = unicode
|
||||
self.errors = errors
|
||||
self.write = self.data.append
|
||||
|
||||
def getvalue(self):
|
||||
if self.encoding:
|
||||
return self.delim.join(self.data).encode(self.encoding, self.errors)
|
||||
else:
|
||||
return self.delim.join(self.data)
|
||||
|
||||
class LRUCache(dict):
|
||||
"""A dictionary-like object that stores a limited number of items, discarding
|
||||
lesser used items periodically.
|
||||
|
||||
this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based
|
||||
paradigm so that synchronization is not really needed. the size management
|
||||
is inexact.
|
||||
"""
|
||||
|
||||
class _Item(object):
|
||||
def __init__(self, key, value):
|
||||
self.key = key
|
||||
self.value = value
|
||||
self.timestamp = time_func()
|
||||
def __repr__(self):
|
||||
return repr(self.value)
|
||||
|
||||
def __init__(self, capacity, threshold=.5):
|
||||
self.capacity = capacity
|
||||
self.threshold = threshold
|
||||
|
||||
def __getitem__(self, key):
|
||||
item = dict.__getitem__(self, key)
|
||||
item.timestamp = time_func()
|
||||
return item.value
|
||||
|
||||
def values(self):
|
||||
return [i.value for i in dict.values(self)]
|
||||
|
||||
def setdefault(self, key, value):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
self[key] = value
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
item = dict.get(self, key)
|
||||
if item is None:
|
||||
item = self._Item(key, value)
|
||||
dict.__setitem__(self, key, item)
|
||||
else:
|
||||
item.value = value
|
||||
self._manage_size()
|
||||
|
||||
def _manage_size(self):
|
||||
while len(self) > self.capacity + self.capacity * self.threshold:
|
||||
bytime = dict.values(self)
|
||||
bytime.sort(lambda a, b: cmp(b.timestamp, a.timestamp))
|
||||
for item in bytime[self.capacity:]:
|
||||
try:
|
||||
del self[item.key]
|
||||
except KeyError:
|
||||
# if we couldnt find a key, most likely some other thread broke in
|
||||
# on us. loop around and try again
|
||||
break
|
||||
|
||||
# Regexp to match python magic encoding line
|
||||
_PYTHON_MAGIC_COMMENT_re = re.compile(
|
||||
r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
|
||||
re.VERBOSE)
|
||||
|
||||
def parse_encoding(fp):
|
||||
"""Deduce the encoding of a source file from magic comment.
|
||||
|
||||
It does this in the same way as the `Python interpreter`__
|
||||
|
||||
.. __: http://docs.python.org/ref/encodings.html
|
||||
|
||||
The ``fp`` argument should be a seekable file object.
|
||||
"""
|
||||
pos = fp.tell()
|
||||
fp.seek(0)
|
||||
try:
|
||||
line1 = fp.readline()
|
||||
has_bom = line1.startswith(codecs.BOM_UTF8)
|
||||
if has_bom:
|
||||
line1 = line1[len(codecs.BOM_UTF8):]
|
||||
|
||||
m = _PYTHON_MAGIC_COMMENT_re.match(line1)
|
||||
if not m:
|
||||
try:
|
||||
import parser
|
||||
parser.suite(line1)
|
||||
except (ImportError, SyntaxError):
|
||||
# Either it's a real syntax error, in which case the source
|
||||
# is not valid python source, or line2 is a continuation of
|
||||
# line1, in which case we don't want to scan line2 for a magic
|
||||
# comment.
|
||||
pass
|
||||
else:
|
||||
line2 = fp.readline()
|
||||
m = _PYTHON_MAGIC_COMMENT_re.match(line2)
|
||||
|
||||
if has_bom:
|
||||
if m:
|
||||
raise SyntaxError, \
|
||||
"python refuses to compile code with both a UTF8" \
|
||||
" byte-order-mark and a magic encoding comment"
|
||||
return 'utf_8'
|
||||
elif m:
|
||||
return m.group(1)
|
||||
else:
|
||||
return None
|
||||
finally:
|
||||
fp.seek(pos)
|
||||
|
||||
def sorted_dict_repr(d):
|
||||
"""repr() a dictionary with the keys in order.
|
||||
|
||||
Used by the lexer unit test to compare parse trees based on strings.
|
||||
|
||||
"""
|
||||
keys = d.keys()
|
||||
keys.sort()
|
||||
return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
|
||||
|
||||
def restore__ast(_ast):
|
||||
"""Attempt to restore the required classes to the _ast module if it
|
||||
appears to be missing them
|
||||
"""
|
||||
if hasattr(_ast, 'AST'):
|
||||
return
|
||||
_ast.PyCF_ONLY_AST = 2 << 9
|
||||
m = compile("""\
|
||||
def foo(): pass
|
||||
class Bar(object): pass
|
||||
if False: pass
|
||||
baz = 'mako'
|
||||
1 + 2 - 3 * 4 / 5
|
||||
6 // 7 % 8 << 9 >> 10
|
||||
11 & 12 ^ 13 | 14
|
||||
15 and 16 or 17
|
||||
-baz + (not +18) - ~17
|
||||
baz and 'foo' or 'bar'
|
||||
(mako is baz == baz) is not baz != mako
|
||||
mako > baz < mako >= baz <= mako
|
||||
mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
|
||||
_ast.Module = type(m)
|
||||
|
||||
for cls in _ast.Module.__mro__:
|
||||
if cls.__name__ == 'mod':
|
||||
_ast.mod = cls
|
||||
elif cls.__name__ == 'AST':
|
||||
_ast.AST = cls
|
||||
|
||||
_ast.FunctionDef = type(m.body[0])
|
||||
_ast.ClassDef = type(m.body[1])
|
||||
_ast.If = type(m.body[2])
|
||||
|
||||
_ast.Name = type(m.body[3].targets[0])
|
||||
_ast.Store = type(m.body[3].targets[0].ctx)
|
||||
_ast.Str = type(m.body[3].value)
|
||||
|
||||
_ast.Sub = type(m.body[4].value.op)
|
||||
_ast.Add = type(m.body[4].value.left.op)
|
||||
_ast.Div = type(m.body[4].value.right.op)
|
||||
_ast.Mult = type(m.body[4].value.right.left.op)
|
||||
|
||||
_ast.RShift = type(m.body[5].value.op)
|
||||
_ast.LShift = type(m.body[5].value.left.op)
|
||||
_ast.Mod = type(m.body[5].value.left.left.op)
|
||||
_ast.FloorDiv = type(m.body[5].value.left.left.left.op)
|
||||
|
||||
_ast.BitOr = type(m.body[6].value.op)
|
||||
_ast.BitXor = type(m.body[6].value.left.op)
|
||||
_ast.BitAnd = type(m.body[6].value.left.left.op)
|
||||
|
||||
_ast.Or = type(m.body[7].value.op)
|
||||
_ast.And = type(m.body[7].value.values[0].op)
|
||||
|
||||
_ast.Invert = type(m.body[8].value.right.op)
|
||||
_ast.Not = type(m.body[8].value.left.right.op)
|
||||
_ast.UAdd = type(m.body[8].value.left.right.operand.op)
|
||||
_ast.USub = type(m.body[8].value.left.left.op)
|
||||
|
||||
_ast.Or = type(m.body[9].value.op)
|
||||
_ast.And = type(m.body[9].value.values[0].op)
|
||||
|
||||
_ast.IsNot = type(m.body[10].value.ops[0])
|
||||
_ast.NotEq = type(m.body[10].value.ops[1])
|
||||
_ast.Is = type(m.body[10].value.left.ops[0])
|
||||
_ast.Eq = type(m.body[10].value.left.ops[1])
|
||||
|
||||
_ast.Gt = type(m.body[11].value.ops[0])
|
||||
_ast.Lt = type(m.body[11].value.ops[1])
|
||||
_ast.GtE = type(m.body[11].value.ops[2])
|
||||
_ast.LtE = type(m.body[11].value.ops[3])
|
||||
|
||||
_ast.In = type(m.body[12].value.ops[0])
|
||||
_ast.NotIn = type(m.body[12].value.ops[1])
|
|
@ -18,7 +18,7 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
import mako
|
||||
from lxml import etree
|
||||
from mako.template import Template
|
||||
from mako.lookup import TemplateLookup
|
||||
|
|
Loading…
Reference in New Issue