removing bundled libraries

This commit is contained in:
Micah Lee 2014-05-24 20:43:58 -04:00
parent e5e25b49f9
commit d868c1cfaa
262 changed files with 0 additions and 71622 deletions

View File

@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask
~~~~~
A microframework based on Werkzeug. It's extensively documented
and follows best practice patterns.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
__version__ = '0.10.1'
# utilities we import from Werkzeug and Jinja2 that are unused
# in the module but are exported as public interface.
from werkzeug.exceptions import abort
from werkzeug.utils import redirect
from jinja2 import Markup, escape
from .app import Flask, Request, Response
from .config import Config
from .helpers import url_for, flash, send_file, send_from_directory, \
get_flashed_messages, get_template_attribute, make_response, safe_join, \
stream_with_context
from .globals import current_app, g, request, session, _request_ctx_stack, \
_app_ctx_stack
from .ctx import has_request_context, has_app_context, \
after_this_request, copy_current_request_context
from .module import Module
from .blueprints import Blueprint
from .templating import render_template, render_template_string
# the signals
from .signals import signals_available, template_rendered, request_started, \
request_finished, got_request_exception, request_tearing_down, \
appcontext_tearing_down, appcontext_pushed, \
appcontext_popped, message_flashed
# We're not exposing the actual json module but a convenient wrapper around
# it.
from . import json
# This was the only thing that flask used to export at one point and it had
# a more generic name.
jsonify = json.jsonify
# backwards compat, goes away in 1.0
from .sessions import SecureCookieSession as Session
json_available = True

View File

@ -1,73 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask._compat
~~~~~~~~~~~~~
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
:copyright: (c) 2013 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
PY2 = sys.version_info[0] == 2
_identity = lambda x: x
if not PY2:
text_type = str
string_types = (str,)
integer_types = (int, )
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
from io import StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
implements_to_string = _identity
else:
text_type = unicode
string_types = (str, unicode)
integer_types = (int, long)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
from cStringIO import StringIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instantiation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})

File diff suppressed because it is too large Load Diff

View File

@ -1,401 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.blueprints
~~~~~~~~~~~~~~~~
Blueprints are the recommended way to implement larger or more
pluggable applications in Flask 0.7 and later.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from .helpers import _PackageBoundObject, _endpoint_from_view_func
class BlueprintSetupState(object):
"""Temporary holder object for registering a blueprint with the
application. An instance of this class is created by the
:meth:`~flask.Blueprint.make_setup_state` method and later passed
to all register callback functions.
"""
def __init__(self, blueprint, app, options, first_registration):
#: a reference to the current application
self.app = app
#: a reference to the blueprint that created this setup state.
self.blueprint = blueprint
#: a dictionary with all options that were passed to the
#: :meth:`~flask.Flask.register_blueprint` method.
self.options = options
#: as blueprints can be registered multiple times with the
#: application and not everything wants to be registered
#: multiple times on it, this attribute can be used to figure
#: out if the blueprint was registered in the past already.
self.first_registration = first_registration
subdomain = self.options.get('subdomain')
if subdomain is None:
subdomain = self.blueprint.subdomain
#: The subdomain that the blueprint should be active for, `None`
#: otherwise.
self.subdomain = subdomain
url_prefix = self.options.get('url_prefix')
if url_prefix is None:
url_prefix = self.blueprint.url_prefix
#: The prefix that should be used for all URLs defined on the
#: blueprint.
self.url_prefix = url_prefix
#: A dictionary with URL defaults that is added to each and every
#: URL that was defined with the blueprint.
self.url_defaults = dict(self.blueprint.url_values_defaults)
self.url_defaults.update(self.options.get('url_defaults', ()))
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
"""A helper method to register a rule (and optionally a view function)
to the application. The endpoint is automatically prefixed with the
blueprint's name.
"""
if self.url_prefix:
rule = self.url_prefix + rule
options.setdefault('subdomain', self.subdomain)
if endpoint is None:
endpoint = _endpoint_from_view_func(view_func)
defaults = self.url_defaults
if 'defaults' in options:
defaults = dict(defaults, **options.pop('defaults'))
self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint),
view_func, defaults=defaults, **options)
class Blueprint(_PackageBoundObject):
"""Represents a blueprint. A blueprint is an object that records
functions that will be called with the
:class:`~flask.blueprint.BlueprintSetupState` later to register functions
or other things on the main application. See :ref:`blueprints` for more
information.
.. versionadded:: 0.7
"""
warn_on_modifications = False
_got_registered_once = False
def __init__(self, name, import_name, static_folder=None,
static_url_path=None, template_folder=None,
url_prefix=None, subdomain=None, url_defaults=None):
_PackageBoundObject.__init__(self, import_name, template_folder)
self.name = name
self.url_prefix = url_prefix
self.subdomain = subdomain
self.static_folder = static_folder
self.static_url_path = static_url_path
self.deferred_functions = []
self.view_functions = {}
if url_defaults is None:
url_defaults = {}
self.url_values_defaults = url_defaults
def record(self, func):
"""Registers a function that is called when the blueprint is
registered on the application. This function is called with the
state as argument as returned by the :meth:`make_setup_state`
method.
"""
if self._got_registered_once and self.warn_on_modifications:
from warnings import warn
warn(Warning('The blueprint was already registered once '
'but is getting modified now. These changes '
'will not show up.'))
self.deferred_functions.append(func)
def record_once(self, func):
"""Works like :meth:`record` but wraps the function in another
function that will ensure the function is only called once. If the
blueprint is registered a second time on the application, the
function passed is not called.
"""
def wrapper(state):
if state.first_registration:
func(state)
return self.record(update_wrapper(wrapper, func))
def make_setup_state(self, app, options, first_registration=False):
"""Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState`
object that is later passed to the register callback functions.
Subclasses can override this to return a subclass of the setup state.
"""
return BlueprintSetupState(self, app, options, first_registration)
def register(self, app, options, first_registration=False):
"""Called by :meth:`Flask.register_blueprint` to register a blueprint
on the application. This can be overridden to customize the register
behavior. Keyword arguments from
:func:`~flask.Flask.register_blueprint` are directly forwarded to this
method in the `options` dictionary.
"""
self._got_registered_once = True
state = self.make_setup_state(app, options, first_registration)
if self.has_static_folder:
state.add_url_rule(self.static_url_path + '/<path:filename>',
view_func=self.send_static_file,
endpoint='static')
for deferred in self.deferred_functions:
deferred(state)
def route(self, rule, **options):
"""Like :meth:`Flask.route` but for a blueprint. The endpoint for the
:func:`url_for` function is prefixed with the name of the blueprint.
"""
def decorator(f):
endpoint = options.pop("endpoint", f.__name__)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
"""Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for
the :func:`url_for` function is prefixed with the name of the blueprint.
"""
if endpoint:
assert '.' not in endpoint, "Blueprint endpoint's should not contain dot's"
self.record(lambda s:
s.add_url_rule(rule, endpoint, view_func, **options))
def endpoint(self, endpoint):
"""Like :meth:`Flask.endpoint` but for a blueprint. This does not
prefix the endpoint with the blueprint name, this has to be done
explicitly by the user of this method. If the endpoint is prefixed
with a `.` it will be registered to the current blueprint, otherwise
it's an application independent endpoint.
"""
def decorator(f):
def register_endpoint(state):
state.app.view_functions[endpoint] = f
self.record_once(register_endpoint)
return f
return decorator
def app_template_filter(self, name=None):
"""Register a custom template filter, available application wide. Like
:meth:`Flask.template_filter` but for a blueprint.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_filter(f, name=name)
return f
return decorator
def add_app_template_filter(self, f, name=None):
"""Register a custom template filter, available application wide. Like
:meth:`Flask.add_template_filter` but for a blueprint. Works exactly
like the :meth:`app_template_filter` decorator.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.filters[name or f.__name__] = f
self.record_once(register_template)
def app_template_test(self, name=None):
"""Register a custom template test, available application wide. Like
:meth:`Flask.template_test` but for a blueprint.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_test(f, name=name)
return f
return decorator
def add_app_template_test(self, f, name=None):
"""Register a custom template test, available application wide. Like
:meth:`Flask.add_template_test` but for a blueprint. Works exactly
like the :meth:`app_template_test` decorator.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.tests[name or f.__name__] = f
self.record_once(register_template)
def app_template_global(self, name=None):
"""Register a custom template global, available application wide. Like
:meth:`Flask.template_global` but for a blueprint.
.. versionadded:: 0.10
:param name: the optional name of the global, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_global(f, name=name)
return f
return decorator
def add_app_template_global(self, f, name=None):
"""Register a custom template global, available application wide. Like
:meth:`Flask.add_template_global` but for a blueprint. Works exactly
like the :meth:`app_template_global` decorator.
.. versionadded:: 0.10
:param name: the optional name of the global, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.globals[name or f.__name__] = f
self.record_once(register_template)
def before_request(self, f):
"""Like :meth:`Flask.before_request` but for a blueprint. This function
is only executed before each request that is handled by a function of
that blueprint.
"""
self.record_once(lambda s: s.app.before_request_funcs
.setdefault(self.name, []).append(f))
return f
def before_app_request(self, f):
"""Like :meth:`Flask.before_request`. Such a function is executed
before each request, even if outside of a blueprint.
"""
self.record_once(lambda s: s.app.before_request_funcs
.setdefault(None, []).append(f))
return f
def before_app_first_request(self, f):
"""Like :meth:`Flask.before_first_request`. Such a function is
executed before the first request to the application.
"""
self.record_once(lambda s: s.app.before_first_request_funcs.append(f))
return f
def after_request(self, f):
"""Like :meth:`Flask.after_request` but for a blueprint. This function
is only executed after each request that is handled by a function of
that blueprint.
"""
self.record_once(lambda s: s.app.after_request_funcs
.setdefault(self.name, []).append(f))
return f
def after_app_request(self, f):
"""Like :meth:`Flask.after_request` but for a blueprint. Such a function
is executed after each request, even if outside of the blueprint.
"""
self.record_once(lambda s: s.app.after_request_funcs
.setdefault(None, []).append(f))
return f
def teardown_request(self, f):
"""Like :meth:`Flask.teardown_request` but for a blueprint. This
function is only executed when tearing down requests handled by a
function of that blueprint. Teardown request functions are executed
when the request context is popped, even when no actual request was
performed.
"""
self.record_once(lambda s: s.app.teardown_request_funcs
.setdefault(self.name, []).append(f))
return f
def teardown_app_request(self, f):
"""Like :meth:`Flask.teardown_request` but for a blueprint. Such a
function is executed when tearing down each request, even if outside of
the blueprint.
"""
self.record_once(lambda s: s.app.teardown_request_funcs
.setdefault(None, []).append(f))
return f
def context_processor(self, f):
"""Like :meth:`Flask.context_processor` but for a blueprint. This
function is only executed for requests handled by a blueprint.
"""
self.record_once(lambda s: s.app.template_context_processors
.setdefault(self.name, []).append(f))
return f
def app_context_processor(self, f):
"""Like :meth:`Flask.context_processor` but for a blueprint. Such a
function is executed each request, even if outside of the blueprint.
"""
self.record_once(lambda s: s.app.template_context_processors
.setdefault(None, []).append(f))
return f
def app_errorhandler(self, code):
"""Like :meth:`Flask.errorhandler` but for a blueprint. This
handler is used for all requests, even if outside of the blueprint.
"""
def decorator(f):
self.record_once(lambda s: s.app.errorhandler(code)(f))
return f
return decorator
def url_value_preprocessor(self, f):
"""Registers a function as URL value preprocessor for this
blueprint. It's called before the view functions are called and
can modify the url values provided.
"""
self.record_once(lambda s: s.app.url_value_preprocessors
.setdefault(self.name, []).append(f))
return f
def url_defaults(self, f):
"""Callback function for URL defaults for this blueprint. It's called
with the endpoint and values and should update the values passed
in place.
"""
self.record_once(lambda s: s.app.url_default_functions
.setdefault(self.name, []).append(f))
return f
def app_url_value_preprocessor(self, f):
"""Same as :meth:`url_value_preprocessor` but application wide.
"""
self.record_once(lambda s: s.app.url_value_preprocessors
.setdefault(None, []).append(f))
return f
def app_url_defaults(self, f):
"""Same as :meth:`url_defaults` but application wide.
"""
self.record_once(lambda s: s.app.url_default_functions
.setdefault(None, []).append(f))
return f
def errorhandler(self, code_or_exception):
"""Registers an error handler that becomes active for this blueprint
only. Please be aware that routing does not happen local to a
blueprint so an error handler for 404 usually is not handled by
a blueprint unless it is caused inside a view function. Another
special case is the 500 internal server error which is always looked
up from the application.
Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator
of the :class:`~flask.Flask` object.
"""
def decorator(f):
self.record_once(lambda s: s.app._register_error_handler(
self.name, code_or_exception, f))
return f
return decorator

View File

@ -1,168 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.config
~~~~~~~~~~~~
Implements the configuration related objects.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import imp
import os
import errno
from werkzeug.utils import import_string
from ._compat import string_types
class ConfigAttribute(object):
"""Makes an attribute forward to the config"""
def __init__(self, name, get_converter=None):
self.__name__ = name
self.get_converter = get_converter
def __get__(self, obj, type=None):
if obj is None:
return self
rv = obj.config[self.__name__]
if self.get_converter is not None:
rv = self.get_converter(rv)
return rv
def __set__(self, obj, value):
obj.config[self.__name__] = value
class Config(dict):
"""Works exactly like a dict but provides ways to fill it from files
or special dictionaries. There are two common patterns to populate the
config.
Either you can fill the config from a config file::
app.config.from_pyfile('yourconfig.cfg')
Or alternatively you can define the configuration options in the
module that calls :meth:`from_object` or provide an import path to
a module that should be loaded. It is also possible to tell it to
use the same module and with that provide the configuration values
just before the call::
DEBUG = True
SECRET_KEY = 'development key'
app.config.from_object(__name__)
In both cases (loading from any Python file or loading from modules),
only uppercase keys are added to the config. This makes it possible to use
lowercase values in the config file for temporary values that are not added
to the config or to define the config keys in the same file that implements
the application.
Probably the most interesting way to load configurations is from an
environment variable pointing to a file::
app.config.from_envvar('YOURAPPLICATION_SETTINGS')
In this case before launching the application you have to set this
environment variable to the file you want to use. On Linux and OS X
use the export statement::
export YOURAPPLICATION_SETTINGS='/path/to/config/file'
On windows use `set` instead.
:param root_path: path to which files are read relative from. When the
config object is created by the application, this is
the application's :attr:`~flask.Flask.root_path`.
:param defaults: an optional dictionary of default values
"""
def __init__(self, root_path, defaults=None):
dict.__init__(self, defaults or {})
self.root_path = root_path
def from_envvar(self, variable_name, silent=False):
"""Loads a configuration from an environment variable pointing to
a configuration file. This is basically just a shortcut with nicer
error messages for this line of code::
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
:param variable_name: name of the environment variable
:param silent: set to `True` if you want silent failure for missing
files.
:return: bool. `True` if able to load config, `False` otherwise.
"""
rv = os.environ.get(variable_name)
if not rv:
if silent:
return False
raise RuntimeError('The environment variable %r is not set '
'and as such configuration could not be '
'loaded. Set this variable and make it '
'point to a configuration file' %
variable_name)
return self.from_pyfile(rv, silent=silent)
def from_pyfile(self, filename, silent=False):
"""Updates the values in the config from a Python file. This function
behaves as if the file was imported as module with the
:meth:`from_object` function.
:param filename: the filename of the config. This can either be an
absolute filename or a filename relative to the
root path.
:param silent: set to `True` if you want silent failure for missing
files.
.. versionadded:: 0.7
`silent` parameter.
"""
filename = os.path.join(self.root_path, filename)
d = imp.new_module('config')
d.__file__ = filename
try:
with open(filename) as config_file:
exec(compile(config_file.read(), filename, 'exec'), d.__dict__)
except IOError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise
self.from_object(d)
return True
def from_object(self, obj):
"""Updates the values from the given object. An object can be of one
of the following two types:
- a string: in this case the object with that name will be imported
- an actual object reference: that object is used directly
Objects are usually either modules or classes.
Just the uppercase variables in that object are stored in the config.
Example usage::
app.config.from_object('yourapplication.default_config')
from yourapplication import default_config
app.config.from_object(default_config)
You should not use this function to load the actual configuration but
rather configuration defaults. The actual config should be loaded
with :meth:`from_pyfile` and ideally from a location not within the
package because the package might be installed system wide.
:param obj: an import name or object
"""
if isinstance(obj, string_types):
obj = import_string(obj)
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self))

View File

@ -1,394 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.ctx
~~~~~~~~~
Implements the objects required to keep the context.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import sys
from functools import update_wrapper
from werkzeug.exceptions import HTTPException
from .globals import _request_ctx_stack, _app_ctx_stack
from .module import blueprint_is_module
from .signals import appcontext_pushed, appcontext_popped
class _AppCtxGlobals(object):
"""A plain object."""
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __contains__(self, item):
return item in self.__dict__
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
top = _app_ctx_stack.top
if top is not None:
return '<flask.g of %r>' % top.app.name
return object.__repr__(self)
def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f
def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f)
def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None
def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None
class AppContext(object):
"""The application context binds an application object implicitly
to the current thread or greenlet, similar to how the
:class:`RequestContext` binds request information. The application
context is also implicitly created if a request context is created
but the application is not on top of the individual application
context.
"""
def __init__(self, app):
self.app = app
self.url_adapter = app.create_url_adapter(None)
self.g = app.app_ctx_globals_class()
# Like request context, app contexts can be pushed multiple times
# but there a basic "refcount" is enough to track them.
self._refcnt = 0
def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app)
def pop(self, exc=None):
"""Pops the app context."""
self._refcnt -= 1
if self._refcnt <= 0:
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
self.pop(exc_value)
class RequestContext(object):
"""The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
Do not attempt to use this class directly, instead use
:meth:`~flask.Flask.test_request_context` and
:meth:`~flask.Flask.request_context` to create this object.
When the request context is popped, it will evaluate all the
functions registered on the application for teardown execution
(:meth:`~flask.Flask.teardown_request`).
The request context is automatically popped at the end of the request
for you. In debug mode the request context is kept around if
exceptions happen so that interactive debuggers have a chance to
introspect the data. With 0.4 this can also be forced for requests
that did not fail and outside of `DEBUG` mode. By setting
``'flask._preserve_context'`` to `True` on the WSGI environment the
context will not pop itself at the end of the request. This is used by
the :meth:`~flask.Flask.test_client` for example to implement the
deferred cleanup functionality.
You might find this helpful for unittests where you need the
information from the context local around for a little longer. Make
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
that situation, otherwise your unittests will leak memory.
"""
def __init__(self, app, environ, request=None):
self.app = app
if request is None:
request = app.request_class(environ)
self.request = request
self.url_adapter = app.create_url_adapter(self.request)
self.flashes = None
self.session = None
# Request contexts can be pushed multiple times and interleaved with
# other request contexts. Now only if the last level is popped we
# get rid of them. Additionally if an application context is missing
# one is created implicitly so for each level we add this information
self._implicit_app_ctx_stack = []
# indicator if the context was preserved. Next time another context
# is pushed the preserved context is popped.
self.preserved = False
# remembers the exception for pop if there is one in case the context
# preservation kicks in.
self._preserved_exc = None
# Functions that should be executed after the request on the response
# object. These will be called before the regular "after_request"
# functions.
self._after_request_functions = []
self.match_request()
# XXX: Support for deprecated functionality. This is going away with
# Flask 1.0
blueprint = self.request.blueprint
if blueprint is not None:
# better safe than sorry, we don't want to break code that
# already worked
bp = app.blueprints.get(blueprint)
if bp is not None and blueprint_is_module(bp):
self.request._is_old_module = True
def _get_g(self):
return _app_ctx_stack.top.g
def _set_g(self, value):
_app_ctx_stack.top.g = value
g = property(_get_g, _set_g)
del _get_g, _set_g
def copy(self):
"""Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
"""
return self.__class__(self.app,
environ=self.request.environ,
request=self.request
)
def match_request(self):
"""Can be overridden by a subclass to hook into the matching
of the request.
"""
try:
url_rule, self.request.view_args = \
self.url_adapter.match(return_rule=True)
self.request.url_rule = url_rule
except HTTPException as e:
self.request.routing_exception = e
def push(self):
"""Binds the request context to the current context."""
# If an exception occurs in debug mode or if context preservation is
# activated under exception situations exactly one context stays
# on the stack. The rationale is that you want to access that
# information under debug situations. However if someone forgets to
# pop that context again we want to make sure that on the next push
# it's invalidated, otherwise we run at risk that something leaks
# memory. This is usually only a problem in testsuite since this
# functionality is not active in production environments.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop(top._preserved_exc)
# Before we push the request context we have to ensure that there
# is an application context.
app_ctx = _app_ctx_stack.top
if app_ctx is None or app_ctx.app != self.app:
app_ctx = self.app.app_context()
app_ctx.push()
self._implicit_app_ctx_stack.append(app_ctx)
else:
self._implicit_app_ctx_stack.append(None)
_request_ctx_stack.push(self)
# Open the session at the moment that the request context is
# available. This allows a custom open_session method to use the
# request context (e.g. code that access database information
# stored on `g` instead of the appcontext).
self.session = self.app.open_session(self.request)
if self.session is None:
self.session = self.app.make_null_session()
def pop(self, exc=None):
"""Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
"""
app_ctx = self._implicit_app_ctx_stack.pop()
clear_request = False
if not self._implicit_app_ctx_stack:
self.preserved = False
self._preserved_exc = None
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_request(exc)
# If this interpreter supports clearing the exception information
# we do that now. This will only go into effect on Python 2.x,
# on 3.x it disappears automatically at the end of the exception
# stack.
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
request_close = getattr(self.request, 'close', None)
if request_close is not None:
request_close()
clear_request = True
rv = _request_ctx_stack.pop()
assert rv is self, 'Popped wrong request context. (%r instead of %r)' \
% (rv, self)
# get rid of circular dependencies at the end of the request
# so that we don't require the GC to be active.
if clear_request:
rv.request.environ['werkzeug.request'] = None
# Get rid of the app as well if necessary.
if app_ctx is not None:
app_ctx.pop(exc)
def auto_pop(self, exc):
if self.request.environ.get('flask._preserve_context') or \
(exc is not None and self.app.preserve_context_on_exception):
self.preserved = True
self._preserved_exc = exc
else:
self.pop(exc)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
# do not pop the request stack if we are in debug mode and an
# exception happened. This will allow the debugger to still
# access the request object in the interactive shell. Furthermore
# the context can be force kept alive for the test client.
# See flask.testing for how this works.
self.auto_pop(exc_value)
def __repr__(self):
return '<%s \'%s\' [%s] of %s>' % (
self.__class__.__name__,
self.request.url,
self.request.method,
self.app.name,
)

View File

@ -1,87 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.debughelpers
~~~~~~~~~~~~~~~~~~
Various helpers to make the development experience better.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from ._compat import implements_to_string
class UnexpectedUnicodeError(AssertionError, UnicodeError):
"""Raised in places where we want some better error reporting for
unexpected unicode or binary data.
"""
@implements_to_string
class DebugFilesKeyError(KeyError, AssertionError):
"""Raised from request.files during debugging. The idea is that it can
provide a better error message than just a generic KeyError/BadRequest.
"""
def __init__(self, request, key):
form_matches = request.form.getlist(key)
buf = ['You tried to access the file "%s" in the request.files '
'dictionary but it does not exist. The mimetype for the request '
'is "%s" instead of "multipart/form-data" which means that no '
'file contents were transmitted. To fix this error you should '
'provide enctype="multipart/form-data" in your form.' %
(key, request.mimetype)]
if form_matches:
buf.append('\n\nThe browser instead transmitted some file names. '
'This was submitted: %s' % ', '.join('"%s"' % x
for x in form_matches))
self.msg = ''.join(buf)
def __str__(self):
return self.msg
class FormDataRoutingRedirect(AssertionError):
"""This exception is raised by Flask in debug mode if it detects a
redirect caused by the routing system when the request method is not
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
"""
def __init__(self, request):
exc = request.routing_exception
buf = ['A request was sent to this URL (%s) but a redirect was '
'issued automatically by the routing system to "%s".'
% (request.url, exc.new_url)]
# In case just a slash was appended we can be extra helpful
if request.base_url + '/' == exc.new_url.split('?')[0]:
buf.append(' The URL was defined with a trailing slash so '
'Flask will automatically redirect to the URL '
'with the trailing slash if it was accessed '
'without one.')
buf.append(' Make sure to directly send your %s-request to this URL '
'since we can\'t make browsers or HTTP clients redirect '
'with form data reliably or without user interaction.' %
request.method)
buf.append('\n\nNote: this exception is only raised in debug mode')
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
def attach_enctype_error_multidict(request):
"""Since Flask 0.8 we're monkeypatching the files object in case a
request is detected that does not use multipart form data but the files
object is accessed.
"""
oldcls = request.files.__class__
class newcls(oldcls):
def __getitem__(self, key):
try:
return oldcls.__getitem__(self, key)
except KeyError as e:
if key not in request.form:
raise
raise DebugFilesKeyError(request, key)
newcls.__name__ = oldcls.__name__
newcls.__module__ = oldcls.__module__
request.files.__class__ = newcls

View File

@ -1,29 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.ext
~~~~~~~~~
Redirect imports for extensions. This module basically makes it possible
for us to transition from flaskext.foo to flask_foo without having to
force all extensions to upgrade at the same time.
When a user does ``from flask.ext.foo import bar`` it will attempt to
import ``from flask_foo import bar`` first and when that fails it will
try to import ``from flaskext.foo import bar``.
We're switching from namespace packages because it was just too painful for
everybody involved.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
def setup():
from ..exthook import ExtensionImporter
importer = ExtensionImporter(['flask_%s', 'flaskext.%s'], __name__)
importer.install()
setup()
del setup

View File

@ -1,120 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.exthook
~~~~~~~~~~~~~
Redirect imports for extensions. This module basically makes it possible
for us to transition from flaskext.foo to flask_foo without having to
force all extensions to upgrade at the same time.
When a user does ``from flask.ext.foo import bar`` it will attempt to
import ``from flask_foo import bar`` first and when that fails it will
try to import ``from flaskext.foo import bar``.
We're switching from namespace packages because it was just too painful for
everybody involved.
This is used by `flask.ext`.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
from ._compat import reraise
class ExtensionImporter(object):
"""This importer redirects imports from this submodule to other locations.
This makes it possible to transition from the old flaskext.name to the
newer flask_name without people having a hard time.
"""
def __init__(self, module_choices, wrapper_module):
self.module_choices = module_choices
self.wrapper_module = wrapper_module
self.prefix = wrapper_module + '.'
self.prefix_cutoff = wrapper_module.count('.') + 1
def __eq__(self, other):
return self.__class__.__module__ == other.__class__.__module__ and \
self.__class__.__name__ == other.__class__.__name__ and \
self.wrapper_module == other.wrapper_module and \
self.module_choices == other.module_choices
def __ne__(self, other):
return not self.__eq__(other)
def install(self):
sys.meta_path[:] = [x for x in sys.meta_path if self != x] + [self]
def find_module(self, fullname, path=None):
if fullname.startswith(self.prefix):
return self
def load_module(self, fullname):
if fullname in sys.modules:
return sys.modules[fullname]
modname = fullname.split('.', self.prefix_cutoff)[self.prefix_cutoff]
for path in self.module_choices:
realname = path % modname
try:
__import__(realname)
except ImportError:
exc_type, exc_value, tb = sys.exc_info()
# since we only establish the entry in sys.modules at the
# very this seems to be redundant, but if recursive imports
# happen we will call into the move import a second time.
# On the second invocation we still don't have an entry for
# fullname in sys.modules, but we will end up with the same
# fake module name and that import will succeed since this
# one already has a temporary entry in the modules dict.
# Since this one "succeeded" temporarily that second
# invocation now will have created a fullname entry in
# sys.modules which we have to kill.
sys.modules.pop(fullname, None)
# If it's an important traceback we reraise it, otherwise
# we swallow it and try the next choice. The skipped frame
# is the one from __import__ above which we don't care about
if self.is_important_traceback(realname, tb):
reraise(exc_type, exc_value, tb.tb_next)
continue
module = sys.modules[fullname] = sys.modules[realname]
if '.' not in modname:
setattr(sys.modules[self.wrapper_module], modname, module)
return module
raise ImportError('No module named %s' % fullname)
def is_important_traceback(self, important_module, tb):
"""Walks a traceback's frames and checks if any of the frames
originated in the given important module. If that is the case then we
were able to import the module itself but apparently something went
wrong when the module was imported. (Eg: import of an import failed).
"""
while tb is not None:
if self.is_important_frame(important_module, tb):
return True
tb = tb.tb_next
return False
def is_important_frame(self, important_module, tb):
"""Checks a single frame if it's important."""
g = tb.tb_frame.f_globals
if '__name__' not in g:
return False
module_name = g['__name__']
# Python 2.7 Behavior. Modules are cleaned up late so the
# name shows up properly here. Success!
if module_name == important_module:
return True
# Some python versions will will clean up modules so early that the
# module name at that point is no longer set. Try guessing from
# the filename then.
filename = os.path.abspath(tb.tb_frame.f_code.co_filename)
test_string = os.path.sep + important_module.replace('.', os.path.sep)
return test_string + '.py' in filename or \
test_string + os.path.sep + '__init__.py' in filename

View File

@ -1,44 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.globals
~~~~~~~~~~~~~
Defines all the global objects that are proxies to the current
active context.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from functools import partial
from werkzeug.local import LocalStack, LocalProxy
def _lookup_req_object(name):
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('working outside of request context')
return getattr(top, name)
def _lookup_app_object(name):
top = _app_ctx_stack.top
if top is None:
raise RuntimeError('working outside of application context')
return getattr(top, name)
def _find_app():
top = _app_ctx_stack.top
if top is None:
raise RuntimeError('working outside of application context')
return top.app
# context locals
_request_ctx_stack = LocalStack()
_app_ctx_stack = LocalStack()
current_app = LocalProxy(_find_app)
request = LocalProxy(partial(_lookup_req_object, 'request'))
session = LocalProxy(partial(_lookup_req_object, 'session'))
g = LocalProxy(partial(_lookup_app_object, 'g'))

View File

@ -1,849 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.helpers
~~~~~~~~~~~~~
Implements various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import pkgutil
import posixpath
import mimetypes
from time import time
from zlib import adler32
from threading import RLock
from werkzeug.routing import BuildError
from functools import update_wrapper
try:
from werkzeug.urls import url_quote
except ImportError:
from urlparse import quote as url_quote
from werkzeug.datastructures import Headers
from werkzeug.exceptions import NotFound
# this was moved in 0.7
try:
from werkzeug.wsgi import wrap_file
except ImportError:
from werkzeug.utils import wrap_file
from jinja2 import FileSystemLoader
from .signals import message_flashed
from .globals import session, _request_ctx_stack, _app_ctx_stack, \
current_app, request
from ._compat import string_types, text_type
# sentinel
_missing = object()
# what separators does this operating system provide that are not a slash?
# this is used by the send_from_directory function to ensure that nobody is
# able to access files from outside the filesystem.
_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep]
if sep not in (None, '/'))
def _endpoint_from_view_func(view_func):
"""Internal helper that returns the default endpoint for a given
function. This always is the function name.
"""
assert view_func is not None, 'expected view func if endpoint ' \
'is not provided.'
return view_func.__name__
def stream_with_context(generator_or_function):
"""Request contexts disappear when the response is started on the server.
This is done for efficiency reasons and to make it less likely to encounter
memory leaks with badly written WSGI middlewares. The downside is that if
you are using streamed responses, the generator cannot access request bound
information any more.
This function however can help you keep the context around for longer::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
@stream_with_context
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(generate())
Alternatively it can also be used around a specific generator::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(stream_with_context(generate()))
.. versionadded:: 0.9
"""
try:
gen = iter(generator_or_function)
except TypeError:
def decorator(*args, **kwargs):
gen = generator_or_function()
return stream_with_context(gen)
return update_wrapper(decorator, generator_or_function)
def generator():
ctx = _request_ctx_stack.top
if ctx is None:
raise RuntimeError('Attempted to stream with context but '
'there was no context in the first place to keep around.')
with ctx:
# Dummy sentinel. Has to be inside the context block or we're
# not actually keeping the context around.
yield None
# The try/finally is here so that if someone passes a WSGI level
# iterator in we're still running the cleanup logic. Generators
# don't need that because they are closed on their destruction
# automatically.
try:
for item in gen:
yield item
finally:
if hasattr(gen, 'close'):
gen.close()
# The trick is to start the generator. Then the code execution runs until
# the first dummy None is yielded at which point the context was already
# pushed. This item is discarded. Then when the iteration continues the
# real generator is executed.
wrapped_g = generator()
next(wrapped_g)
return wrapped_g
def make_response(*args):
"""Sometimes it is necessary to set additional headers in a view. Because
views do not have to return response objects but can return a value that
is converted into a response object by Flask itself, it becomes tricky to
add headers to it. This function can be called instead of using a return
and you will get a response object which you can use to attach headers.
If view looked like this and you want to add a new header::
def index():
return render_template('index.html', foo=42)
You can now do something like this::
def index():
response = make_response(render_template('index.html', foo=42))
response.headers['X-Parachutes'] = 'parachutes are cool'
return response
This function accepts the very same arguments you can return from a
view function. This for example creates a response with a 404 error
code::
response = make_response(render_template('not_found.html'), 404)
The other use case of this function is to force the return value of a
view function into a response which is helpful with view
decorators::
response = make_response(view_function())
response.headers['X-Parachutes'] = 'parachutes are cool'
Internally this function does the following things:
- if no arguments are passed, it creates a new response argument
- if one argument is passed, :meth:`flask.Flask.make_response`
is invoked with it.
- if more than one argument is passed, the arguments are passed
to the :meth:`flask.Flask.make_response` function as tuple.
.. versionadded:: 0.6
"""
if not args:
return current_app.response_class()
if len(args) == 1:
args = args[0]
return current_app.make_response(args)
def url_for(endpoint, **values):
"""Generates a URL to the given endpoint with the method provided.
Variable arguments that are unknown to the target endpoint are appended
to the generated URL as query arguments. If the value of a query argument
is `None`, the whole pair is skipped. In case blueprints are active
you can shortcut references to the same blueprint by prefixing the
local endpoint with a dot (``.``).
This will reference the index function local to the current blueprint::
url_for('.index')
For more information, head over to the :ref:`Quickstart <url-building>`.
To integrate applications, :class:`Flask` has a hook to intercept URL build
errors through :attr:`Flask.build_error_handler`. The `url_for` function
results in a :exc:`~werkzeug.routing.BuildError` when the current app does
not have a URL for the given endpoint and values. When it does, the
:data:`~flask.current_app` calls its :attr:`~Flask.build_error_handler` if
it is not `None`, which can return a string to use as the result of
`url_for` (instead of `url_for`'s default to raise the
:exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception.
An example::
def external_url_handler(error, endpoint, **values):
"Looks up an external URL when `url_for` cannot build a URL."
# This is an example of hooking the build_error_handler.
# Here, lookup_url is some utility function you've built
# which looks up the endpoint in some external URL registry.
url = lookup_url(endpoint, **values)
if url is None:
# External lookup did not have a URL.
# Re-raise the BuildError, in context of original traceback.
exc_type, exc_value, tb = sys.exc_info()
if exc_value is error:
raise exc_type, exc_value, tb
else:
raise error
# url_for will use this result, instead of raising BuildError.
return url
app.build_error_handler = external_url_handler
Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and
`endpoint` and `**values` are the arguments passed into `url_for`. Note
that this is for building URLs outside the current application, and not for
handling 404 NotFound errors.
.. versionadded:: 0.10
The `_scheme` parameter was added.
.. versionadded:: 0.9
The `_anchor` and `_method` parameters were added.
.. versionadded:: 0.9
Calls :meth:`Flask.handle_build_error` on
:exc:`~werkzeug.routing.BuildError`.
:param endpoint: the endpoint of the URL (name of the function)
:param values: the variable arguments of the URL rule
:param _external: if set to `True`, an absolute URL is generated. Server
address can be changed via `SERVER_NAME` configuration variable which
defaults to `localhost`.
:param _scheme: a string specifying the desired URL scheme. The `_external`
parameter must be set to `True` or a `ValueError` is raised.
:param _anchor: if provided this is added as anchor to the URL.
:param _method: if provided this explicitly specifies an HTTP method.
"""
appctx = _app_ctx_stack.top
reqctx = _request_ctx_stack.top
if appctx is None:
raise RuntimeError('Attempted to generate a URL without the '
'application context being pushed. This has to be '
'executed when application context is available.')
# If request specific information is available we have some extra
# features that support "relative" urls.
if reqctx is not None:
url_adapter = reqctx.url_adapter
blueprint_name = request.blueprint
if not reqctx.request._is_old_module:
if endpoint[:1] == '.':
if blueprint_name is not None:
endpoint = blueprint_name + endpoint
else:
endpoint = endpoint[1:]
else:
# TODO: get rid of this deprecated functionality in 1.0
if '.' not in endpoint:
if blueprint_name is not None:
endpoint = blueprint_name + '.' + endpoint
elif endpoint.startswith('.'):
endpoint = endpoint[1:]
external = values.pop('_external', False)
# Otherwise go with the url adapter from the appctx and make
# the urls external by default.
else:
url_adapter = appctx.url_adapter
if url_adapter is None:
raise RuntimeError('Application was not able to create a URL '
'adapter for request independent URL generation. '
'You might be able to fix this by setting '
'the SERVER_NAME config variable.')
external = values.pop('_external', True)
anchor = values.pop('_anchor', None)
method = values.pop('_method', None)
scheme = values.pop('_scheme', None)
appctx.app.inject_url_defaults(endpoint, values)
if scheme is not None:
if not external:
raise ValueError('When specifying _scheme, _external must be True')
url_adapter.url_scheme = scheme
try:
rv = url_adapter.build(endpoint, values, method=method,
force_external=external)
except BuildError as error:
# We need to inject the values again so that the app callback can
# deal with that sort of stuff.
values['_external'] = external
values['_anchor'] = anchor
values['_method'] = method
return appctx.app.handle_url_build_error(error, endpoint, values)
if anchor is not None:
rv += '#' + url_quote(anchor)
return rv
def get_template_attribute(template_name, attribute):
"""Loads a macro (or variable) a template exports. This can be used to
invoke a macro from within Python code. If you for example have a
template named `_cider.html` with the following contents:
.. sourcecode:: html+jinja
{% macro hello(name) %}Hello {{ name }}!{% endmacro %}
You can access this from Python code like this::
hello = get_template_attribute('_cider.html', 'hello')
return hello('World')
.. versionadded:: 0.2
:param template_name: the name of the template
:param attribute: the name of the variable of macro to access
"""
return getattr(current_app.jinja_env.get_template(template_name).module,
attribute)
def flash(message, category='message'):
"""Flashes a message to the next request. In order to remove the
flashed message from the session and to display it to the user,
the template has to call :func:`get_flashed_messages`.
.. versionchanged:: 0.3
`category` parameter added.
:param message: the message to be flashed.
:param category: the category for the message. The following values
are recommended: ``'message'`` for any kind of message,
``'error'`` for errors, ``'info'`` for information
messages and ``'warning'`` for warnings. However any
kind of string can be used as category.
"""
# Original implementation:
#
# session.setdefault('_flashes', []).append((category, message))
#
# This assumed that changes made to mutable structures in the session are
# are always in sync with the sess on object, which is not true for session
# implementations that use external storage for keeping their keys/values.
flashes = session.get('_flashes', [])
flashes.append((category, message))
session['_flashes'] = flashes
message_flashed.send(current_app._get_current_object(),
message=message, category=category)
def get_flashed_messages(with_categories=False, category_filter=[]):
"""Pulls all flashed messages from the session and returns them.
Further calls in the same request to the function will return
the same messages. By default just the messages are returned,
but when `with_categories` is set to `True`, the return value will
be a list of tuples in the form ``(category, message)`` instead.
Filter the flashed messages to one or more categories by providing those
categories in `category_filter`. This allows rendering categories in
separate html blocks. The `with_categories` and `category_filter`
arguments are distinct:
* `with_categories` controls whether categories are returned with message
text (`True` gives a tuple, where `False` gives just the message text).
* `category_filter` filters the messages down to only those matching the
provided categories.
See :ref:`message-flashing-pattern` for examples.
.. versionchanged:: 0.3
`with_categories` parameter added.
.. versionchanged:: 0.9
`category_filter` parameter added.
:param with_categories: set to `True` to also receive categories.
:param category_filter: whitelist of categories to limit return values
"""
flashes = _request_ctx_stack.top.flashes
if flashes is None:
_request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \
if '_flashes' in session else []
if category_filter:
flashes = list(filter(lambda f: f[0] in category_filter, flashes))
if not with_categories:
return [x[1] for x in flashes]
return flashes
def send_file(filename_or_fp, mimetype=None, as_attachment=False,
attachment_filename=None, add_etags=True,
cache_timeout=None, conditional=False):
"""Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support. Alternatively
you can set the application's :attr:`~Flask.use_x_sendfile` attribute
to ``True`` to directly emit an `X-Sendfile` header. This however
requires support of the underlying webserver for `X-Sendfile`.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first. Something like this is usually sufficient to
avoid security problems::
if '..' in filename or filename.startswith('/'):
abort(404)
.. versionadded:: 0.2
.. versionadded:: 0.5
The `add_etags`, `cache_timeout` and `conditional` parameters were
added. The default behavior is now to attach etags.
.. versionchanged:: 0.7
mimetype guessing and etag support for file objects was
deprecated because it was unreliable. Pass a filename if you are
able to, otherwise attach an etag yourself. This functionality
will be removed in Flask 1.0
.. versionchanged:: 0.9
cache_timeout pulls its default from application config, when None.
:param filename_or_fp: the filename of the file to send. This is
relative to the :attr:`~Flask.root_path` if a
relative path is specified.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param attachment_filename: the filename for the attachment if it
differs from the file's filename.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `True` to enable conditional responses.
:param cache_timeout: the timeout in seconds for the headers. When `None`
(default), this value is set by
:meth:`~Flask.get_send_file_max_age` of
:data:`~flask.current_app`.
"""
mtime = None
if isinstance(filename_or_fp, string_types):
filename = filename_or_fp
file = None
else:
from warnings import warn
file = filename_or_fp
filename = getattr(file, 'name', None)
# XXX: this behavior is now deprecated because it was unreliable.
# removed in Flask 1.0
if not attachment_filename and not mimetype \
and isinstance(filename, string_types):
warn(DeprecationWarning('The filename support for file objects '
'passed to send_file is now deprecated. Pass an '
'attach_filename if you want mimetypes to be guessed.'),
stacklevel=2)
if add_etags:
warn(DeprecationWarning('In future flask releases etags will no '
'longer be generated for file objects passed to the send_file '
'function because this behavior was unreliable. Pass '
'filenames instead if possible, otherwise attach an etag '
'yourself based on another value'), stacklevel=2)
if filename is not None:
if not os.path.isabs(filename):
filename = os.path.join(current_app.root_path, filename)
if mimetype is None and (filename or attachment_filename):
mimetype = mimetypes.guess_type(filename or attachment_filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = Headers()
if as_attachment:
if attachment_filename is None:
if filename is None:
raise TypeError('filename unavailable, required for '
'sending as attachment')
attachment_filename = os.path.basename(filename)
headers.add('Content-Disposition', 'attachment',
filename=attachment_filename)
if current_app.use_x_sendfile and filename:
if file is not None:
file.close()
headers['X-Sendfile'] = filename
headers['Content-Length'] = os.path.getsize(filename)
data = None
else:
if file is None:
file = open(filename, 'rb')
mtime = os.path.getmtime(filename)
headers['Content-Length'] = os.path.getsize(filename)
data = wrap_file(request.environ, file)
rv = current_app.response_class(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
# if we know the file modification date, we can store it as the
# the time of the last modification.
if mtime is not None:
rv.last_modified = int(mtime)
rv.cache_control.public = True
if cache_timeout is None:
cache_timeout = current_app.get_send_file_max_age(filename)
if cache_timeout is not None:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time() + cache_timeout)
if add_etags and filename is not None:
rv.set_etag('flask-%s-%s-%s' % (
os.path.getmtime(filename),
os.path.getsize(filename),
adler32(
filename.encode('utf-8') if isinstance(filename, text_type)
else filename
) & 0xffffffff
))
if conditional:
rv = rv.make_conditional(request)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
def safe_join(directory, filename):
"""Safely join `directory` and `filename`.
Example usage::
@app.route('/wiki/<path:filename>')
def wiki_page(filename):
filename = safe_join(app.config['WIKI_FOLDER'], filename)
with open(filename, 'rb') as fd:
content = fd.read() # Read and process the file content...
:param directory: the base directory.
:param filename: the untrusted filename relative to that directory.
:raises: :class:`~werkzeug.exceptions.NotFound` if the resulting path
would fall out of `directory`.
"""
filename = posixpath.normpath(filename)
for sep in _os_alt_seps:
if sep in filename:
raise NotFound()
if os.path.isabs(filename) or \
filename == '..' or \
filename.startswith('../'):
raise NotFound()
return os.path.join(directory, filename)
def send_from_directory(directory, filename, **options):
"""Send a file from a given directory with :func:`send_file`. This
is a secure way to quickly expose static files from an upload folder
or something similar.
Example usage::
@app.route('/uploads/<path:filename>')
def download_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename, as_attachment=True)
.. admonition:: Sending files and Performance
It is strongly recommended to activate either `X-Sendfile` support in
your webserver or (if no authentication happens) to tell the webserver
to serve files for the given path on its own without calling into the
web application for improved performance.
.. versionadded:: 0.5
:param directory: the directory where all the files are stored.
:param filename: the filename relative to that directory to
download.
:param options: optional keyword arguments that are directly
forwarded to :func:`send_file`.
"""
filename = safe_join(directory, filename)
if not os.path.isfile(filename):
raise NotFound()
options.setdefault('conditional', True)
return send_file(filename, **options)
def get_root_path(import_name):
"""Returns the path to a package or cwd if that cannot be found. This
returns the path of a package or the folder that contains a module.
Not to be confused with the package path returned by :func:`find_package`.
"""
# Module already imported and has a file attribute. Use that first.
mod = sys.modules.get(import_name)
if mod is not None and hasattr(mod, '__file__'):
return os.path.dirname(os.path.abspath(mod.__file__))
# Next attempt: check the loader.
loader = pkgutil.get_loader(import_name)
# Loader does not exist or we're referring to an unloaded main module
# or a main module without path (interactive sessions), go with the
# current working directory.
if loader is None or import_name == '__main__':
return os.getcwd()
# For .egg, zipimporter does not have get_filename until Python 2.7.
# Some other loaders might exhibit the same behavior.
if hasattr(loader, 'get_filename'):
filepath = loader.get_filename(import_name)
else:
# Fall back to imports.
__import__(import_name)
filepath = sys.modules[import_name].__file__
# filepath is import_name.py for a module, or __init__.py for a package.
return os.path.dirname(os.path.abspath(filepath))
def find_package(import_name):
"""Finds a package and returns the prefix (or None if the package is
not installed) as well as the folder that contains the package or
module as a tuple. The package path returned is the module that would
have to be added to the pythonpath in order to make it possible to
import the module. The prefix is the path below which a UNIX like
folder structure exists (lib, share etc.).
"""
root_mod_name = import_name.split('.')[0]
loader = pkgutil.get_loader(root_mod_name)
if loader is None or import_name == '__main__':
# import name is not found, or interactive/main module
package_path = os.getcwd()
else:
# For .egg, zipimporter does not have get_filename until Python 2.7.
if hasattr(loader, 'get_filename'):
filename = loader.get_filename(root_mod_name)
elif hasattr(loader, 'archive'):
# zipimporter's loader.archive points to the .egg or .zip
# archive filename is dropped in call to dirname below.
filename = loader.archive
else:
# At least one loader is missing both get_filename and archive:
# Google App Engine's HardenedModulesHook
#
# Fall back to imports.
__import__(import_name)
filename = sys.modules[import_name].__file__
package_path = os.path.abspath(os.path.dirname(filename))
# package_path ends with __init__.py for a package
if loader.is_package(root_mod_name):
package_path = os.path.dirname(package_path)
site_parent, site_folder = os.path.split(package_path)
py_prefix = os.path.abspath(sys.prefix)
if package_path.startswith(py_prefix):
return py_prefix, package_path
elif site_folder.lower() == 'site-packages':
parent, folder = os.path.split(site_parent)
# Windows like installations
if folder.lower() == 'lib':
base_dir = parent
# UNIX like installations
elif os.path.basename(parent).lower() == 'lib':
base_dir = os.path.dirname(parent)
else:
base_dir = site_parent
return base_dir, package_path
return None, package_path
class locked_cached_property(object):
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value. Works like the one in Werkzeug but has a lock for
thread safety.
"""
def __init__(self, func, name=None, doc=None):
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
self.lock = RLock()
def __get__(self, obj, type=None):
if obj is None:
return self
with self.lock:
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
class _PackageBoundObject(object):
def __init__(self, import_name, template_folder=None):
#: The name of the package or module. Do not change this once
#: it was set by the constructor.
self.import_name = import_name
#: location of the templates. `None` if templates should not be
#: exposed.
self.template_folder = template_folder
#: Where is the app root located?
self.root_path = get_root_path(self.import_name)
self._static_folder = None
self._static_url_path = None
def _get_static_folder(self):
if self._static_folder is not None:
return os.path.join(self.root_path, self._static_folder)
def _set_static_folder(self, value):
self._static_folder = value
static_folder = property(_get_static_folder, _set_static_folder)
del _get_static_folder, _set_static_folder
def _get_static_url_path(self):
if self._static_url_path is None:
if self.static_folder is None:
return None
return '/' + os.path.basename(self.static_folder)
return self._static_url_path
def _set_static_url_path(self, value):
self._static_url_path = value
static_url_path = property(_get_static_url_path, _set_static_url_path)
del _get_static_url_path, _set_static_url_path
@property
def has_static_folder(self):
"""This is `True` if the package bound object's container has a
folder named ``'static'``.
.. versionadded:: 0.5
"""
return self.static_folder is not None
@locked_cached_property
def jinja_loader(self):
"""The Jinja loader for this package bound object.
.. versionadded:: 0.5
"""
if self.template_folder is not None:
return FileSystemLoader(os.path.join(self.root_path,
self.template_folder))
def get_send_file_max_age(self, filename):
"""Provides default cache_timeout for the :func:`send_file` functions.
By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from
the configuration of :data:`~flask.current_app`.
Static file functions such as :func:`send_from_directory` use this
function, and :func:`send_file` calls this function on
:data:`~flask.current_app` when the given cache_timeout is `None`. If a
cache_timeout is given in :func:`send_file`, that timeout is used;
otherwise, this method is called.
This allows subclasses to change the behavior when sending files based
on the filename. For example, to set the cache timeout for .js files
to 60 seconds::
class MyFlask(flask.Flask):
def get_send_file_max_age(self, name):
if name.lower().endswith('.js'):
return 60
return flask.Flask.get_send_file_max_age(self, name)
.. versionadded:: 0.9
"""
return current_app.config['SEND_FILE_MAX_AGE_DEFAULT']
def send_static_file(self, filename):
"""Function used internally to send static files from the static
folder to the browser.
.. versionadded:: 0.5
"""
if not self.has_static_folder:
raise RuntimeError('No static folder for this object')
# Ensure get_send_file_max_age is called in all cases.
# Here, we ensure get_send_file_max_age is called for Blueprints.
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.static_folder, filename,
cache_timeout=cache_timeout)
def open_resource(self, resource, mode='rb'):
"""Opens a resource from the application's resource folder. To see
how this works, consider the following folder structure::
/myapplication.py
/schema.sql
/static
/style.css
/templates
/layout.html
/index.html
If you want to open the `schema.sql` file you would do the
following::
with app.open_resource('schema.sql') as f:
contents = f.read()
do_something_with(contents)
:param resource: the name of the resource. To access resources within
subfolders use forward slashes as separator.
:param mode: resource file opening mode, default is 'rb'.
"""
if mode not in ('r', 'rb'):
raise ValueError('Resources can only be opened for reading')
return open(os.path.join(self.root_path, resource), mode)

View File

@ -1,243 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.jsonimpl
~~~~~~~~~~~~~~
Implementation helpers for the JSON support in Flask.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import io
import uuid
from datetime import datetime
from .globals import current_app, request
from ._compat import text_type, PY2
from werkzeug.http import http_date
from jinja2 import Markup
# Use the same json implementation as itsdangerous on which we
# depend anyways.
try:
from itsdangerous import simplejson as _json
except ImportError:
from itsdangerous import json as _json
# figure out if simplejson escapes slashes. This behavior was changed
# from one version to another without reason.
_slash_escape = '\\/' not in _json.dumps('/')
__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump',
'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder',
'jsonify']
def _wrap_reader_for_text(fp, encoding):
if isinstance(fp.read(0), bytes):
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
return fp
def _wrap_writer_for_text(fp, encoding):
try:
fp.write('')
except TypeError:
fp = io.TextIOWrapper(fp, encoding)
return fp
class JSONEncoder(_json.JSONEncoder):
"""The default Flask JSON encoder. This one extends the default simplejson
encoder by also supporting ``datetime`` objects, ``UUID`` as well as
``Markup`` objects which are serialized as RFC 822 datetime strings (same
as the HTTP date format). In order to support more data types override the
:meth:`default` method.
"""
def default(self, o):
"""Implement this method in a subclass such that it returns a
serializable object for ``o``, or calls the base implementation (to
raise a ``TypeError``).
For example, to support arbitrary iterators, you could implement
default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
if isinstance(o, datetime):
return http_date(o)
if isinstance(o, uuid.UUID):
return str(o)
if hasattr(o, '__html__'):
return text_type(o.__html__())
return _json.JSONEncoder.default(self, o)
class JSONDecoder(_json.JSONDecoder):
"""The default JSON decoder. This one does not change the behavior from
the default simplejson encoder. Consult the :mod:`json` documentation
for more information. This decoder is not only used for the load
functions of this module but also :attr:`~flask.Request`.
"""
def _dump_arg_defaults(kwargs):
"""Inject default arguments for dump functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_encoder)
if not current_app.config['JSON_AS_ASCII']:
kwargs.setdefault('ensure_ascii', False)
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS'])
else:
kwargs.setdefault('sort_keys', True)
kwargs.setdefault('cls', JSONEncoder)
def _load_arg_defaults(kwargs):
"""Inject default arguments for load functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_decoder)
else:
kwargs.setdefault('cls', JSONDecoder)
def dumps(obj, **kwargs):
"""Serialize ``obj`` to a JSON formatted ``str`` by using the application's
configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an
application on the stack.
This function can return ``unicode`` strings or ascii-only bytestrings by
default which coerce into unicode strings automatically. That behavior by
default is controlled by the ``JSON_AS_ASCII`` configuration variable
and can be overriden by the simplejson ``ensure_ascii`` parameter.
"""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
rv = _json.dumps(obj, **kwargs)
if encoding is not None and isinstance(rv, text_type):
rv = rv.encode(encoding)
return rv
def dump(obj, fp, **kwargs):
"""Like :func:`dumps` but writes into a file object."""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding)
_json.dump(obj, fp, **kwargs)
def loads(s, **kwargs):
"""Unserialize a JSON object from a string ``s`` by using the application's
configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an
application on the stack.
"""
_load_arg_defaults(kwargs)
if isinstance(s, bytes):
s = s.decode(kwargs.pop('encoding', None) or 'utf-8')
return _json.loads(s, **kwargs)
def load(fp, **kwargs):
"""Like :func:`loads` but reads from a file object.
"""
_load_arg_defaults(kwargs)
if not PY2:
fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8')
return _json.load(fp, **kwargs)
def htmlsafe_dumps(obj, **kwargs):
"""Works exactly like :func:`dumps` but is safe for use in ``<script>``
tags. It accepts the same arguments and returns a JSON string. Note that
this is available in templates through the ``|tojson`` filter which will
also mark the result as safe. Due to how this function escapes certain
characters this is safe even if used outside of ``<script>`` tags.
The following characters are escaped in strings:
- ``<``
- ``>``
- ``&``
- ``'``
This makes it safe to embed such strings in any place in HTML with the
notable exception of double quoted attributes. In that case single
quote your attributes or HTML escape it in addition.
.. versionchanged:: 0.10
This function's return value is now always safe for HTML usage, even
if outside of script tags or if used in XHTML. This rule does not
hold true when using this function in HTML attributes that are double
quoted. Always single quote attributes if you use the ``|tojson``
filter. Alternatively use ``|tojson|forceescape``.
"""
rv = dumps(obj, **kwargs) \
.replace(u'<', u'\\u003c') \
.replace(u'>', u'\\u003e') \
.replace(u'&', u'\\u0026') \
.replace(u"'", u'\\u0027')
if not _slash_escape:
rv = rv.replace('\\/', '/')
return rv
def htmlsafe_dump(obj, fp, **kwargs):
"""Like :func:`htmlsafe_dumps` but writes into a file object."""
fp.write(unicode(htmlsafe_dumps(obj, **kwargs)))
def jsonify(*args, **kwargs):
"""Creates a :class:`~flask.Response` with the JSON representation of
the given arguments with an `application/json` mimetype. The arguments
to this function are the same as to the :class:`dict` constructor.
Example usage::
from flask import jsonify
@app.route('/_get_current_user')
def get_current_user():
return jsonify(username=g.user.username,
email=g.user.email,
id=g.user.id)
This will send a JSON response like this to the browser::
{
"username": "admin",
"email": "admin@localhost",
"id": 42
}
For security reasons only objects are supported toplevel. For more
information about this, have a look at :ref:`json-security`.
This function's response will be pretty printed if it was not requested
with ``X-Requested-With: XMLHttpRequest`` to simplify debugging unless
the ``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to false.
.. versionadded:: 0.2
"""
indent = None
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] \
and not request.is_xhr:
indent = 2
return current_app.response_class(dumps(dict(*args, **kwargs),
indent=indent),
mimetype='application/json')
def tojson_filter(obj, **kwargs):
return Markup(htmlsafe_dumps(obj, **kwargs))

View File

@ -1,45 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.logging
~~~~~~~~~~~~~
Implements the logging support for Flask.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from logging import getLogger, StreamHandler, Formatter, getLoggerClass, DEBUG
def create_logger(app):
"""Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
"""
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(x):
if x.level == 0 and app.debug:
return DEBUG
return Logger.getEffectiveLevel(x)
class DebugHandler(StreamHandler):
def emit(x, record):
StreamHandler.emit(x, record) if app.debug else None
handler = DebugHandler()
handler.setLevel(DEBUG)
handler.setFormatter(Formatter(app.debug_log_format))
logger = getLogger(app.logger_name)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(handler)
return logger

View File

@ -1,42 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.module
~~~~~~~~~~~~
Implements a class that represents module blueprints.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
from .blueprints import Blueprint
def blueprint_is_module(bp):
"""Used to figure out if something is actually a module"""
return isinstance(bp, Module)
class Module(Blueprint):
"""Deprecated module support. Until Flask 0.6 modules were a different
name of the concept now available as blueprints in Flask. They are
essentially doing the same but have some bad semantics for templates and
static files that were fixed with blueprints.
.. versionchanged:: 0.7
Modules were deprecated in favor for blueprints.
"""
def __init__(self, import_name, name=None, url_prefix=None,
static_path=None, subdomain=None):
if name is None:
assert '.' in import_name, 'name required if package name ' \
'does not point to a submodule'
name = import_name.rsplit('.', 1)[1]
Blueprint.__init__(self, name, import_name, url_prefix=url_prefix,
subdomain=subdomain, template_folder='templates')
if os.path.isdir(os.path.join(self.root_path, 'static')):
self._static_folder = 'static'

View File

@ -1,332 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import uuid
import hashlib
from base64 import b64encode, b64decode
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from ._compat import iteritems, text_type
from itsdangerous import URLSafeTimedSerializer, BadSignature
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes `False` in.
new = False
#: for some backends this will always be `True`, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes `True` in.
modified = True
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif isinstance(value, uuid.UUID):
return {' u': value.hex}
elif isinstance(value, bytes):
return {' b': b64encode(value).decode('ascii')}
elif callable(getattr(value, '__html__', None)):
return {' m': text_type(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in iteritems(value))
elif isinstance(value, str):
try:
return text_type(value)
except UnicodeError:
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = next(iteritems(obj))
if the_key == ' t':
return tuple(the_value)
elif the_key == ' u':
return uuid.UUID(the_value)
elif the_key == ' b':
return b64decode(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Baseclass for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns `None` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop of the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's `None`.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or `None` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def open_session(self, app, request):
"""This method has to be implemented and must either return `None`
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
from flask.debughelpers import UnexpectedUnicodeError

View File

@ -1,55 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.signals
~~~~~~~~~~~~~
Implements signals based on blinker if available, otherwise
falls silently back to a noop
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
signals_available = False
try:
from blinker import Namespace
signals_available = True
except ImportError:
class Namespace(object):
def signal(self, name, doc=None):
return _FakeSignal(name, doc)
class _FakeSignal(object):
"""If blinker is unavailable, create a fake class with the same
interface that allows sending of signals but will fail with an
error on anything else. Instead of doing anything on send, it
will just ignore the arguments and do nothing instead.
"""
def __init__(self, name, doc=None):
self.name = name
self.__doc__ = doc
def _fail(self, *args, **kwargs):
raise RuntimeError('signalling support is unavailable '
'because the blinker library is '
'not installed.')
send = lambda *a, **kw: None
connect = disconnect = has_receivers_for = receivers_for = \
temporarily_connected_to = connected_to = _fail
del _fail
# the namespace for code signals. If you are not flask code, do
# not put signals in here. Create your own namespace instead.
_signals = Namespace()
# core signals. For usage examples grep the sourcecode or consult
# the API documentation in docs/api.rst as well as docs/signals.rst
template_rendered = _signals.signal('template-rendered')
request_started = _signals.signal('request-started')
request_finished = _signals.signal('request-finished')
request_tearing_down = _signals.signal('request-tearing-down')
got_request_exception = _signals.signal('got-request-exception')
appcontext_tearing_down = _signals.signal('appcontext-tearing-down')
appcontext_pushed = _signals.signal('appcontext-pushed')
appcontext_popped = _signals.signal('appcontext-popped')
message_flashed = _signals.signal('message-flashed')

View File

@ -1,143 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.templating
~~~~~~~~~~~~~~~~
Implements the bridge to Jinja2.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import posixpath
from jinja2 import BaseLoader, Environment as BaseEnvironment, \
TemplateNotFound
from .globals import _request_ctx_stack, _app_ctx_stack
from .signals import template_rendered
from .module import blueprint_is_module
from ._compat import itervalues, iteritems
def _default_template_ctx_processor():
"""Default template context processor. Injects `request`,
`session` and `g`.
"""
reqctx = _request_ctx_stack.top
appctx = _app_ctx_stack.top
rv = {}
if appctx is not None:
rv['g'] = appctx.g
if reqctx is not None:
rv['request'] = reqctx.request
rv['session'] = reqctx.session
return rv
class Environment(BaseEnvironment):
"""Works like a regular Jinja2 environment but has some additional
knowledge of how Flask's blueprint works so that it can prepend the
name of the blueprint to referenced templates if necessary.
"""
def __init__(self, app, **options):
if 'loader' not in options:
options['loader'] = app.create_global_jinja_loader()
BaseEnvironment.__init__(self, **options)
self.app = app
class DispatchingJinjaLoader(BaseLoader):
"""A loader that looks for templates in the application and all
the blueprint folders.
"""
def __init__(self, app):
self.app = app
def get_source(self, environment, template):
for loader, local_name in self._iter_loaders(template):
try:
return loader.get_source(environment, local_name)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
def _iter_loaders(self, template):
loader = self.app.jinja_loader
if loader is not None:
yield loader, template
# old style module based loaders in case we are dealing with a
# blueprint that is an old style module
try:
module, local_name = posixpath.normpath(template).split('/', 1)
blueprint = self.app.blueprints[module]
if blueprint_is_module(blueprint):
loader = blueprint.jinja_loader
if loader is not None:
yield loader, local_name
except (ValueError, KeyError):
pass
for blueprint in itervalues(self.app.blueprints):
if blueprint_is_module(blueprint):
continue
loader = blueprint.jinja_loader
if loader is not None:
yield loader, template
def list_templates(self):
result = set()
loader = self.app.jinja_loader
if loader is not None:
result.update(loader.list_templates())
for name, blueprint in iteritems(self.app.blueprints):
loader = blueprint.jinja_loader
if loader is not None:
for template in loader.list_templates():
prefix = ''
if blueprint_is_module(blueprint):
prefix = name + '/'
result.add(prefix + template)
return list(result)
def _render(template, context, app):
"""Renders the template and fires the signal"""
rv = template.render(context)
template_rendered.send(app, template=template, context=context)
return rv
def render_template(template_name_or_list, **context):
"""Renders a template from the template folder with the given
context.
:param template_name_or_list: the name of the template to be
rendered, or an iterable with template names
the first one existing will be rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list),
context, ctx.app)
def render_template_string(source, **context):
"""Renders a template from the given template source string
with the given context.
:param source: the sourcecode of the template to be
rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.from_string(source),
context, ctx.app)

View File

@ -1,124 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testing
~~~~~~~~~~~~~
Implements test support helpers. This module is lazily imported
and usually not used in production environments.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from contextlib import contextmanager
from werkzeug.test import Client, EnvironBuilder
from flask import _request_ctx_stack
try:
from werkzeug.urls import url_parse
except ImportError:
from urlparse import urlsplit as url_parse
def make_test_environ_builder(app, path='/', base_url=None, *args, **kwargs):
"""Creates a new test builder with some application defaults thrown in."""
http_host = app.config.get('SERVER_NAME')
app_root = app.config.get('APPLICATION_ROOT')
if base_url is None:
url = url_parse(path)
base_url = 'http://%s/' % (url.netloc or http_host or 'localhost')
if app_root:
base_url += app_root.lstrip('/')
if url.netloc:
path = url.path
return EnvironBuilder(path, base_url, *args, **kwargs)
class FlaskClient(Client):
"""Works like a regular Werkzeug test client but has some knowledge about
how Flask works to defer the cleanup of the request context stack to the
end of a with body when used in a with statement. For general information
about how to use this class refer to :class:`werkzeug.test.Client`.
Basic usage is outlined in the :ref:`testing` chapter.
"""
preserve_context = False
@contextmanager
def session_transaction(self, *args, **kwargs):
"""When used in combination with a with statement this opens a
session transaction. This can be used to modify the session that
the test client uses. Once the with block is left the session is
stored back.
with client.session_transaction() as session:
session['value'] = 42
Internally this is implemented by going through a temporary test
request context and since session handling could depend on
request variables this function accepts the same arguments as
:meth:`~flask.Flask.test_request_context` which are directly
passed through.
"""
if self.cookie_jar is None:
raise RuntimeError('Session transactions only make sense '
'with cookies enabled.')
app = self.application
environ_overrides = kwargs.setdefault('environ_overrides', {})
self.cookie_jar.inject_wsgi(environ_overrides)
outer_reqctx = _request_ctx_stack.top
with app.test_request_context(*args, **kwargs) as c:
sess = app.open_session(c.request)
if sess is None:
raise RuntimeError('Session backend did not open a session. '
'Check the configuration')
# Since we have to open a new request context for the session
# handling we want to make sure that we hide out own context
# from the caller. By pushing the original request context
# (or None) on top of this and popping it we get exactly that
# behavior. It's important to not use the push and pop
# methods of the actual request context object since that would
# mean that cleanup handlers are called
_request_ctx_stack.push(outer_reqctx)
try:
yield sess
finally:
_request_ctx_stack.pop()
resp = app.response_class()
if not app.session_interface.is_null_session(sess):
app.save_session(sess, resp)
headers = resp.get_wsgi_headers(c.request.environ)
self.cookie_jar.extract_wsgi(c.request.environ, headers)
def open(self, *args, **kwargs):
kwargs.setdefault('environ_overrides', {}) \
['flask._preserve_context'] = self.preserve_context
as_tuple = kwargs.pop('as_tuple', False)
buffered = kwargs.pop('buffered', False)
follow_redirects = kwargs.pop('follow_redirects', False)
builder = make_test_environ_builder(self.application, *args, **kwargs)
return Client.open(self, builder,
as_tuple=as_tuple,
buffered=buffered,
follow_redirects=follow_redirects)
def __enter__(self):
if self.preserve_context:
raise RuntimeError('Cannot nest client invocations')
self.preserve_context = True
return self
def __exit__(self, exc_type, exc_value, tb):
self.preserve_context = False
# on exit we want to clean up earlier. Normally the request context
# stays preserved until the next request in the same thread comes
# in. See RequestGlobals.push() for the general behavior.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop()

View File

@ -1,246 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite
~~~~~~~~~~~~~~~
Tests Flask itself. The majority of Flask is already tested
as part of Werkzeug.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function
import os
import sys
import flask
import warnings
import unittest
from functools import update_wrapper
from contextlib import contextmanager
from werkzeug.utils import import_string, find_modules
from flask._compat import reraise, StringIO
def add_to_path(path):
"""Adds an entry to sys.path if it's not already there. This does
not append it but moves it to the front so that we can be sure it
is loaded.
"""
if not os.path.isdir(path):
raise RuntimeError('Tried to add nonexisting path')
def _samefile(x, y):
if x == y:
return True
try:
return os.path.samefile(x, y)
except (IOError, OSError, AttributeError):
# Windows has no samefile
return False
sys.path[:] = [x for x in sys.path if not _samefile(path, x)]
sys.path.insert(0, path)
def iter_suites():
"""Yields all testsuites."""
for module in find_modules(__name__):
mod = import_string(module)
if hasattr(mod, 'suite'):
yield mod.suite()
def find_all_tests(suite):
"""Yields all the tests and their names from a given suite."""
suites = [suite]
while suites:
s = suites.pop()
try:
suites.extend(s)
except TypeError:
yield s, '%s.%s.%s' % (
s.__class__.__module__,
s.__class__.__name__,
s._testMethodName
)
@contextmanager
def catch_warnings():
"""Catch warnings in a with block in a list"""
# make sure deprecation warnings are active in tests
warnings.simplefilter('default', category=DeprecationWarning)
filters = warnings.filters
warnings.filters = filters[:]
old_showwarning = warnings.showwarning
log = []
def showwarning(message, category, filename, lineno, file=None, line=None):
log.append(locals())
try:
warnings.showwarning = showwarning
yield log
finally:
warnings.filters = filters
warnings.showwarning = old_showwarning
@contextmanager
def catch_stderr():
"""Catch stderr in a StringIO"""
old_stderr = sys.stderr
sys.stderr = rv = StringIO()
try:
yield rv
finally:
sys.stderr = old_stderr
def emits_module_deprecation_warning(f):
def new_f(self, *args, **kwargs):
with catch_warnings() as log:
f(self, *args, **kwargs)
self.assert_true(log, 'expected deprecation warning')
for entry in log:
self.assert_in('Modules are deprecated', str(entry['message']))
return update_wrapper(new_f, f)
class FlaskTestCase(unittest.TestCase):
"""Baseclass for all the tests that Flask uses. Use these methods
for testing instead of the camelcased ones in the baseclass for
consistency.
"""
def ensure_clean_request_context(self):
# make sure we're not leaking a request context since we are
# testing flask internally in debug mode in a few cases
leaks = []
while flask._request_ctx_stack.top is not None:
leaks.append(flask._request_ctx_stack.pop())
self.assert_equal(leaks, [])
def setup(self):
pass
def teardown(self):
pass
def setUp(self):
self.setup()
def tearDown(self):
unittest.TestCase.tearDown(self)
self.ensure_clean_request_context()
self.teardown()
def assert_equal(self, x, y):
return self.assertEqual(x, y)
def assert_raises(self, exc_type, callable=None, *args, **kwargs):
catcher = _ExceptionCatcher(self, exc_type)
if callable is None:
return catcher
with catcher:
callable(*args, **kwargs)
def assert_true(self, x, msg=None):
self.assertTrue(x, msg)
def assert_false(self, x, msg=None):
self.assertFalse(x, msg)
def assert_in(self, x, y):
self.assertIn(x, y)
def assert_not_in(self, x, y):
self.assertNotIn(x, y)
if sys.version_info[:2] == (2, 6):
def assertIn(self, x, y):
assert x in y, "%r unexpectedly not in %r" % (x, y)
def assertNotIn(self, x, y):
assert x not in y, "%r unexpectedly in %r" % (x, y)
class _ExceptionCatcher(object):
def __init__(self, test_case, exc_type):
self.test_case = test_case
self.exc_type = exc_type
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
exception_name = self.exc_type.__name__
if exc_type is None:
self.test_case.fail('Expected exception of type %r' %
exception_name)
elif not issubclass(exc_type, self.exc_type):
reraise(exc_type, exc_value, tb)
return True
class BetterLoader(unittest.TestLoader):
"""A nicer loader that solves two problems. First of all we are setting
up tests from different sources and we're doing this programmatically
which breaks the default loading logic so this is required anyways.
Secondly this loader has a nicer interpolation for test names than the
default one so you can just do ``run-tests.py ViewTestCase`` and it
will work.
"""
def getRootSuite(self):
return suite()
def loadTestsFromName(self, name, module=None):
root = self.getRootSuite()
if name == 'suite':
return root
all_tests = []
for testcase, testname in find_all_tests(root):
if testname == name or \
testname.endswith('.' + name) or \
('.' + name + '.') in testname or \
testname.startswith(name + '.'):
all_tests.append(testcase)
if not all_tests:
raise LookupError('could not find test case for "%s"' % name)
if len(all_tests) == 1:
return all_tests[0]
rv = unittest.TestSuite()
for test in all_tests:
rv.addTest(test)
return rv
def setup_path():
add_to_path(os.path.abspath(os.path.join(
os.path.dirname(__file__), 'test_apps')))
def suite():
"""A testsuite that has all the Flask tests. You can use this
function to integrate the Flask tests into your own testsuite
in case you want to test that monkeypatches to Flask do not
break it.
"""
setup_path()
suite = unittest.TestSuite()
for other_suite in iter_suites():
suite.addTest(other_suite)
return suite
def main():
"""Runs the testsuite as command line application."""
try:
unittest.main(testLoader=BetterLoader(), defaultTest='suite')
except Exception as e:
print('Error: %s' % e)

View File

@ -1,101 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.appctx
~~~~~~~~~~~~~~~~~~~~~~
Tests the application context.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase
class AppContextTestCase(FlaskTestCase):
def test_basic_url_generation(self):
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
app.config['PREFERRED_URL_SCHEME'] = 'https'
@app.route('/')
def index():
pass
with app.app_context():
rv = flask.url_for('index')
self.assert_equal(rv, 'https://localhost/')
def test_url_generation_requires_server_name(self):
app = flask.Flask(__name__)
with app.app_context():
with self.assert_raises(RuntimeError):
flask.url_for('index')
def test_url_generation_without_context_fails(self):
with self.assert_raises(RuntimeError):
flask.url_for('index')
def test_request_context_means_app_context(self):
app = flask.Flask(__name__)
with app.test_request_context():
self.assert_equal(flask.current_app._get_current_object(), app)
self.assert_equal(flask._app_ctx_stack.top, None)
def test_app_context_provides_current_app(self):
app = flask.Flask(__name__)
with app.app_context():
self.assert_equal(flask.current_app._get_current_object(), app)
self.assert_equal(flask._app_ctx_stack.top, None)
def test_app_tearing_down(self):
cleanup_stuff = []
app = flask.Flask(__name__)
@app.teardown_appcontext
def cleanup(exception):
cleanup_stuff.append(exception)
with app.app_context():
pass
self.assert_equal(cleanup_stuff, [None])
def test_custom_app_ctx_globals_class(self):
class CustomRequestGlobals(object):
def __init__(self):
self.spam = 'eggs'
app = flask.Flask(__name__)
app.app_ctx_globals_class = CustomRequestGlobals
with app.app_context():
self.assert_equal(
flask.render_template_string('{{ g.spam }}'), 'eggs')
def test_context_refcounts(self):
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_req(error=None):
called.append('request')
@app.teardown_appcontext
def teardown_app(error=None):
called.append('app')
@app.route('/')
def index():
with flask._app_ctx_stack.top:
with flask._request_ctx_stack.top:
pass
self.assert_true(flask._request_ctx_stack.top.request.environ
['werkzeug.request'] is not None)
return u''
c = app.test_client()
c.get('/')
self.assertEqual(called, ['request', 'app'])
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AppContextTestCase))
return suite

File diff suppressed because it is too large Load Diff

View File

@ -1,790 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.blueprints
~~~~~~~~~~~~~~~~~~~~~~~~~~
Blueprints (and currently modules)
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
import warnings
from flask.testsuite import FlaskTestCase, emits_module_deprecation_warning
from flask._compat import text_type
from werkzeug.exceptions import NotFound
from werkzeug.http import parse_cache_control_header
from jinja2 import TemplateNotFound
# import moduleapp here because it uses deprecated features and we don't
# want to see the warnings
warnings.simplefilter('ignore', DeprecationWarning)
from moduleapp import app as moduleapp
warnings.simplefilter('default', DeprecationWarning)
class ModuleTestCase(FlaskTestCase):
@emits_module_deprecation_warning
def test_basic_module(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@admin.route('/')
def admin_index():
return 'admin index'
@admin.route('/login')
def admin_login():
return 'admin login'
@admin.route('/logout')
def admin_logout():
return 'admin logout'
@app.route('/')
def index():
return 'the index'
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'the index')
self.assert_equal(c.get('/admin/').data, b'admin index')
self.assert_equal(c.get('/admin/login').data, b'admin login')
self.assert_equal(c.get('/admin/logout').data, b'admin logout')
@emits_module_deprecation_warning
def test_default_endpoint_name(self):
app = flask.Flask(__name__)
mod = flask.Module(__name__, 'frontend')
def index():
return 'Awesome'
mod.add_url_rule('/', view_func=index)
app.register_module(mod)
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'Awesome')
with app.test_request_context():
self.assert_equal(flask.url_for('frontend.index'), '/')
@emits_module_deprecation_warning
def test_request_processing(self):
catched = []
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@admin.before_request
def before_admin_request():
catched.append('before-admin')
@admin.after_request
def after_admin_request(response):
catched.append('after-admin')
return response
@admin.route('/')
def admin_index():
return 'the admin'
@app.before_request
def before_request():
catched.append('before-app')
@app.after_request
def after_request(response):
catched.append('after-app')
return response
@app.route('/')
def index():
return 'the index'
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'the index')
self.assert_equal(catched, ['before-app', 'after-app'])
del catched[:]
self.assert_equal(c.get('/admin/').data, b'the admin')
self.assert_equal(catched, ['before-app', 'before-admin',
'after-admin', 'after-app'])
@emits_module_deprecation_warning
def test_context_processors(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@app.context_processor
def inject_all_regular():
return {'a': 1}
@admin.context_processor
def inject_admin():
return {'b': 2}
@admin.app_context_processor
def inject_all_module():
return {'c': 3}
@app.route('/')
def index():
return flask.render_template_string('{{ a }}{{ b }}{{ c }}')
@admin.route('/')
def admin_index():
return flask.render_template_string('{{ a }}{{ b }}{{ c }}')
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'13')
self.assert_equal(c.get('/admin/').data, b'123')
@emits_module_deprecation_warning
def test_late_binding(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin')
@admin.route('/')
def index():
return '42'
app.register_module(admin, url_prefix='/admin')
self.assert_equal(app.test_client().get('/admin/').data, b'42')
@emits_module_deprecation_warning
def test_error_handling(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin')
@admin.app_errorhandler(404)
def not_found(e):
return 'not found', 404
@admin.app_errorhandler(500)
def internal_server_error(e):
return 'internal server error', 500
@admin.route('/')
def index():
flask.abort(404)
@admin.route('/error')
def error():
1 // 0
app.register_module(admin)
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.status_code, 404)
self.assert_equal(rv.data, b'not found')
rv = c.get('/error')
self.assert_equal(rv.status_code, 500)
self.assert_equal(b'internal server error', rv.data)
def test_templates_and_static(self):
app = moduleapp
app.testing = True
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Hello from the Frontend')
rv = c.get('/admin/')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/index2')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/static/test.txt')
self.assert_equal(rv.data.strip(), b'Admin File')
rv.close()
rv = c.get('/admin/static/css/test.css')
self.assert_equal(rv.data.strip(), b'/* nested file */')
rv.close()
with app.test_request_context():
self.assert_equal(flask.url_for('admin.static', filename='test.txt'),
'/admin/static/test.txt')
with app.test_request_context():
try:
flask.render_template('missing.html')
except TemplateNotFound as e:
self.assert_equal(e.name, 'missing.html')
else:
self.assert_true(0, 'expected exception')
with flask.Flask(__name__).test_request_context():
self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested')
def test_safe_access(self):
app = moduleapp
with app.test_request_context():
f = app.view_functions['admin.static']
try:
f('/etc/passwd')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
try:
f('../__init__.py')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
# testcase for a security issue that may exist on windows systems
import os
import ntpath
old_path = os.path
os.path = ntpath
try:
try:
f('..\\__init__.py')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
finally:
os.path = old_path
@emits_module_deprecation_warning
def test_endpoint_decorator(self):
from werkzeug.routing import Submount, Rule
from flask import Module
app = flask.Flask(__name__)
app.testing = True
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
module = Module(__name__, __name__)
@module.endpoint('bar')
def bar():
return 'bar'
@module.endpoint('index')
def index():
return 'index'
app.register_module(module)
c = app.test_client()
self.assert_equal(c.get('/foo/').data, b'index')
self.assert_equal(c.get('/foo/bar').data, b'bar')
class BlueprintTestCase(FlaskTestCase):
def test_blueprint_specific_error_handling(self):
frontend = flask.Blueprint('frontend', __name__)
backend = flask.Blueprint('backend', __name__)
sideend = flask.Blueprint('sideend', __name__)
@frontend.errorhandler(403)
def frontend_forbidden(e):
return 'frontend says no', 403
@frontend.route('/frontend-no')
def frontend_no():
flask.abort(403)
@backend.errorhandler(403)
def backend_forbidden(e):
return 'backend says no', 403
@backend.route('/backend-no')
def backend_no():
flask.abort(403)
@sideend.route('/what-is-a-sideend')
def sideend_no():
flask.abort(403)
app = flask.Flask(__name__)
app.register_blueprint(frontend)
app.register_blueprint(backend)
app.register_blueprint(sideend)
@app.errorhandler(403)
def app_forbidden(e):
return 'application itself says no', 403
c = app.test_client()
self.assert_equal(c.get('/frontend-no').data, b'frontend says no')
self.assert_equal(c.get('/backend-no').data, b'backend says no')
self.assert_equal(c.get('/what-is-a-sideend').data, b'application itself says no')
def test_blueprint_url_definitions(self):
bp = flask.Blueprint('test', __name__)
@bp.route('/foo', defaults={'baz': 42})
def foo(bar, baz):
return '%s/%d' % (bar, baz)
@bp.route('/bar')
def bar(bar):
return text_type(bar)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/1', url_defaults={'bar': 23})
app.register_blueprint(bp, url_prefix='/2', url_defaults={'bar': 19})
c = app.test_client()
self.assert_equal(c.get('/1/foo').data, b'23/42')
self.assert_equal(c.get('/2/foo').data, b'19/42')
self.assert_equal(c.get('/1/bar').data, b'23')
self.assert_equal(c.get('/2/bar').data, b'19')
def test_blueprint_url_processors(self):
bp = flask.Blueprint('frontend', __name__, url_prefix='/<lang_code>')
@bp.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', flask.g.lang_code)
@bp.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop('lang_code')
@bp.route('/')
def index():
return flask.url_for('.about')
@bp.route('/about')
def about():
return flask.url_for('.index')
app = flask.Flask(__name__)
app.register_blueprint(bp)
c = app.test_client()
self.assert_equal(c.get('/de/').data, b'/de/about')
self.assert_equal(c.get('/de/about').data, b'/de/')
def test_templates_and_static(self):
from blueprintapp import app
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Hello from the Frontend')
rv = c.get('/admin/')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/index2')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/static/test.txt')
self.assert_equal(rv.data.strip(), b'Admin File')
rv.close()
rv = c.get('/admin/static/css/test.css')
self.assert_equal(rv.data.strip(), b'/* nested file */')
rv.close()
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT']
try:
expected_max_age = 3600
if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == expected_max_age:
expected_max_age = 7200
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = expected_max_age
rv = c.get('/admin/static/css/test.css')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, expected_max_age)
rv.close()
finally:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default
with app.test_request_context():
self.assert_equal(flask.url_for('admin.static', filename='test.txt'),
'/admin/static/test.txt')
with app.test_request_context():
try:
flask.render_template('missing.html')
except TemplateNotFound as e:
self.assert_equal(e.name, 'missing.html')
else:
self.assert_true(0, 'expected exception')
with flask.Flask(__name__).test_request_context():
self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested')
def test_default_static_cache_timeout(self):
app = flask.Flask(__name__)
class MyBlueprint(flask.Blueprint):
def get_send_file_max_age(self, filename):
return 100
blueprint = MyBlueprint('blueprint', __name__, static_folder='static')
app.register_blueprint(blueprint)
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT']
try:
with app.test_request_context():
unexpected_max_age = 3600
if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == unexpected_max_age:
unexpected_max_age = 7200
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = unexpected_max_age
rv = blueprint.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 100)
rv.close()
finally:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default
def test_templates_list(self):
from blueprintapp import app
templates = sorted(app.jinja_env.list_templates())
self.assert_equal(templates, ['admin/index.html',
'frontend/index.html'])
def test_dotted_names(self):
frontend = flask.Blueprint('myapp.frontend', __name__)
backend = flask.Blueprint('myapp.backend', __name__)
@frontend.route('/fe')
def frontend_index():
return flask.url_for('myapp.backend.backend_index')
@frontend.route('/fe2')
def frontend_page2():
return flask.url_for('.frontend_index')
@backend.route('/be')
def backend_index():
return flask.url_for('myapp.frontend.frontend_index')
app = flask.Flask(__name__)
app.register_blueprint(frontend)
app.register_blueprint(backend)
c = app.test_client()
self.assert_equal(c.get('/fe').data.strip(), b'/be')
self.assert_equal(c.get('/fe2').data.strip(), b'/fe')
self.assert_equal(c.get('/be').data.strip(), b'/fe')
def test_dotted_names_from_app(self):
app = flask.Flask(__name__)
app.testing = True
test = flask.Blueprint('test', __name__)
@app.route('/')
def app_index():
return flask.url_for('test.index')
@test.route('/test/')
def index():
return flask.url_for('app_index')
app.register_blueprint(test)
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'/test/')
def test_empty_url_defaults(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/', defaults={'page': 1})
@bp.route('/page/<int:page>')
def something(page):
return str(page)
app = flask.Flask(__name__)
app.register_blueprint(bp)
c = app.test_client()
self.assert_equal(c.get('/').data, b'1')
self.assert_equal(c.get('/page/2').data, b'2')
def test_route_decorator_custom_endpoint(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/foo')
def foo():
return flask.request.endpoint
@bp.route('/bar', endpoint='bar')
def foo_bar():
return flask.request.endpoint
@bp.route('/bar/123', endpoint='123')
def foo_bar_foo():
return flask.request.endpoint
@bp.route('/bar/foo')
def bar_foo():
return flask.request.endpoint
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.request.endpoint
c = app.test_client()
self.assertEqual(c.get('/').data, b'index')
self.assertEqual(c.get('/py/foo').data, b'bp.foo')
self.assertEqual(c.get('/py/bar').data, b'bp.bar')
self.assertEqual(c.get('/py/bar/123').data, b'bp.123')
self.assertEqual(c.get('/py/bar/foo').data, b'bp.bar_foo')
def test_route_decorator_custom_endpoint_with_dots(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/foo')
def foo():
return flask.request.endpoint
try:
@bp.route('/bar', endpoint='bar.bar')
def foo_bar():
return flask.request.endpoint
except AssertionError:
pass
else:
raise AssertionError('expected AssertionError not raised')
try:
@bp.route('/bar/123', endpoint='bar.123')
def foo_bar_foo():
return flask.request.endpoint
except AssertionError:
pass
else:
raise AssertionError('expected AssertionError not raised')
def foo_foo_foo():
pass
self.assertRaises(
AssertionError,
lambda: bp.add_url_rule(
'/bar/123', endpoint='bar.123', view_func=foo_foo_foo
)
)
self.assertRaises(
AssertionError,
bp.route('/bar/123', endpoint='bar.123'),
lambda: None
)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
c = app.test_client()
self.assertEqual(c.get('/py/foo').data, b'bp.foo')
# The rule's didn't actually made it through
rv = c.get('/py/bar')
assert rv.status_code == 404
rv = c.get('/py/bar/123')
assert rv.status_code == 404
def test_template_filter(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_add_template_filter(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_template_filter_with_name(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter('strrev')
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_add_template_filter_with_name(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, 'strrev')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_template_filter_with_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_filter_after_route_with_template(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix='/py')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_template(self):
bp = flask.Blueprint('bp', __name__)
def super_reverse(s):
return s[::-1]
bp.add_app_template_filter(super_reverse)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_filter_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter('super_reverse')
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, 'super_reverse')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_test(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('is_boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['is_boolean'](False))
def test_add_template_test(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('is_boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['is_boolean'](False))
def test_template_test_with_name(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_add_template_test_with_name(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, 'boolean')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_template_test_with_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_template_test_after_route_with_template(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix='/py')
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_template(self):
bp = flask.Blueprint('bp', __name__)
def boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(boolean)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_template_test_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, 'boolean')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BlueprintTestCase))
suite.addTest(unittest.makeSuite(ModuleTestCase))
return suite

View File

@ -1,299 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.config
~~~~~~~~~~~~~~~~~~~~~~
Configuration and instances.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import flask
import pkgutil
import unittest
from contextlib import contextmanager
from flask.testsuite import FlaskTestCase
# config keys used for the ConfigTestCase
TEST_KEY = 'foo'
SECRET_KEY = 'devkey'
class ConfigTestCase(FlaskTestCase):
def common_object_test(self, app):
self.assert_equal(app.secret_key, 'devkey')
self.assert_equal(app.config['TEST_KEY'], 'foo')
self.assert_not_in('ConfigTestCase', app.config)
def test_config_from_file(self):
app = flask.Flask(__name__)
app.config.from_pyfile(__file__.rsplit('.', 1)[0] + '.py')
self.common_object_test(app)
def test_config_from_object(self):
app = flask.Flask(__name__)
app.config.from_object(__name__)
self.common_object_test(app)
def test_config_from_class(self):
class Base(object):
TEST_KEY = 'foo'
class Test(Base):
SECRET_KEY = 'devkey'
app = flask.Flask(__name__)
app.config.from_object(Test)
self.common_object_test(app)
def test_config_from_envvar(self):
env = os.environ
try:
os.environ = {}
app = flask.Flask(__name__)
try:
app.config.from_envvar('FOO_SETTINGS')
except RuntimeError as e:
self.assert_true("'FOO_SETTINGS' is not set" in str(e))
else:
self.assert_true(0, 'expected exception')
self.assert_false(app.config.from_envvar('FOO_SETTINGS', silent=True))
os.environ = {'FOO_SETTINGS': __file__.rsplit('.', 1)[0] + '.py'}
self.assert_true(app.config.from_envvar('FOO_SETTINGS'))
self.common_object_test(app)
finally:
os.environ = env
def test_config_from_envvar_missing(self):
env = os.environ
try:
os.environ = {'FOO_SETTINGS': 'missing.cfg'}
try:
app = flask.Flask(__name__)
app.config.from_envvar('FOO_SETTINGS')
except IOError as e:
msg = str(e)
self.assert_true(msg.startswith('[Errno 2] Unable to load configuration '
'file (No such file or directory):'))
self.assert_true(msg.endswith("missing.cfg'"))
else:
self.fail('expected IOError')
self.assertFalse(app.config.from_envvar('FOO_SETTINGS', silent=True))
finally:
os.environ = env
def test_config_missing(self):
app = flask.Flask(__name__)
try:
app.config.from_pyfile('missing.cfg')
except IOError as e:
msg = str(e)
self.assert_true(msg.startswith('[Errno 2] Unable to load configuration '
'file (No such file or directory):'))
self.assert_true(msg.endswith("missing.cfg'"))
else:
self.assert_true(0, 'expected config')
self.assert_false(app.config.from_pyfile('missing.cfg', silent=True))
def test_session_lifetime(self):
app = flask.Flask(__name__)
app.config['PERMANENT_SESSION_LIFETIME'] = 42
self.assert_equal(app.permanent_session_lifetime.seconds, 42)
class LimitedLoaderMockWrapper(object):
def __init__(self, loader):
self.loader = loader
def __getattr__(self, name):
if name in ('archive', 'get_filename'):
msg = 'Mocking a loader which does not have `%s.`' % name
raise AttributeError(msg)
return getattr(self.loader, name)
@contextmanager
def patch_pkgutil_get_loader(wrapper_class=LimitedLoaderMockWrapper):
"""Patch pkgutil.get_loader to give loader without get_filename or archive.
This provides for tests where a system has custom loaders, e.g. Google App
Engine's HardenedModulesHook, which have neither the `get_filename` method
nor the `archive` attribute.
"""
old_get_loader = pkgutil.get_loader
def get_loader(*args, **kwargs):
return wrapper_class(old_get_loader(*args, **kwargs))
try:
pkgutil.get_loader = get_loader
yield
finally:
pkgutil.get_loader = old_get_loader
class InstanceTestCase(FlaskTestCase):
def test_explicit_instance_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
try:
flask.Flask(__name__, instance_path='instance')
except ValueError as e:
self.assert_in('must be absolute', str(e))
else:
self.fail('Expected value error')
app = flask.Flask(__name__, instance_path=here)
self.assert_equal(app.instance_path, here)
def test_main_module_paths(self):
# Test an app with '__main__' as the import name, uses cwd.
from main_app import app
here = os.path.abspath(os.getcwd())
self.assert_equal(app.instance_path, os.path.join(here, 'instance'))
if 'main_app' in sys.modules:
del sys.modules['main_app']
def test_uninstalled_module_paths(self):
from config_module_app import app
here = os.path.abspath(os.path.dirname(__file__))
self.assert_equal(app.instance_path, os.path.join(here, 'test_apps', 'instance'))
def test_uninstalled_package_paths(self):
from config_package_app import app
here = os.path.abspath(os.path.dirname(__file__))
self.assert_equal(app.instance_path, os.path.join(here, 'test_apps', 'instance'))
def test_installed_module_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
try:
import site_app
self.assert_equal(site_app.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_app-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_app' in sys.modules:
del sys.modules['site_app']
def test_installed_module_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
with patch_pkgutil_get_loader():
try:
import site_app
self.assert_equal(site_app.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_app-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_app' in sys.modules:
del sys.modules['site_app']
def test_installed_package_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
installed_path = os.path.join(expected_prefix, 'path')
sys.path.append(installed_path)
try:
import installed_package
self.assert_equal(installed_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'installed_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(installed_path)
if 'installed_package' in sys.modules:
del sys.modules['installed_package']
def test_installed_package_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
installed_path = os.path.join(expected_prefix, 'path')
sys.path.append(installed_path)
with patch_pkgutil_get_loader():
try:
import installed_package
self.assert_equal(installed_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'installed_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(installed_path)
if 'installed_package' in sys.modules:
del sys.modules['installed_package']
def test_prefix_package_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
try:
import site_package
self.assert_equal(site_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_package' in sys.modules:
del sys.modules['site_package']
def test_prefix_package_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
with patch_pkgutil_get_loader():
try:
import site_package
self.assert_equal(site_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_package' in sys.modules:
del sys.modules['site_package']
def test_egg_installed_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
egg_path = os.path.join(site_packages, 'SiteEgg.egg')
sys.path.append(site_packages)
sys.path.append(egg_path)
try:
import site_egg # in SiteEgg.egg
self.assert_equal(site_egg.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_egg-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
sys.path.remove(egg_path)
if 'site_egg' in sys.modules:
del sys.modules['site_egg']
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ConfigTestCase))
suite.addTest(unittest.makeSuite(InstanceTestCase))
return suite

View File

@ -1,24 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.deprecations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests deprecation support.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase, catch_warnings
class DeprecationsTestCase(FlaskTestCase):
"""not used currently"""
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DeprecationsTestCase))
return suite

View File

@ -1,38 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.examples
~~~~~~~~~~~~~~~~~~~~~~~~
Tests the examples.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import unittest
from flask.testsuite import add_to_path
def setup_path():
example_path = os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, 'examples')
add_to_path(os.path.join(example_path, 'flaskr'))
add_to_path(os.path.join(example_path, 'minitwit'))
def suite():
setup_path()
suite = unittest.TestSuite()
try:
from minitwit_tests import MiniTwitTestCase
except ImportError:
pass
else:
suite.addTest(unittest.makeSuite(MiniTwitTestCase))
try:
from flaskr_tests import FlaskrTestCase
except ImportError:
pass
else:
suite.addTest(unittest.makeSuite(FlaskrTestCase))
return suite

View File

@ -1,134 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.ext
~~~~~~~~~~~~~~~~~~~
Tests the extension import thing.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
try:
from imp import reload as reload_module
except ImportError:
reload_module = reload
from flask.testsuite import FlaskTestCase
from flask._compat import PY2
class ExtImportHookTestCase(FlaskTestCase):
def setup(self):
# we clear this out for various reasons. The most important one is
# that a real flaskext could be in there which would disable our
# fake package. Secondly we want to make sure that the flaskext
# import hook does not break on reloading.
for entry, value in list(sys.modules.items()):
if (entry.startswith('flask.ext.') or
entry.startswith('flask_') or
entry.startswith('flaskext.') or
entry == 'flaskext') and value is not None:
sys.modules.pop(entry, None)
from flask import ext
reload_module(ext)
# reloading must not add more hooks
import_hooks = 0
for item in sys.meta_path:
cls = type(item)
if cls.__module__ == 'flask.exthook' and \
cls.__name__ == 'ExtensionImporter':
import_hooks += 1
self.assert_equal(import_hooks, 1)
def teardown(self):
from flask import ext
for key in ext.__dict__:
self.assert_not_in('.', key)
def test_flaskext_new_simple_import_normal(self):
from flask.ext.newext_simple import ext_id
self.assert_equal(ext_id, 'newext_simple')
def test_flaskext_new_simple_import_module(self):
from flask.ext import newext_simple
self.assert_equal(newext_simple.ext_id, 'newext_simple')
self.assert_equal(newext_simple.__name__, 'flask_newext_simple')
def test_flaskext_new_package_import_normal(self):
from flask.ext.newext_package import ext_id
self.assert_equal(ext_id, 'newext_package')
def test_flaskext_new_package_import_module(self):
from flask.ext import newext_package
self.assert_equal(newext_package.ext_id, 'newext_package')
self.assert_equal(newext_package.__name__, 'flask_newext_package')
def test_flaskext_new_package_import_submodule_function(self):
from flask.ext.newext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_new_package_import_submodule(self):
from flask.ext.newext_package import submodule
self.assert_equal(submodule.__name__, 'flask_newext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_simple_import_normal(self):
from flask.ext.oldext_simple import ext_id
self.assert_equal(ext_id, 'oldext_simple')
def test_flaskext_old_simple_import_module(self):
from flask.ext import oldext_simple
self.assert_equal(oldext_simple.ext_id, 'oldext_simple')
self.assert_equal(oldext_simple.__name__, 'flaskext.oldext_simple')
def test_flaskext_old_package_import_normal(self):
from flask.ext.oldext_package import ext_id
self.assert_equal(ext_id, 'oldext_package')
def test_flaskext_old_package_import_module(self):
from flask.ext import oldext_package
self.assert_equal(oldext_package.ext_id, 'oldext_package')
self.assert_equal(oldext_package.__name__, 'flaskext.oldext_package')
def test_flaskext_old_package_import_submodule(self):
from flask.ext.oldext_package import submodule
self.assert_equal(submodule.__name__, 'flaskext.oldext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_package_import_submodule_function(self):
from flask.ext.oldext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_broken_package_no_module_caching(self):
for x in range(2):
with self.assert_raises(ImportError):
import flask.ext.broken
def test_no_error_swallowing(self):
try:
import flask.ext.broken
except ImportError:
exc_type, exc_value, tb = sys.exc_info()
self.assert_true(exc_type is ImportError)
if PY2:
message = 'No module named missing_module'
else:
message = 'No module named \'missing_module\''
self.assert_equal(str(exc_value), message)
self.assert_true(tb.tb_frame.f_globals is globals())
# reraise() adds a second frame so we need to skip that one too.
# On PY3 we even have another one :(
next = tb.tb_next.tb_next
if not PY2:
next = next.tb_next
self.assert_in('flask_broken/__init__.py', next.tb_frame.f_code.co_filename)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ExtImportHookTestCase))
return suite

View File

@ -1,593 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.helpers
~~~~~~~~~~~~~~~~~~~~~~~
Various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import flask
import unittest
from logging import StreamHandler
from flask.testsuite import FlaskTestCase, catch_warnings, catch_stderr
from werkzeug.http import parse_cache_control_header, parse_options_header
from flask._compat import StringIO, text_type
def has_encoding(name):
try:
import codecs
codecs.lookup(name)
return True
except LookupError:
return False
class JSONTestCase(FlaskTestCase):
def test_json_bad_requests(self):
app = flask.Flask(__name__)
@app.route('/json', methods=['POST'])
def return_json():
return flask.jsonify(foo=text_type(flask.request.get_json()))
c = app.test_client()
rv = c.post('/json', data='malformed', content_type='application/json')
self.assert_equal(rv.status_code, 400)
def test_json_body_encoding(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
return flask.request.get_json()
c = app.test_client()
resp = c.get('/', data=u'"Hällo Wörld"'.encode('iso-8859-15'),
content_type='application/json; charset=iso-8859-15')
self.assert_equal(resp.data, u'Hällo Wörld'.encode('utf-8'))
def test_jsonify(self):
d = dict(a=23, b=42, c=[1, 2, 3])
app = flask.Flask(__name__)
@app.route('/kw')
def return_kwargs():
return flask.jsonify(**d)
@app.route('/dict')
def return_dict():
return flask.jsonify(d)
c = app.test_client()
for url in '/kw', '/dict':
rv = c.get(url)
self.assert_equal(rv.mimetype, 'application/json')
self.assert_equal(flask.json.loads(rv.data), d)
def test_json_as_unicode(self):
app = flask.Flask(__name__)
app.config['JSON_AS_ASCII'] = True
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, '"\\u2603"')
app.config['JSON_AS_ASCII'] = False
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, u'"\u2603"')
def test_json_attr(self):
app = flask.Flask(__name__)
@app.route('/add', methods=['POST'])
def add():
json = flask.request.get_json()
return text_type(json['a'] + json['b'])
c = app.test_client()
rv = c.post('/add', data=flask.json.dumps({'a': 1, 'b': 2}),
content_type='application/json')
self.assert_equal(rv.data, b'3')
def test_template_escaping(self):
app = flask.Flask(__name__)
render = flask.render_template_string
with app.test_request_context():
rv = flask.json.htmlsafe_dumps('</script>')
self.assert_equal(rv, u'"\\u003c/script\\u003e"')
self.assert_equal(type(rv), text_type)
rv = render('{{ "</script>"|tojson }}')
self.assert_equal(rv, '"\\u003c/script\\u003e"')
rv = render('{{ "<\0/script>"|tojson }}')
self.assert_equal(rv, '"\\u003c\\u0000/script\\u003e"')
rv = render('{{ "<!--<script>"|tojson }}')
self.assert_equal(rv, '"\\u003c!--\\u003cscript\\u003e"')
rv = render('{{ "&"|tojson }}')
self.assert_equal(rv, '"\\u0026"')
rv = render('{{ "\'"|tojson }}')
self.assert_equal(rv, '"\\u0027"')
rv = render("<a ng-data='{{ data|tojson }}'></a>",
data={'x': ["foo", "bar", "baz'"]})
self.assert_equal(rv,
'<a ng-data=\'{"x": ["foo", "bar", "baz\\u0027"]}\'></a>')
def test_json_customization(self):
class X(object):
def __init__(self, val):
self.val = val
class MyEncoder(flask.json.JSONEncoder):
def default(self, o):
if isinstance(o, X):
return '<%d>' % o.val
return flask.json.JSONEncoder.default(self, o)
class MyDecoder(flask.json.JSONDecoder):
def __init__(self, *args, **kwargs):
kwargs.setdefault('object_hook', self.object_hook)
flask.json.JSONDecoder.__init__(self, *args, **kwargs)
def object_hook(self, obj):
if len(obj) == 1 and '_foo' in obj:
return X(obj['_foo'])
return obj
app = flask.Flask(__name__)
app.testing = True
app.json_encoder = MyEncoder
app.json_decoder = MyDecoder
@app.route('/', methods=['POST'])
def index():
return flask.json.dumps(flask.request.get_json()['x'])
c = app.test_client()
rv = c.post('/', data=flask.json.dumps({
'x': {'_foo': 42}
}), content_type='application/json')
self.assertEqual(rv.data, b'"<42>"')
def test_modified_url_encoding(self):
class ModifiedRequest(flask.Request):
url_charset = 'euc-kr'
app = flask.Flask(__name__)
app.testing = True
app.request_class = ModifiedRequest
app.url_map.charset = 'euc-kr'
@app.route('/')
def index():
return flask.request.args['foo']
rv = app.test_client().get(u'/?foo=정상처리'.encode('euc-kr'))
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, u'정상처리'.encode('utf-8'))
if not has_encoding('euc-kr'):
test_modified_url_encoding = None
def test_json_key_sorting(self):
app = flask.Flask(__name__)
app.testing = True
self.assert_equal(app.config['JSON_SORT_KEYS'], True)
d = dict.fromkeys(range(20), 'foo')
@app.route('/')
def index():
return flask.jsonify(values=d)
c = app.test_client()
rv = c.get('/')
lines = [x.strip() for x in rv.data.strip().decode('utf-8').splitlines()]
self.assert_equal(lines, [
'{',
'"values": {',
'"0": "foo",',
'"1": "foo",',
'"2": "foo",',
'"3": "foo",',
'"4": "foo",',
'"5": "foo",',
'"6": "foo",',
'"7": "foo",',
'"8": "foo",',
'"9": "foo",',
'"10": "foo",',
'"11": "foo",',
'"12": "foo",',
'"13": "foo",',
'"14": "foo",',
'"15": "foo",',
'"16": "foo",',
'"17": "foo",',
'"18": "foo",',
'"19": "foo"',
'}',
'}'
])
class SendfileTestCase(FlaskTestCase):
def test_send_file_regular(self):
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_equal(rv.mimetype, 'text/html')
with app.open_resource('static/index.html') as f:
rv.direct_passthrough = False
self.assert_equal(rv.data, f.read())
rv.close()
def test_send_file_xsendfile(self):
app = flask.Flask(__name__)
app.use_x_sendfile = True
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
def test_send_file_object(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
rv.direct_passthrough = False
with app.open_resource('static/index.html') as f:
self.assert_equal(rv.data, f.read())
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
self.assert_equal(rv.mimetype, 'text/html')
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = False
with app.test_request_context():
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f)
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'application/octet-stream')
rv.close()
# etags
self.assert_equal(len(captured), 1)
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f, mimetype='text/plain')
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'text/plain')
rv.close()
# etags
self.assert_equal(len(captured), 1)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = StringIO('Test')
rv = flask.send_file(f)
self.assert_not_in('x-sendfile', rv.headers)
rv.close()
# etags
self.assert_equal(len(captured), 1)
def test_attachment(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f, as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
with app.test_request_context():
self.assert_equal(options['filename'], 'index.html')
rv = flask.send_file('static/index.html', as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.html')
rv.close()
with app.test_request_context():
rv = flask.send_file(StringIO('Test'), as_attachment=True,
attachment_filename='index.txt',
add_etags=False)
self.assert_equal(rv.mimetype, 'text/plain')
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.txt')
rv.close()
def test_static_file(self):
app = flask.Flask(__name__)
# default cache timeout is 12 hours
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 3600
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
class StaticFileApp(flask.Flask):
def get_send_file_max_age(self, filename):
return 10
app = StaticFileApp(__name__)
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
class LoggingTestCase(FlaskTestCase):
def test_logger_cache(self):
app = flask.Flask(__name__)
logger1 = app.logger
self.assert_true(app.logger is logger1)
self.assert_equal(logger1.name, __name__)
app.logger_name = __name__ + '/test_logger_cache'
self.assert_true(app.logger is not logger1)
def test_debug_log(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
app.logger.warning('the standard library is dead')
app.logger.debug('this is a debug statement')
return ''
@app.route('/exc')
def exc():
1 // 0
with app.test_client() as c:
with catch_stderr() as err:
c.get('/')
out = err.getvalue()
self.assert_in('WARNING in helpers [', out)
self.assert_in(os.path.basename(__file__.rsplit('.', 1)[0] + '.py'), out)
self.assert_in('the standard library is dead', out)
self.assert_in('this is a debug statement', out)
with catch_stderr() as err:
try:
c.get('/exc')
except ZeroDivisionError:
pass
else:
self.assert_true(False, 'debug log ate the exception')
def test_debug_log_override(self):
app = flask.Flask(__name__)
app.debug = True
app.logger_name = 'flask_tests/test_debug_log_override'
app.logger.level = 10
self.assert_equal(app.logger.level, 10)
def test_exception_logging(self):
out = StringIO()
app = flask.Flask(__name__)
app.logger_name = 'flask_tests/test_exception_logging'
app.logger.addHandler(StreamHandler(out))
@app.route('/')
def index():
1 // 0
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_in(b'Internal Server Error', rv.data)
err = out.getvalue()
self.assert_in('Exception on / [GET]', err)
self.assert_in('Traceback (most recent call last):', err)
self.assert_in('1 // 0', err)
self.assert_in('ZeroDivisionError:', err)
def test_processor_exceptions(self):
app = flask.Flask(__name__)
@app.before_request
def before_request():
if trigger == 'before':
1 // 0
@app.after_request
def after_request(response):
if trigger == 'after':
1 // 0
return response
@app.route('/')
def index():
return 'Foo'
@app.errorhandler(500)
def internal_server_error(e):
return 'Hello Server Error', 500
for trigger in 'before', 'after':
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_equal(rv.data, b'Hello Server Error')
def test_url_for_with_anchor(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index', _anchor='x y'),
'/#x%20y')
def test_url_for_with_scheme(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index',
_external=True,
_scheme='https'),
'https://localhost/')
def test_url_for_with_scheme_not_external(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_raises(ValueError,
flask.url_for,
'index',
_scheme='https')
def test_url_with_method(self):
from flask.views import MethodView
app = flask.Flask(__name__)
class MyView(MethodView):
def get(self, id=None):
if id is None:
return 'List'
return 'Get %d' % id
def post(self):
return 'Create'
myview = MyView.as_view('myview')
app.add_url_rule('/myview/', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/<int:id>', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/create', methods=['POST'],
view_func=myview)
with app.test_request_context():
self.assert_equal(flask.url_for('myview', _method='GET'),
'/myview/')
self.assert_equal(flask.url_for('myview', id=42, _method='GET'),
'/myview/42')
self.assert_equal(flask.url_for('myview', _method='POST'),
'/myview/create')
class NoImportsTestCase(FlaskTestCase):
"""Test Flasks are created without import.
Avoiding ``__import__`` helps create Flask instances where there are errors
at import time. Those runtime errors will be apparent to the user soon
enough, but tools which build Flask instances meta-programmatically benefit
from a Flask which does not ``__import__``. Instead of importing to
retrieve file paths or metadata on a module or package, use the pkgutil and
imp modules in the Python standard library.
"""
def test_name_with_import_error(self):
try:
flask.Flask('importerror')
except NotImplementedError:
self.fail('Flask(import_name) is importing import_name.')
class StreamingTestCase(FlaskTestCase):
def test_streaming_with_context(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(generate()))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_as_decorator(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.stream_with_context
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(generate())
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_and_custom_close(self):
app = flask.Flask(__name__)
app.testing = True
called = []
class Wrapper(object):
def __init__(self, gen):
self._gen = gen
def __iter__(self):
return self
def close(self):
called.append(42)
def __next__(self):
return next(self._gen)
next = __next__
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(
Wrapper(generate())))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
self.assertEqual(called, [42])
def suite():
suite = unittest.TestSuite()
if flask.json_available:
suite.addTest(unittest.makeSuite(JSONTestCase))
suite.addTest(unittest.makeSuite(SendfileTestCase))
suite.addTest(unittest.makeSuite(LoggingTestCase))
suite.addTest(unittest.makeSuite(NoImportsTestCase))
suite.addTest(unittest.makeSuite(StreamingTestCase))
return suite

View File

@ -1,116 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.regression
~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests regressions.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import gc
import sys
import flask
import threading
import unittest
from werkzeug.exceptions import NotFound
from flask.testsuite import FlaskTestCase
_gc_lock = threading.Lock()
class _NoLeakAsserter(object):
def __init__(self, testcase):
self.testcase = testcase
def __enter__(self):
gc.disable()
_gc_lock.acquire()
loc = flask._request_ctx_stack._local
# Force Python to track this dictionary at all times.
# This is necessary since Python only starts tracking
# dicts if they contain mutable objects. It's a horrible,
# horrible hack but makes this kinda testable.
loc.__storage__['FOOO'] = [1, 2, 3]
gc.collect()
self.old_objects = len(gc.get_objects())
def __exit__(self, exc_type, exc_value, tb):
if not hasattr(sys, 'getrefcount'):
gc.collect()
new_objects = len(gc.get_objects())
if new_objects > self.old_objects:
self.testcase.fail('Example code leaked')
_gc_lock.release()
gc.enable()
class MemoryTestCase(FlaskTestCase):
def assert_no_leak(self):
return _NoLeakAsserter(self)
def test_memory_consumption(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('simple_template.html', whiskey=42)
def fire():
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, b'<h1>42</h1>')
# Trigger caches
fire()
# This test only works on CPython 2.7.
if sys.version_info >= (2, 7) and \
not hasattr(sys, 'pypy_translation_info'):
with self.assert_no_leak():
for x in range(10):
fire()
def test_safe_join_toplevel_pardir(self):
from flask.helpers import safe_join
with self.assert_raises(NotFound):
safe_join('/foo', '..')
class ExceptionTestCase(FlaskTestCase):
def test_aborting(self):
class Foo(Exception):
whatever = 42
app = flask.Flask(__name__)
app.testing = True
@app.errorhandler(Foo)
def handle_foo(e):
return str(e.whatever)
@app.route('/')
def index():
raise flask.abort(flask.redirect(flask.url_for('test')))
@app.route('/test')
def test():
raise Foo()
with app.test_client() as c:
rv = c.get('/')
self.assertEqual(rv.headers['Location'], 'http://localhost/test')
rv = c.get('/test')
self.assertEqual(rv.data, b'42')
def suite():
suite = unittest.TestSuite()
if os.environ.get('RUN_FLASK_MEMORY_TESTS') == '1':
suite.addTest(unittest.makeSuite(MemoryTestCase))
suite.addTest(unittest.makeSuite(ExceptionTestCase))
return suite

View File

@ -1,185 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.reqctx
~~~~~~~~~~~~~~~~~~~~~~
Tests the request context.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
try:
from greenlet import greenlet
except ImportError:
greenlet = None
from flask.testsuite import FlaskTestCase
class RequestContextTestCase(FlaskTestCase):
def test_teardown_on_pop(self):
buffer = []
app = flask.Flask(__name__)
@app.teardown_request
def end_of_request(exception):
buffer.append(exception)
ctx = app.test_request_context()
ctx.push()
self.assert_equal(buffer, [])
ctx.pop()
self.assert_equal(buffer, [None])
def test_proper_test_request_context(self):
app = flask.Flask(__name__)
app.config.update(
SERVER_NAME='localhost.localdomain:5000'
)
@app.route('/')
def index():
return None
@app.route('/', subdomain='foo')
def sub():
return None
with app.test_request_context('/'):
self.assert_equal(flask.url_for('index', _external=True), 'http://localhost.localdomain:5000/')
with app.test_request_context('/'):
self.assert_equal(flask.url_for('sub', _external=True), 'http://foo.localhost.localdomain:5000/')
try:
with app.test_request_context('/', environ_overrides={'HTTP_HOST': 'localhost'}):
pass
except Exception as e:
self.assert_true(isinstance(e, ValueError))
self.assert_equal(str(e), "the server name provided " +
"('localhost.localdomain:5000') does not match the " + \
"server name from the WSGI environment ('localhost')")
try:
app.config.update(SERVER_NAME='localhost')
with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost'}):
pass
except ValueError as e:
raise ValueError(
"No ValueError exception should have been raised \"%s\"" % e
)
try:
app.config.update(SERVER_NAME='localhost:80')
with app.test_request_context('/', environ_overrides={'SERVER_NAME': 'localhost:80'}):
pass
except ValueError as e:
raise ValueError(
"No ValueError exception should have been raised \"%s\"" % e
)
def test_context_binding(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return 'Hello %s!' % flask.request.args['name']
@app.route('/meh')
def meh():
return flask.request.url
with app.test_request_context('/?name=World'):
self.assert_equal(index(), 'Hello World!')
with app.test_request_context('/meh'):
self.assert_equal(meh(), 'http://localhost/meh')
self.assert_true(flask._request_ctx_stack.top is None)
def test_context_test(self):
app = flask.Flask(__name__)
self.assert_false(flask.request)
self.assert_false(flask.has_request_context())
ctx = app.test_request_context()
ctx.push()
try:
self.assert_true(flask.request)
self.assert_true(flask.has_request_context())
finally:
ctx.pop()
def test_manual_context_binding(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return 'Hello %s!' % flask.request.args['name']
ctx = app.test_request_context('/?name=World')
ctx.push()
self.assert_equal(index(), 'Hello World!')
ctx.pop()
try:
index()
except RuntimeError:
pass
else:
self.assert_true(0, 'expected runtime error')
def test_greenlet_context_copying(self):
app = flask.Flask(__name__)
greenlets = []
@app.route('/')
def index():
reqctx = flask._request_ctx_stack.top.copy()
def g():
self.assert_false(flask.request)
self.assert_false(flask.current_app)
with reqctx:
self.assert_true(flask.request)
self.assert_equal(flask.current_app, app)
self.assert_equal(flask.request.path, '/')
self.assert_equal(flask.request.args['foo'], 'bar')
self.assert_false(flask.request)
return 42
greenlets.append(greenlet(g))
return 'Hello World!'
rv = app.test_client().get('/?foo=bar')
self.assert_equal(rv.data, b'Hello World!')
result = greenlets[0].run()
self.assert_equal(result, 42)
def test_greenlet_context_copying_api(self):
app = flask.Flask(__name__)
greenlets = []
@app.route('/')
def index():
reqctx = flask._request_ctx_stack.top.copy()
@flask.copy_current_request_context
def g():
self.assert_true(flask.request)
self.assert_equal(flask.current_app, app)
self.assert_equal(flask.request.path, '/')
self.assert_equal(flask.request.args['foo'], 'bar')
return 42
greenlets.append(greenlet(g))
return 'Hello World!'
rv = app.test_client().get('/?foo=bar')
self.assert_equal(rv.data, b'Hello World!')
result = greenlets[0].run()
self.assert_equal(result, 42)
# Disable test if we don't have greenlets available
if greenlet is None:
test_greenlet_context_copying = None
test_greenlet_context_copying_api = None
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(RequestContextTestCase))
return suite

View File

@ -1,153 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.signals
~~~~~~~~~~~~~~~~~~~~~~~
Signalling.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase
class SignalsTestCase(FlaskTestCase):
def test_template_rendered(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('simple_template.html', whiskey=42)
recorded = []
def record(sender, template, context):
recorded.append((template, context))
flask.template_rendered.connect(record, app)
try:
app.test_client().get('/')
self.assert_equal(len(recorded), 1)
template, context = recorded[0]
self.assert_equal(template.name, 'simple_template.html')
self.assert_equal(context['whiskey'], 42)
finally:
flask.template_rendered.disconnect(record, app)
def test_request_signals(self):
app = flask.Flask(__name__)
calls = []
def before_request_signal(sender):
calls.append('before-signal')
def after_request_signal(sender, response):
self.assert_equal(response.data, b'stuff')
calls.append('after-signal')
@app.before_request
def before_request_handler():
calls.append('before-handler')
@app.after_request
def after_request_handler(response):
calls.append('after-handler')
response.data = 'stuff'
return response
@app.route('/')
def index():
calls.append('handler')
return 'ignored anyway'
flask.request_started.connect(before_request_signal, app)
flask.request_finished.connect(after_request_signal, app)
try:
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'stuff')
self.assert_equal(calls, ['before-signal', 'before-handler',
'handler', 'after-handler',
'after-signal'])
finally:
flask.request_started.disconnect(before_request_signal, app)
flask.request_finished.disconnect(after_request_signal, app)
def test_request_exception_signal(self):
app = flask.Flask(__name__)
recorded = []
@app.route('/')
def index():
1 // 0
def record(sender, exception):
recorded.append(exception)
flask.got_request_exception.connect(record, app)
try:
self.assert_equal(app.test_client().get('/').status_code, 500)
self.assert_equal(len(recorded), 1)
self.assert_true(isinstance(recorded[0], ZeroDivisionError))
finally:
flask.got_request_exception.disconnect(record, app)
def test_appcontext_signals(self):
app = flask.Flask(__name__)
recorded = []
def record_push(sender, **kwargs):
recorded.append('push')
def record_pop(sender, **kwargs):
recorded.append('push')
@app.route('/')
def index():
return 'Hello'
flask.appcontext_pushed.connect(record_push, app)
flask.appcontext_popped.connect(record_pop, app)
try:
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'Hello')
self.assert_equal(recorded, ['push'])
self.assert_equal(recorded, ['push', 'pop'])
finally:
flask.appcontext_pushed.disconnect(record_push, app)
flask.appcontext_popped.disconnect(record_pop, app)
def test_flash_signal(self):
app = flask.Flask(__name__)
app.config['SECRET_KEY'] = 'secret'
@app.route('/')
def index():
flask.flash('This is a flash message', category='notice')
return flask.redirect('/other')
recorded = []
def record(sender, message, category):
recorded.append((message, category))
flask.message_flashed.connect(record, app)
try:
client = app.test_client()
with client.session_transaction():
client.get('/')
self.assert_equal(len(recorded), 1)
message, category = recorded[0]
self.assert_equal(message, 'This is a flash message')
self.assert_equal(category, 'notice')
finally:
flask.message_flashed.disconnect(record, app)
def suite():
suite = unittest.TestSuite()
if flask.signals_available:
suite.addTest(unittest.makeSuite(SignalsTestCase))
return suite

View File

@ -1 +0,0 @@
<h1>Hello World!</h1>

View File

@ -1,46 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.subclassing
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test that certain behavior of flask can be customized by
subclasses.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from logging import StreamHandler
from flask.testsuite import FlaskTestCase
from flask._compat import StringIO
class FlaskSubclassingTestCase(FlaskTestCase):
def test_suppressed_exception_logging(self):
class SuppressedFlask(flask.Flask):
def log_exception(self, exc_info):
pass
out = StringIO()
app = SuppressedFlask(__name__)
app.logger_name = 'flask_tests/test_suppressed_exception_logging'
app.logger.addHandler(StreamHandler(out))
@app.route('/')
def index():
1 // 0
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_in(b'Internal Server Error', rv.data)
err = out.getvalue()
self.assert_equal(err, '')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(FlaskSubclassingTestCase))
return suite

View File

@ -1 +0,0 @@
{% macro hello(name) %}Hello {{ name }}!{% endmacro %}

View File

@ -1 +0,0 @@
<p>{{ value }}|{{ injected_value }}

View File

@ -1,6 +0,0 @@
{{ text }}
{{ html }}
{% autoescape false %}{{ text }}
{{ html }}{% endautoescape %}
{% autoescape true %}{{ text }}
{{ html }}{% endautoescape %}

View File

@ -1 +0,0 @@
{{ foo}} Mail

View File

@ -1 +0,0 @@
I'm nested

View File

@ -1 +0,0 @@
<h1>{{ whiskey }}</h1>

View File

@ -1 +0,0 @@
{{ value|super_reverse }}

View File

@ -1,3 +0,0 @@
{% if value is boolean %}
Success!
{% endif %}

View File

@ -1,302 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.templating
~~~~~~~~~~~~~~~~~~~~~~~~~~
Template functionality
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase
class TemplatingTestCase(FlaskTestCase):
def test_context_processing(self):
app = flask.Flask(__name__)
@app.context_processor
def context_processor():
return {'injected_value': 42}
@app.route('/')
def index():
return flask.render_template('context_template.html', value=23)
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'<p>23|42')
def test_original_win(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template_string('{{ config }}', config=42)
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'42')
def test_request_less_rendering(self):
app = flask.Flask(__name__)
app.config['WORLD_NAME'] = 'Special World'
@app.context_processor
def context_processor():
return dict(foo=42)
with app.app_context():
rv = flask.render_template_string('Hello {{ config.WORLD_NAME }} '
'{{ foo }}')
self.assert_equal(rv, 'Hello Special World 42')
def test_standard_context(self):
app = flask.Flask(__name__)
app.secret_key = 'development key'
@app.route('/')
def index():
flask.g.foo = 23
flask.session['test'] = 'aha'
return flask.render_template_string('''
{{ request.args.foo }}
{{ g.foo }}
{{ config.DEBUG }}
{{ session.test }}
''')
rv = app.test_client().get('/?foo=42')
self.assert_equal(rv.data.split(), [b'42', b'23', b'False', b'aha'])
def test_escaping(self):
text = '<p>Hello World!'
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('escaping_template.html', text=text,
html=flask.Markup(text))
lines = app.test_client().get('/').data.splitlines()
self.assert_equal(lines, [
b'&lt;p&gt;Hello World!',
b'<p>Hello World!',
b'<p>Hello World!',
b'<p>Hello World!',
b'&lt;p&gt;Hello World!',
b'<p>Hello World!'
])
def test_no_escaping(self):
app = flask.Flask(__name__)
with app.test_request_context():
self.assert_equal(flask.render_template_string('{{ foo }}',
foo='<test>'), '<test>')
self.assert_equal(flask.render_template('mail.txt', foo='<test>'),
'<test> Mail')
def test_macros(self):
app = flask.Flask(__name__)
with app.test_request_context():
macro = flask.get_template_attribute('_macro.html', 'hello')
self.assert_equal(macro('World'), 'Hello World!')
def test_template_filter(self):
app = flask.Flask(__name__)
@app.template_filter()
def my_reverse(s):
return s[::-1]
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_add_template_filter(self):
app = flask.Flask(__name__)
def my_reverse(s):
return s[::-1]
app.add_template_filter(my_reverse)
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_template_filter_with_name(self):
app = flask.Flask(__name__)
@app.template_filter('strrev')
def my_reverse(s):
return s[::-1]
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_add_template_filter_with_name(self):
app = flask.Flask(__name__)
def my_reverse(s):
return s[::-1]
app.add_template_filter(my_reverse, 'strrev')
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_template_filter_with_template(self):
app = flask.Flask(__name__)
@app.template_filter()
def super_reverse(s):
return s[::-1]
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_template(self):
app = flask.Flask(__name__)
def super_reverse(s):
return s[::-1]
app.add_template_filter(super_reverse)
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_filter_with_name_and_template(self):
app = flask.Flask(__name__)
@app.template_filter('super_reverse')
def my_reverse(s):
return s[::-1]
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_name_and_template(self):
app = flask.Flask(__name__)
def my_reverse(s):
return s[::-1]
app.add_template_filter(my_reverse, 'super_reverse')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_test(self):
app = flask.Flask(__name__)
@app.template_test()
def boolean(value):
return isinstance(value, bool)
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_add_template_test(self):
app = flask.Flask(__name__)
def boolean(value):
return isinstance(value, bool)
app.add_template_test(boolean)
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_template_test_with_name(self):
app = flask.Flask(__name__)
@app.template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_add_template_test_with_name(self):
app = flask.Flask(__name__)
def is_boolean(value):
return isinstance(value, bool)
app.add_template_test(is_boolean, 'boolean')
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_template_test_with_template(self):
app = flask.Flask(__name__)
@app.template_test()
def boolean(value):
return isinstance(value, bool)
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_template(self):
app = flask.Flask(__name__)
def boolean(value):
return isinstance(value, bool)
app.add_template_test(boolean)
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_template_test_with_name_and_template(self):
app = flask.Flask(__name__)
@app.template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_name_and_template(self):
app = flask.Flask(__name__)
def is_boolean(value):
return isinstance(value, bool)
app.add_template_test(is_boolean, 'boolean')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_global(self):
app = flask.Flask(__name__)
@app.template_global()
def get_stuff():
return 42
self.assert_in('get_stuff', app.jinja_env.globals.keys())
self.assert_equal(app.jinja_env.globals['get_stuff'], get_stuff)
self.assert_true(app.jinja_env.globals['get_stuff'](), 42)
with app.app_context():
rv = flask.render_template_string('{{ get_stuff() }}')
self.assert_equal(rv, '42')
def test_custom_template_loader(self):
class MyFlask(flask.Flask):
def create_global_jinja_loader(self):
from jinja2 import DictLoader
return DictLoader({'index.html': 'Hello Custom World!'})
app = MyFlask(__name__)
@app.route('/')
def index():
return flask.render_template('index.html')
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Hello Custom World!')
def test_iterable_loader(self):
app = flask.Flask(__name__)
@app.context_processor
def context_processor():
return {'whiskey': 'Jameson'}
@app.route('/')
def index():
return flask.render_template(
['no_template.xml', # should skip this one
'simple_template.html', # should render this
'context_template.html'],
value=23)
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'<h1>Jameson</h1>')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TemplatingTestCase))
return suite

View File

@ -1,7 +0,0 @@
from flask import Flask
app = Flask(__name__)
from blueprintapp.apps.admin import admin
from blueprintapp.apps.frontend import frontend
app.register_blueprint(admin)
app.register_blueprint(frontend)

View File

@ -1,15 +0,0 @@
from flask import Blueprint, render_template
admin = Blueprint('admin', __name__, url_prefix='/admin',
template_folder='templates',
static_folder='static')
@admin.route('/')
def index():
return render_template('admin/index.html')
@admin.route('/index2')
def index2():
return render_template('./admin/index.html')

View File

@ -1,8 +0,0 @@
from flask import Blueprint, render_template
frontend = Blueprint('frontend', __name__, template_folder='templates')
@frontend.route('/')
def index():
return render_template('frontend/index.html')

View File

@ -1,4 +0,0 @@
import os
import flask
here = os.path.abspath(os.path.dirname(__file__))
app = flask.Flask(__name__)

View File

@ -1,4 +0,0 @@
import os
import flask
here = os.path.abspath(os.path.dirname(__file__))
app = flask.Flask(__name__)

View File

@ -1,2 +0,0 @@
import flask.ext.broken.b
import missing_module

View File

@ -1 +0,0 @@
ext_id = 'newext_package'

View File

@ -1,2 +0,0 @@
def test_function():
return 42

View File

@ -1 +0,0 @@
ext_id = 'newext_simple'

View File

@ -1 +0,0 @@
ext_id = 'oldext_package'

View File

@ -1,2 +0,0 @@
def test_function():
return 42

View File

@ -1 +0,0 @@
ext_id = 'oldext_simple'

View File

@ -1,2 +0,0 @@
# NoImportsTestCase
raise NotImplementedError

View File

@ -1,3 +0,0 @@
import flask
app = flask.Flask(__name__)

View File

@ -1,3 +0,0 @@
import flask
app = flask.Flask(__name__)

View File

@ -1,4 +0,0 @@
import flask
# Test Flask initialization with main module.
app = flask.Flask('__main__')

View File

@ -1,7 +0,0 @@
from flask import Flask
app = Flask(__name__)
from moduleapp.apps.admin import admin
from moduleapp.apps.frontend import frontend
app.register_module(admin)
app.register_module(frontend)

View File

@ -1,14 +0,0 @@
from flask import Module, render_template
admin = Module(__name__, url_prefix='/admin')
@admin.route('/')
def index():
return render_template('admin/index.html')
@admin.route('/index2')
def index2():
return render_template('./admin/index.html')

View File

@ -1 +0,0 @@
Hello from the Admin

View File

@ -1,9 +0,0 @@
from flask import Module, render_template
frontend = Module(__name__)
@frontend.route('/')
def index():
return render_template('frontend/index.html')

View File

@ -1 +0,0 @@
Hello from the Frontend

View File

@ -1,3 +0,0 @@
import flask
app = flask.Flask(__name__)

View File

@ -1,4 +0,0 @@
from flask import Module
mod = Module(__name__, 'foo', subdomain='foo')

View File

@ -1 +0,0 @@
Hello Subdomain

View File

@ -1,242 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.testing
~~~~~~~~~~~~~~~~~~~~~~~
Test client and more.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase
from flask._compat import text_type
class TestToolsTestCase(FlaskTestCase):
def test_environ_defaults_from_config(self):
app = flask.Flask(__name__)
app.testing = True
app.config['SERVER_NAME'] = 'example.com:1234'
app.config['APPLICATION_ROOT'] = '/foo'
@app.route('/')
def index():
return flask.request.url
ctx = app.test_request_context()
self.assert_equal(ctx.request.url, 'http://example.com:1234/foo/')
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'http://example.com:1234/foo/')
def test_environ_defaults(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
return flask.request.url
ctx = app.test_request_context()
self.assert_equal(ctx.request.url, 'http://localhost/')
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'http://localhost/')
def test_redirect_keep_session(self):
app = flask.Flask(__name__)
app.secret_key = 'testing'
@app.route('/', methods=['GET', 'POST'])
def index():
if flask.request.method == 'POST':
return flask.redirect('/getsession')
flask.session['data'] = 'foo'
return 'index'
@app.route('/getsession')
def get_session():
return flask.session.get('data', '<missing>')
with app.test_client() as c:
rv = c.get('/getsession')
assert rv.data == b'<missing>'
rv = c.get('/')
assert rv.data == b'index'
assert flask.session.get('data') == 'foo'
rv = c.post('/', data={}, follow_redirects=True)
assert rv.data == b'foo'
# This support requires a new Werkzeug version
if not hasattr(c, 'redirect_client'):
assert flask.session.get('data') == 'foo'
rv = c.get('/getsession')
assert rv.data == b'foo'
def test_session_transactions(self):
app = flask.Flask(__name__)
app.testing = True
app.secret_key = 'testing'
@app.route('/')
def index():
return text_type(flask.session['foo'])
with app.test_client() as c:
with c.session_transaction() as sess:
self.assert_equal(len(sess), 0)
sess['foo'] = [42]
self.assert_equal(len(sess), 1)
rv = c.get('/')
self.assert_equal(rv.data, b'[42]')
with c.session_transaction() as sess:
self.assert_equal(len(sess), 1)
self.assert_equal(sess['foo'], [42])
def test_session_transactions_no_null_sessions(self):
app = flask.Flask(__name__)
app.testing = True
with app.test_client() as c:
try:
with c.session_transaction() as sess:
pass
except RuntimeError as e:
self.assert_in('Session backend did not open a session', str(e))
else:
self.fail('Expected runtime error')
def test_session_transactions_keep_context(self):
app = flask.Flask(__name__)
app.testing = True
app.secret_key = 'testing'
with app.test_client() as c:
rv = c.get('/')
req = flask.request._get_current_object()
self.assert_true(req is not None)
with c.session_transaction():
self.assert_true(req is flask.request._get_current_object())
def test_session_transaction_needs_cookies(self):
app = flask.Flask(__name__)
app.testing = True
c = app.test_client(use_cookies=False)
try:
with c.session_transaction() as s:
pass
except RuntimeError as e:
self.assert_in('cookies', str(e))
else:
self.fail('Expected runtime error')
def test_test_client_context_binding(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
flask.g.value = 42
return 'Hello World!'
@app.route('/other')
def other():
1 // 0
with app.test_client() as c:
resp = c.get('/')
self.assert_equal(flask.g.value, 42)
self.assert_equal(resp.data, b'Hello World!')
self.assert_equal(resp.status_code, 200)
resp = c.get('/other')
self.assert_false(hasattr(flask.g, 'value'))
self.assert_in(b'Internal Server Error', resp.data)
self.assert_equal(resp.status_code, 500)
flask.g.value = 23
try:
flask.g.value
except (AttributeError, RuntimeError):
pass
else:
raise AssertionError('some kind of exception expected')
def test_reuse_client(self):
app = flask.Flask(__name__)
c = app.test_client()
with c:
self.assert_equal(c.get('/').status_code, 404)
with c:
self.assert_equal(c.get('/').status_code, 404)
def test_test_client_calls_teardown_handlers(self):
app = flask.Flask(__name__)
called = []
@app.teardown_request
def remember(error):
called.append(error)
with app.test_client() as c:
self.assert_equal(called, [])
c.get('/')
self.assert_equal(called, [])
self.assert_equal(called, [None])
del called[:]
with app.test_client() as c:
self.assert_equal(called, [])
c.get('/')
self.assert_equal(called, [])
c.get('/')
self.assert_equal(called, [None])
self.assert_equal(called, [None, None])
class SubdomainTestCase(FlaskTestCase):
def setUp(self):
self.app = flask.Flask(__name__)
self.app.config['SERVER_NAME'] = 'example.com'
self.client = self.app.test_client()
self._ctx = self.app.test_request_context()
self._ctx.push()
def tearDown(self):
if self._ctx is not None:
self._ctx.pop()
def test_subdomain(self):
@self.app.route('/', subdomain='<company_id>')
def view(company_id):
return company_id
url = flask.url_for('view', company_id='xxx')
response = self.client.get(url)
self.assert_equal(200, response.status_code)
self.assert_equal(b'xxx', response.data)
def test_nosubdomain(self):
@self.app.route('/<company_id>')
def view(company_id):
return company_id
url = flask.url_for('view', company_id='xxx')
response = self.client.get(url)
self.assert_equal(200, response.status_code)
self.assert_equal(b'xxx', response.data)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestToolsTestCase))
suite.addTest(unittest.makeSuite(SubdomainTestCase))
return suite

View File

@ -1,169 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.testsuite.views
~~~~~~~~~~~~~~~~~~~~~
Pluggable views.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import flask.views
import unittest
from flask.testsuite import FlaskTestCase
from werkzeug.http import parse_set_header
class ViewTestCase(FlaskTestCase):
def common_test(self, app):
c = app.test_client()
self.assert_equal(c.get('/').data, b'GET')
self.assert_equal(c.post('/').data, b'POST')
self.assert_equal(c.put('/').status_code, 405)
meths = parse_set_header(c.open('/', method='OPTIONS').headers['Allow'])
self.assert_equal(sorted(meths), ['GET', 'HEAD', 'OPTIONS', 'POST'])
def test_basic_view(self):
app = flask.Flask(__name__)
class Index(flask.views.View):
methods = ['GET', 'POST']
def dispatch_request(self):
return flask.request.method
app.add_url_rule('/', view_func=Index.as_view('index'))
self.common_test(app)
def test_method_based_view(self):
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return 'GET'
def post(self):
return 'POST'
app.add_url_rule('/', view_func=Index.as_view('index'))
self.common_test(app)
def test_view_patching(self):
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
1 // 0
def post(self):
1 // 0
class Other(Index):
def get(self):
return 'GET'
def post(self):
return 'POST'
view = Index.as_view('index')
view.view_class = Other
app.add_url_rule('/', view_func=view)
self.common_test(app)
def test_view_inheritance(self):
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return 'GET'
def post(self):
return 'POST'
class BetterIndex(Index):
def delete(self):
return 'DELETE'
app.add_url_rule('/', view_func=BetterIndex.as_view('index'))
c = app.test_client()
meths = parse_set_header(c.open('/', method='OPTIONS').headers['Allow'])
self.assert_equal(sorted(meths), ['DELETE', 'GET', 'HEAD', 'OPTIONS', 'POST'])
def test_view_decorators(self):
app = flask.Flask(__name__)
def add_x_parachute(f):
def new_function(*args, **kwargs):
resp = flask.make_response(f(*args, **kwargs))
resp.headers['X-Parachute'] = 'awesome'
return resp
return new_function
class Index(flask.views.View):
decorators = [add_x_parachute]
def dispatch_request(self):
return 'Awesome'
app.add_url_rule('/', view_func=Index.as_view('index'))
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.headers['X-Parachute'], 'awesome')
self.assert_equal(rv.data, b'Awesome')
def test_implicit_head(self):
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return flask.Response('Blub', headers={
'X-Method': flask.request.method
})
app.add_url_rule('/', view_func=Index.as_view('index'))
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Blub')
self.assert_equal(rv.headers['X-Method'], 'GET')
rv = c.head('/')
self.assert_equal(rv.data, b'')
self.assert_equal(rv.headers['X-Method'], 'HEAD')
def test_explicit_head(self):
app = flask.Flask(__name__)
class Index(flask.views.MethodView):
def get(self):
return 'GET'
def head(self):
return flask.Response('', headers={'X-Method': 'HEAD'})
app.add_url_rule('/', view_func=Index.as_view('index'))
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'GET')
rv = c.head('/')
self.assert_equal(rv.data, b'')
self.assert_equal(rv.headers['X-Method'], 'HEAD')
def test_endpoint_override(self):
app = flask.Flask(__name__)
app.debug = True
class Index(flask.views.View):
methods = ['GET', 'POST']
def dispatch_request(self):
return flask.request.method
app.add_url_rule('/', view_func=Index.as_view('index'))
with self.assert_raises(AssertionError):
app.add_url_rule('/', view_func=Index.as_view('index'))
# But these tests should still pass. We just log a warning.
self.common_test(app)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ViewTestCase))
return suite

View File

@ -1,149 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.views
~~~~~~~~~~~
This module provides class-based views inspired by the ones in Django.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from .globals import request
from ._compat import with_metaclass
http_method_funcs = frozenset(['get', 'post', 'head', 'options',
'delete', 'put', 'trace', 'patch'])
class View(object):
"""Alternative way to use view functions. A subclass has to implement
:meth:`dispatch_request` which is called with the view arguments from
the URL routing system. If :attr:`methods` is provided the methods
do not have to be passed to the :meth:`~flask.Flask.add_url_rule`
method explicitly::
class MyView(View):
methods = ['GET']
def dispatch_request(self, name):
return 'Hello %s!' % name
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview'))
When you want to decorate a pluggable view you will have to either do that
when the view function is created (by wrapping the return value of
:meth:`as_view`) or you can use the :attr:`decorators` attribute::
class SecretView(View):
methods = ['GET']
decorators = [superuser_required]
def dispatch_request(self):
...
The decorators stored in the decorators list are applied one after another
when the view function is created. Note that you can *not* use the class
based decorators since those would decorate the view class and not the
generated view function!
"""
#: A for which methods this pluggable view can handle.
methods = None
#: The canonical way to decorate class-based views is to decorate the
#: return value of as_view(). However since this moves parts of the
#: logic from the class declaration to the place where it's hooked
#: into the routing system.
#:
#: You can place one or more decorators in this list and whenever the
#: view function is created the result is automatically decorated.
#:
#: .. versionadded:: 0.8
decorators = []
def dispatch_request(self):
"""Subclasses have to override this method to implement the
actual view function code. This method is called with all
the arguments from the URL rule.
"""
raise NotImplementedError()
@classmethod
def as_view(cls, name, *class_args, **class_kwargs):
"""Converts the class into an actual view function that can be used
with the routing system. Internally this generates a function on the
fly which will instantiate the :class:`View` on each request and call
the :meth:`dispatch_request` method on it.
The arguments passed to :meth:`as_view` are forwarded to the
constructor of the class.
"""
def view(*args, **kwargs):
self = view.view_class(*class_args, **class_kwargs)
return self.dispatch_request(*args, **kwargs)
if cls.decorators:
view.__name__ = name
view.__module__ = cls.__module__
for decorator in cls.decorators:
view = decorator(view)
# we attach the view class to the view function for two reasons:
# first of all it allows us to easily figure out what class-based
# view this thing came from, secondly it's also used for instantiating
# the view class so you can actually replace it with something else
# for testing purposes and debugging.
view.view_class = cls
view.__name__ = name
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.methods = cls.methods
return view
class MethodViewType(type):
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
if 'methods' not in d:
methods = set(rv.methods or [])
for key in d:
if key in http_method_funcs:
methods.add(key.upper())
# if we have no method at all in there we don't want to
# add a method list. (This is for instance the case for
# the baseclass or another subclass of a base method view
# that does not introduce new methods).
if methods:
rv.methods = sorted(methods)
return rv
class MethodView(with_metaclass(MethodViewType, View)):
"""Like a regular class-based view but that dispatches requests to
particular methods. For instance if you implement a method called
:meth:`get` it means you will response to ``'GET'`` requests and
the :meth:`dispatch_request` implementation will automatically
forward your request to that. Also :attr:`options` is set for you
automatically::
class CounterAPI(MethodView):
def get(self):
return session.get('counter', 0)
def post(self):
session['counter'] = session.get('counter', 0) + 1
return 'OK'
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter'))
"""
def dispatch_request(self, *args, **kwargs):
meth = getattr(self, request.method.lower(), None)
# if the request method is HEAD and we don't have a handler for it
# retry with GET
if meth is None and request.method == 'HEAD':
meth = getattr(self, 'get', None)
assert meth is not None, 'Unimplemented method %r' % request.method
return meth(*args, **kwargs)

View File

@ -1,184 +0,0 @@
# -*- coding: utf-8 -*-
"""
flask.wrappers
~~~~~~~~~~~~~~
Implements the WSGI wrappers (request and response).
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase
from werkzeug.exceptions import BadRequest
from .debughelpers import attach_enctype_error_multidict
from . import json
from .globals import _request_ctx_stack
_missing = object()
def _get_data(req, cache):
getter = getattr(req, 'get_data', None)
if getter is not None:
return getter(cache=cache)
return req.data
class Request(RequestBase):
"""The request object used by default in Flask. Remembers the
matched endpoint and view arguments.
It is what ends up as :class:`~flask.request`. If you want to replace
the request object used you can subclass this and set
:attr:`~flask.Flask.request_class` to your subclass.
The request object is a :class:`~werkzeug.wrappers.Request` subclass and
provides all of the attributes Werkzeug defines plus a few Flask
specific ones.
"""
#: the internal URL rule that matched the request. This can be
#: useful to inspect which methods are allowed for the URL from
#: a before/after handler (``request.url_rule.methods``) etc.
#:
#: .. versionadded:: 0.6
url_rule = None
#: a dict of view arguments that matched the request. If an exception
#: happened when matching, this will be `None`.
view_args = None
#: if matching the URL failed, this is the exception that will be
#: raised / was raised as part of the request handling. This is
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or
#: something similar.
routing_exception = None
# switched by the request context until 1.0 to opt in deprecated
# module functionality
_is_old_module = False
@property
def max_content_length(self):
"""Read-only view of the `MAX_CONTENT_LENGTH` config key."""
ctx = _request_ctx_stack.top
if ctx is not None:
return ctx.app.config['MAX_CONTENT_LENGTH']
@property
def endpoint(self):
"""The endpoint that matched the request. This in combination with
:attr:`view_args` can be used to reconstruct the same or a
modified URL. If an exception happened when matching, this will
be `None`.
"""
if self.url_rule is not None:
return self.url_rule.endpoint
@property
def module(self):
"""The name of the current module if the request was dispatched
to an actual module. This is deprecated functionality, use blueprints
instead.
"""
from warnings import warn
warn(DeprecationWarning('modules were deprecated in favor of '
'blueprints. Use request.blueprint '
'instead.'), stacklevel=2)
if self._is_old_module:
return self.blueprint
@property
def blueprint(self):
"""The name of the current blueprint"""
if self.url_rule and '.' in self.url_rule.endpoint:
return self.url_rule.endpoint.rsplit('.', 1)[0]
@property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data. Otherwise this will be `None`.
The :meth:`get_json` method should be used instead.
"""
# XXX: deprecate property
return self.get_json()
def get_json(self, force=False, silent=False, cache=True):
"""Parses the incoming JSON request data and returns it. If
parsing fails the :meth:`on_json_loading_failed` method on the
request object will be invoked. By default this function will
only load the json data if the mimetype is ``application/json``
but this can be overriden by the `force` parameter.
:param force: if set to `True` the mimetype is ignored.
:param silent: if set to `False` this method will fail silently
and return `False`.
:param cache: if set to `True` the parsed JSON data is remembered
on the request.
"""
rv = getattr(self, '_cached_json', _missing)
if rv is not _missing:
return rv
if self.mimetype != 'application/json' and not force:
return None
# We accept a request charset against the specification as
# certain clients have been using this in the past. This
# fits our general approach of being nice in what we accept
# and strict in what we send out.
request_charset = self.mimetype_params.get('charset')
try:
data = _get_data(self, cache)
if request_charset is not None:
rv = json.loads(data, encoding=request_charset)
else:
rv = json.loads(data)
except ValueError as e:
if silent:
rv = None
else:
rv = self.on_json_loading_failed(e)
if cache:
self._cached_json = rv
return rv
def on_json_loading_failed(self, e):
"""Called if decoding of the JSON data failed. The return value of
this method is used by :meth:`get_json` when an error occurred. The
default implementation just raises a :class:`BadRequest` exception.
.. versionchanged:: 0.10
Removed buggy previous behavior of generating a random JSON
response. If you want that behavior back you can trivially
add it by subclassing.
.. versionadded:: 0.8
"""
raise BadRequest()
def _load_form_data(self):
RequestBase._load_form_data(self)
# in debug mode we're replacing the files multidict with an ad-hoc
# subclass that raises a different error for key errors.
ctx = _request_ctx_stack.top
if ctx is not None and ctx.app.debug and \
self.mimetype != 'multipart/form-data' and not self.files:
attach_enctype_error_multidict(self)
class Response(ResponseBase):
"""The response object that is used by default in Flask. Works like the
response object from Werkzeug but is set to have an HTML mimetype by
default. Quite often you don't have to create this object yourself because
:meth:`~flask.Flask.make_response` will take care of that for you.
If you want to replace the response object used you can subclass this and
set :attr:`~flask.Flask.response_class` to your subclass.
"""
default_mimetype = 'text/html'

View File

@ -1,872 +0,0 @@
# -*- coding: utf-8 -*-
"""
itsdangerous
~~~~~~~~~~~~
A module that implements various functions to deal with untrusted
sources. Mainly useful for web applications.
:copyright: (c) 2014 by Armin Ronacher and the Django Software Foundation.
:license: BSD, see LICENSE for more details.
"""
import sys
import hmac
import zlib
import time
import base64
import hashlib
import operator
from datetime import datetime
PY2 = sys.version_info[0] == 2
if PY2:
from itertools import izip
text_type = unicode
int_to_byte = chr
number_types = (int, long, float)
else:
from functools import reduce
izip = zip
text_type = str
int_to_byte = operator.methodcaller('to_bytes', 1, 'big')
number_types = (int, float)
try:
import simplejson as json
except ImportError:
import json
class _CompactJSON(object):
"""Wrapper around simplejson that strips whitespace.
"""
def loads(self, payload):
return json.loads(payload)
def dumps(self, obj):
return json.dumps(obj, separators=(',', ':'))
compact_json = _CompactJSON()
# 2011/01/01 in UTC
EPOCH = 1293840000
def want_bytes(s, encoding='utf-8', errors='strict'):
if isinstance(s, text_type):
s = s.encode(encoding, errors)
return s
def is_text_serializer(serializer):
"""Checks wheather a serializer generates text or binary."""
return isinstance(serializer.dumps({}), text_type)
# Starting with 3.3 the standard library has a c-implementation for
# constant time string compares.
_builtin_constant_time_compare = getattr(hmac, 'compare_digest', None)
def constant_time_compare(val1, val2):
"""Returns True if the two strings are equal, False otherwise.
The time taken is independent of the number of characters that match. Do
not use this function for anything else than comparision with known
length targets.
This is should be implemented in C in order to get it completely right.
"""
if _builtin_constant_time_compare is not None:
return _builtin_constant_time_compare(val1, val2)
len_eq = len(val1) == len(val2)
if len_eq:
result = 0
left = val1
else:
result = 1
left = val2
for x, y in izip(bytearray(left), bytearray(val2)):
result |= x ^ y
return result == 0
class BadData(Exception):
"""Raised if bad data of any sort was encountered. This is the
base for all exceptions that itsdangerous is currently using.
.. versionadded:: 0.15
"""
message = None
def __init__(self, message):
Exception.__init__(self, message)
self.message = message
def __str__(self):
return text_type(self.message)
if PY2:
__unicode__ = __str__
def __str__(self):
return self.__unicode__().encode('utf-8')
class BadPayload(BadData):
"""This error is raised in situations when payload is loaded without
checking the signature first and an exception happend as a result of
that. The original exception that caused that will be stored on the
exception as :attr:`original_error`.
This can also happen with a :class:`JSONWebSignatureSerializer` that
is subclassed and uses a different serializer for the payload than
the expected one.
.. versionadded:: 0.15
"""
def __init__(self, message, original_error=None):
BadData.__init__(self, message)
#: If available, the error that indicates why the payload
#: was not valid. This might be `None`.
self.original_error = original_error
class BadSignature(BadData):
"""This error is raised if a signature does not match. As of
itsdangerous 0.14 there are helpful attributes on the exception
instances. You can also catch down the baseclass :exc:`BadData`.
"""
def __init__(self, message, payload=None):
BadData.__init__(self, message)
#: The payload that failed the signature test. In some
#: situations you might still want to inspect this, even if
#: you know it was tampered with.
#:
#: .. versionadded:: 0.14
self.payload = payload
class BadTimeSignature(BadSignature):
"""Raised for time based signatures that fail. This is a subclass
of :class:`BadSignature` so you can catch those down as well.
"""
def __init__(self, message, payload=None, date_signed=None):
BadSignature.__init__(self, message, payload)
#: If the signature expired this exposes the date of when the
#: signature was created. This can be helpful in order to
#: tell the user how long a link has been gone stale.
#:
#: .. versionadded:: 0.14
self.date_signed = date_signed
class BadHeader(BadSignature):
"""Raised if a signed header is invalid in some form. This only
happens for serializers that have a header that goes with the
signature.
.. versionadded:: 0.24
"""
def __init__(self, message, payload=None, header=None,
original_error=None):
BadSignature.__init__(self, message, payload)
#: If the header is actually available but just malformed it
#: might be stored here.
self.header = header
#: If available, the error that indicates why the payload
#: was not valid. This might be `None`.
self.original_error = original_error
class SignatureExpired(BadTimeSignature):
"""Signature timestamp is older than required max_age. This is a
subclass of :exc:`BadTimeSignature` so you can use the baseclass for
catching the error.
"""
def base64_encode(string):
"""base64 encodes a single bytestring (and is tolerant to getting
called with a unicode string).
The resulting bytestring is safe for putting into URLs.
"""
string = want_bytes(string)
return base64.urlsafe_b64encode(string).strip(b'=')
def base64_decode(string):
"""base64 decodes a single bytestring (and is tolerant to getting
called with a unicode string).
The result is also a bytestring.
"""
string = want_bytes(string, encoding='ascii', errors='ignore')
return base64.urlsafe_b64decode(string + b'=' * (-len(string) % 4))
def int_to_bytes(num):
assert num >= 0
rv = []
while num:
rv.append(int_to_byte(num & 0xff))
num >>= 8
return b''.join(reversed(rv))
def bytes_to_int(bytestr):
return reduce(lambda a, b: a << 8 | b, bytearray(bytestr), 0)
class SigningAlgorithm(object):
"""Subclasses of `SigningAlgorithm` have to implement `get_signature` to
provide signature generation functionality.
"""
def get_signature(self, key, value):
"""Returns the signature for the given key and value"""
raise NotImplementedError()
def verify_signature(self, key, value, sig):
"""Verifies the given signature matches the expected signature"""
return constant_time_compare(sig, self.get_signature(key, value))
class NoneAlgorithm(SigningAlgorithm):
"""This class provides a algorithm that does not perform any signing and
returns an empty signature.
"""
def get_signature(self, key, value):
return b''
class HMACAlgorithm(SigningAlgorithm):
"""This class provides signature generation using HMACs."""
#: The digest method to use with the MAC algorithm. This defaults to sha1
#: but can be changed for any other function in the hashlib module.
default_digest_method = staticmethod(hashlib.sha1)
def __init__(self, digest_method=None):
if digest_method is None:
digest_method = self.default_digest_method
self.digest_method = digest_method
def get_signature(self, key, value):
mac = hmac.new(key, msg=value, digestmod=self.digest_method)
return mac.digest()
class Signer(object):
"""This class can sign bytes and unsign it and validate the signature
provided.
Salt can be used to namespace the hash, so that a signed string is only
valid for a given namespace. Leaving this at the default value or re-using
a salt value across different parts of your application where the same
signed value in one part can mean something different in another part
is a security risk.
See :ref:`the-salt` for an example of what the salt is doing and how you
can utilize it.
.. versionadded:: 0.14
`key_derivation` and `digest_method` were added as arguments to the
class constructor.
.. versionadded:: 0.18
`algorithm` was added as an argument to the class constructor.
"""
#: The digest method to use for the signer. This defaults to sha1 but can
#: be changed for any other function in the hashlib module.
#:
#: .. versionchanged:: 0.14
default_digest_method = staticmethod(hashlib.sha1)
#: Controls how the key is derived. The default is Django style
#: concatenation. Possible values are ``concat``, ``django-concat``
#: and ``hmac``. This is used for deriving a key from the secret key
#: with an added salt.
#:
#: .. versionadded:: 0.14
default_key_derivation = 'django-concat'
def __init__(self, secret_key, salt=None, sep='.', key_derivation=None,
digest_method=None, algorithm=None):
self.secret_key = want_bytes(secret_key)
self.sep = sep
self.salt = 'itsdangerous.Signer' if salt is None else salt
if key_derivation is None:
key_derivation = self.default_key_derivation
self.key_derivation = key_derivation
if digest_method is None:
digest_method = self.default_digest_method
self.digest_method = digest_method
if algorithm is None:
algorithm = HMACAlgorithm(self.digest_method)
self.algorithm = algorithm
def derive_key(self):
"""This method is called to derive the key. If you're unhappy with
the default key derivation choices you can override them here.
Keep in mind that the key derivation in itsdangerous is not intended
to be used as a security method to make a complex key out of a short
password. Instead you should use large random secret keys.
"""
salt = want_bytes(self.salt)
if self.key_derivation == 'concat':
return self.digest_method(salt + self.secret_key).digest()
elif self.key_derivation == 'django-concat':
return self.digest_method(salt + b'signer' +
self.secret_key).digest()
elif self.key_derivation == 'hmac':
mac = hmac.new(self.secret_key, digestmod=self.digest_method)
mac.update(salt)
return mac.digest()
elif self.key_derivation == 'none':
return self.secret_key
else:
raise TypeError('Unknown key derivation method')
def get_signature(self, value):
"""Returns the signature for the given value"""
value = want_bytes(value)
key = self.derive_key()
sig = self.algorithm.get_signature(key, value)
return base64_encode(sig)
def sign(self, value):
"""Signs the given string."""
return value + want_bytes(self.sep) + self.get_signature(value)
def verify_signature(self, value, sig):
"""Verifies the signature for the given value."""
key = self.derive_key()
try:
sig = base64_decode(sig)
except Exception:
return False
return self.algorithm.verify_signature(key, value, sig)
def unsign(self, signed_value):
"""Unsigns the given string."""
signed_value = want_bytes(signed_value)
sep = want_bytes(self.sep)
if sep not in signed_value:
raise BadSignature('No %r found in value' % self.sep)
value, sig = signed_value.rsplit(sep, 1)
if self.verify_signature(value, sig):
return value
raise BadSignature('Signature %r does not match' % sig,
payload=value)
def validate(self, signed_value):
"""Just validates the given signed value. Returns `True` if the
signature exists and is valid, `False` otherwise."""
try:
self.unsign(signed_value)
return True
except BadSignature:
return False
class TimestampSigner(Signer):
"""Works like the regular :class:`Signer` but also records the time
of the signing and can be used to expire signatures. The unsign
method can rause a :exc:`SignatureExpired` method if the unsigning
failed because the signature is expired. This exception is a subclass
of :exc:`BadSignature`.
"""
def get_timestamp(self):
"""Returns the current timestamp. This implementation returns the
seconds since 1/1/2011. The function must return an integer.
"""
return int(time.time() - EPOCH)
def timestamp_to_datetime(self, ts):
"""Used to convert the timestamp from `get_timestamp` into a
datetime object.
"""
return datetime.utcfromtimestamp(ts + EPOCH)
def sign(self, value):
"""Signs the given string and also attaches a time information."""
value = want_bytes(value)
timestamp = base64_encode(int_to_bytes(self.get_timestamp()))
sep = want_bytes(self.sep)
value = value + sep + timestamp
return value + sep + self.get_signature(value)
def unsign(self, value, max_age=None, return_timestamp=False):
"""Works like the regular :meth:`~Signer.unsign` but can also
validate the time. See the base docstring of the class for
the general behavior. If `return_timestamp` is set to `True`
the timestamp of the signature will be returned as naive
:class:`datetime.datetime` object in UTC.
"""
try:
result = Signer.unsign(self, value)
sig_error = None
except BadSignature as e:
sig_error = e
result = e.payload or b''
sep = want_bytes(self.sep)
# If there is no timestamp in the result there is something
# seriously wrong. In case there was a signature error, we raise
# that one directly, otherwise we have a weird situation in which
# we shouldn't have come except someone uses a time-based serializer
# on non-timestamp data, so catch that.
if not sep in result:
if sig_error:
raise sig_error
raise BadTimeSignature('timestamp missing', payload=result)
value, timestamp = result.rsplit(sep, 1)
try:
timestamp = bytes_to_int(base64_decode(timestamp))
except Exception:
timestamp = None
# Signature is *not* okay. Raise a proper error now that we have
# split the value and the timestamp.
if sig_error is not None:
raise BadTimeSignature(text_type(sig_error), payload=value,
date_signed=timestamp)
# Signature was okay but the timestamp is actually not there or
# malformed. Should not happen, but well. We handle it nonetheless
if timestamp is None:
raise BadTimeSignature('Malformed timestamp', payload=value)
# Check timestamp is not older than max_age
if max_age is not None:
age = self.get_timestamp() - timestamp
if age > max_age:
raise SignatureExpired(
'Signature age %s > %s seconds' % (age, max_age),
payload=value,
date_signed=self.timestamp_to_datetime(timestamp))
if return_timestamp:
return value, self.timestamp_to_datetime(timestamp)
return value
def validate(self, signed_value, max_age=None):
"""Just validates the given signed value. Returns `True` if the
signature exists and is valid, `False` otherwise."""
try:
self.unsign(signed_value, max_age=max_age)
return True
except BadSignature:
return False
class Serializer(object):
"""This class provides a serialization interface on top of the
signer. It provides a similar API to json/pickle and other modules but is
slightly differently structured internally. If you want to change the
underlying implementation for parsing and loading you have to override the
:meth:`load_payload` and :meth:`dump_payload` functions.
This implementation uses simplejson if available for dumping and loading
and will fall back to the standard library's json module if it's not
available.
Starting with 0.14 you do not need to subclass this class in order to
switch out or customer the :class:`Signer`. You can instead also pass a
different class to the constructor as well as keyword arguments as
dictionary that should be forwarded::
s = Serializer(signer_kwargs={'key_derivation': 'hmac'})
.. versionchanged:: 0.14:
The `signer` and `signer_kwargs` parameters were added to the
constructor.
"""
#: If a serializer module or class is not passed to the constructor
#: this one is picked up. This currently defaults to :mod:`json`.
default_serializer = json
#: The default :class:`Signer` class that is being used by this
#: serializer.
#:
#: .. versionadded:: 0.14
default_signer = Signer
def __init__(self, secret_key, salt=b'itsdangerous', serializer=None,
signer=None, signer_kwargs=None):
self.secret_key = want_bytes(secret_key)
self.salt = want_bytes(salt)
if serializer is None:
serializer = self.default_serializer
self.serializer = serializer
self.is_text_serializer = is_text_serializer(serializer)
if signer is None:
signer = self.default_signer
self.signer = signer
self.signer_kwargs = signer_kwargs or {}
def load_payload(self, payload, serializer=None):
"""Loads the encoded object. This function raises :class:`BadPayload`
if the payload is not valid. The `serializer` parameter can be used to
override the serializer stored on the class. The encoded payload is
always byte based.
"""
if serializer is None:
serializer = self.serializer
is_text = self.is_text_serializer
else:
is_text = is_text_serializer(serializer)
try:
if is_text:
payload = payload.decode('utf-8')
return serializer.loads(payload)
except Exception as e:
raise BadPayload('Could not load the payload because an '
'exception occurred on unserializing the data',
original_error=e)
def dump_payload(self, obj):
"""Dumps the encoded object. The return value is always a
bytestring. If the internal serializer is text based the value
will automatically be encoded to utf-8.
"""
return want_bytes(self.serializer.dumps(obj))
def make_signer(self, salt=None):
"""A method that creates a new instance of the signer to be used.
The default implementation uses the :class:`Signer` baseclass.
"""
if salt is None:
salt = self.salt
return self.signer(self.secret_key, salt=salt, **self.signer_kwargs)
def dumps(self, obj, salt=None):
"""Returns a signed string serialized with the internal serializer.
The return value can be either a byte or unicode string depending
on the format of the internal serializer.
"""
payload = want_bytes(self.dump_payload(obj))
rv = self.make_signer(salt).sign(payload)
if self.is_text_serializer:
rv = rv.decode('utf-8')
return rv
def dump(self, obj, f, salt=None):
"""Like :meth:`dumps` but dumps into a file. The file handle has
to be compatible with what the internal serializer expects.
"""
f.write(self.dumps(obj, salt))
def loads(self, s, salt=None):
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the
signature validation fails.
"""
s = want_bytes(s)
return self.load_payload(self.make_signer(salt).unsign(s))
def load(self, f, salt=None):
"""Like :meth:`loads` but loads from a file."""
return self.loads(f.read(), salt)
def loads_unsafe(self, s, salt=None):
"""Like :meth:`loads` but without verifying the signature. This is
potentially very dangerous to use depending on how your serializer
works. The return value is ``(signature_okay, payload)`` instead of
just the payload. The first item will be a boolean that indicates
if the signature is okay (``True``) or if it failed. This function
never fails.
Use it for debugging only and if you know that your serializer module
is not exploitable (eg: do not use it with a pickle serializer).
.. versionadded:: 0.15
"""
return self._loads_unsafe_impl(s, salt)
def _loads_unsafe_impl(self, s, salt, load_kwargs=None,
load_payload_kwargs=None):
"""Lowlevel helper function to implement :meth:`loads_unsafe` in
serializer subclasses.
"""
try:
return True, self.loads(s, salt=salt, **(load_kwargs or {}))
except BadSignature as e:
if e.payload is None:
return False, None
try:
return False, self.load_payload(e.payload,
**(load_payload_kwargs or {}))
except BadPayload:
return False, None
def load_unsafe(self, f, *args, **kwargs):
"""Like :meth:`loads_unsafe` but loads from a file.
.. versionadded:: 0.15
"""
return self.loads_unsafe(f.read(), *args, **kwargs)
class TimedSerializer(Serializer):
"""Uses the :class:`TimestampSigner` instead of the default
:meth:`Signer`.
"""
default_signer = TimestampSigner
def loads(self, s, max_age=None, return_timestamp=False, salt=None):
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the
signature validation fails. If a `max_age` is provided it will
ensure the signature is not older than that time in seconds. In
case the signature is outdated, :exc:`SignatureExpired` is raised
which is a subclass of :exc:`BadSignature`. All arguments are
forwarded to the signer's :meth:`~TimestampSigner.unsign` method.
"""
base64d, timestamp = self.make_signer(salt) \
.unsign(s, max_age, return_timestamp=True)
payload = self.load_payload(base64d)
if return_timestamp:
return payload, timestamp
return payload
def loads_unsafe(self, s, max_age=None, salt=None):
load_kwargs = {'max_age': max_age}
load_payload_kwargs = {}
return self._loads_unsafe_impl(s, salt, load_kwargs, load_payload_kwargs)
class JSONWebSignatureSerializer(Serializer):
"""This serializer implements JSON Web Signature (JWS) support. Only
supports the JWS Compact Serialization.
"""
jws_algorithms = {
'HS256': HMACAlgorithm(hashlib.sha256),
'HS384': HMACAlgorithm(hashlib.sha384),
'HS512': HMACAlgorithm(hashlib.sha512),
'none': NoneAlgorithm(),
}
#: The default algorithm to use for signature generation
default_algorithm = 'HS256'
default_serializer = compact_json
def __init__(self, secret_key, salt=None, serializer=None,
signer=None, signer_kwargs=None, algorithm_name=None):
Serializer.__init__(self, secret_key, salt, serializer,
signer, signer_kwargs)
if algorithm_name is None:
algorithm_name = self.default_algorithm
self.algorithm_name = algorithm_name
self.algorithm = self.make_algorithm(algorithm_name)
def load_payload(self, payload, return_header=False):
payload = want_bytes(payload)
if b'.' not in payload:
raise BadPayload('No "." found in value')
base64d_header, base64d_payload = payload.split(b'.', 1)
try:
json_header = base64_decode(base64d_header)
except Exception as e:
raise BadHeader('Could not base64 decode the header because of '
'an exception', original_error=e)
try:
json_payload = base64_decode(base64d_payload)
except Exception as e:
raise BadPayload('Could not base64 decode the payload because of '
'an exception', original_error=e)
try:
header = Serializer.load_payload(self, json_header,
serializer=json)
except BadData as e:
raise BadHeader('Could not unserialize header because it was '
'malformed', original_error=e)
if not isinstance(header, dict):
raise BadHeader('Header payload is not a JSON object',
header=header)
payload = Serializer.load_payload(self, json_payload)
if return_header:
return payload, header
return payload
def dump_payload(self, header, obj):
base64d_header = base64_encode(self.serializer.dumps(header))
base64d_payload = base64_encode(self.serializer.dumps(obj))
return base64d_header + b'.' + base64d_payload
def make_algorithm(self, algorithm_name):
try:
return self.jws_algorithms[algorithm_name]
except KeyError:
raise NotImplementedError('Algorithm not supported')
def make_signer(self, salt=None, algorithm=None):
if salt is None:
salt = self.salt
key_derivation = 'none' if salt is None else None
if algorithm is None:
algorithm = self.algorithm
return self.signer(self.secret_key, salt=salt, sep='.',
key_derivation=key_derivation, algorithm=algorithm)
def make_header(self, header_fields):
header = header_fields.copy() if header_fields else {}
header['alg'] = self.algorithm_name
return header
def dumps(self, obj, salt=None, header_fields=None):
"""Like :meth:`~Serializer.dumps` but creates a JSON Web Signature. It
also allows for specifying additional fields to be included in the JWS
Header.
"""
header = self.make_header(header_fields)
signer = self.make_signer(salt, self.algorithm)
return signer.sign(self.dump_payload(header, obj))
def loads(self, s, salt=None, return_header=False):
"""Reverse of :meth:`dumps`. If requested via `return_header` it will
return a tuple of payload and header.
"""
payload, header = self.load_payload(
self.make_signer(salt, self.algorithm).unsign(want_bytes(s)),
return_header=True)
if header.get('alg') != self.algorithm_name:
raise BadHeader('Algorithm mismatch', header=header,
payload=payload)
if return_header:
return payload, header
return payload
def loads_unsafe(self, s, salt=None, return_header=False):
kwargs = {'return_header': return_header}
return self._loads_unsafe_impl(s, salt, kwargs, kwargs)
class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
"""Works like the regular :class:`JSONWebSignatureSerializer` but also
records the time of the signing and can be used to expire signatures.
JWS currently does not specify this behavior but it mentions a possibility
extension like this in the spec. Expiry date is encoded into the header
similarily as specified in `draft-ietf-oauth-json-web-token
<http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html#expDef`_.
The unsign method can raise a :exc:`SignatureExpired` method if the
unsigning failed because the signature is expired. This exception is a
subclass of :exc:`BadSignature`.
"""
DEFAULT_EXPIRES_IN = 3600
def __init__(self, secret_key, expires_in=None, **kwargs):
JSONWebSignatureSerializer.__init__(self, secret_key, **kwargs)
if expires_in is None:
expires_in = self.DEFAULT_EXPIRES_IN
self.expires_in = expires_in
def make_header(self, header_fields):
header = JSONWebSignatureSerializer.make_header(self, header_fields)
iat = self.now()
exp = iat + self.expires_in
header['iat'] = iat
header['exp'] = exp
return header
def loads(self, s, salt=None, return_header=False):
payload, header = JSONWebSignatureSerializer.loads(
self, s, salt, return_header=True)
if 'exp' not in header:
raise BadSignature('Missing expiry date', payload=payload)
if not (isinstance(header['exp'], number_types)
and header['exp'] > 0):
raise BadSignature('expiry date is not an IntDate',
payload=payload)
if header['exp'] < self.now():
raise SignatureExpired('Signature expired', payload=payload,
date_signed=self.get_issue_date(header))
if return_header:
return payload, header
return payload
def get_issue_date(self, header):
rv = header.get('iat')
if isinstance(rv, number_types):
return datetime.utcfromtimestamp(int(rv))
def now(self):
return int(time.time())
class URLSafeSerializerMixin(object):
"""Mixed in with a regular serializer it will attempt to zlib compress
the string to make it shorter if necessary. It will also base64 encode
the string so that it can safely be placed in a URL.
"""
def load_payload(self, payload):
decompress = False
if payload.startswith(b'.'):
payload = payload[1:]
decompress = True
try:
json = base64_decode(payload)
except Exception as e:
raise BadPayload('Could not base64 decode the payload because of '
'an exception', original_error=e)
if decompress:
try:
json = zlib.decompress(json)
except Exception as e:
raise BadPayload('Could not zlib decompress the payload before '
'decoding the payload', original_error=e)
return super(URLSafeSerializerMixin, self).load_payload(json)
def dump_payload(self, obj):
json = super(URLSafeSerializerMixin, self).dump_payload(obj)
is_compressed = False
compressed = zlib.compress(json)
if len(compressed) < (len(json) - 1):
json = compressed
is_compressed = True
base64d = base64_encode(json)
if is_compressed:
base64d = b'.' + base64d
return base64d
class URLSafeSerializer(URLSafeSerializerMixin, Serializer):
"""Works like :class:`Serializer` but dumps and loads into a URL
safe string consisting of the upper and lowercase character of the
alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
"""
default_serializer = compact_json
class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer):
"""Works like :class:`TimedSerializer` but dumps and loads into a URL
safe string consisting of the upper and lowercase character of the
alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
"""
default_serializer = compact_json

View File

@ -1,69 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2
~~~~~~
Jinja2 is a template engine written in pure Python. It provides a
Django inspired non-XML syntax but supports inline expressions and
an optional sandboxed environment.
Nutshell
--------
Here a small example of a Jinja2 template::
{% extends 'base.html' %}
{% block title %}Memberlist{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
__docformat__ = 'restructuredtext en'
__version__ = '2.7.2'
# high level interface
from jinja2.environment import Environment, Template
# loaders
from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
ModuleLoader
# bytecode caches
from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
MemcachedBytecodeCache
# undefined types
from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined
# exceptions
from jinja2.exceptions import TemplateError, UndefinedError, \
TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
TemplateAssertionError
# decorators and public utilities
from jinja2.filters import environmentfilter, contextfilter, \
evalcontextfilter
from jinja2.utils import Markup, escape, clear_caches, \
environmentfunction, evalcontextfunction, contextfunction, \
is_undefined
__all__ = [
'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
'evalcontextfilter', 'evalcontextfunction'
]

View File

@ -1,150 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2._compat
~~~~~~~~~~~~~~
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
:copyright: Copyright 2013 by the Jinja team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
PY2 = sys.version_info[0] == 2
PYPY = hasattr(sys, 'pypy_translation_info')
_identity = lambda x: x
if not PY2:
unichr = chr
range_type = range
text_type = str
string_types = (str,)
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
import pickle
from io import BytesIO, StringIO
NativeStringIO = StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
ifilter = filter
imap = map
izip = zip
intern = sys.intern
implements_iterator = _identity
implements_to_string = _identity
encode_filename = _identity
get_next = lambda x: x.__next__
else:
unichr = unichr
text_type = unicode
range_type = xrange
string_types = (str, unicode)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
import cPickle as pickle
from cStringIO import StringIO as BytesIO, StringIO
NativeStringIO = BytesIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
from itertools import imap, izip, ifilter
intern = intern
def implements_iterator(cls):
cls.next = cls.__next__
del cls.__next__
return cls
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
get_next = lambda x: x.next
def encode_filename(filename):
if isinstance(filename, unicode):
return filename.encode('utf-8')
return filename
try:
next = next
except NameError:
def next(it):
return it.next()
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instanciation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
try:
from collections import Mapping as mapping_types
except ImportError:
import UserDict
mapping_types = (UserDict.UserDict, UserDict.DictMixin, dict)
# common types. These do exist in the special types module too which however
# does not exist in IronPython out of the box. Also that way we don't have
# to deal with implementation specific stuff here
class _C(object):
def method(self): pass
def _func():
yield None
function_type = type(_func)
generator_type = type(_func())
method_type = type(_C().method)
code_type = type(_C.method.__code__)
try:
raise TypeError()
except TypeError:
_tb = sys.exc_info()[2]
traceback_type = type(_tb)
frame_type = type(_tb.tb_frame)
try:
from urllib.parse import quote_from_bytes as url_quote
except ImportError:
from urllib import quote as url_quote
try:
from thread import allocate_lock
except ImportError:
try:
from threading import Lock as allocate_lock
except ImportError:
from dummy_thread import allocate_lock

File diff suppressed because one or more lines are too long

View File

@ -1,337 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.bccache
~~~~~~~~~~~~~~
This module implements the bytecode cache system Jinja is optionally
using. This is useful if you have very complex template situations and
the compiliation of all those templates slow down your application too
much.
Situations where this is useful are often forking web applications that
are initialized on the first request.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from os import path, listdir
import os
import sys
import errno
import marshal
import tempfile
import fnmatch
from hashlib import sha1
from jinja2.utils import open_if_exists
from jinja2._compat import BytesIO, pickle, PY2, text_type
# marshal works better on 3.x, one hack less required
if not PY2:
marshal_dump = marshal.dump
marshal_load = marshal.load
else:
def marshal_dump(code, f):
if isinstance(f, file):
marshal.dump(code, f)
else:
f.write(marshal.dumps(code))
def marshal_load(f):
if isinstance(f, file):
return marshal.load(f)
return marshal.loads(f.read())
bc_version = 2
# magic version used to only change with new jinja versions. With 2.6
# we change this to also take Python version changes into account. The
# reason for this is that Python tends to segfault if fed earlier bytecode
# versions because someone thought it would be a good idea to reuse opcodes
# or make Python incompatible with earlier versions.
bc_magic = 'j2'.encode('ascii') + \
pickle.dumps(bc_version, 2) + \
pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1])
class Bucket(object):
"""Buckets are used to store the bytecode for one template. It's created
and initialized by the bytecode cache and passed to the loading functions.
The buckets get an internal checksum from the cache assigned and use this
to automatically reject outdated cache material. Individual bytecode
cache subclasses don't have to care about cache invalidation.
"""
def __init__(self, environment, key, checksum):
self.environment = environment
self.key = key
self.checksum = checksum
self.reset()
def reset(self):
"""Resets the bucket (unloads the bytecode)."""
self.code = None
def load_bytecode(self, f):
"""Loads bytecode from a file or file like object."""
# make sure the magic header is correct
magic = f.read(len(bc_magic))
if magic != bc_magic:
self.reset()
return
# the source code of the file changed, we need to reload
checksum = pickle.load(f)
if self.checksum != checksum:
self.reset()
return
self.code = marshal_load(f)
def write_bytecode(self, f):
"""Dump the bytecode into the file or file like object passed."""
if self.code is None:
raise TypeError('can\'t write empty bucket')
f.write(bc_magic)
pickle.dump(self.checksum, f, 2)
marshal_dump(self.code, f)
def bytecode_from_string(self, string):
"""Load bytecode from a string."""
self.load_bytecode(BytesIO(string))
def bytecode_to_string(self):
"""Return the bytecode as string."""
out = BytesIO()
self.write_bytecode(out)
return out.getvalue()
class BytecodeCache(object):
"""To implement your own bytecode cache you have to subclass this class
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
these methods are passed a :class:`~jinja2.bccache.Bucket`.
A very basic bytecode cache that saves the bytecode on the file system::
from os import path
class MyCache(BytecodeCache):
def __init__(self, directory):
self.directory = directory
def load_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
if path.exists(filename):
with open(filename, 'rb') as f:
bucket.load_bytecode(f)
def dump_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
with open(filename, 'wb') as f:
bucket.write_bytecode(f)
A more advanced version of a filesystem based bytecode cache is part of
Jinja2.
"""
def load_bytecode(self, bucket):
"""Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
"""
raise NotImplementedError()
def dump_bytecode(self, bucket):
"""Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
"""
raise NotImplementedError()
def clear(self):
"""Clears the cache. This method is not used by Jinja2 but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
"""
def get_cache_key(self, name, filename=None):
"""Returns the unique hash key for this template name."""
hash = sha1(name.encode('utf-8'))
if filename is not None:
filename = '|' + filename
if isinstance(filename, text_type):
filename = filename.encode('utf-8')
hash.update(filename)
return hash.hexdigest()
def get_source_checksum(self, source):
"""Returns a checksum for the source."""
return sha1(source.encode('utf-8')).hexdigest()
def get_bucket(self, environment, name, filename, source):
"""Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
"""
key = self.get_cache_key(name, filename)
checksum = self.get_source_checksum(source)
bucket = Bucket(environment, key, checksum)
self.load_bytecode(bucket)
return bucket
def set_bucket(self, bucket):
"""Put the bucket into the cache."""
self.dump_bytecode(bucket)
class FileSystemBytecodeCache(BytecodeCache):
"""A bytecode cache that stores bytecode on the filesystem. It accepts
two arguments: The directory where the cache items are stored and a
pattern string that is used to build the filename.
If no directory is specified a default cache directory is selected. On
Windows the user's temp directory is used, on UNIX systems a directory
is created for the user in the system temp directory.
The pattern can be used to have multiple separate caches operate on the
same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
is replaced with the cache key.
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
This bytecode cache supports clearing of the cache using the clear method.
"""
def __init__(self, directory=None, pattern='__jinja2_%s.cache'):
if directory is None:
directory = self._get_default_cache_dir()
self.directory = directory
self.pattern = pattern
def _get_default_cache_dir(self):
tmpdir = tempfile.gettempdir()
# On windows the temporary directory is used specific unless
# explicitly forced otherwise. We can just use that.
if os.name == 'n':
return tmpdir
if not hasattr(os, 'getuid'):
raise RuntimeError('Cannot determine safe temp directory. You '
'need to explicitly provide one.')
dirname = '_jinja2-cache-%d' % os.getuid()
actual_dir = os.path.join(tmpdir, dirname)
try:
# 448 == 0700
os.mkdir(actual_dir, 448)
except OSError as e:
if e.errno != errno.EEXIST:
raise
return actual_dir
def _get_cache_filename(self, bucket):
return path.join(self.directory, self.pattern % bucket.key)
def load_bytecode(self, bucket):
f = open_if_exists(self._get_cache_filename(bucket), 'rb')
if f is not None:
try:
bucket.load_bytecode(f)
finally:
f.close()
def dump_bytecode(self, bucket):
f = open(self._get_cache_filename(bucket), 'wb')
try:
bucket.write_bytecode(f)
finally:
f.close()
def clear(self):
# imported lazily here because google app-engine doesn't support
# write access on the file system and the function does not exist
# normally.
from os import remove
files = fnmatch.filter(listdir(self.directory), self.pattern % '*')
for filename in files:
try:
remove(path.join(self.directory, filename))
except OSError:
pass
class MemcachedBytecodeCache(BytecodeCache):
"""This class implements a bytecode cache that uses a memcache cache for
storing the information. It does not enforce a specific memcache library
(tummy's memcache or cmemcache) but will accept any class that provides
the minimal interface required.
Libraries compatible with this class:
- `werkzeug <http://werkzeug.pocoo.org/>`_.contrib.cache
- `python-memcached <http://www.tummy.com/Community/software/python-memcached/>`_
- `cmemcache <http://gijsbert.org/cmemcache/>`_
(Unfortunately the django cache interface is not compatible because it
does not support storing binary data, only unicode. You can however pass
the underlying cache client to the bytecode cache which is available
as `django.core.cache.cache._client`.)
The minimal interface for the client passed to the constructor is this:
.. class:: MinimalClientInterface
.. method:: set(key, value[, timeout])
Stores the bytecode in the cache. `value` is a string and
`timeout` the timeout of the key. If timeout is not provided
a default timeout or no timeout should be assumed, if it's
provided it's an integer with the number of seconds the cache
item should exist.
.. method:: get(key)
Returns the value for the cache key. If the item does not
exist in the cache the return value must be `None`.
The other arguments to the constructor are the prefix for all keys that
is added before the actual cache key and the timeout for the bytecode in
the cache system. We recommend a high (or no) timeout.
This bytecode cache does not support clearing of used items in the cache.
The clear method is a no-operation function.
.. versionadded:: 2.7
Added support for ignoring memcache errors through the
`ignore_memcache_errors` parameter.
"""
def __init__(self, client, prefix='jinja2/bytecode/', timeout=None,
ignore_memcache_errors=True):
self.client = client
self.prefix = prefix
self.timeout = timeout
self.ignore_memcache_errors = ignore_memcache_errors
def load_bytecode(self, bucket):
try:
code = self.client.get(self.prefix + bucket.key)
except Exception:
if not self.ignore_memcache_errors:
raise
code = None
if code is not None:
bucket.bytecode_from_string(code)
def dump_bytecode(self, bucket):
args = (self.prefix + bucket.key, bucket.bytecode_to_string())
if self.timeout is not None:
args += (self.timeout,)
try:
self.client.set(*args)
except Exception:
if not self.ignore_memcache_errors:
raise

File diff suppressed because it is too large Load Diff

View File

@ -1,32 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja.constants
~~~~~~~~~~~~~~~
Various constants.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
#: list of lorem ipsum words used by the lipsum() helper function
LOREM_IPSUM_WORDS = u'''\
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
auctor augue bibendum blandit class commodo condimentum congue consectetuer
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
penatibus per pharetra phasellus placerat platea porta porttitor posuere
potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
viverra volutpat vulputate'''

View File

@ -1,337 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.debug
~~~~~~~~~~~~
Implements the debug interface for Jinja. This module does some pretty
ugly stuff with the Python traceback system in order to achieve tracebacks
with correct line numbers, locals and contents.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import traceback
from types import TracebackType
from jinja2.utils import missing, internal_code
from jinja2.exceptions import TemplateSyntaxError
from jinja2._compat import iteritems, reraise, code_type
# on pypy we can take advantage of transparent proxies
try:
from __pypy__ import tproxy
except ImportError:
tproxy = None
# how does the raise helper look like?
try:
exec("raise TypeError, 'foo'")
except SyntaxError:
raise_helper = 'raise __jinja_exception__[1]'
except TypeError:
raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
class TracebackFrameProxy(object):
"""Proxies a traceback frame."""
def __init__(self, tb):
self.tb = tb
self._tb_next = None
@property
def tb_next(self):
return self._tb_next
def set_next(self, next):
if tb_set_next is not None:
try:
tb_set_next(self.tb, next and next.tb or None)
except Exception:
# this function can fail due to all the hackery it does
# on various python implementations. We just catch errors
# down and ignore them if necessary.
pass
self._tb_next = next
@property
def is_jinja_frame(self):
return '__jinja_template__' in self.tb.tb_frame.f_globals
def __getattr__(self, name):
return getattr(self.tb, name)
def make_frame_proxy(frame):
proxy = TracebackFrameProxy(frame)
if tproxy is None:
return proxy
def operation_handler(operation, *args, **kwargs):
if operation in ('__getattribute__', '__getattr__'):
return getattr(proxy, args[0])
elif operation == '__setattr__':
proxy.__setattr__(*args, **kwargs)
else:
return getattr(proxy, operation)(*args, **kwargs)
return tproxy(TracebackType, operation_handler)
class ProcessedTraceback(object):
"""Holds a Jinja preprocessed traceback for printing or reraising."""
def __init__(self, exc_type, exc_value, frames):
assert frames, 'no frames for this traceback?'
self.exc_type = exc_type
self.exc_value = exc_value
self.frames = frames
# newly concatenate the frames (which are proxies)
prev_tb = None
for tb in self.frames:
if prev_tb is not None:
prev_tb.set_next(tb)
prev_tb = tb
prev_tb.set_next(None)
def render_as_text(self, limit=None):
"""Return a string with the traceback."""
lines = traceback.format_exception(self.exc_type, self.exc_value,
self.frames[0], limit=limit)
return ''.join(lines).rstrip()
def render_as_html(self, full=False):
"""Return a unicode string with the traceback as rendered HTML."""
from jinja2.debugrenderer import render_traceback
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
@property
def is_template_syntax_error(self):
"""`True` if this is a template syntax error."""
return isinstance(self.exc_value, TemplateSyntaxError)
@property
def exc_info(self):
"""Exception info tuple with a proxy around the frame objects."""
return self.exc_type, self.exc_value, self.frames[0]
@property
def standard_exc_info(self):
"""Standard python exc_info for re-raising"""
tb = self.frames[0]
# the frame will be an actual traceback (or transparent proxy) if
# we are on pypy or a python implementation with support for tproxy
if type(tb) is not TracebackType:
tb = tb.tb
return self.exc_type, self.exc_value, tb
def make_traceback(exc_info, source_hint=None):
"""Creates a processed traceback object from the exc_info."""
exc_type, exc_value, tb = exc_info
if isinstance(exc_value, TemplateSyntaxError):
exc_info = translate_syntax_error(exc_value, source_hint)
initial_skip = 0
else:
initial_skip = 1
return translate_exception(exc_info, initial_skip)
def translate_syntax_error(error, source=None):
"""Rewrites a syntax error to please traceback systems."""
error.source = source
error.translated = True
exc_info = (error.__class__, error, None)
filename = error.filename
if filename is None:
filename = '<unknown>'
return fake_exc_info(exc_info, filename, error.lineno)
def translate_exception(exc_info, initial_skip=0):
"""If passed an exc_info it will automatically rewrite the exceptions
all the way down to the correct line numbers and frames.
"""
tb = exc_info[2]
frames = []
# skip some internal frames if wanted
for x in range(initial_skip):
if tb is not None:
tb = tb.tb_next
initial_tb = tb
while tb is not None:
# skip frames decorated with @internalcode. These are internal
# calls we can't avoid and that are useless in template debugging
# output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
# save a reference to the next frame if we override the current
# one with a faked one.
next = tb.tb_next
# fake template exceptions
template = tb.tb_frame.f_globals.get('__jinja_template__')
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
lineno)[2]
frames.append(make_frame_proxy(tb))
tb = next
# if we don't have any exceptions in the frames left, we have to
# reraise it unchanged.
# XXX: can we backup here? when could this happen?
if not frames:
reraise(exc_info[0], exc_info[1], exc_info[2])
return ProcessedTraceback(exc_info[0], exc_info[1], frames)
def fake_exc_info(exc_info, filename, lineno):
"""Helper for `translate_exception`."""
exc_type, exc_value, tb = exc_info
# figure the real context out
if tb is not None:
real_locals = tb.tb_frame.f_locals.copy()
ctx = real_locals.get('context')
if ctx:
locals = ctx.get_all()
else:
locals = {}
for name, value in iteritems(real_locals):
if name.startswith('l_') and value is not missing:
locals[name[2:]] = value
# if there is a local called __jinja_exception__, we get
# rid of it to not break the debug functionality.
locals.pop('__jinja_exception__', None)
else:
locals = {}
# assamble fake globals we need
globals = {
'__name__': filename,
'__file__': filename,
'__jinja_exception__': exc_info[:2],
# we don't want to keep the reference to the template around
# to not cause circular dependencies, but we mark it as Jinja
# frame for the ProcessedTraceback
'__jinja_template__': None
}
# and fake the exception
code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
# if it's possible, change the name of the code. This won't work
# on some python environments such as google appengine
try:
if tb is None:
location = 'template'
else:
function = tb.tb_frame.f_code.co_name
if function == 'root':
location = 'top-level template code'
elif function.startswith('block_'):
location = 'block "%s"' % function[6:]
else:
location = 'template'
code = code_type(0, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, code.co_consts,
code.co_names, code.co_varnames, filename,
location, code.co_firstlineno,
code.co_lnotab, (), ())
except:
pass
# execute the code and catch the new traceback
try:
exec(code, globals, locals)
except:
exc_info = sys.exc_info()
new_tb = exc_info[2].tb_next
# return without this frame
return exc_info[:2] + (new_tb,)
def _init_ugly_crap():
"""This function implements a few ugly things so that we can patch the
traceback objects. The function returned allows resetting `tb_next` on
any python traceback object. Do not attempt to use this on non cpython
interpreters
"""
import ctypes
from types import TracebackType
# figure out side of _Py_ssize_t
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
_Py_ssize_t = ctypes.c_int64
else:
_Py_ssize_t = ctypes.c_int
# regular python
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
# python with trace
if hasattr(sys, 'getobjects'):
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('_ob_next', ctypes.POINTER(_PyObject)),
('_ob_prev', ctypes.POINTER(_PyObject)),
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
class _Traceback(_PyObject):
pass
_Traceback._fields_ = [
('tb_next', ctypes.POINTER(_Traceback)),
('tb_frame', ctypes.POINTER(_PyObject)),
('tb_lasti', ctypes.c_int),
('tb_lineno', ctypes.c_int)
]
def tb_set_next(tb, next):
"""Set the tb_next attribute of a traceback object."""
if not (isinstance(tb, TracebackType) and
(next is None or isinstance(next, TracebackType))):
raise TypeError('tb_set_next arguments must be traceback objects')
obj = _Traceback.from_address(id(tb))
if tb.tb_next is not None:
old = _Traceback.from_address(id(tb.tb_next))
old.ob_refcnt -= 1
if next is None:
obj.tb_next = ctypes.POINTER(_Traceback)()
else:
next = _Traceback.from_address(id(next))
next.ob_refcnt += 1
obj.tb_next = ctypes.pointer(next)
return tb_set_next
# try to get a tb_set_next implementation if we don't have transparent
# proxies.
tb_set_next = None
if tproxy is None:
try:
tb_set_next = _init_ugly_crap()
except:
pass
del _init_ugly_crap

View File

@ -1,43 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.defaults
~~~~~~~~~~~~~~~
Jinja default filters and tags.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import range_type
from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner
# defaults for the parser / lexer
BLOCK_START_STRING = '{%'
BLOCK_END_STRING = '%}'
VARIABLE_START_STRING = '{{'
VARIABLE_END_STRING = '}}'
COMMENT_START_STRING = '{#'
COMMENT_END_STRING = '#}'
LINE_STATEMENT_PREFIX = None
LINE_COMMENT_PREFIX = None
TRIM_BLOCKS = False
LSTRIP_BLOCKS = False
NEWLINE_SEQUENCE = '\n'
KEEP_TRAILING_NEWLINE = False
# default filters, tests and namespace
from jinja2.filters import FILTERS as DEFAULT_FILTERS
from jinja2.tests import TESTS as DEFAULT_TESTS
DEFAULT_NAMESPACE = {
'range': range_type,
'dict': lambda **kw: kw,
'lipsum': generate_lorem_ipsum,
'cycler': Cycler,
'joiner': Joiner
}
# export all constants
__all__ = tuple(x for x in locals().keys() if x.isupper())

File diff suppressed because it is too large Load Diff

View File

@ -1,146 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.exceptions
~~~~~~~~~~~~~~~~~
Jinja exceptions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import imap, text_type, PY2, implements_to_string
class TemplateError(Exception):
"""Baseclass for all template errors."""
if PY2:
def __init__(self, message=None):
if message is not None:
message = text_type(message).encode('utf-8')
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message.decode('utf-8', 'replace')
def __unicode__(self):
return self.message or u''
else:
def __init__(self, message=None):
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message
@implements_to_string
class TemplateNotFound(IOError, LookupError, TemplateError):
"""Raised if a template does not exist."""
# looks weird, but removes the warning descriptor that just
# bogusly warns us about message being deprecated
message = None
def __init__(self, name, message=None):
IOError.__init__(self)
if message is None:
message = name
self.message = message
self.name = name
self.templates = [name]
def __str__(self):
return self.message
class TemplatesNotFound(TemplateNotFound):
"""Like :class:`TemplateNotFound` but raised if multiple templates
are selected. This is a subclass of :class:`TemplateNotFound`
exception, so just catching the base exception will catch both.
.. versionadded:: 2.2
"""
def __init__(self, names=(), message=None):
if message is None:
message = u'none of the templates given were found: ' + \
u', '.join(imap(text_type, names))
TemplateNotFound.__init__(self, names and names[-1] or None, message)
self.templates = list(names)
@implements_to_string
class TemplateSyntaxError(TemplateError):
"""Raised to tell the user that there is a problem with the template."""
def __init__(self, message, lineno, name=None, filename=None):
TemplateError.__init__(self, message)
self.lineno = lineno
self.name = name
self.filename = filename
self.source = None
# this is set to True if the debug.translate_syntax_error
# function translated the syntax error into a new traceback
self.translated = False
def __str__(self):
# for translated errors we only return the message
if self.translated:
return self.message
# otherwise attach some stuff
location = 'line %d' % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
lines = [self.message, ' ' + location]
# if the source is set, add the line to the output
if self.source is not None:
try:
line = self.source.splitlines()[self.lineno - 1]
except IndexError:
line = None
if line:
lines.append(' ' + line.strip())
return u'\n'.join(lines)
class TemplateAssertionError(TemplateSyntaxError):
"""Like a template syntax error, but covers cases where something in the
template caused an error at compile time that wasn't necessarily caused
by a syntax error. However it's a direct subclass of
:exc:`TemplateSyntaxError` and has the same attributes.
"""
class TemplateRuntimeError(TemplateError):
"""A generic runtime error in the template engine. Under some situations
Jinja may raise this exception.
"""
class UndefinedError(TemplateRuntimeError):
"""Raised if a template tries to operate on :class:`Undefined`."""
class SecurityError(TemplateRuntimeError):
"""Raised if a template tries to do something insecure if the
sandbox is enabled.
"""
class FilterArgumentError(TemplateRuntimeError):
"""This error is raised if a filter was called with inappropriate
arguments
"""

View File

@ -1,636 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.ext
~~~~~~~~~~
Jinja extensions allow to add custom tags similar to the way django custom
tags work. By default two example extensions exist: an i18n and a cache
extension.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from jinja2 import nodes
from jinja2.defaults import BLOCK_START_STRING, \
BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
from jinja2.environment import Environment
from jinja2.runtime import concat
from jinja2.exceptions import TemplateAssertionError, TemplateSyntaxError
from jinja2.utils import contextfunction, import_string, Markup
from jinja2._compat import next, with_metaclass, string_types, iteritems
# the only real useful gettext functions for a Jinja template. Note
# that ugettext must be assigned to gettext as Jinja doesn't support
# non unicode strings.
GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext')
class ExtensionRegistry(type):
"""Gives the extension an unique identifier."""
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
rv.identifier = rv.__module__ + '.' + rv.__name__
return rv
class Extension(with_metaclass(ExtensionRegistry, object)):
"""Extensions can be used to add extra functionality to the Jinja template
system at the parser level. Custom extensions are bound to an environment
but may not store environment specific data on `self`. The reason for
this is that an extension can be bound to another environment (for
overlays) by creating a copy and reassigning the `environment` attribute.
As extensions are created by the environment they cannot accept any
arguments for configuration. One may want to work around that by using
a factory function, but that is not possible as extensions are identified
by their import name. The correct way to configure the extension is
storing the configuration values on the environment. Because this way the
environment ends up acting as central configuration storage the
attributes may clash which is why extensions have to ensure that the names
they choose for configuration are not too generic. ``prefix`` for example
is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
name as includes the name of the extension (fragment cache).
"""
#: if this extension parses this is the list of tags it's listening to.
tags = set()
#: the priority of that extension. This is especially useful for
#: extensions that preprocess values. A lower value means higher
#: priority.
#:
#: .. versionadded:: 2.4
priority = 100
def __init__(self, environment):
self.environment = environment
def bind(self, environment):
"""Create a copy of this extension bound to another environment."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.environment = environment
return rv
def preprocess(self, source, name, filename=None):
"""This method is called before the actual lexing and can be used to
preprocess the source. The `filename` is optional. The return value
must be the preprocessed source.
"""
return source
def filter_stream(self, stream):
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
to filter tokens returned. This method has to return an iterable of
:class:`~jinja2.lexer.Token`\s, but it doesn't have to return a
:class:`~jinja2.lexer.TokenStream`.
In the `ext` folder of the Jinja2 source distribution there is a file
called `inlinegettext.py` which implements a filter that utilizes this
method.
"""
return stream
def parse(self, parser):
"""If any of the :attr:`tags` matched this method is called with the
parser as first argument. The token the parser stream is pointing at
is the name token that matched. This method has to return one or a
list of multiple nodes.
"""
raise NotImplementedError()
def attr(self, name, lineno=None):
"""Return an attribute node for the current extension. This is useful
to pass constants on extensions to generated template code.
::
self.attr('_my_attribute', lineno=lineno)
"""
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
def call_method(self, name, args=None, kwargs=None, dyn_args=None,
dyn_kwargs=None, lineno=None):
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
"""
if args is None:
args = []
if kwargs is None:
kwargs = []
return nodes.Call(self.attr(name, lineno=lineno), args, kwargs,
dyn_args, dyn_kwargs, lineno=lineno)
@contextfunction
def _gettext_alias(__context, *args, **kwargs):
return __context.call(__context.resolve('gettext'), *args, **kwargs)
def _make_new_gettext(func):
@contextfunction
def gettext(__context, __string, **variables):
rv = __context.call(func, __string)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
return rv % variables
return gettext
def _make_new_ngettext(func):
@contextfunction
def ngettext(__context, __singular, __plural, __num, **variables):
variables.setdefault('num', __num)
rv = __context.call(func, __singular, __plural, __num)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
return rv % variables
return ngettext
class InternationalizationExtension(Extension):
"""This extension adds gettext support to Jinja2."""
tags = set(['trans'])
# TODO: the i18n extension is currently reevaluating values in a few
# situations. Take this example:
# {% trans count=something() %}{{ count }} foo{% pluralize
# %}{{ count }} fooss{% endtrans %}
# something is called twice here. One time for the gettext value and
# the other time for the n-parameter of the ngettext function.
def __init__(self, environment):
Extension.__init__(self, environment)
environment.globals['_'] = _gettext_alias
environment.extend(
install_gettext_translations=self._install,
install_null_translations=self._install_null,
install_gettext_callables=self._install_callables,
uninstall_gettext_translations=self._uninstall,
extract_translations=self._extract,
newstyle_gettext=False
)
def _install(self, translations, newstyle=None):
gettext = getattr(translations, 'ugettext', None)
if gettext is None:
gettext = translations.gettext
ngettext = getattr(translations, 'ungettext', None)
if ngettext is None:
ngettext = translations.ngettext
self._install_callables(gettext, ngettext, newstyle)
def _install_null(self, newstyle=None):
self._install_callables(
lambda x: x,
lambda s, p, n: (n != 1 and (p,) or (s,))[0],
newstyle
)
def _install_callables(self, gettext, ngettext, newstyle=None):
if newstyle is not None:
self.environment.newstyle_gettext = newstyle
if self.environment.newstyle_gettext:
gettext = _make_new_gettext(gettext)
ngettext = _make_new_ngettext(ngettext)
self.environment.globals.update(
gettext=gettext,
ngettext=ngettext
)
def _uninstall(self, translations):
for key in 'gettext', 'ngettext':
self.environment.globals.pop(key, None)
def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
if isinstance(source, string_types):
source = self.environment.parse(source)
return extract_from_ast(source, gettext_functions)
def parse(self, parser):
"""Parse a translatable tag."""
lineno = next(parser.stream).lineno
num_called_num = False
# find all the variables referenced. Additionally a variable can be
# defined in the body of the trans block too, but this is checked at
# a later state.
plural_expr = None
plural_expr_assignment = None
variables = {}
while parser.stream.current.type != 'block_end':
if variables:
parser.stream.expect('comma')
# skip colon for python compatibility
if parser.stream.skip_if('colon'):
break
name = parser.stream.expect('name')
if name.value in variables:
parser.fail('translatable variable %r defined twice.' %
name.value, name.lineno,
exc=TemplateAssertionError)
# expressions
if parser.stream.current.type == 'assign':
next(parser.stream)
variables[name.value] = var = parser.parse_expression()
else:
variables[name.value] = var = nodes.Name(name.value, 'load')
if plural_expr is None:
if isinstance(var, nodes.Call):
plural_expr = nodes.Name('_trans', 'load')
variables[name.value] = plural_expr
plural_expr_assignment = nodes.Assign(
nodes.Name('_trans', 'store'), var)
else:
plural_expr = var
num_called_num = name.value == 'num'
parser.stream.expect('block_end')
plural = plural_names = None
have_plural = False
referenced = set()
# now parse until endtrans or pluralize
singular_names, singular = self._parse_block(parser, True)
if singular_names:
referenced.update(singular_names)
if plural_expr is None:
plural_expr = nodes.Name(singular_names[0], 'load')
num_called_num = singular_names[0] == 'num'
# if we have a pluralize block, we parse that too
if parser.stream.current.test('name:pluralize'):
have_plural = True
next(parser.stream)
if parser.stream.current.type != 'block_end':
name = parser.stream.expect('name')
if name.value not in variables:
parser.fail('unknown variable %r for pluralization' %
name.value, name.lineno,
exc=TemplateAssertionError)
plural_expr = variables[name.value]
num_called_num = name.value == 'num'
parser.stream.expect('block_end')
plural_names, plural = self._parse_block(parser, False)
next(parser.stream)
referenced.update(plural_names)
else:
next(parser.stream)
# register free names as simple name expressions
for var in referenced:
if var not in variables:
variables[var] = nodes.Name(var, 'load')
if not have_plural:
plural_expr = None
elif plural_expr is None:
parser.fail('pluralize without variables', lineno)
node = self._make_node(singular, plural, variables, plural_expr,
bool(referenced),
num_called_num and have_plural)
node.set_lineno(lineno)
if plural_expr_assignment is not None:
return [plural_expr_assignment, node]
else:
return node
def _parse_block(self, parser, allow_pluralize):
"""Parse until the next block tag with a given name."""
referenced = []
buf = []
while 1:
if parser.stream.current.type == 'data':
buf.append(parser.stream.current.value.replace('%', '%%'))
next(parser.stream)
elif parser.stream.current.type == 'variable_begin':
next(parser.stream)
name = parser.stream.expect('name').value
referenced.append(name)
buf.append('%%(%s)s' % name)
parser.stream.expect('variable_end')
elif parser.stream.current.type == 'block_begin':
next(parser.stream)
if parser.stream.current.test('name:endtrans'):
break
elif parser.stream.current.test('name:pluralize'):
if allow_pluralize:
break
parser.fail('a translatable section can have only one '
'pluralize section')
parser.fail('control structures in translatable sections are '
'not allowed')
elif parser.stream.eos:
parser.fail('unclosed translation block')
else:
assert False, 'internal parser error'
return referenced, concat(buf)
def _make_node(self, singular, plural, variables, plural_expr,
vars_referenced, num_called_num):
"""Generates a useful node from the data provided."""
# no variables referenced? no need to escape for old style
# gettext invocations only if there are vars.
if not vars_referenced and not self.environment.newstyle_gettext:
singular = singular.replace('%%', '%')
if plural:
plural = plural.replace('%%', '%')
# singular only:
if plural_expr is None:
gettext = nodes.Name('gettext', 'load')
node = nodes.Call(gettext, [nodes.Const(singular)],
[], None, None)
# singular and plural
else:
ngettext = nodes.Name('ngettext', 'load')
node = nodes.Call(ngettext, [
nodes.Const(singular),
nodes.Const(plural),
plural_expr
], [], None, None)
# in case newstyle gettext is used, the method is powerful
# enough to handle the variable expansion and autoescape
# handling itself
if self.environment.newstyle_gettext:
for key, value in iteritems(variables):
# the function adds that later anyways in case num was
# called num, so just skip it.
if num_called_num and key == 'num':
continue
node.kwargs.append(nodes.Keyword(key, value))
# otherwise do that here
else:
# mark the return value as safe if we are in an
# environment with autoescaping turned on
node = nodes.MarkSafeIfAutoescape(node)
if variables:
node = nodes.Mod(node, nodes.Dict([
nodes.Pair(nodes.Const(key), value)
for key, value in variables.items()
]))
return nodes.Output([node])
class ExprStmtExtension(Extension):
"""Adds a `do` tag to Jinja2 that works like the print statement just
that it doesn't print the return value.
"""
tags = set(['do'])
def parse(self, parser):
node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
node.node = parser.parse_tuple()
return node
class LoopControlExtension(Extension):
"""Adds break and continue to the template engine."""
tags = set(['break', 'continue'])
def parse(self, parser):
token = next(parser.stream)
if token.value == 'break':
return nodes.Break(lineno=token.lineno)
return nodes.Continue(lineno=token.lineno)
class WithExtension(Extension):
"""Adds support for a django-like with block."""
tags = set(['with'])
def parse(self, parser):
node = nodes.Scope(lineno=next(parser.stream).lineno)
assignments = []
while parser.stream.current.type != 'block_end':
lineno = parser.stream.current.lineno
if assignments:
parser.stream.expect('comma')
target = parser.parse_assign_target()
parser.stream.expect('assign')
expr = parser.parse_expression()
assignments.append(nodes.Assign(target, expr, lineno=lineno))
node.body = assignments + \
list(parser.parse_statements(('name:endwith',),
drop_needle=True))
return node
class AutoEscapeExtension(Extension):
"""Changes auto escape rules for a scope."""
tags = set(['autoescape'])
def parse(self, parser):
node = nodes.ScopedEvalContextModifier(lineno=next(parser.stream).lineno)
node.options = [
nodes.Keyword('autoescape', parser.parse_expression())
]
node.body = parser.parse_statements(('name:endautoescape',),
drop_needle=True)
return nodes.Scope([node])
def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS,
babel_style=True):
"""Extract localizable strings from the given template node. Per
default this function returns matches in babel style that means non string
parameters as well as keyword arguments are returned as `None`. This
allows Babel to figure out what you really meant if you are using
gettext functions that allow keyword arguments for placeholder expansion.
If you don't want that behavior set the `babel_style` parameter to `False`
which causes only strings to be returned and parameters are always stored
in tuples. As a consequence invalid gettext calls (calls without a single
string parameter or string parameters after non-string parameters) are
skipped.
This example explains the behavior:
>>> from jinja2 import Environment
>>> env = Environment()
>>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
>>> list(extract_from_ast(node))
[(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
>>> list(extract_from_ast(node, babel_style=False))
[(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
For every string found this function yields a ``(lineno, function,
message)`` tuple, where:
* ``lineno`` is the number of the line on which the string was found,
* ``function`` is the name of the ``gettext`` function used (if the
string was extracted from embedded Python code), and
* ``message`` is the string itself (a ``unicode`` object, or a tuple
of ``unicode`` objects for functions with multiple string arguments).
This extraction function operates on the AST and is because of that unable
to extract any comments. For comment support you have to use the babel
extraction interface or extract comments yourself.
"""
for node in node.find_all(nodes.Call):
if not isinstance(node.node, nodes.Name) or \
node.node.name not in gettext_functions:
continue
strings = []
for arg in node.args:
if isinstance(arg, nodes.Const) and \
isinstance(arg.value, string_types):
strings.append(arg.value)
else:
strings.append(None)
for arg in node.kwargs:
strings.append(None)
if node.dyn_args is not None:
strings.append(None)
if node.dyn_kwargs is not None:
strings.append(None)
if not babel_style:
strings = tuple(x for x in strings if x is not None)
if not strings:
continue
else:
if len(strings) == 1:
strings = strings[0]
else:
strings = tuple(strings)
yield node.lineno, node.node.name, strings
class _CommentFinder(object):
"""Helper class to find comments in a token stream. Can only
find comments for gettext calls forwards. Once the comment
from line 4 is found, a comment for line 1 will not return a
usable value.
"""
def __init__(self, tokens, comment_tags):
self.tokens = tokens
self.comment_tags = comment_tags
self.offset = 0
self.last_lineno = 0
def find_backwards(self, offset):
try:
for _, token_type, token_value in \
reversed(self.tokens[self.offset:offset]):
if token_type in ('comment', 'linecomment'):
try:
prefix, comment = token_value.split(None, 1)
except ValueError:
continue
if prefix in self.comment_tags:
return [comment.rstrip()]
return []
finally:
self.offset = offset
def find_comments(self, lineno):
if not self.comment_tags or self.last_lineno > lineno:
return []
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset:]):
if token_lineno > lineno:
return self.find_backwards(self.offset + idx)
return self.find_backwards(len(self.tokens))
def babel_extract(fileobj, keywords, comment_tags, options):
"""Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
try to find the best preceeding comment that begins with one of the
keywords. For best results, make sure to not have more than one
gettext call in one line of code and the matching comment in the
same line or the line before.
.. versionchanged:: 2.5.1
The `newstyle_gettext` flag can be set to `True` to enable newstyle
gettext calls.
.. versionchanged:: 2.7
A `silent` option can now be provided. If set to `False` template
syntax errors are propagated instead of being ignored.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results.
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
(comments will be empty currently)
"""
extensions = set()
for extension in options.get('extensions', '').split(','):
extension = extension.strip()
if not extension:
continue
extensions.add(import_string(extension))
if InternationalizationExtension not in extensions:
extensions.add(InternationalizationExtension)
def getbool(options, key, default=False):
return options.get(key, str(default)).lower() in \
('1', 'on', 'yes', 'true')
silent = getbool(options, 'silent', True)
environment = Environment(
options.get('block_start_string', BLOCK_START_STRING),
options.get('block_end_string', BLOCK_END_STRING),
options.get('variable_start_string', VARIABLE_START_STRING),
options.get('variable_end_string', VARIABLE_END_STRING),
options.get('comment_start_string', COMMENT_START_STRING),
options.get('comment_end_string', COMMENT_END_STRING),
options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX,
options.get('line_comment_prefix') or LINE_COMMENT_PREFIX,
getbool(options, 'trim_blocks', TRIM_BLOCKS),
getbool(options, 'lstrip_blocks', LSTRIP_BLOCKS),
NEWLINE_SEQUENCE,
getbool(options, 'keep_trailing_newline', KEEP_TRAILING_NEWLINE),
frozenset(extensions),
cache_size=0,
auto_reload=False
)
if getbool(options, 'newstyle_gettext'):
environment.newstyle_gettext = True
source = fileobj.read().decode(options.get('encoding', 'utf-8'))
try:
node = environment.parse(source)
tokens = list(environment.lex(environment.preprocess(source)))
except TemplateSyntaxError as e:
if not silent:
raise
# skip templates with syntax errors
return
finder = _CommentFinder(tokens, comment_tags)
for lineno, func, message in extract_from_ast(node, keywords):
yield lineno, func, message, finder.find_comments(lineno)
#: nicer import names
i18n = InternationalizationExtension
do = ExprStmtExtension
loopcontrols = LoopControlExtension
with_ = WithExtension
autoescape = AutoEscapeExtension

View File

@ -1,987 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.filters
~~~~~~~~~~~~~~
Bundled jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import math
from random import choice
from operator import itemgetter
from itertools import groupby
from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \
unicode_urlencode
from jinja2.runtime import Undefined
from jinja2.exceptions import FilterArgumentError
from jinja2._compat import next, imap, string_types, text_type, iteritems
_word_re = re.compile(r'\w+(?u)')
def contextfilter(f):
"""Decorator for marking context dependent filters. The current
:class:`Context` will be passed as first argument.
"""
f.contextfilter = True
return f
def evalcontextfilter(f):
"""Decorator for marking eval-context dependent filters. An eval
context object is passed as first argument. For more information
about the eval context, see :ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfilter = True
return f
def environmentfilter(f):
"""Decorator for marking evironment dependent filters. The current
:class:`Environment` is passed to the filter as first argument.
"""
f.environmentfilter = True
return f
def make_attrgetter(environment, attribute):
"""Returns a callable that looks up the given attribute from a
passed object with the rules of the environment. Dots are allowed
to access attributes of attributes. Integer parts in paths are
looked up as integers.
"""
if not isinstance(attribute, string_types) \
or ('.' not in attribute and not attribute.isdigit()):
return lambda x: environment.getitem(x, attribute)
attribute = attribute.split('.')
def attrgetter(item):
for part in attribute:
if part.isdigit():
part = int(part)
item = environment.getitem(item, part)
return item
return attrgetter
def do_forceescape(value):
"""Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, '__html__'):
value = value.__html__()
return escape(text_type(value))
def do_urlencode(value):
"""Escape strings for use in URLs (uses UTF-8 encoding). It accepts both
dictionaries and regular strings as well as pairwise iterables.
.. versionadded:: 2.7
"""
itemiter = None
if isinstance(value, dict):
itemiter = iteritems(value)
elif not isinstance(value, string_types):
try:
itemiter = iter(value)
except TypeError:
pass
if itemiter is None:
return unicode_urlencode(value)
return u'&'.join(unicode_urlencode(k) + '=' +
unicode_urlencode(v) for k, v in itemiter)
@evalcontextfilter
def do_replace(eval_ctx, s, old, new, count=None):
"""Return a copy of the value with all occurrences of a substring
replaced with a new one. The first argument is the substring
that should be replaced, the second is the replacement string.
If the optional third argument ``count`` is given, only the first
``count`` occurrences are replaced:
.. sourcecode:: jinja
{{ "Hello World"|replace("Hello", "Goodbye") }}
-> Goodbye World
{{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
-> d'oh, d'oh, aaargh
"""
if count is None:
count = -1
if not eval_ctx.autoescape:
return text_type(s).replace(text_type(old), text_type(new), count)
if hasattr(old, '__html__') or hasattr(new, '__html__') and \
not hasattr(s, '__html__'):
s = escape(s)
else:
s = soft_unicode(s)
return s.replace(soft_unicode(old), soft_unicode(new), count)
def do_upper(s):
"""Convert a value to uppercase."""
return soft_unicode(s).upper()
def do_lower(s):
"""Convert a value to lowercase."""
return soft_unicode(s).lower()
@evalcontextfilter
def do_xmlattr(_eval_ctx, d, autospace=True):
"""Create an SGML/XML attribute string based on the items in a dict.
All values that are neither `none` nor `undefined` are automatically
escaped:
.. sourcecode:: html+jinja
<ul{{ {'class': 'my_list', 'missing': none,
'id': 'list-%d'|format(variable)}|xmlattr }}>
...
</ul>
Results in something like this:
.. sourcecode:: html
<ul class="my_list" id="list-42">
...
</ul>
As you can see it automatically prepends a space in front of the item
if the filter returned something unless the second parameter is false.
"""
rv = u' '.join(
u'%s="%s"' % (escape(key), escape(value))
for key, value in iteritems(d)
if value is not None and not isinstance(value, Undefined)
)
if autospace and rv:
rv = u' ' + rv
if _eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_capitalize(s):
"""Capitalize a value. The first character will be uppercase, all others
lowercase.
"""
return soft_unicode(s).capitalize()
def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
rv = []
for item in re.compile(r'([-\s]+)(?u)').split(s):
if not item:
continue
rv.append(item[0].upper() + item[1:].lower())
return ''.join(rv)
def do_dictsort(value, case_sensitive=False, by='key'):
"""Sort a dict and yield (key, value) pairs. Because python dicts are
unsorted you may want to use this function to order them by either
key or value:
.. sourcecode:: jinja
{% for item in mydict|dictsort %}
sort the dict by key, case insensitive
{% for item in mydict|dictsort(true) %}
sort the dict by key, case sensitive
{% for item in mydict|dictsort(false, 'value') %}
sort the dict by key, case insensitive, sorted
normally and ordered by value.
"""
if by == 'key':
pos = 0
elif by == 'value':
pos = 1
else:
raise FilterArgumentError('You can only sort by either '
'"key" or "value"')
def sort_func(item):
value = item[pos]
if isinstance(value, string_types) and not case_sensitive:
value = value.lower()
return value
return sorted(value.items(), key=sort_func)
@environmentfilter
def do_sort(environment, value, reverse=False, case_sensitive=False,
attribute=None):
"""Sort an iterable. Per default it sorts ascending, if you pass it
true as first argument it will reverse the sorting.
If the iterable is made of strings the third parameter can be used to
control the case sensitiveness of the comparison which is disabled by
default.
.. sourcecode:: jinja
{% for item in iterable|sort %}
...
{% endfor %}
It is also possible to sort by an attribute (for example to sort
by the date of an object) by specifying the `attribute` parameter:
.. sourcecode:: jinja
{% for item in iterable|sort(attribute='date') %}
...
{% endfor %}
.. versionchanged:: 2.6
The `attribute` parameter was added.
"""
if not case_sensitive:
def sort_func(item):
if isinstance(item, string_types):
item = item.lower()
return item
else:
sort_func = None
if attribute is not None:
getter = make_attrgetter(environment, attribute)
def sort_func(item, processor=sort_func or (lambda x: x)):
return processor(getter(item))
return sorted(value, key=sort_func, reverse=reverse)
def do_default(value, default_value=u'', boolean=False):
"""If the value is undefined it will return the passed default value,
otherwise the value of the variable:
.. sourcecode:: jinja
{{ my_variable|default('my_variable is not defined') }}
This will output the value of ``my_variable`` if the variable was
defined, otherwise ``'my_variable is not defined'``. If you want
to use default with variables that evaluate to false you have to
set the second parameter to `true`:
.. sourcecode:: jinja
{{ ''|default('the string was empty', true) }}
"""
if isinstance(value, Undefined) or (boolean and not value):
return default_value
return value
@evalcontextfilter
def do_join(eval_ctx, value, d=u'', attribute=None):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
It is also possible to join certain attributes of an object:
.. sourcecode:: jinja
{{ users|join(', ', attribute='username') }}
.. versionadded:: 2.6
The `attribute` parameter was added.
"""
if attribute is not None:
value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
# no automatic escaping? joining is a lot eaiser then
if not eval_ctx.autoescape:
return text_type(d).join(imap(text_type, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, '__html__'):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
if hasattr(item, '__html__'):
do_escape = True
else:
value[idx] = text_type(item)
if do_escape:
d = escape(d)
else:
d = text_type(d)
return d.join(value)
# no html involved, to normal joining
return soft_unicode(d).join(imap(soft_unicode, value))
def do_center(value, width=80):
"""Centers the value in a field of a given width."""
return text_type(value).center(width)
@environmentfilter
def do_first(environment, seq):
"""Return the first item of a sequence."""
try:
return next(iter(seq))
except StopIteration:
return environment.undefined('No first item, sequence was empty.')
@environmentfilter
def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return next(iter(reversed(seq)))
except StopIteration:
return environment.undefined('No last item, sequence was empty.')
@environmentfilter
def do_random(environment, seq):
"""Return a random item from the sequence."""
try:
return choice(seq)
except IndexError:
return environment.undefined('No random item, sequence was empty.')
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (Mebi, Gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
prefixes = [
(binary and 'KiB' or 'kB'),
(binary and 'MiB' or 'MB'),
(binary and 'GiB' or 'GB'),
(binary and 'TiB' or 'TB'),
(binary and 'PiB' or 'PB'),
(binary and 'EiB' or 'EB'),
(binary and 'ZiB' or 'ZB'),
(binary and 'YiB' or 'YB')
]
if bytes == 1:
return '1 Byte'
elif bytes < base:
return '%d Bytes' % bytes
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
return '%.1f %s' % ((base * bytes / unit), prefix)
return '%.1f %s' % ((base * bytes / unit), prefix)
def do_pprint(value, verbose=False):
"""Pretty print a variable. Useful for debugging.
With Jinja 1.2 onwards you can pass it a parameter. If this parameter
is truthy the output will be more verbose (this requires `pretty`)
"""
return pformat(value, verbose=verbose)
@evalcontextfilter
def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False):
"""Converts URLs in plain text into clickable links.
If you pass the filter an additional integer it will shorten the urls
to that number. Also a third argument exists that makes the urls
"nofollow":
.. sourcecode:: jinja
{{ mytext|urlize(40, true) }}
links are shortened to 40 chars and defined with rel="nofollow"
"""
rv = urlize(value, trim_url_limit, nofollow)
if eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_indent(s, width=4, indentfirst=False):
"""Return a copy of the passed string, each line indented by
4 spaces. The first line is not indented. If you want to
change the number of spaces or indent the first line too
you can pass additional parameters to the filter:
.. sourcecode:: jinja
{{ mytext|indent(2, true) }}
indent by two spaces and indent the first line too.
"""
indention = u' ' * width
rv = (u'\n' + indention).join(s.splitlines())
if indentfirst:
rv = indention + rv
return rv
def do_truncate(s, length=255, killwords=False, end='...'):
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
parameter is ``true`` the filter will cut the text at length. Otherwise
it will discard the last word. If the text was in fact
truncated it will append an ellipsis sign (``"..."``). If you want a
different ellipsis sign than ``"..."`` you can specify it using the
third parameter.
.. sourcecode:: jinja
{{ "foo bar"|truncate(5) }}
-> "foo ..."
{{ "foo bar"|truncate(5, True) }}
-> "foo b..."
"""
if len(s) <= length:
return s
elif killwords:
return s[:length] + end
words = s.split(' ')
result = []
m = 0
for word in words:
m += len(word) + 1
if m > length:
break
result.append(word)
result.append(end)
return u' '.join(result)
@environmentfilter
def do_wordwrap(environment, s, width=79, break_long_words=True,
wrapstring=None):
"""
Return a copy of the string passed to the filter wrapped after
``79`` characters. You can override this default using the first
parameter. If you set the second parameter to `false` Jinja will not
split words apart if they are longer than `width`. By default, the newlines
will be the default newlines for the environment, but this can be changed
using the wrapstring keyword argument.
.. versionadded:: 2.7
Added support for the `wrapstring` parameter.
"""
if not wrapstring:
wrapstring = environment.newline_sequence
import textwrap
return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False,
replace_whitespace=False,
break_long_words=break_long_words))
def do_wordcount(s):
"""Count the words in that string."""
return len(_word_re.findall(s))
def do_int(value, default=0):
"""Convert the value into an integer. If the
conversion doesn't work it will return ``0``. You can
override this default using the first parameter.
"""
try:
return int(value)
except (TypeError, ValueError):
# this quirk is necessary so that "42.23"|int gives 42.
try:
return int(float(value))
except (TypeError, ValueError):
return default
def do_float(value, default=0.0):
"""Convert the value into a floating point number. If the
conversion doesn't work it will return ``0.0``. You can
override this default using the first parameter.
"""
try:
return float(value)
except (TypeError, ValueError):
return default
def do_format(value, *args, **kwargs):
"""
Apply python string formatting on an object:
.. sourcecode:: jinja
{{ "%s - %s"|format("Hello?", "Foo!") }}
-> Hello? - Foo!
"""
if args and kwargs:
raise FilterArgumentError('can\'t handle positional and keyword '
'arguments at the same time')
return soft_unicode(value) % (kwargs or args)
def do_trim(value):
"""Strip leading and trailing whitespace."""
return soft_unicode(value).strip()
def do_striptags(value):
"""Strip SGML/XML tags and replace adjacent whitespace by one space.
"""
if hasattr(value, '__html__'):
value = value.__html__()
return Markup(text_type(value)).striptags()
def do_slice(value, slices, fill_with=None):
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration.
"""
seq = list(value)
length = len(seq)
items_per_slice = length // slices
slices_with_extra = length % slices
offset = 0
for slice_number in range(slices):
start = offset + slice_number * items_per_slice
if slice_number < slices_with_extra:
offset += 1
end = offset + (slice_number + 1) * items_per_slice
tmp = seq[start:end]
if fill_with is not None and slice_number >= slices_with_extra:
tmp.append(fill_with)
yield tmp
def do_batch(value, linecount, fill_with=None):
"""
A filter that batches items. It works pretty much like `slice`
just the other way round. It returns a list of lists with the
given number of items. If you provide a second parameter this
is used to fill up missing items. See this example:
.. sourcecode:: html+jinja
<table>
{%- for row in items|batch(3, '&nbsp;') %}
<tr>
{%- for column in row %}
<td>{{ column }}</td>
{%- endfor %}
</tr>
{%- endfor %}
</table>
"""
result = []
tmp = []
for item in value:
if len(tmp) == linecount:
yield tmp
tmp = []
tmp.append(item)
if tmp:
if fill_with is not None and len(tmp) < linecount:
tmp += [fill_with] * (linecount - len(tmp))
yield tmp
def do_round(value, precision=0, method='common'):
"""Round the number to a given precision. The first
parameter specifies the precision (default is ``0``), the
second the rounding method:
- ``'common'`` rounds either up or down
- ``'ceil'`` always rounds up
- ``'floor'`` always rounds down
If you don't specify a method ``'common'`` is used.
.. sourcecode:: jinja
{{ 42.55|round }}
-> 43.0
{{ 42.55|round(1, 'floor') }}
-> 42.5
Note that even if rounded to 0 precision, a float is returned. If
you need a real integer, pipe it through `int`:
.. sourcecode:: jinja
{{ 42.55|round|int }}
-> 43
"""
if not method in ('common', 'ceil', 'floor'):
raise FilterArgumentError('method must be common, ceil or floor')
if method == 'common':
return round(value, precision)
func = getattr(math, method)
return func(value * (10 ** precision)) / (10 ** precision)
@environmentfilter
def do_groupby(environment, value, attribute):
"""Group a sequence of objects by a common attribute.
If you for example have a list of dicts or objects that represent persons
with `gender`, `first_name` and `last_name` attributes and you want to
group all users by genders you can do something like the following
snippet:
.. sourcecode:: html+jinja
<ul>
{% for group in persons|groupby('gender') %}
<li>{{ group.grouper }}<ul>
{% for person in group.list %}
<li>{{ person.first_name }} {{ person.last_name }}</li>
{% endfor %}</ul></li>
{% endfor %}
</ul>
Additionally it's possible to use tuple unpacking for the grouper and
list:
.. sourcecode:: html+jinja
<ul>
{% for grouper, list in persons|groupby('gender') %}
...
{% endfor %}
</ul>
As you can see the item we're grouping by is stored in the `grouper`
attribute and the `list` contains all the objects that have this grouper
in common.
.. versionchanged:: 2.6
It's now possible to use dotted notation to group by the child
attribute of another attribute.
"""
expr = make_attrgetter(environment, attribute)
return sorted(map(_GroupTuple, groupby(sorted(value, key=expr), expr)))
class _GroupTuple(tuple):
__slots__ = ()
grouper = property(itemgetter(0))
list = property(itemgetter(1))
def __new__(cls, xxx_todo_changeme):
(key, value) = xxx_todo_changeme
return tuple.__new__(cls, (key, list(value)))
@environmentfilter
def do_sum(environment, iterable, attribute=None, start=0):
"""Returns the sum of a sequence of numbers plus the value of parameter
'start' (which defaults to 0). When the sequence is empty it returns
start.
It is also possible to sum up only certain attributes:
.. sourcecode:: jinja
Total: {{ items|sum(attribute='price') }}
.. versionchanged:: 2.6
The `attribute` parameter was added to allow suming up over
attributes. Also the `start` parameter was moved on to the right.
"""
if attribute is not None:
iterable = imap(make_attrgetter(environment, attribute), iterable)
return sum(iterable, start)
def do_list(value):
"""Convert the value into a list. If it was a string the returned list
will be a list of characters.
"""
return list(value)
def do_mark_safe(value):
"""Mark the value as safe which means that in an environment with automatic
escaping enabled this variable will not be escaped.
"""
return Markup(value)
def do_mark_unsafe(value):
"""Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
return text_type(value)
def do_reverse(value):
"""Reverse the object or return an iterator the iterates over it the other
way round.
"""
if isinstance(value, string_types):
return value[::-1]
try:
return reversed(value)
except TypeError:
try:
rv = list(value)
rv.reverse()
return rv
except TypeError:
raise FilterArgumentError('argument must be iterable')
@environmentfilter
def do_attr(environment, obj, name):
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo["bar"]`` just that always an attribute is returned and items are not
looked up.
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
"""
try:
name = str(name)
except UnicodeError:
pass
else:
try:
value = getattr(obj, name)
except AttributeError:
pass
else:
if environment.sandboxed and not \
environment.is_safe_attribute(obj, name, value):
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name)
@contextfilter
def do_map(*args, **kwargs):
"""Applies a filter on a sequence of objects or looks up an attribute.
This is useful when dealing with lists of objects but you are really
only interested in a certain value of it.
The basic usage is mapping on an attribute. Imagine you have a list
of users but you are only interested in a list of usernames:
.. sourcecode:: jinja
Users on this page: {{ users|map(attribute='username')|join(', ') }}
Alternatively you can let it invoke a filter by passing the name of the
filter and the arguments afterwards. A good example would be applying a
text conversion filter on a sequence:
.. sourcecode:: jinja
Users on this page: {{ titles|map('lower')|join(', ') }}
.. versionadded:: 2.7
"""
context = args[0]
seq = args[1]
if len(args) == 2 and 'attribute' in kwargs:
attribute = kwargs.pop('attribute')
if kwargs:
raise FilterArgumentError('Unexpected keyword argument %r' %
next(iter(kwargs)))
func = make_attrgetter(context.environment, attribute)
else:
try:
name = args[2]
args = args[3:]
except LookupError:
raise FilterArgumentError('map requires a filter argument')
func = lambda item: context.environment.call_filter(
name, item, args, kwargs, context=context)
if seq:
for item in seq:
yield func(item)
@contextfilter
def do_select(*args, **kwargs):
"""Filters a sequence of objects by appying a test to either the object
or the attribute and only selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|select("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, False)
@contextfilter
def do_reject(*args, **kwargs):
"""Filters a sequence of objects by appying a test to either the object
or the attribute and rejecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|reject("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, False)
@contextfilter
def do_selectattr(*args, **kwargs):
"""Filters a sequence of objects by appying a test to either the object
or the attribute and only selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ users|selectattr("is_active") }}
{{ users|selectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, True)
@contextfilter
def do_rejectattr(*args, **kwargs):
"""Filters a sequence of objects by appying a test to either the object
or the attribute and rejecting the ones with the test succeeding.
.. sourcecode:: jinja
{{ users|rejectattr("is_active") }}
{{ users|rejectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, True)
def _select_or_reject(args, kwargs, modfunc, lookup_attr):
context = args[0]
seq = args[1]
if lookup_attr:
try:
attr = args[2]
except LookupError:
raise FilterArgumentError('Missing parameter for attribute name')
transfunc = make_attrgetter(context.environment, attr)
off = 1
else:
off = 0
transfunc = lambda x: x
try:
name = args[2 + off]
args = args[3 + off:]
func = lambda item: context.environment.call_test(
name, item, args, kwargs)
except LookupError:
func = bool
if seq:
for item in seq:
if modfunc(func(transfunc(item))):
yield item
FILTERS = {
'attr': do_attr,
'replace': do_replace,
'upper': do_upper,
'lower': do_lower,
'escape': escape,
'e': escape,
'forceescape': do_forceescape,
'capitalize': do_capitalize,
'title': do_title,
'default': do_default,
'd': do_default,
'join': do_join,
'count': len,
'dictsort': do_dictsort,
'sort': do_sort,
'length': len,
'reverse': do_reverse,
'center': do_center,
'indent': do_indent,
'title': do_title,
'capitalize': do_capitalize,
'first': do_first,
'last': do_last,
'map': do_map,
'random': do_random,
'reject': do_reject,
'rejectattr': do_rejectattr,
'filesizeformat': do_filesizeformat,
'pprint': do_pprint,
'truncate': do_truncate,
'wordwrap': do_wordwrap,
'wordcount': do_wordcount,
'int': do_int,
'float': do_float,
'string': soft_unicode,
'list': do_list,
'urlize': do_urlize,
'format': do_format,
'trim': do_trim,
'striptags': do_striptags,
'select': do_select,
'selectattr': do_selectattr,
'slice': do_slice,
'batch': do_batch,
'sum': do_sum,
'abs': abs,
'round': do_round,
'groupby': do_groupby,
'safe': do_mark_safe,
'xmlattr': do_xmlattr,
'urlencode': do_urlencode
}

View File

@ -1,733 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.lexer
~~~~~~~~~~~~
This module implements a Jinja / Python combination lexer. The
`Lexer` class provided by this module is used to do some preprocessing
for Jinja.
On the one hand it filters out invalid operators like the bitshift
operators we don't allow in templates. On the other hand it separates
template code and python code in expressions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
from operator import itemgetter
from collections import deque
from jinja2.exceptions import TemplateSyntaxError
from jinja2.utils import LRUCache
from jinja2._compat import next, iteritems, implements_iterator, text_type, \
intern
# cache for the lexers. Exists in order to be able to have multiple
# environments with the same lexer
_lexer_cache = LRUCache(50)
# static regular expressions
whitespace_re = re.compile(r'\s+', re.U)
string_re = re.compile(r"('([^'\\]*(?:\\.[^'\\]*)*)'"
r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S)
integer_re = re.compile(r'\d+')
# we use the unicode identifier rule if this python version is able
# to handle unicode identifiers, otherwise the standard ASCII one.
try:
compile('föö', '<unknown>', 'eval')
except SyntaxError:
name_re = re.compile(r'\b[a-zA-Z_][a-zA-Z0-9_]*\b')
else:
from jinja2 import _stringdefs
name_re = re.compile(r'[%s][%s]*' % (_stringdefs.xid_start,
_stringdefs.xid_continue))
float_re = re.compile(r'(?<!\.)\d+\.\d+')
newline_re = re.compile(r'(\r\n|\r|\n)')
# internal the tokens and keep references to them
TOKEN_ADD = intern('add')
TOKEN_ASSIGN = intern('assign')
TOKEN_COLON = intern('colon')
TOKEN_COMMA = intern('comma')
TOKEN_DIV = intern('div')
TOKEN_DOT = intern('dot')
TOKEN_EQ = intern('eq')
TOKEN_FLOORDIV = intern('floordiv')
TOKEN_GT = intern('gt')
TOKEN_GTEQ = intern('gteq')
TOKEN_LBRACE = intern('lbrace')
TOKEN_LBRACKET = intern('lbracket')
TOKEN_LPAREN = intern('lparen')
TOKEN_LT = intern('lt')
TOKEN_LTEQ = intern('lteq')
TOKEN_MOD = intern('mod')
TOKEN_MUL = intern('mul')
TOKEN_NE = intern('ne')
TOKEN_PIPE = intern('pipe')
TOKEN_POW = intern('pow')
TOKEN_RBRACE = intern('rbrace')
TOKEN_RBRACKET = intern('rbracket')
TOKEN_RPAREN = intern('rparen')
TOKEN_SEMICOLON = intern('semicolon')
TOKEN_SUB = intern('sub')
TOKEN_TILDE = intern('tilde')
TOKEN_WHITESPACE = intern('whitespace')
TOKEN_FLOAT = intern('float')
TOKEN_INTEGER = intern('integer')
TOKEN_NAME = intern('name')
TOKEN_STRING = intern('string')
TOKEN_OPERATOR = intern('operator')
TOKEN_BLOCK_BEGIN = intern('block_begin')
TOKEN_BLOCK_END = intern('block_end')
TOKEN_VARIABLE_BEGIN = intern('variable_begin')
TOKEN_VARIABLE_END = intern('variable_end')
TOKEN_RAW_BEGIN = intern('raw_begin')
TOKEN_RAW_END = intern('raw_end')
TOKEN_COMMENT_BEGIN = intern('comment_begin')
TOKEN_COMMENT_END = intern('comment_end')
TOKEN_COMMENT = intern('comment')
TOKEN_LINESTATEMENT_BEGIN = intern('linestatement_begin')
TOKEN_LINESTATEMENT_END = intern('linestatement_end')
TOKEN_LINECOMMENT_BEGIN = intern('linecomment_begin')
TOKEN_LINECOMMENT_END = intern('linecomment_end')
TOKEN_LINECOMMENT = intern('linecomment')
TOKEN_DATA = intern('data')
TOKEN_INITIAL = intern('initial')
TOKEN_EOF = intern('eof')
# bind operators to token types
operators = {
'+': TOKEN_ADD,
'-': TOKEN_SUB,
'/': TOKEN_DIV,
'//': TOKEN_FLOORDIV,
'*': TOKEN_MUL,
'%': TOKEN_MOD,
'**': TOKEN_POW,
'~': TOKEN_TILDE,
'[': TOKEN_LBRACKET,
']': TOKEN_RBRACKET,
'(': TOKEN_LPAREN,
')': TOKEN_RPAREN,
'{': TOKEN_LBRACE,
'}': TOKEN_RBRACE,
'==': TOKEN_EQ,
'!=': TOKEN_NE,
'>': TOKEN_GT,
'>=': TOKEN_GTEQ,
'<': TOKEN_LT,
'<=': TOKEN_LTEQ,
'=': TOKEN_ASSIGN,
'.': TOKEN_DOT,
':': TOKEN_COLON,
'|': TOKEN_PIPE,
',': TOKEN_COMMA,
';': TOKEN_SEMICOLON
}
reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
assert len(operators) == len(reverse_operators), 'operators dropped'
operator_re = re.compile('(%s)' % '|'.join(re.escape(x) for x in
sorted(operators, key=lambda x: -len(x))))
ignored_tokens = frozenset([TOKEN_COMMENT_BEGIN, TOKEN_COMMENT,
TOKEN_COMMENT_END, TOKEN_WHITESPACE,
TOKEN_WHITESPACE, TOKEN_LINECOMMENT_BEGIN,
TOKEN_LINECOMMENT_END, TOKEN_LINECOMMENT])
ignore_if_empty = frozenset([TOKEN_WHITESPACE, TOKEN_DATA,
TOKEN_COMMENT, TOKEN_LINECOMMENT])
def _describe_token_type(token_type):
if token_type in reverse_operators:
return reverse_operators[token_type]
return {
TOKEN_COMMENT_BEGIN: 'begin of comment',
TOKEN_COMMENT_END: 'end of comment',
TOKEN_COMMENT: 'comment',
TOKEN_LINECOMMENT: 'comment',
TOKEN_BLOCK_BEGIN: 'begin of statement block',
TOKEN_BLOCK_END: 'end of statement block',
TOKEN_VARIABLE_BEGIN: 'begin of print statement',
TOKEN_VARIABLE_END: 'end of print statement',
TOKEN_LINESTATEMENT_BEGIN: 'begin of line statement',
TOKEN_LINESTATEMENT_END: 'end of line statement',
TOKEN_DATA: 'template data / text',
TOKEN_EOF: 'end of template'
}.get(token_type, token_type)
def describe_token(token):
"""Returns a description of the token."""
if token.type == 'name':
return token.value
return _describe_token_type(token.type)
def describe_token_expr(expr):
"""Like `describe_token` but for token expressions."""
if ':' in expr:
type, value = expr.split(':', 1)
if type == 'name':
return value
else:
type = expr
return _describe_token_type(type)
def count_newlines(value):
"""Count the number of newline characters in the string. This is
useful for extensions that filter a stream.
"""
return len(newline_re.findall(value))
def compile_rules(environment):
"""Compiles all the rules from the environment into a list of rules."""
e = re.escape
rules = [
(len(environment.comment_start_string), 'comment',
e(environment.comment_start_string)),
(len(environment.block_start_string), 'block',
e(environment.block_start_string)),
(len(environment.variable_start_string), 'variable',
e(environment.variable_start_string))
]
if environment.line_statement_prefix is not None:
rules.append((len(environment.line_statement_prefix), 'linestatement',
r'^[ \t\v]*' + e(environment.line_statement_prefix)))
if environment.line_comment_prefix is not None:
rules.append((len(environment.line_comment_prefix), 'linecomment',
r'(?:^|(?<=\S))[^\S\r\n]*' +
e(environment.line_comment_prefix)))
return [x[1:] for x in sorted(rules, reverse=True)]
class Failure(object):
"""Class that raises a `TemplateSyntaxError` if called.
Used by the `Lexer` to specify known errors.
"""
def __init__(self, message, cls=TemplateSyntaxError):
self.message = message
self.error_class = cls
def __call__(self, lineno, filename):
raise self.error_class(self.message, lineno, filename)
class Token(tuple):
"""Token class."""
__slots__ = ()
lineno, type, value = (property(itemgetter(x)) for x in range(3))
def __new__(cls, lineno, type, value):
return tuple.__new__(cls, (lineno, intern(str(type)), value))
def __str__(self):
if self.type in reverse_operators:
return reverse_operators[self.type]
elif self.type == 'name':
return self.value
return self.type
def test(self, expr):
"""Test a token against a token expression. This can either be a
token type or ``'token_type:token_value'``. This can only test
against string values and types.
"""
# here we do a regular string equality check as test_any is usually
# passed an iterable of not interned strings.
if self.type == expr:
return True
elif ':' in expr:
return expr.split(':', 1) == [self.type, self.value]
return False
def test_any(self, *iterable):
"""Test against multiple token expressions."""
for expr in iterable:
if self.test(expr):
return True
return False
def __repr__(self):
return 'Token(%r, %r, %r)' % (
self.lineno,
self.type,
self.value
)
@implements_iterator
class TokenStreamIterator(object):
"""The iterator for tokenstreams. Iterate over the stream
until the eof token is reached.
"""
def __init__(self, stream):
self.stream = stream
def __iter__(self):
return self
def __next__(self):
token = self.stream.current
if token.type is TOKEN_EOF:
self.stream.close()
raise StopIteration()
next(self.stream)
return token
@implements_iterator
class TokenStream(object):
"""A token stream is an iterable that yields :class:`Token`\s. The
parser however does not iterate over it but calls :meth:`next` to go
one token ahead. The current active token is stored as :attr:`current`.
"""
def __init__(self, generator, name, filename):
self._iter = iter(generator)
self._pushed = deque()
self.name = name
self.filename = filename
self.closed = False
self.current = Token(1, TOKEN_INITIAL, '')
next(self)
def __iter__(self):
return TokenStreamIterator(self)
def __bool__(self):
return bool(self._pushed) or self.current.type is not TOKEN_EOF
__nonzero__ = __bool__ # py2
eos = property(lambda x: not x, doc="Are we at the end of the stream?")
def push(self, token):
"""Push a token back to the stream."""
self._pushed.append(token)
def look(self):
"""Look at the next token."""
old_token = next(self)
result = self.current
self.push(result)
self.current = old_token
return result
def skip(self, n=1):
"""Got n tokens ahead."""
for x in range(n):
next(self)
def next_if(self, expr):
"""Perform the token test and return the token if it matched.
Otherwise the return value is `None`.
"""
if self.current.test(expr):
return next(self)
def skip_if(self, expr):
"""Like :meth:`next_if` but only returns `True` or `False`."""
return self.next_if(expr) is not None
def __next__(self):
"""Go one token ahead and return the old one"""
rv = self.current
if self._pushed:
self.current = self._pushed.popleft()
elif self.current.type is not TOKEN_EOF:
try:
self.current = next(self._iter)
except StopIteration:
self.close()
return rv
def close(self):
"""Close the stream."""
self.current = Token(self.current.lineno, TOKEN_EOF, '')
self._iter = None
self.closed = True
def expect(self, expr):
"""Expect a given token type and return it. This accepts the same
argument as :meth:`jinja2.lexer.Token.test`.
"""
if not self.current.test(expr):
expr = describe_token_expr(expr)
if self.current.type is TOKEN_EOF:
raise TemplateSyntaxError('unexpected end of template, '
'expected %r.' % expr,
self.current.lineno,
self.name, self.filename)
raise TemplateSyntaxError("expected token %r, got %r" %
(expr, describe_token(self.current)),
self.current.lineno,
self.name, self.filename)
try:
return self.current
finally:
next(self)
def get_lexer(environment):
"""Return a lexer which is probably cached."""
key = (environment.block_start_string,
environment.block_end_string,
environment.variable_start_string,
environment.variable_end_string,
environment.comment_start_string,
environment.comment_end_string,
environment.line_statement_prefix,
environment.line_comment_prefix,
environment.trim_blocks,
environment.lstrip_blocks,
environment.newline_sequence,
environment.keep_trailing_newline)
lexer = _lexer_cache.get(key)
if lexer is None:
lexer = Lexer(environment)
_lexer_cache[key] = lexer
return lexer
class Lexer(object):
"""Class that implements a lexer for a given environment. Automatically
created by the environment class, usually you don't have to do that.
Note that the lexer is not automatically bound to an environment.
Multiple environments can share the same lexer.
"""
def __init__(self, environment):
# shortcuts
c = lambda x: re.compile(x, re.M | re.S)
e = re.escape
# lexing rules for tags
tag_rules = [
(whitespace_re, TOKEN_WHITESPACE, None),
(float_re, TOKEN_FLOAT, None),
(integer_re, TOKEN_INTEGER, None),
(name_re, TOKEN_NAME, None),
(string_re, TOKEN_STRING, None),
(operator_re, TOKEN_OPERATOR, None)
]
# assemble the root lexing rule. because "|" is ungreedy
# we have to sort by length so that the lexer continues working
# as expected when we have parsing rules like <% for block and
# <%= for variables. (if someone wants asp like syntax)
# variables are just part of the rules if variable processing
# is required.
root_tag_rules = compile_rules(environment)
# block suffix if trimming is enabled
block_suffix_re = environment.trim_blocks and '\\n?' or ''
# strip leading spaces if lstrip_blocks is enabled
prefix_re = {}
if environment.lstrip_blocks:
# use '{%+' to manually disable lstrip_blocks behavior
no_lstrip_re = e('+')
# detect overlap between block and variable or comment strings
block_diff = c(r'^%s(.*)' % e(environment.block_start_string))
# make sure we don't mistake a block for a variable or a comment
m = block_diff.match(environment.comment_start_string)
no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
m = block_diff.match(environment.variable_start_string)
no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
# detect overlap between comment and variable strings
comment_diff = c(r'^%s(.*)' % e(environment.comment_start_string))
m = comment_diff.match(environment.variable_start_string)
no_variable_re = m and r'(?!%s)' % e(m.group(1)) or ''
lstrip_re = r'^[ \t]*'
block_prefix_re = r'%s%s(?!%s)|%s\+?' % (
lstrip_re,
e(environment.block_start_string),
no_lstrip_re,
e(environment.block_start_string),
)
comment_prefix_re = r'%s%s%s|%s\+?' % (
lstrip_re,
e(environment.comment_start_string),
no_variable_re,
e(environment.comment_start_string),
)
prefix_re['block'] = block_prefix_re
prefix_re['comment'] = comment_prefix_re
else:
block_prefix_re = '%s' % e(environment.block_start_string)
self.newline_sequence = environment.newline_sequence
self.keep_trailing_newline = environment.keep_trailing_newline
# global lexing rules
self.rules = {
'root': [
# directives
(c('(.*?)(?:%s)' % '|'.join(
[r'(?P<raw_begin>(?:\s*%s\-|%s)\s*raw\s*(?:\-%s\s*|%s))' % (
e(environment.block_start_string),
block_prefix_re,
e(environment.block_end_string),
e(environment.block_end_string)
)] + [
r'(?P<%s_begin>\s*%s\-|%s)' % (n, r, prefix_re.get(n,r))
for n, r in root_tag_rules
])), (TOKEN_DATA, '#bygroup'), '#bygroup'),
# data
(c('.+'), TOKEN_DATA, None)
],
# comments
TOKEN_COMMENT_BEGIN: [
(c(r'(.*?)((?:\-%s\s*|%s)%s)' % (
e(environment.comment_end_string),
e(environment.comment_end_string),
block_suffix_re
)), (TOKEN_COMMENT, TOKEN_COMMENT_END), '#pop'),
(c('(.)'), (Failure('Missing end of comment tag'),), None)
],
# blocks
TOKEN_BLOCK_BEGIN: [
(c('(?:\-%s\s*|%s)%s' % (
e(environment.block_end_string),
e(environment.block_end_string),
block_suffix_re
)), TOKEN_BLOCK_END, '#pop'),
] + tag_rules,
# variables
TOKEN_VARIABLE_BEGIN: [
(c('\-%s\s*|%s' % (
e(environment.variable_end_string),
e(environment.variable_end_string)
)), TOKEN_VARIABLE_END, '#pop')
] + tag_rules,
# raw block
TOKEN_RAW_BEGIN: [
(c('(.*?)((?:\s*%s\-|%s)\s*endraw\s*(?:\-%s\s*|%s%s))' % (
e(environment.block_start_string),
block_prefix_re,
e(environment.block_end_string),
e(environment.block_end_string),
block_suffix_re
)), (TOKEN_DATA, TOKEN_RAW_END), '#pop'),
(c('(.)'), (Failure('Missing end of raw directive'),), None)
],
# line statements
TOKEN_LINESTATEMENT_BEGIN: [
(c(r'\s*(\n|$)'), TOKEN_LINESTATEMENT_END, '#pop')
] + tag_rules,
# line comments
TOKEN_LINECOMMENT_BEGIN: [
(c(r'(.*?)()(?=\n|$)'), (TOKEN_LINECOMMENT,
TOKEN_LINECOMMENT_END), '#pop')
]
}
def _normalize_newlines(self, value):
"""Called for strings and template data to normalize it to unicode."""
return newline_re.sub(self.newline_sequence, value)
def tokenize(self, source, name=None, filename=None, state=None):
"""Calls tokeniter + tokenize and wraps it in a token stream.
"""
stream = self.tokeniter(source, name, filename, state)
return TokenStream(self.wrap(stream, name, filename), name, filename)
def wrap(self, stream, name=None, filename=None):
"""This is called with the stream as returned by `tokenize` and wraps
every token in a :class:`Token` and converts the value.
"""
for lineno, token, value in stream:
if token in ignored_tokens:
continue
elif token == 'linestatement_begin':
token = 'block_begin'
elif token == 'linestatement_end':
token = 'block_end'
# we are not interested in those tokens in the parser
elif token in ('raw_begin', 'raw_end'):
continue
elif token == 'data':
value = self._normalize_newlines(value)
elif token == 'keyword':
token = value
elif token == 'name':
value = str(value)
elif token == 'string':
# try to unescape string
try:
value = self._normalize_newlines(value[1:-1]) \
.encode('ascii', 'backslashreplace') \
.decode('unicode-escape')
except Exception as e:
msg = str(e).split(':')[-1].strip()
raise TemplateSyntaxError(msg, lineno, name, filename)
# if we can express it as bytestring (ascii only)
# we do that for support of semi broken APIs
# as datetime.datetime.strftime. On python 3 this
# call becomes a noop thanks to 2to3
try:
value = str(value)
except UnicodeError:
pass
elif token == 'integer':
value = int(value)
elif token == 'float':
value = float(value)
elif token == 'operator':
token = operators[value]
yield Token(lineno, token, value)
def tokeniter(self, source, name, filename=None, state=None):
"""This method tokenizes the text and returns the tokens in a
generator. Use this method if you just want to tokenize a template.
"""
source = text_type(source)
lines = source.splitlines()
if self.keep_trailing_newline and source:
for newline in ('\r\n', '\r', '\n'):
if source.endswith(newline):
lines.append('')
break
source = '\n'.join(lines)
pos = 0
lineno = 1
stack = ['root']
if state is not None and state != 'root':
assert state in ('variable', 'block'), 'invalid state'
stack.append(state + '_begin')
else:
state = 'root'
statetokens = self.rules[stack[-1]]
source_length = len(source)
balancing_stack = []
while 1:
# tokenizer loop
for regex, tokens, new_state in statetokens:
m = regex.match(source, pos)
# if no match we try again with the next rule
if m is None:
continue
# we only match blocks and variables if braces / parentheses
# are balanced. continue parsing with the lower rule which
# is the operator rule. do this only if the end tags look
# like operators
if balancing_stack and \
tokens in ('variable_end', 'block_end',
'linestatement_end'):
continue
# tuples support more options
if isinstance(tokens, tuple):
for idx, token in enumerate(tokens):
# failure group
if token.__class__ is Failure:
raise token(lineno, filename)
# bygroup is a bit more complex, in that case we
# yield for the current token the first named
# group that matched
elif token == '#bygroup':
for key, value in iteritems(m.groupdict()):
if value is not None:
yield lineno, key, value
lineno += value.count('\n')
break
else:
raise RuntimeError('%r wanted to resolve '
'the token dynamically'
' but no group matched'
% regex)
# normal group
else:
data = m.group(idx + 1)
if data or token not in ignore_if_empty:
yield lineno, token, data
lineno += data.count('\n')
# strings as token just are yielded as it.
else:
data = m.group()
# update brace/parentheses balance
if tokens == 'operator':
if data == '{':
balancing_stack.append('}')
elif data == '(':
balancing_stack.append(')')
elif data == '[':
balancing_stack.append(']')
elif data in ('}', ')', ']'):
if not balancing_stack:
raise TemplateSyntaxError('unexpected \'%s\'' %
data, lineno, name,
filename)
expected_op = balancing_stack.pop()
if expected_op != data:
raise TemplateSyntaxError('unexpected \'%s\', '
'expected \'%s\'' %
(data, expected_op),
lineno, name,
filename)
# yield items
if data or tokens not in ignore_if_empty:
yield lineno, tokens, data
lineno += data.count('\n')
# fetch new position into new variable so that we can check
# if there is a internal parsing error which would result
# in an infinite loop
pos2 = m.end()
# handle state changes
if new_state is not None:
# remove the uppermost state
if new_state == '#pop':
stack.pop()
# resolve the new state by group checking
elif new_state == '#bygroup':
for key, value in iteritems(m.groupdict()):
if value is not None:
stack.append(key)
break
else:
raise RuntimeError('%r wanted to resolve the '
'new state dynamically but'
' no group matched' %
regex)
# direct state name given
else:
stack.append(new_state)
statetokens = self.rules[stack[-1]]
# we are still at the same position and no stack change.
# this means a loop without break condition, avoid that and
# raise error
elif pos2 == pos:
raise RuntimeError('%r yielded empty string without '
'stack change' % regex)
# publish new function and start again
pos = pos2
break
# if loop terminated without break we haven't found a single match
# either we are at the end of the file or we have a problem
else:
# end of text
if pos >= source_length:
return
# something went wrong
raise TemplateSyntaxError('unexpected char %r at %d' %
(source[pos], pos), lineno,
name, filename)

View File

@ -1,471 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.loaders
~~~~~~~~~~~~~~
Jinja loader classes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import weakref
from types import ModuleType
from os import path
from hashlib import sha1
from jinja2.exceptions import TemplateNotFound
from jinja2.utils import open_if_exists, internalcode
from jinja2._compat import string_types, iteritems
def split_template_path(template):
"""Split a path into segments and perform a sanity check. If it detects
'..' in the path it will raise a `TemplateNotFound` error.
"""
pieces = []
for piece in template.split('/'):
if path.sep in piece \
or (path.altsep and path.altsep in piece) or \
piece == path.pardir:
raise TemplateNotFound(template)
elif piece and piece != '.':
pieces.append(piece)
return pieces
class BaseLoader(object):
"""Baseclass for all loaders. Subclass this and override `get_source` to
implement a custom loading mechanism. The environment provides a
`get_template` method that calls the loader's `load` method to get the
:class:`Template` object.
A very basic example for a loader that looks up templates on the file
system could look like this::
from jinja2 import BaseLoader, TemplateNotFound
from os.path import join, exists, getmtime
class MyLoader(BaseLoader):
def __init__(self, path):
self.path = path
def get_source(self, environment, template):
path = join(self.path, template)
if not exists(path):
raise TemplateNotFound(template)
mtime = getmtime(path)
with file(path) as f:
source = f.read().decode('utf-8')
return source, path, lambda: mtime == getmtime(path)
"""
#: if set to `False` it indicates that the loader cannot provide access
#: to the source of templates.
#:
#: .. versionadded:: 2.4
has_source_access = True
def get_source(self, environment, template):
"""Get the template source, filename and reload helper for a template.
It's passed the environment and template name and has to return a
tuple in the form ``(source, filename, uptodate)`` or raise a
`TemplateNotFound` error if it can't locate the template.
The source part of the returned tuple must be the source of the
template as unicode string or a ASCII bytestring. The filename should
be the name of the file on the filesystem if it was loaded from there,
otherwise `None`. The filename is used by python for the tracebacks
if no loader extension is used.
The last item in the tuple is the `uptodate` function. If auto
reloading is enabled it's always called to check if the template
changed. No arguments are passed so the function must store the
old state somewhere (for example in a closure). If it returns `False`
the template will be reloaded.
"""
if not self.has_source_access:
raise RuntimeError('%s cannot provide access to the source' %
self.__class__.__name__)
raise TemplateNotFound(template)
def list_templates(self):
"""Iterates over all templates. If the loader does not support that
it should raise a :exc:`TypeError` which is the default behavior.
"""
raise TypeError('this loader cannot iterate over all templates')
@internalcode
def load(self, environment, name, globals=None):
"""Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly.
"""
code = None
if globals is None:
globals = {}
# first we try to get the source for this template together
# with the filename and the uptodate function.
source, filename, uptodate = self.get_source(environment, name)
# try to load the code from the bytecode cache if there is a
# bytecode cache configured.
bcc = environment.bytecode_cache
if bcc is not None:
bucket = bcc.get_bucket(environment, name, filename, source)
code = bucket.code
# if we don't have code so far (not cached, no longer up to
# date) etc. we compile the template
if code is None:
code = environment.compile(source, name, filename)
# if the bytecode cache is available and the bucket doesn't
# have a code so far, we give the bucket the new code and put
# it back to the bytecode cache.
if bcc is not None and bucket.code is None:
bucket.code = code
bcc.set_bucket(bucket)
return environment.template_class.from_code(environment, code,
globals, uptodate)
class FileSystemLoader(BaseLoader):
"""Loads templates from the file system. This loader can find templates
in folders on the file system and is the preferred way to load them.
The loader takes the path to the templates as string, or if multiple
locations are wanted a list of them which is then looked up in the
given order:
>>> loader = FileSystemLoader('/path/to/templates')
>>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
Per default the template encoding is ``'utf-8'`` which can be changed
by setting the `encoding` parameter to something else.
"""
def __init__(self, searchpath, encoding='utf-8'):
if isinstance(searchpath, string_types):
searchpath = [searchpath]
self.searchpath = list(searchpath)
self.encoding = encoding
def get_source(self, environment, template):
pieces = split_template_path(template)
for searchpath in self.searchpath:
filename = path.join(searchpath, *pieces)
f = open_if_exists(filename)
if f is None:
continue
try:
contents = f.read().decode(self.encoding)
finally:
f.close()
mtime = path.getmtime(filename)
def uptodate():
try:
return path.getmtime(filename) == mtime
except OSError:
return False
return contents, filename, uptodate
raise TemplateNotFound(template)
def list_templates(self):
found = set()
for searchpath in self.searchpath:
for dirpath, dirnames, filenames in os.walk(searchpath):
for filename in filenames:
template = os.path.join(dirpath, filename) \
[len(searchpath):].strip(os.path.sep) \
.replace(os.path.sep, '/')
if template[:2] == './':
template = template[2:]
if template not in found:
found.add(template)
return sorted(found)
class PackageLoader(BaseLoader):
"""Load templates from python eggs or packages. It is constructed with
the name of the python package and the path to the templates in that
package::
loader = PackageLoader('mypackage', 'views')
If the package path is not given, ``'templates'`` is assumed.
Per default the template encoding is ``'utf-8'`` which can be changed
by setting the `encoding` parameter to something else. Due to the nature
of eggs it's only possible to reload templates if the package was loaded
from the file system and not a zip file.
"""
def __init__(self, package_name, package_path='templates',
encoding='utf-8'):
from pkg_resources import DefaultProvider, ResourceManager, \
get_provider
provider = get_provider(package_name)
self.encoding = encoding
self.manager = ResourceManager()
self.filesystem_bound = isinstance(provider, DefaultProvider)
self.provider = provider
self.package_path = package_path
def get_source(self, environment, template):
pieces = split_template_path(template)
p = '/'.join((self.package_path,) + tuple(pieces))
if not self.provider.has_resource(p):
raise TemplateNotFound(template)
filename = uptodate = None
if self.filesystem_bound:
filename = self.provider.get_resource_filename(self.manager, p)
mtime = path.getmtime(filename)
def uptodate():
try:
return path.getmtime(filename) == mtime
except OSError:
return False
source = self.provider.get_resource_string(self.manager, p)
return source.decode(self.encoding), filename, uptodate
def list_templates(self):
path = self.package_path
if path[:2] == './':
path = path[2:]
elif path == '.':
path = ''
offset = len(path)
results = []
def _walk(path):
for filename in self.provider.resource_listdir(path):
fullname = path + '/' + filename
if self.provider.resource_isdir(fullname):
_walk(fullname)
else:
results.append(fullname[offset:].lstrip('/'))
_walk(path)
results.sort()
return results
class DictLoader(BaseLoader):
"""Loads a template from a python dict. It's passed a dict of unicode
strings bound to template names. This loader is useful for unittesting:
>>> loader = DictLoader({'index.html': 'source here'})
Because auto reloading is rarely useful this is disabled per default.
"""
def __init__(self, mapping):
self.mapping = mapping
def get_source(self, environment, template):
if template in self.mapping:
source = self.mapping[template]
return source, None, lambda: source == self.mapping.get(template)
raise TemplateNotFound(template)
def list_templates(self):
return sorted(self.mapping)
class FunctionLoader(BaseLoader):
"""A loader that is passed a function which does the loading. The
function becomes the name of the template passed and has to return either
an unicode string with the template source, a tuple in the form ``(source,
filename, uptodatefunc)`` or `None` if the template does not exist.
>>> def load_template(name):
... if name == 'index.html':
... return '...'
...
>>> loader = FunctionLoader(load_template)
The `uptodatefunc` is a function that is called if autoreload is enabled
and has to return `True` if the template is still up to date. For more
details have a look at :meth:`BaseLoader.get_source` which has the same
return value.
"""
def __init__(self, load_func):
self.load_func = load_func
def get_source(self, environment, template):
rv = self.load_func(template)
if rv is None:
raise TemplateNotFound(template)
elif isinstance(rv, string_types):
return rv, None, None
return rv
class PrefixLoader(BaseLoader):
"""A loader that is passed a dict of loaders where each loader is bound
to a prefix. The prefix is delimited from the template by a slash per
default, which can be changed by setting the `delimiter` argument to
something else::
loader = PrefixLoader({
'app1': PackageLoader('mypackage.app1'),
'app2': PackageLoader('mypackage.app2')
})
By loading ``'app1/index.html'`` the file from the app1 package is loaded,
by loading ``'app2/index.html'`` the file from the second.
"""
def __init__(self, mapping, delimiter='/'):
self.mapping = mapping
self.delimiter = delimiter
def get_loader(self, template):
try:
prefix, name = template.split(self.delimiter, 1)
loader = self.mapping[prefix]
except (ValueError, KeyError):
raise TemplateNotFound(template)
return loader, name
def get_source(self, environment, template):
loader, name = self.get_loader(template)
try:
return loader.get_source(environment, name)
except TemplateNotFound:
# re-raise the exception with the correct fileame here.
# (the one that includes the prefix)
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
loader, local_name = self.get_loader(name)
try:
return loader.load(environment, local_name, globals)
except TemplateNotFound:
# re-raise the exception with the correct fileame here.
# (the one that includes the prefix)
raise TemplateNotFound(name)
def list_templates(self):
result = []
for prefix, loader in iteritems(self.mapping):
for template in loader.list_templates():
result.append(prefix + self.delimiter + template)
return result
class ChoiceLoader(BaseLoader):
"""This loader works like the `PrefixLoader` just that no prefix is
specified. If a template could not be found by one loader the next one
is tried.
>>> loader = ChoiceLoader([
... FileSystemLoader('/path/to/user/templates'),
... FileSystemLoader('/path/to/system/templates')
... ])
This is useful if you want to allow users to override builtin templates
from a different location.
"""
def __init__(self, loaders):
self.loaders = loaders
def get_source(self, environment, template):
for loader in self.loaders:
try:
return loader.get_source(environment, template)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
for loader in self.loaders:
try:
return loader.load(environment, name, globals)
except TemplateNotFound:
pass
raise TemplateNotFound(name)
def list_templates(self):
found = set()
for loader in self.loaders:
found.update(loader.list_templates())
return sorted(found)
class _TemplateModule(ModuleType):
"""Like a normal module but with support for weak references"""
class ModuleLoader(BaseLoader):
"""This loader loads templates from precompiled templates.
Example usage:
>>> loader = ChoiceLoader([
... ModuleLoader('/path/to/compiled/templates'),
... FileSystemLoader('/path/to/templates')
... ])
Templates can be precompiled with :meth:`Environment.compile_templates`.
"""
has_source_access = False
def __init__(self, path):
package_name = '_jinja2_module_templates_%x' % id(self)
# create a fake module that looks for the templates in the
# path given.
mod = _TemplateModule(package_name)
if isinstance(path, string_types):
path = [path]
else:
path = list(path)
mod.__path__ = path
sys.modules[package_name] = weakref.proxy(mod,
lambda x: sys.modules.pop(package_name, None))
# the only strong reference, the sys.modules entry is weak
# so that the garbage collector can remove it once the
# loader that created it goes out of business.
self.module = mod
self.package_name = package_name
@staticmethod
def get_template_key(name):
return 'tmpl_' + sha1(name.encode('utf-8')).hexdigest()
@staticmethod
def get_module_filename(name):
return ModuleLoader.get_template_key(name) + '.py'
@internalcode
def load(self, environment, name, globals=None):
key = self.get_template_key(name)
module = '%s.%s' % (self.package_name, key)
mod = getattr(self.module, module, None)
if mod is None:
try:
mod = __import__(module, None, None, ['root'])
except ImportError:
raise TemplateNotFound(name)
# remove the entry from sys.modules, we only want the attribute
# on the module object we have stored on the loader.
sys.modules.pop(module, None)
return environment.template_class.from_module_dict(
environment, mod.__dict__, globals)

View File

@ -1,103 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.meta
~~~~~~~~~~~
This module implements various functions that exposes information about
templates that might be interesting for various kinds of applications.
:copyright: (c) 2010 by the Jinja Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from jinja2 import nodes
from jinja2.compiler import CodeGenerator
from jinja2._compat import string_types
class TrackingCodeGenerator(CodeGenerator):
"""We abuse the code generator for introspection."""
def __init__(self, environment):
CodeGenerator.__init__(self, environment, '<introspection>',
'<introspection>')
self.undeclared_identifiers = set()
def write(self, x):
"""Don't write."""
def pull_locals(self, frame):
"""Remember all undeclared identifiers."""
self.undeclared_identifiers.update(frame.identifiers.undeclared)
def find_undeclared_variables(ast):
"""Returns a set of all variables in the AST that will be looked up from
the context at runtime. Because at compile time it's not known which
variables will be used depending on the path the execution takes at
runtime, all variables are returned.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
>>> meta.find_undeclared_variables(ast)
set(['bar'])
.. admonition:: Implementation
Internally the code generator is used for finding undeclared variables.
This is good to know because the code generator might raise a
:exc:`TemplateAssertionError` during compilation and as a matter of
fact this function can currently raise that exception as well.
"""
codegen = TrackingCodeGenerator(ast.environment)
codegen.visit(ast)
return codegen.undeclared_identifiers
def find_referenced_templates(ast):
"""Finds all the referenced templates from the AST. This will return an
iterator over all the hardcoded template extensions, inclusions and
imports. If dynamic inheritance or inclusion is used, `None` will be
yielded.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
>>> list(meta.find_referenced_templates(ast))
['layout.html', None]
This function is useful for dependency tracking. For example if you want
to rebuild parts of the website after a layout template has changed.
"""
for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import,
nodes.Include)):
if not isinstance(node.template, nodes.Const):
# a tuple with some non consts in there
if isinstance(node.template, (nodes.Tuple, nodes.List)):
for template_name in node.template.items:
# something const, only yield the strings and ignore
# non-string consts that really just make no sense
if isinstance(template_name, nodes.Const):
if isinstance(template_name.value, string_types):
yield template_name.value
# something dynamic in there
else:
yield None
# something dynamic we don't know about here
else:
yield None
continue
# constant is a basestring, direct template name
if isinstance(node.template.value, string_types):
yield node.template.value
# a tuple or list (latter *should* not happen) made of consts,
# yield the consts that are strings. We could warn here for
# non string values
elif isinstance(node, nodes.Include) and \
isinstance(node.template.value, (tuple, list)):
for template_name in node.template.value:
if isinstance(template_name, string_types):
yield template_name
# something else we don't care about, we could warn here
else:
yield None

View File

@ -1,914 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.nodes
~~~~~~~~~~~~
This module implements additional nodes derived from the ast base node.
It also provides some node tree helper functions like `in_lineno` and
`get_nodes` used by the parser and translator in order to normalize
python and jinja nodes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import operator
from collections import deque
from jinja2.utils import Markup
from jinja2._compat import next, izip, with_metaclass, text_type, \
method_type, function_type
#: the types we support for context functions
_context_function_types = (function_type, method_type)
_binop_to_func = {
'*': operator.mul,
'/': operator.truediv,
'//': operator.floordiv,
'**': operator.pow,
'%': operator.mod,
'+': operator.add,
'-': operator.sub
}
_uaop_to_func = {
'not': operator.not_,
'+': operator.pos,
'-': operator.neg
}
_cmpop_to_func = {
'eq': operator.eq,
'ne': operator.ne,
'gt': operator.gt,
'gteq': operator.ge,
'lt': operator.lt,
'lteq': operator.le,
'in': lambda a, b: a in b,
'notin': lambda a, b: a not in b
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(cls, name, bases, d):
for attr in 'fields', 'attributes':
storage = []
storage.extend(getattr(bases[0], attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) == 1, 'multiple inheritance not allowed'
assert len(storage) == len(set(storage)), 'layout conflict'
d[attr] = tuple(storage)
d.setdefault('abstract', False)
return type.__new__(cls, name, bases, d)
class EvalContext(object):
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(self, environment, template_name=None):
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self):
return self.__dict__.copy()
def revert(self, old):
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
raise RuntimeError('if no eval context is passed, the '
'node must have an attached '
'environment.')
return EvalContext(node.environment)
return ctx
class Node(with_metaclass(NodeType, object)):
"""Baseclass for all Jinja2 nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields = ()
attributes = ('lineno', 'environment')
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
raise TypeError('abstract nodes are not instanciable')
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError('%r takes 0 arguments' %
self.__class__.__name__)
raise TypeError('%r takes 0 or %d argument%s' % (
self.__class__.__name__,
len(self.fields),
len(self.fields) != 1 and 's' or ''
))
for name, arg in izip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError('unknown attribute %r' %
next(iter(attributes)))
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (exclude is only is None) or \
(exclude is not None and name not in exclude) or \
(only is not None and name in only):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for field, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
for result in child.find_all(node_type):
yield result
def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if 'ctx' in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if 'lineno' in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other):
return type(self) is type(other) and \
tuple(self.iter_fields()) == tuple(other.iter_fields())
def __ne__(self, other):
return not self.__eq__(other)
# Restore Python 2 hashing behavior on Python 3
__hash__ = object.__hash__
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join('%s=%r' % (arg, getattr(self, arg, None)) for
arg in self.fields)
)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ('body',)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ('nodes',)
class Extends(Stmt):
"""Represents an extends statement."""
fields = ('template',)
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive')
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ('test', 'body', 'else_')
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ('name', 'args', 'defaults', 'body')
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ('call', 'args', 'defaults', 'body')
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ('body', 'filter')
class Block(Stmt):
"""A node that represents a block."""
fields = ('name', 'body', 'scoped')
class Include(Stmt):
"""A node that represents the include tag."""
fields = ('template', 'with_context', 'ignore_missing')
class Import(Stmt):
"""A node that represents the import tag."""
fields = ('template', 'target', 'with_context')
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ('template', 'names', 'with_context')
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ('node',)
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ('target', 'node')
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ('left', 'right')
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_binops:
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ('node',)
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_unops:
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ('name', 'ctx')
def can_assign(self):
return self.name not in ('true', 'false', 'none',
'True', 'False', 'None')
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ('value',)
def as_const(self, eval_ctx=None):
return self.value
@classmethod
def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ('data',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ('items', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self):
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ('test', 'expr1', 'expr2')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
class Filter(Expr):
"""This node applies a filter on an expression. `name` is the name of
the filter, the rest of the fields are the same as for :class:`Call`.
If the `node` of a filter is `None` the contents of the last buffer are
filtered. Buffers are created by macros and filter blocks.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile or self.node is None:
raise Impossible()
# we have to be careful here because we call filter_ below.
# if this variable would be called filter, 2to3 would wrap the
# call in a list beause it is assuming we are talking about the
# builtin filter function here which no longer returns a list in
# python 3. because of that, do not rename filter_ to filter!
filter_ = self.environment.filters.get(self.name)
if filter_ is None or getattr(filter_, 'contextfilter', False):
raise Impossible()
obj = self.node.as_const(eval_ctx)
args = [x.as_const(eval_ctx) for x in self.args]
if getattr(filter_, 'evalcontextfilter', False):
args.insert(0, eval_ctx)
elif getattr(filter_, 'environmentfilter', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return filter_(obj, *args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible()
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ('node', 'arg', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != 'load':
raise Impossible()
try:
return self.environment.getitem(self.node.as_const(eval_ctx),
self.arg.as_const(eval_ctx))
except Exception:
raise Impossible()
def can_assign(self):
return False
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ('node', 'attr', 'ctx')
def as_const(self, eval_ctx=None):
if self.ctx != 'load':
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx),
self.attr)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ('start', 'stop', 'step')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting them to
unicode.
"""
fields = ('nodes',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return ''.join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\s.
"""
fields = ('expr', 'ops')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ('op', 'expr')
if __debug__:
Operand.__doc__ += '\nThe following operators are available: ' + \
', '.join(sorted('``%s``' % x for x in set(_binop_to_func) |
set(_uaop_to_func) | set(_cmpop_to_func)))
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = '*'
class Div(BinExpr):
"""Divides the left by the right node."""
operator = '/'
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = '//'
class Add(BinExpr):
"""Add the left to the right node."""
operator = '+'
class Sub(BinExpr):
"""Substract the right from the left node."""
operator = '-'
class Mod(BinExpr):
"""Left modulo right."""
operator = '%'
class Pow(BinExpr):
"""Left to the power of right."""
operator = '**'
class And(BinExpr):
"""Short circuited AND."""
operator = 'and'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = 'or'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = 'not'
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = '-'
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = '+'
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ('name',)
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ('identifier', 'name')
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ('importname',)
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
fields = ('name',)
def __init__(self):
raise TypeError('Can\'t create internal names. Use the '
'`free_identifier` method on a parser.')
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ('body',)
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ('options',)
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ('body',)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
raise TypeError('can\'t create custom node types')
NodeType.__new__ = staticmethod(_failing_new); del _failing_new

View File

@ -1,68 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.optimizer
~~~~~~~~~~~~~~~~
The jinja optimizer is currently trying to constant fold a few expressions
and modify the AST in place so that it should be easier to evaluate it.
Because the AST does not contain all the scoping information and the
compiler has to find that out, we cannot do all the optimizations we
want. For example loop unrolling doesn't work because unrolled loops would
have a different scoping.
The solution would be a second syntax tree that has the scoping rules stored.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from jinja2 import nodes
from jinja2.visitor import NodeTransformer
def optimize(node, environment):
"""The context hint can be used to perform an static optimization
based on the context given."""
optimizer = Optimizer(environment)
return optimizer.visit(node)
class Optimizer(NodeTransformer):
def __init__(self, environment):
self.environment = environment
def visit_If(self, node):
"""Eliminate dead code."""
# do not optimize ifs that have a block inside so that it doesn't
# break super().
if node.find(nodes.Block) is not None:
return self.generic_visit(node)
try:
val = self.visit(node.test).as_const()
except nodes.Impossible:
return self.generic_visit(node)
if val:
body = node.body
else:
body = node.else_
result = []
for node in body:
result.extend(self.visit_list(node))
return result
def fold(self, node):
"""Do constant folding."""
node = self.generic_visit(node)
try:
return nodes.Const.from_untrusted(node.as_const(),
lineno=node.lineno,
environment=self.environment)
except nodes.Impossible:
return node
visit_Add = visit_Sub = visit_Mul = visit_Div = visit_FloorDiv = \
visit_Pow = visit_Mod = visit_And = visit_Or = visit_Pos = visit_Neg = \
visit_Not = visit_Compare = visit_Getitem = visit_Getattr = visit_Call = \
visit_Filter = visit_Test = visit_CondExpr = fold
del fold

View File

@ -1,895 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.parser
~~~~~~~~~~~~~
Implements the template parser.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2 import nodes
from jinja2.exceptions import TemplateSyntaxError, TemplateAssertionError
from jinja2.lexer import describe_token, describe_token_expr
from jinja2._compat import next, imap
#: statements that callinto
_statement_keywords = frozenset(['for', 'if', 'block', 'extends', 'print',
'macro', 'include', 'from', 'import',
'set'])
_compare_operators = frozenset(['eq', 'ne', 'lt', 'lteq', 'gt', 'gteq'])
class Parser(object):
"""This is the central parsing class Jinja2 uses. It's passed to
extensions and can be used to parse expressions or statements.
"""
def __init__(self, environment, source, name=None, filename=None,
state=None):
self.environment = environment
self.stream = environment._tokenize(source, name, filename, state)
self.name = name
self.filename = filename
self.closed = False
self.extensions = {}
for extension in environment.iter_extensions():
for tag in extension.tags:
self.extensions[tag] = extension.parse
self._last_identifier = 0
self._tag_stack = []
self._end_token_stack = []
def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
"""Convenience method that raises `exc` with the message, passed
line number or last line number as well as the current name and
filename.
"""
if lineno is None:
lineno = self.stream.current.lineno
raise exc(msg, lineno, self.name, self.filename)
def _fail_ut_eof(self, name, end_token_stack, lineno):
expected = []
for exprs in end_token_stack:
expected.extend(imap(describe_token_expr, exprs))
if end_token_stack:
currently_looking = ' or '.join(
"'%s'" % describe_token_expr(expr)
for expr in end_token_stack[-1])
else:
currently_looking = None
if name is None:
message = ['Unexpected end of template.']
else:
message = ['Encountered unknown tag \'%s\'.' % name]
if currently_looking:
if name is not None and name in expected:
message.append('You probably made a nesting mistake. Jinja '
'is expecting this tag, but currently looking '
'for %s.' % currently_looking)
else:
message.append('Jinja was looking for the following tags: '
'%s.' % currently_looking)
if self._tag_stack:
message.append('The innermost block that needs to be '
'closed is \'%s\'.' % self._tag_stack[-1])
self.fail(' '.join(message), lineno)
def fail_unknown_tag(self, name, lineno=None):
"""Called if the parser encounters an unknown tag. Tries to fail
with a human readable error message that could help to identify
the problem.
"""
return self._fail_ut_eof(name, self._end_token_stack, lineno)
def fail_eof(self, end_tokens=None, lineno=None):
"""Like fail_unknown_tag but for end of template situations."""
stack = list(self._end_token_stack)
if end_tokens is not None:
stack.append(end_tokens)
return self._fail_ut_eof(None, stack, lineno)
def is_tuple_end(self, extra_end_rules=None):
"""Are we at the end of a tuple?"""
if self.stream.current.type in ('variable_end', 'block_end', 'rparen'):
return True
elif extra_end_rules is not None:
return self.stream.current.test_any(extra_end_rules)
return False
def free_identifier(self, lineno=None):
"""Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
self._last_identifier += 1
rv = object.__new__(nodes.InternalName)
nodes.Node.__init__(rv, 'fi%d' % self._last_identifier, lineno=lineno)
return rv
def parse_statement(self):
"""Parse a single statement."""
token = self.stream.current
if token.type != 'name':
self.fail('tag name expected', token.lineno)
self._tag_stack.append(token.value)
pop_tag = True
try:
if token.value in _statement_keywords:
return getattr(self, 'parse_' + self.stream.current.value)()
if token.value == 'call':
return self.parse_call_block()
if token.value == 'filter':
return self.parse_filter_block()
ext = self.extensions.get(token.value)
if ext is not None:
return ext(self)
# did not work out, remove the token we pushed by accident
# from the stack so that the unknown tag fail function can
# produce a proper error message.
self._tag_stack.pop()
pop_tag = False
self.fail_unknown_tag(token.value, token.lineno)
finally:
if pop_tag:
self._tag_stack.pop()
def parse_statements(self, end_tokens, drop_needle=False):
"""Parse multiple statements into a list until one of the end tokens
is reached. This is used to parse the body of statements as it also
parses template data if appropriate. The parser checks first if the
current token is a colon and skips it if there is one. Then it checks
for the block end and parses until if one of the `end_tokens` is
reached. Per default the active token in the stream at the end of
the call is the matched end token. If this is not wanted `drop_needle`
can be set to `True` and the end token is removed.
"""
# the first token may be a colon for python compatibility
self.stream.skip_if('colon')
# in the future it would be possible to add whole code sections
# by adding some sort of end of statement token and parsing those here.
self.stream.expect('block_end')
result = self.subparse(end_tokens)
# we reached the end of the template too early, the subparser
# does not check for this, so we do that now
if self.stream.current.type == 'eof':
self.fail_eof(end_tokens)
if drop_needle:
next(self.stream)
return result
def parse_set(self):
"""Parse an assign statement."""
lineno = next(self.stream).lineno
target = self.parse_assign_target()
self.stream.expect('assign')
expr = self.parse_tuple()
return nodes.Assign(target, expr, lineno=lineno)
def parse_for(self):
"""Parse a for loop."""
lineno = self.stream.expect('name:for').lineno
target = self.parse_assign_target(extra_end_rules=('name:in',))
self.stream.expect('name:in')
iter = self.parse_tuple(with_condexpr=False,
extra_end_rules=('name:recursive',))
test = None
if self.stream.skip_if('name:if'):
test = self.parse_expression()
recursive = self.stream.skip_if('name:recursive')
body = self.parse_statements(('name:endfor', 'name:else'))
if next(self.stream).value == 'endfor':
else_ = []
else:
else_ = self.parse_statements(('name:endfor',), drop_needle=True)
return nodes.For(target, iter, body, else_, test,
recursive, lineno=lineno)
def parse_if(self):
"""Parse an if construct."""
node = result = nodes.If(lineno=self.stream.expect('name:if').lineno)
while 1:
node.test = self.parse_tuple(with_condexpr=False)
node.body = self.parse_statements(('name:elif', 'name:else',
'name:endif'))
token = next(self.stream)
if token.test('name:elif'):
new_node = nodes.If(lineno=self.stream.current.lineno)
node.else_ = [new_node]
node = new_node
continue
elif token.test('name:else'):
node.else_ = self.parse_statements(('name:endif',),
drop_needle=True)
else:
node.else_ = []
break
return result
def parse_block(self):
node = nodes.Block(lineno=next(self.stream).lineno)
node.name = self.stream.expect('name').value
node.scoped = self.stream.skip_if('name:scoped')
# common problem people encounter when switching from django
# to jinja. we do not support hyphens in block names, so let's
# raise a nicer error message in that case.
if self.stream.current.type == 'sub':
self.fail('Block names in Jinja have to be valid Python '
'identifiers and may not contain hyphens, use an '
'underscore instead.')
node.body = self.parse_statements(('name:endblock',), drop_needle=True)
self.stream.skip_if('name:' + node.name)
return node
def parse_extends(self):
node = nodes.Extends(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
return node
def parse_import_context(self, node, default):
if self.stream.current.test_any('name:with', 'name:without') and \
self.stream.look().test('name:context'):
node.with_context = next(self.stream).value == 'with'
self.stream.skip()
else:
node.with_context = default
return node
def parse_include(self):
node = nodes.Include(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
if self.stream.current.test('name:ignore') and \
self.stream.look().test('name:missing'):
node.ignore_missing = True
self.stream.skip(2)
else:
node.ignore_missing = False
return self.parse_import_context(node, True)
def parse_import(self):
node = nodes.Import(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
self.stream.expect('name:as')
node.target = self.parse_assign_target(name_only=True).name
return self.parse_import_context(node, False)
def parse_from(self):
node = nodes.FromImport(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
self.stream.expect('name:import')
node.names = []
def parse_context():
if self.stream.current.value in ('with', 'without') and \
self.stream.look().test('name:context'):
node.with_context = next(self.stream).value == 'with'
self.stream.skip()
return True
return False
while 1:
if node.names:
self.stream.expect('comma')
if self.stream.current.type == 'name':
if parse_context():
break
target = self.parse_assign_target(name_only=True)
if target.name.startswith('_'):
self.fail('names starting with an underline can not '
'be imported', target.lineno,
exc=TemplateAssertionError)
if self.stream.skip_if('name:as'):
alias = self.parse_assign_target(name_only=True)
node.names.append((target.name, alias.name))
else:
node.names.append(target.name)
if parse_context() or self.stream.current.type != 'comma':
break
else:
break
if not hasattr(node, 'with_context'):
node.with_context = False
self.stream.skip_if('comma')
return node
def parse_signature(self, node):
node.args = args = []
node.defaults = defaults = []
self.stream.expect('lparen')
while self.stream.current.type != 'rparen':
if args:
self.stream.expect('comma')
arg = self.parse_assign_target(name_only=True)
arg.set_ctx('param')
if self.stream.skip_if('assign'):
defaults.append(self.parse_expression())
args.append(arg)
self.stream.expect('rparen')
def parse_call_block(self):
node = nodes.CallBlock(lineno=next(self.stream).lineno)
if self.stream.current.type == 'lparen':
self.parse_signature(node)
else:
node.args = []
node.defaults = []
node.call = self.parse_expression()
if not isinstance(node.call, nodes.Call):
self.fail('expected call', node.lineno)
node.body = self.parse_statements(('name:endcall',), drop_needle=True)
return node
def parse_filter_block(self):
node = nodes.FilterBlock(lineno=next(self.stream).lineno)
node.filter = self.parse_filter(None, start_inline=True)
node.body = self.parse_statements(('name:endfilter',),
drop_needle=True)
return node
def parse_macro(self):
node = nodes.Macro(lineno=next(self.stream).lineno)
node.name = self.parse_assign_target(name_only=True).name
self.parse_signature(node)
node.body = self.parse_statements(('name:endmacro',),
drop_needle=True)
return node
def parse_print(self):
node = nodes.Output(lineno=next(self.stream).lineno)
node.nodes = []
while self.stream.current.type != 'block_end':
if node.nodes:
self.stream.expect('comma')
node.nodes.append(self.parse_expression())
return node
def parse_assign_target(self, with_tuple=True, name_only=False,
extra_end_rules=None):
"""Parse an assignment target. As Jinja2 allows assignments to
tuples, this function can parse all allowed assignment targets. Per
default assignments to tuples are parsed, that can be disable however
by setting `with_tuple` to `False`. If only assignments to names are
wanted `name_only` can be set to `True`. The `extra_end_rules`
parameter is forwarded to the tuple parsing function.
"""
if name_only:
token = self.stream.expect('name')
target = nodes.Name(token.value, 'store', lineno=token.lineno)
else:
if with_tuple:
target = self.parse_tuple(simplified=True,
extra_end_rules=extra_end_rules)
else:
target = self.parse_primary()
target.set_ctx('store')
if not target.can_assign():
self.fail('can\'t assign to %r' % target.__class__.
__name__.lower(), target.lineno)
return target
def parse_expression(self, with_condexpr=True):
"""Parse an expression. Per default all expressions are parsed, if
the optional `with_condexpr` parameter is set to `False` conditional
expressions are not parsed.
"""
if with_condexpr:
return self.parse_condexpr()
return self.parse_or()
def parse_condexpr(self):
lineno = self.stream.current.lineno
expr1 = self.parse_or()
while self.stream.skip_if('name:if'):
expr2 = self.parse_or()
if self.stream.skip_if('name:else'):
expr3 = self.parse_condexpr()
else:
expr3 = None
expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
lineno = self.stream.current.lineno
return expr1
def parse_or(self):
lineno = self.stream.current.lineno
left = self.parse_and()
while self.stream.skip_if('name:or'):
right = self.parse_and()
left = nodes.Or(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_and(self):
lineno = self.stream.current.lineno
left = self.parse_not()
while self.stream.skip_if('name:and'):
right = self.parse_not()
left = nodes.And(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_not(self):
if self.stream.current.test('name:not'):
lineno = next(self.stream).lineno
return nodes.Not(self.parse_not(), lineno=lineno)
return self.parse_compare()
def parse_compare(self):
lineno = self.stream.current.lineno
expr = self.parse_add()
ops = []
while 1:
token_type = self.stream.current.type
if token_type in _compare_operators:
next(self.stream)
ops.append(nodes.Operand(token_type, self.parse_add()))
elif self.stream.skip_if('name:in'):
ops.append(nodes.Operand('in', self.parse_add()))
elif self.stream.current.test('name:not') and \
self.stream.look().test('name:in'):
self.stream.skip(2)
ops.append(nodes.Operand('notin', self.parse_add()))
else:
break
lineno = self.stream.current.lineno
if not ops:
return expr
return nodes.Compare(expr, ops, lineno=lineno)
def parse_add(self):
lineno = self.stream.current.lineno
left = self.parse_sub()
while self.stream.current.type == 'add':
next(self.stream)
right = self.parse_sub()
left = nodes.Add(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_sub(self):
lineno = self.stream.current.lineno
left = self.parse_concat()
while self.stream.current.type == 'sub':
next(self.stream)
right = self.parse_concat()
left = nodes.Sub(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_concat(self):
lineno = self.stream.current.lineno
args = [self.parse_mul()]
while self.stream.current.type == 'tilde':
next(self.stream)
args.append(self.parse_mul())
if len(args) == 1:
return args[0]
return nodes.Concat(args, lineno=lineno)
def parse_mul(self):
lineno = self.stream.current.lineno
left = self.parse_div()
while self.stream.current.type == 'mul':
next(self.stream)
right = self.parse_div()
left = nodes.Mul(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_div(self):
lineno = self.stream.current.lineno
left = self.parse_floordiv()
while self.stream.current.type == 'div':
next(self.stream)
right = self.parse_floordiv()
left = nodes.Div(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_floordiv(self):
lineno = self.stream.current.lineno
left = self.parse_mod()
while self.stream.current.type == 'floordiv':
next(self.stream)
right = self.parse_mod()
left = nodes.FloorDiv(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_mod(self):
lineno = self.stream.current.lineno
left = self.parse_pow()
while self.stream.current.type == 'mod':
next(self.stream)
right = self.parse_pow()
left = nodes.Mod(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_pow(self):
lineno = self.stream.current.lineno
left = self.parse_unary()
while self.stream.current.type == 'pow':
next(self.stream)
right = self.parse_unary()
left = nodes.Pow(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_unary(self, with_filter=True):
token_type = self.stream.current.type
lineno = self.stream.current.lineno
if token_type == 'sub':
next(self.stream)
node = nodes.Neg(self.parse_unary(False), lineno=lineno)
elif token_type == 'add':
next(self.stream)
node = nodes.Pos(self.parse_unary(False), lineno=lineno)
else:
node = self.parse_primary()
node = self.parse_postfix(node)
if with_filter:
node = self.parse_filter_expr(node)
return node
def parse_primary(self):
token = self.stream.current
if token.type == 'name':
if token.value in ('true', 'false', 'True', 'False'):
node = nodes.Const(token.value in ('true', 'True'),
lineno=token.lineno)
elif token.value in ('none', 'None'):
node = nodes.Const(None, lineno=token.lineno)
else:
node = nodes.Name(token.value, 'load', lineno=token.lineno)
next(self.stream)
elif token.type == 'string':
next(self.stream)
buf = [token.value]
lineno = token.lineno
while self.stream.current.type == 'string':
buf.append(self.stream.current.value)
next(self.stream)
node = nodes.Const(''.join(buf), lineno=lineno)
elif token.type in ('integer', 'float'):
next(self.stream)
node = nodes.Const(token.value, lineno=token.lineno)
elif token.type == 'lparen':
next(self.stream)
node = self.parse_tuple(explicit_parentheses=True)
self.stream.expect('rparen')
elif token.type == 'lbracket':
node = self.parse_list()
elif token.type == 'lbrace':
node = self.parse_dict()
else:
self.fail("unexpected '%s'" % describe_token(token), token.lineno)
return node
def parse_tuple(self, simplified=False, with_condexpr=True,
extra_end_rules=None, explicit_parentheses=False):
"""Works like `parse_expression` but if multiple expressions are
delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
This method could also return a regular expression instead of a tuple
if no commas where found.
The default parsing mode is a full tuple. If `simplified` is `True`
only names and literals are parsed. The `no_condexpr` parameter is
forwarded to :meth:`parse_expression`.
Because tuples do not require delimiters and may end in a bogus comma
an extra hint is needed that marks the end of a tuple. For example
for loops support tuples between `for` and `in`. In that case the
`extra_end_rules` is set to ``['name:in']``.
`explicit_parentheses` is true if the parsing was triggered by an
expression in parentheses. This is used to figure out if an empty
tuple is a valid expression or not.
"""
lineno = self.stream.current.lineno
if simplified:
parse = self.parse_primary
elif with_condexpr:
parse = self.parse_expression
else:
parse = lambda: self.parse_expression(with_condexpr=False)
args = []
is_tuple = False
while 1:
if args:
self.stream.expect('comma')
if self.is_tuple_end(extra_end_rules):
break
args.append(parse())
if self.stream.current.type == 'comma':
is_tuple = True
else:
break
lineno = self.stream.current.lineno
if not is_tuple:
if args:
return args[0]
# if we don't have explicit parentheses, an empty tuple is
# not a valid expression. This would mean nothing (literally
# nothing) in the spot of an expression would be an empty
# tuple.
if not explicit_parentheses:
self.fail('Expected an expression, got \'%s\'' %
describe_token(self.stream.current))
return nodes.Tuple(args, 'load', lineno=lineno)
def parse_list(self):
token = self.stream.expect('lbracket')
items = []
while self.stream.current.type != 'rbracket':
if items:
self.stream.expect('comma')
if self.stream.current.type == 'rbracket':
break
items.append(self.parse_expression())
self.stream.expect('rbracket')
return nodes.List(items, lineno=token.lineno)
def parse_dict(self):
token = self.stream.expect('lbrace')
items = []
while self.stream.current.type != 'rbrace':
if items:
self.stream.expect('comma')
if self.stream.current.type == 'rbrace':
break
key = self.parse_expression()
self.stream.expect('colon')
value = self.parse_expression()
items.append(nodes.Pair(key, value, lineno=key.lineno))
self.stream.expect('rbrace')
return nodes.Dict(items, lineno=token.lineno)
def parse_postfix(self, node):
while 1:
token_type = self.stream.current.type
if token_type == 'dot' or token_type == 'lbracket':
node = self.parse_subscript(node)
# calls are valid both after postfix expressions (getattr
# and getitem) as well as filters and tests
elif token_type == 'lparen':
node = self.parse_call(node)
else:
break
return node
def parse_filter_expr(self, node):
while 1:
token_type = self.stream.current.type
if token_type == 'pipe':
node = self.parse_filter(node)
elif token_type == 'name' and self.stream.current.value == 'is':
node = self.parse_test(node)
# calls are valid both after postfix expressions (getattr
# and getitem) as well as filters and tests
elif token_type == 'lparen':
node = self.parse_call(node)
else:
break
return node
def parse_subscript(self, node):
token = next(self.stream)
if token.type == 'dot':
attr_token = self.stream.current
next(self.stream)
if attr_token.type == 'name':
return nodes.Getattr(node, attr_token.value, 'load',
lineno=token.lineno)
elif attr_token.type != 'integer':
self.fail('expected name or number', attr_token.lineno)
arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
if token.type == 'lbracket':
args = []
while self.stream.current.type != 'rbracket':
if args:
self.stream.expect('comma')
args.append(self.parse_subscribed())
self.stream.expect('rbracket')
if len(args) == 1:
arg = args[0]
else:
arg = nodes.Tuple(args, 'load', lineno=token.lineno)
return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
self.fail('expected subscript expression', self.lineno)
def parse_subscribed(self):
lineno = self.stream.current.lineno
if self.stream.current.type == 'colon':
next(self.stream)
args = [None]
else:
node = self.parse_expression()
if self.stream.current.type != 'colon':
return node
next(self.stream)
args = [node]
if self.stream.current.type == 'colon':
args.append(None)
elif self.stream.current.type not in ('rbracket', 'comma'):
args.append(self.parse_expression())
else:
args.append(None)
if self.stream.current.type == 'colon':
next(self.stream)
if self.stream.current.type not in ('rbracket', 'comma'):
args.append(self.parse_expression())
else:
args.append(None)
else:
args.append(None)
return nodes.Slice(lineno=lineno, *args)
def parse_call(self, node):
token = self.stream.expect('lparen')
args = []
kwargs = []
dyn_args = dyn_kwargs = None
require_comma = False
def ensure(expr):
if not expr:
self.fail('invalid syntax for function call expression',
token.lineno)
while self.stream.current.type != 'rparen':
if require_comma:
self.stream.expect('comma')
# support for trailing comma
if self.stream.current.type == 'rparen':
break
if self.stream.current.type == 'mul':
ensure(dyn_args is None and dyn_kwargs is None)
next(self.stream)
dyn_args = self.parse_expression()
elif self.stream.current.type == 'pow':
ensure(dyn_kwargs is None)
next(self.stream)
dyn_kwargs = self.parse_expression()
else:
ensure(dyn_args is None and dyn_kwargs is None)
if self.stream.current.type == 'name' and \
self.stream.look().type == 'assign':
key = self.stream.current.value
self.stream.skip(2)
value = self.parse_expression()
kwargs.append(nodes.Keyword(key, value,
lineno=value.lineno))
else:
ensure(not kwargs)
args.append(self.parse_expression())
require_comma = True
self.stream.expect('rparen')
if node is None:
return args, kwargs, dyn_args, dyn_kwargs
return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs,
lineno=token.lineno)
def parse_filter(self, node, start_inline=False):
while self.stream.current.type == 'pipe' or start_inline:
if not start_inline:
next(self.stream)
token = self.stream.expect('name')
name = token.value
while self.stream.current.type == 'dot':
next(self.stream)
name += '.' + self.stream.expect('name').value
if self.stream.current.type == 'lparen':
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
else:
args = []
kwargs = []
dyn_args = dyn_kwargs = None
node = nodes.Filter(node, name, args, kwargs, dyn_args,
dyn_kwargs, lineno=token.lineno)
start_inline = False
return node
def parse_test(self, node):
token = next(self.stream)
if self.stream.current.test('name:not'):
next(self.stream)
negated = True
else:
negated = False
name = self.stream.expect('name').value
while self.stream.current.type == 'dot':
next(self.stream)
name += '.' + self.stream.expect('name').value
dyn_args = dyn_kwargs = None
kwargs = []
if self.stream.current.type == 'lparen':
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
elif self.stream.current.type in ('name', 'string', 'integer',
'float', 'lparen', 'lbracket',
'lbrace') and not \
self.stream.current.test_any('name:else', 'name:or',
'name:and'):
if self.stream.current.test('name:is'):
self.fail('You cannot chain multiple tests with is')
args = [self.parse_expression()]
else:
args = []
node = nodes.Test(node, name, args, kwargs, dyn_args,
dyn_kwargs, lineno=token.lineno)
if negated:
node = nodes.Not(node, lineno=token.lineno)
return node
def subparse(self, end_tokens=None):
body = []
data_buffer = []
add_data = data_buffer.append
if end_tokens is not None:
self._end_token_stack.append(end_tokens)
def flush_data():
if data_buffer:
lineno = data_buffer[0].lineno
body.append(nodes.Output(data_buffer[:], lineno=lineno))
del data_buffer[:]
try:
while self.stream:
token = self.stream.current
if token.type == 'data':
if token.value:
add_data(nodes.TemplateData(token.value,
lineno=token.lineno))
next(self.stream)
elif token.type == 'variable_begin':
next(self.stream)
add_data(self.parse_tuple(with_condexpr=True))
self.stream.expect('variable_end')
elif token.type == 'block_begin':
flush_data()
next(self.stream)
if end_tokens is not None and \
self.stream.current.test_any(*end_tokens):
return body
rv = self.parse_statement()
if isinstance(rv, list):
body.extend(rv)
else:
body.append(rv)
self.stream.expect('block_end')
else:
raise AssertionError('internal parsing error')
flush_data()
finally:
if end_tokens is not None:
self._end_token_stack.pop()
return body
def parse(self):
"""Parse the whole template into a `Template` node."""
result = nodes.Template(self.subparse(), lineno=1)
result.set_environment(self.environment)
return result

View File

@ -1,581 +0,0 @@
# -*- coding: utf-8 -*-
"""
jinja2.runtime
~~~~~~~~~~~~~~
Runtime helpers.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from itertools import chain
from jinja2.nodes import EvalContext, _context_function_types
from jinja2.utils import Markup, soft_unicode, escape, missing, concat, \
internalcode, object_type_repr
from jinja2.exceptions import UndefinedError, TemplateRuntimeError, \
TemplateNotFound
from jinja2._compat import next, imap, text_type, iteritems, \
implements_iterator, implements_to_string, string_types, PY2
# these variables are exported to the template runtime
__all__ = ['LoopContext', 'TemplateReference', 'Macro', 'Markup',
'TemplateRuntimeError', 'missing', 'concat', 'escape',
'markup_join', 'unicode_join', 'to_string', 'identity',
'TemplateNotFound']
#: the name of the function that is used to convert something into
#: a string. We can just use the text type here.
to_string = text_type
#: the identity function. Useful for certain things in the environment
identity = lambda x: x
_last_iteration = object()
def markup_join(seq):
"""Concatenation that escapes if necessary and converts to unicode."""
buf = []
iterator = imap(soft_unicode, seq)
for arg in iterator:
buf.append(arg)
if hasattr(arg, '__html__'):
return Markup(u'').join(chain(buf, iterator))
return concat(buf)
def unicode_join(seq):
"""Simple args to unicode conversion and concatenation."""
return concat(imap(text_type, seq))
def new_context(environment, template_name, blocks, vars=None,
shared=None, globals=None, locals=None):
"""Internal helper to for context creation."""
if vars is None:
vars = {}
if shared:
parent = vars
else:
parent = dict(globals or (), **vars)
if locals:
# if the parent is shared a copy should be created because
# we don't want to modify the dict passed
if shared:
parent = dict(parent)
for key, value in iteritems(locals):
if key[:2] == 'l_' and value is not missing:
parent[key[2:]] = value
return Context(environment, parent, template_name, blocks)
class TemplateReference(object):
"""The `self` in templates."""
def __init__(self, context):
self.__context = context
def __getitem__(self, name):
blocks = self.__context.blocks[name]
return BlockReference(name, self.__context, blocks, 0)
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.__context.name
)
class Context(object):
"""The template context holds the variables of a template. It stores the
values passed to the template and also the names the template exports.
Creating instances is neither supported nor useful as it's created
automatically at various stages of the template evaluation and should not
be created by hand.
The context is immutable. Modifications on :attr:`parent` **must not**
happen and modifications on :attr:`vars` are allowed from generated
template code only. Template filters and global functions marked as
:func:`contextfunction`\s get the active context passed as first argument
and are allowed to access the context read-only.
The template context supports read only dict operations (`get`,
`keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
`__getitem__`, `__contains__`). Additionally there is a :meth:`resolve`
method that doesn't fail with a `KeyError` but returns an
:class:`Undefined` object for missing variables.
"""
__slots__ = ('parent', 'vars', 'environment', 'eval_ctx', 'exported_vars',
'name', 'blocks', '__weakref__')
def __init__(self, environment, parent, name, blocks):
self.parent = parent
self.vars = {}
self.environment = environment
self.eval_ctx = EvalContext(self.environment, name)
self.exported_vars = set()
self.name = name
# create the initial mapping of blocks. Whenever template inheritance
# takes place the runtime will update this mapping with the new blocks
# from the template.
self.blocks = dict((k, [v]) for k, v in iteritems(blocks))
def super(self, name, current):
"""Render a parent block."""
try:
blocks = self.blocks[name]
index = blocks.index(current) + 1
blocks[index]
except LookupError:
return self.environment.undefined('there is no parent block '
'called %r.' % name,
name='super')
return BlockReference(name, self, blocks, index)
def get(self, key, default=None):
"""Returns an item from the template context, if it doesn't exist
`default` is returned.
"""
try:
return self[key]
except KeyError:
return default
def resolve(self, key):
"""Looks up a variable like `__getitem__` or `get` but returns an
:class:`Undefined` object with the name of the name looked up.
"""
if key in self.vars:
return self.vars[key]
if key in self.parent:
return self.parent[key]
return self.environment.undefined(name=key)
def get_exported(self):
"""Get a new dict with the exported variables."""
return dict((k, self.vars[k]) for k in self.exported_vars)
def get_all(self):
"""Return a copy of the complete context as dict including the
exported variables.
"""
return dict(self.parent, **self.vars)
@internalcode
def call(__self, __obj, *args, **kwargs):
"""Call the callable with the arguments and keyword arguments
provided but inject the active context or environment as first
argument if the callable is a :func:`contextfunction` or
:func:`environmentfunction`.
"""
if __debug__:
__traceback_hide__ = True
# Allow callable classes to take a context
fn = __obj.__call__
for fn_type in ('contextfunction',
'evalcontextfunction',
'environmentfunction'):
if hasattr(fn, fn_type):
__obj = fn
break
if isinstance(__obj, _context_function_types):
if getattr(__obj, 'contextfunction', 0):
args = (__self,) + args
elif getattr(__obj, 'evalcontextfunction', 0):
args = (__self.eval_ctx,) + args
elif getattr(__obj, 'environmentfunction', 0):
args = (__self.environment,) + args
try:
return __obj(*args, **kwargs)
except StopIteration:
return __self.environment.undefined('value was undefined because '
'a callable raised a '
'StopIteration exception')
def derived(self, locals=None):
"""Internal helper function to create a derived context."""
context = new_context(self.environment, self.name, {},
self.parent, True, None, locals)
context.vars.update(self.vars)
context.eval_ctx = self.eval_ctx
context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
return context
def _all(meth):
proxy = lambda self: getattr(self.get_all(), meth)()
proxy.__doc__ = getattr(dict, meth).__doc__
proxy.__name__ = meth
return proxy
keys = _all('keys')
values = _all('values')
items = _all('items')
# not available on python 3
if PY2:
iterkeys = _all('iterkeys')
itervalues = _all('itervalues')
iteritems = _all('iteritems')
del _all
def __contains__(self, name):
return name in self.vars or name in self.parent
def __getitem__(self, key):
"""Lookup a variable or raise `KeyError` if the variable is
undefined.
"""
item = self.resolve(key)
if isinstance(item, Undefined):
raise KeyError(key)
return item
def __repr__(self):
return '<%s %s of %r>' % (
self.__class__.__name__,
repr(self.get_all()),
self.name
)
# register the context as mapping if possible
try:
from collections import Mapping
Mapping.register(Context)
except ImportError:
pass
class BlockReference(object):
"""One block on a template reference."""
def __init__(self, name, context, stack, depth):
self.name = name
self._context = context
self._stack = stack
self._depth = depth
@property
def super(self):
"""Super the block."""
if self._depth + 1 >= len(self._stack):
return self._context.environment. \
undefined('there is no parent block called %r.' %
self.name, name='super')
return BlockReference(self.name, self._context, self._stack,
self._depth + 1)
@internalcode
def __call__(self):
rv = concat(self._stack[self._depth](self._context))
if self._context.eval_ctx.autoescape:
rv = Markup(rv)
return rv
class LoopContext(object):
"""A loop context for dynamic iteration."""
def __init__(self, iterable, recurse=None, depth0=0):
self._iterator = iter(iterable)
self._recurse = recurse
self._after = self._safe_next()
self.index0 = -1
self.depth0 = depth0
# try to get the length of the iterable early. This must be done
# here because there are some broken iterators around where there
# __len__ is the number of iterations left (i'm looking at your
# listreverseiterator!).
try:
self._length = len(iterable)
except (TypeError, AttributeError):
self._length = None
def cycle(self, *args):
"""Cycles among the arguments with the current loop index."""
if not args:
raise TypeError('no items for cycling given')
return args[self.index0 % len(args)]
first = property(lambda x: x.index0 == 0)
last = property(lambda x: x._after is _last_iteration)
index = property(lambda x: x.index0 + 1)
revindex = property(lambda x: x.length - x.index0)
revindex0 = property(lambda x: x.length - x.index)
depth = property(lambda x: x.depth0 + 1)
def __len__(self):
return self.length
def __iter__(self):
return LoopContextIterator(self)
def _safe_next(self):
try:
return next(self._iterator)
except StopIteration:
return _last_iteration
@internalcode
def loop(self, iterable):
if self._recurse is None:
raise TypeError('Tried to call non recursive loop. Maybe you '
"forgot the 'recursive' modifier.")
return self._recurse(iterable, self._recurse, self.depth0 + 1)
# a nifty trick to enhance the error message if someone tried to call
# the the loop without or with too many arguments.
__call__ = loop
del loop
@property
def length(self):
if self._length is None:
# if was not possible to get the length of the iterator when
# the loop context was created (ie: iterating over a generator)
# we have to convert the iterable into a sequence and use the
# length of that.
iterable = tuple(self._iterator)
self._iterator = iter(iterable)
self._length = len(iterable) + self.index0 + 1
return self._length
def __repr__(self):
return '<%s %r/%r>' % (
self.__class__.__name__,
self.index,
self.length
)
@implements_iterator
class LoopContextIterator(object):
"""The iterator for a loop context."""
__slots__ = ('context',)
def __init__(self, context):
self.context = context
def __iter__(self):
return self
def __next__(self):
ctx = self.context
ctx.index0 += 1
if ctx._after is _last_iteration:
raise StopIteration()
next_elem = ctx._after
ctx._after = ctx._safe_next()
return next_elem, ctx
class Macro(object):
"""Wraps a macro function."""
def __init__(self, environment, func, name, arguments, defaults,
catch_kwargs, catch_varargs, caller):
self._environment = environment
self._func = func
self._argument_count = len(arguments)
self.name = name
self.arguments = arguments
self.defaults = defaults
self.catch_kwargs = catch_kwargs
self.catch_varargs = catch_varargs
self.caller = caller
@internalcode
def __call__(self, *args, **kwargs):
# try to consume the positional arguments
arguments = list(args[:self._argument_count])
off = len(arguments)
# if the number of arguments consumed is not the number of
# arguments expected we start filling in keyword arguments
# and defaults.
if off != self._argument_count:
for idx, name in enumerate(self.arguments[len(arguments):]):
try:
value = kwargs.pop(name)
except KeyError:
try:
value = self.defaults[idx - self._argument_count + off]
except IndexError:
value = self._environment.undefined(
'parameter %r was not provided' % name, name=name)
arguments.append(value)
# it's important that the order of these arguments does not change
# if not also changed in the compiler's `function_scoping` method.
# the order is caller, keyword arguments, positional arguments!
if self.caller:
caller = kwargs.pop('caller', None)
if caller is None:
caller = self._environment.undefined('No caller defined',
name='caller')
arguments.append(caller)
if self.catch_kwargs:
arguments.append(kwargs)
elif kwargs:
raise TypeError('macro %r takes no keyword argument %r' %
(self.name, next(iter(kwargs))))
if self.catch_varargs:
arguments.append(args[self._argument_count:])
elif len(args) > self._argument_count:
raise TypeError('macro %r takes not more than %d argument(s)' %
(self.name, len(self.arguments)))
return self._func(*arguments)
def __repr__(self):
return '<%s %s>' % (
self.__class__.__name__,
self.name is None and 'anonymous' or repr(self.name)
)
@implements_to_string
class Undefined(object):
"""The default undefined type. This undefined type can be printed and
iterated over, but every other access will raise an :exc:`UndefinedError`:
>>> foo = Undefined(name='foo')
>>> str(foo)
''
>>> not foo
True
>>> foo + 42
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
"""
__slots__ = ('_undefined_hint', '_undefined_obj', '_undefined_name',
'_undefined_exception')
def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
self._undefined_hint = hint
self._undefined_obj = obj
self._undefined_name = name
self._undefined_exception = exc
@internalcode
def _fail_with_undefined_error(self, *args, **kwargs):
"""Regular callback function for undefined objects that raises an
`UndefinedError` on call.
"""
if self._undefined_hint is None:
if self._undefined_obj is missing:
hint = '%r is undefined' % self._undefined_name
elif not isinstance(self._undefined_name, string_types):
hint = '%s has no element %r' % (
object_type_repr(self._undefined_obj),
self._undefined_name
)
else:
hint = '%r has no attribute %r' % (
object_type_repr(self._undefined_obj),
self._undefined_name
)
else:
hint = self._undefined_hint
raise self._undefined_exception(hint)
@internalcode
def __getattr__(self, name):
if name[:2] == '__':
raise AttributeError(name)
return self._fail_with_undefined_error()
__add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \
__truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \
__mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \
__getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = \
__float__ = __complex__ = __pow__ = __rpow__ = \
_fail_with_undefined_error
def __eq__(self, other):
return type(self) is type(other)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return id(type(self))
def __str__(self):
return u''
def __len__(self):
return 0
def __iter__(self):
if 0:
yield None
def __nonzero__(self):
return False
def __repr__(self):
return 'Undefined'
@implements_to_string
class DebugUndefined(Undefined):
"""An undefined that returns the debug info when printed.
>>> foo = DebugUndefined(name='foo')
>>> str(foo)
'{{ foo }}'
>>> not foo
True
>>> foo + 42
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
"""
__slots__ = ()
def __str__(self):
if self._undefined_hint is None:
if self._undefined_obj is missing:
return u'{{ %s }}' % self._undefined_name
return '{{ no such element: %s[%r] }}' % (
object_type_repr(self._undefined_obj),
self._undefined_name
)
return u'{{ undefined value printed: %s }}' % self._undefined_hint
@implements_to_string
class StrictUndefined(Undefined):
"""An undefined that barks on print and iteration as well as boolean
tests and all kinds of comparisons. In other words: you can do nothing
with it except checking if it's defined using the `defined` test.
>>> foo = StrictUndefined(name='foo')
>>> str(foo)
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
>>> not foo
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
>>> foo + 42
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
"""
__slots__ = ()
__iter__ = __str__ = __len__ = __nonzero__ = __eq__ = \
__ne__ = __bool__ = __hash__ = \
Undefined._fail_with_undefined_error
# remove remaining slots attributes, after the metaclass did the magic they
# are unneeded and irritating as they contain wrong data for the subclasses.
del Undefined.__slots__, DebugUndefined.__slots__, StrictUndefined.__slots__

Some files were not shown because too many files have changed in this diff Show More