Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
vnf-onboarding
OSM Packages
Commits
17583c8b
Commit
17583c8b
authored
Nov 30, 2020
by
Mark Beierl
Browse files
Merge branch 'master' into 'master'
Update squid charm See merge request
!105
parents
e2197769
9c012576
Pipeline
#143
passed with stage
in 1 minute and 34 seconds
Changes
160
Pipelines
2
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
4623 additions
and
0 deletions
+4623
-0
magma/squid_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/RECORD
...d_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/RECORD
+16
-0
magma/squid_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/WHEEL
...id_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/WHEEL
+5
-0
magma/squid_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/top_level.txt
...harms/squid/venv/MarkupSafe-1.1.1.dist-info/top_level.txt
+1
-0
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/INSTALLER
...id_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/INSTALLER
+1
-0
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/LICENSE
...quid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/LICENSE
+20
-0
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/METADATA
...uid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/METADATA
+41
-0
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/RECORD
...squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/RECORD
+40
-0
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/WHEEL
.../squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/WHEEL
+5
-0
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/top_level.txt
...nf/charms/squid/venv/PyYAML-5.3.1.dist-info/top_level.txt
+2
-0
magma/squid_cnf/charms/squid/venv/jinja2/__init__.py
magma/squid_cnf/charms/squid/venv/jinja2/__init__.py
+44
-0
magma/squid_cnf/charms/squid/venv/jinja2/_compat.py
magma/squid_cnf/charms/squid/venv/jinja2/_compat.py
+132
-0
magma/squid_cnf/charms/squid/venv/jinja2/_identifier.py
magma/squid_cnf/charms/squid/venv/jinja2/_identifier.py
+6
-0
magma/squid_cnf/charms/squid/venv/jinja2/asyncfilters.py
magma/squid_cnf/charms/squid/venv/jinja2/asyncfilters.py
+158
-0
magma/squid_cnf/charms/squid/venv/jinja2/asyncsupport.py
magma/squid_cnf/charms/squid/venv/jinja2/asyncsupport.py
+264
-0
magma/squid_cnf/charms/squid/venv/jinja2/bccache.py
magma/squid_cnf/charms/squid/venv/jinja2/bccache.py
+350
-0
magma/squid_cnf/charms/squid/venv/jinja2/compiler.py
magma/squid_cnf/charms/squid/venv/jinja2/compiler.py
+1843
-0
magma/squid_cnf/charms/squid/venv/jinja2/constants.py
magma/squid_cnf/charms/squid/venv/jinja2/constants.py
+21
-0
magma/squid_cnf/charms/squid/venv/jinja2/debug.py
magma/squid_cnf/charms/squid/venv/jinja2/debug.py
+268
-0
magma/squid_cnf/charms/squid/venv/jinja2/defaults.py
magma/squid_cnf/charms/squid/venv/jinja2/defaults.py
+44
-0
magma/squid_cnf/charms/squid/venv/jinja2/environment.py
magma/squid_cnf/charms/squid/venv/jinja2/environment.py
+1362
-0
No files found.
magma/squid_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/RECORD
0 → 100644
View file @
17583c8b
MarkupSafe-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
MarkupSafe-1.1.1.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
MarkupSafe-1.1.1.dist-info/METADATA,sha256=IFCP4hCNGjXJgMoSvdjPiKDLAMUTTWoxKXQsQvmyMNU,3653
MarkupSafe-1.1.1.dist-info/RECORD,,
MarkupSafe-1.1.1.dist-info/WHEEL,sha256=VEyGcIFAmk_1KbI6gaZGw_mMiT-pdGweASQLX-DzYaY,108
MarkupSafe-1.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
markupsafe/__init__.py,sha256=oTblO5f9KFM-pvnq9bB0HgElnqkJyqHnFN1Nx2NIvnY,10126
markupsafe/__pycache__/__init__.cpython-38.pyc,,
markupsafe/__pycache__/_compat.cpython-38.pyc,,
markupsafe/__pycache__/_constants.cpython-38.pyc,,
markupsafe/__pycache__/_native.cpython-38.pyc,,
markupsafe/_compat.py,sha256=uEW1ybxEjfxIiuTbRRaJpHsPFf4yQUMMKaPgYEC5XbU,558
markupsafe/_constants.py,sha256=zo2ajfScG-l1Sb_52EP3MlDCqO7Y1BVHUXXKRsVDRNk,4690
markupsafe/_native.py,sha256=d-8S_zzYt2y512xYcuSxq0NeG2DUUvG80wVdTn-4KI8,1873
markupsafe/_speedups.c,sha256=k0fzEIK3CP6MmMqeY0ob43TP90mVN0DTyn7BAl3RqSg,9884
markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so,sha256=SbJwN321Xn7OPYGv5a6Ghzga75uT8RHQUGkoQUASF-o,48016
magma/squid_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/WHEEL
0 → 100644
View file @
17583c8b
Wheel-Version: 1.0
Generator: bdist_wheel (0.31.1)
Root-Is-Purelib: false
Tag: cp38-cp38-manylinux1_x86_64
magma/squid_cnf/charms/squid/venv/MarkupSafe-1.1.1.dist-info/top_level.txt
0 → 100644
View file @
17583c8b
markupsafe
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/INSTALLER
0 → 100644
View file @
17583c8b
pip
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/LICENSE
0 → 100644
View file @
17583c8b
Copyright (c) 2017-2020 Ingy döt Net
Copyright (c) 2006-2016 Kirill Simonov
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/METADATA
0 → 100644
View file @
17583c8b
Metadata-Version: 2.1
Name: PyYAML
Version: 5.3.1
Summary: YAML parser and emitter for Python
Home-page: https://github.com/yaml/pyyaml
Author: Kirill Simonov
Author-email: xi@resolvent.net
License: MIT
Download-URL: https://pypi.org/project/PyYAML/
Platform: Any
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Cython
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Text Processing :: Markup
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
YAML is a data serialization format designed for human readability
and interaction with scripting languages. PyYAML is a YAML parser
and emitter for Python.
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
support, capable extension API, and sensible error messages. PyYAML
supports standard YAML tags and provides Python-specific tags that
allow to represent an arbitrary Python object.
PyYAML is applicable for a broad range of tasks from complex
configuration files to object serialization and persistence.
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/RECORD
0 → 100644
View file @
17583c8b
PyYAML-5.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyYAML-5.3.1.dist-info/LICENSE,sha256=xAESRJ8lS5dTBFklJIMT6ScO-jbSJrItgtTMbEPFfyk,1101
PyYAML-5.3.1.dist-info/METADATA,sha256=xTsZFjd8T4M-5rC2M3BHgx_KTTpEPy5vFDIXrbzRXPQ,1758
PyYAML-5.3.1.dist-info/RECORD,,
PyYAML-5.3.1.dist-info/WHEEL,sha256=TpFVeXF_cAlV118WSIPWtjqW7nPvzoOw-49FmS3fDKQ,103
PyYAML-5.3.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
yaml/__init__.py,sha256=XFUNbKTg4afAd0BETjGQ1mKQ97_g5jbE1C0WoKc74dc,13170
yaml/__pycache__/__init__.cpython-38.pyc,,
yaml/__pycache__/composer.cpython-38.pyc,,
yaml/__pycache__/constructor.cpython-38.pyc,,
yaml/__pycache__/cyaml.cpython-38.pyc,,
yaml/__pycache__/dumper.cpython-38.pyc,,
yaml/__pycache__/emitter.cpython-38.pyc,,
yaml/__pycache__/error.cpython-38.pyc,,
yaml/__pycache__/events.cpython-38.pyc,,
yaml/__pycache__/loader.cpython-38.pyc,,
yaml/__pycache__/nodes.cpython-38.pyc,,
yaml/__pycache__/parser.cpython-38.pyc,,
yaml/__pycache__/reader.cpython-38.pyc,,
yaml/__pycache__/representer.cpython-38.pyc,,
yaml/__pycache__/resolver.cpython-38.pyc,,
yaml/__pycache__/scanner.cpython-38.pyc,,
yaml/__pycache__/serializer.cpython-38.pyc,,
yaml/__pycache__/tokens.cpython-38.pyc,,
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
yaml/constructor.py,sha256=O3Uaf0_J_5GQBoeI9ZNhpJAhtdagr_X2HzDgGbZOMnw,28627
yaml/cyaml.py,sha256=LiMkvchNonfoy1F6ec9L2BiUz3r0bwF4hympASJX1Ic,3846
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
yaml/representer.py,sha256=82UM3ZxUQKqsKAF4ltWOxCS6jGPIFtXpGs7mvqyv4Xs,14184
yaml/resolver.py,sha256=DJCjpQr8YQCEYYjKEYqTl0GrsZil2H4aFOI9b0Oe-U4,8970
yaml/scanner.py,sha256=KeQIKGNlSyPE8QDwionHxy9CgbqE5teJEz05FR9-nAg,51277
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/WHEEL
0 → 100644
View file @
17583c8b
Wheel-Version: 1.0
Generator: bdist_wheel (0.34.2)
Root-Is-Purelib: false
Tag: cp38-cp38-linux_x86_64
magma/squid_cnf/charms/squid/venv/PyYAML-5.3.1.dist-info/top_level.txt
0 → 100644
View file @
17583c8b
_yaml
yaml
magma/squid_cnf/charms/squid/venv/jinja2/__init__.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
"""Jinja is a template engine written in pure Python. It provides a
non-XML syntax that supports inline expressions and an optional
sandboxed environment.
"""
from
markupsafe
import
escape
from
markupsafe
import
Markup
from
.bccache
import
BytecodeCache
from
.bccache
import
FileSystemBytecodeCache
from
.bccache
import
MemcachedBytecodeCache
from
.environment
import
Environment
from
.environment
import
Template
from
.exceptions
import
TemplateAssertionError
from
.exceptions
import
TemplateError
from
.exceptions
import
TemplateNotFound
from
.exceptions
import
TemplateRuntimeError
from
.exceptions
import
TemplatesNotFound
from
.exceptions
import
TemplateSyntaxError
from
.exceptions
import
UndefinedError
from
.filters
import
contextfilter
from
.filters
import
environmentfilter
from
.filters
import
evalcontextfilter
from
.loaders
import
BaseLoader
from
.loaders
import
ChoiceLoader
from
.loaders
import
DictLoader
from
.loaders
import
FileSystemLoader
from
.loaders
import
FunctionLoader
from
.loaders
import
ModuleLoader
from
.loaders
import
PackageLoader
from
.loaders
import
PrefixLoader
from
.runtime
import
ChainableUndefined
from
.runtime
import
DebugUndefined
from
.runtime
import
make_logging_undefined
from
.runtime
import
StrictUndefined
from
.runtime
import
Undefined
from
.utils
import
clear_caches
from
.utils
import
contextfunction
from
.utils
import
environmentfunction
from
.utils
import
evalcontextfunction
from
.utils
import
is_undefined
from
.utils
import
select_autoescape
__version__
=
"2.11.2"
magma/squid_cnf/charms/squid/venv/jinja2/_compat.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
# flake8: noqa
import
marshal
import
sys
PY2
=
sys
.
version_info
[
0
]
==
2
PYPY
=
hasattr
(
sys
,
"pypy_translation_info"
)
_identity
=
lambda
x
:
x
if
not
PY2
:
unichr
=
chr
range_type
=
range
text_type
=
str
string_types
=
(
str
,)
integer_types
=
(
int
,)
iterkeys
=
lambda
d
:
iter
(
d
.
keys
())
itervalues
=
lambda
d
:
iter
(
d
.
values
())
iteritems
=
lambda
d
:
iter
(
d
.
items
())
import
pickle
from
io
import
BytesIO
,
StringIO
NativeStringIO
=
StringIO
def
reraise
(
tp
,
value
,
tb
=
None
):
if
value
.
__traceback__
is
not
tb
:
raise
value
.
with_traceback
(
tb
)
raise
value
ifilter
=
filter
imap
=
map
izip
=
zip
intern
=
sys
.
intern
implements_iterator
=
_identity
implements_to_string
=
_identity
encode_filename
=
_identity
marshal_dump
=
marshal
.
dump
marshal_load
=
marshal
.
load
else
:
unichr
=
unichr
text_type
=
unicode
range_type
=
xrange
string_types
=
(
str
,
unicode
)
integer_types
=
(
int
,
long
)
iterkeys
=
lambda
d
:
d
.
iterkeys
()
itervalues
=
lambda
d
:
d
.
itervalues
()
iteritems
=
lambda
d
:
d
.
iteritems
()
import
cPickle
as
pickle
from
cStringIO
import
StringIO
as
BytesIO
,
StringIO
NativeStringIO
=
BytesIO
exec
(
"def reraise(tp, value, tb=None):
\n
raise tp, value, tb"
)
from
itertools
import
imap
,
izip
,
ifilter
intern
=
intern
def
implements_iterator
(
cls
):
cls
.
next
=
cls
.
__next__
del
cls
.
__next__
return
cls
def
implements_to_string
(
cls
):
cls
.
__unicode__
=
cls
.
__str__
cls
.
__str__
=
lambda
x
:
x
.
__unicode__
().
encode
(
"utf-8"
)
return
cls
def
encode_filename
(
filename
):
if
isinstance
(
filename
,
unicode
):
return
filename
.
encode
(
"utf-8"
)
return
filename
def
marshal_dump
(
code
,
f
):
if
isinstance
(
f
,
file
):
marshal
.
dump
(
code
,
f
)
else
:
f
.
write
(
marshal
.
dumps
(
code
))
def
marshal_load
(
f
):
if
isinstance
(
f
,
file
):
return
marshal
.
load
(
f
)
return
marshal
.
loads
(
f
.
read
())
def
with_metaclass
(
meta
,
*
bases
):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instantiation that replaces
# itself with the actual metaclass.
class
metaclass
(
type
):
def
__new__
(
cls
,
name
,
this_bases
,
d
):
return
meta
(
name
,
bases
,
d
)
return
type
.
__new__
(
metaclass
,
"temporary_class"
,
(),
{})
try
:
from
urllib.parse
import
quote_from_bytes
as
url_quote
except
ImportError
:
from
urllib
import
quote
as
url_quote
try
:
from
collections
import
abc
except
ImportError
:
import
collections
as
abc
try
:
from
os
import
fspath
except
ImportError
:
try
:
from
pathlib
import
PurePath
except
ImportError
:
PurePath
=
None
def
fspath
(
path
):
if
hasattr
(
path
,
"__fspath__"
):
return
path
.
__fspath__
()
# Python 3.5 doesn't have __fspath__ yet, use str.
if
PurePath
is
not
None
and
isinstance
(
path
,
PurePath
):
return
str
(
path
)
return
path
magma/squid_cnf/charms/squid/venv/jinja2/_identifier.py
0 → 100644
View file @
17583c8b
import
re
# generated by scripts/generate_identifier_pattern.py
pattern
=
re
.
compile
(
r
"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+"
# noqa: B950
)
magma/squid_cnf/charms/squid/venv/jinja2/asyncfilters.py
0 → 100644
View file @
17583c8b
from
functools
import
wraps
from
.
import
filters
from
.asyncsupport
import
auto_aiter
from
.asyncsupport
import
auto_await
async
def
auto_to_seq
(
value
):
seq
=
[]
if
hasattr
(
value
,
"__aiter__"
):
async
for
item
in
value
:
seq
.
append
(
item
)
else
:
for
item
in
value
:
seq
.
append
(
item
)
return
seq
async
def
async_select_or_reject
(
args
,
kwargs
,
modfunc
,
lookup_attr
):
seq
,
func
=
filters
.
prepare_select_or_reject
(
args
,
kwargs
,
modfunc
,
lookup_attr
)
if
seq
:
async
for
item
in
auto_aiter
(
seq
):
if
func
(
item
):
yield
item
def
dualfilter
(
normal_filter
,
async_filter
):
wrap_evalctx
=
False
if
getattr
(
normal_filter
,
"environmentfilter"
,
False
)
is
True
:
def
is_async
(
args
):
return
args
[
0
].
is_async
wrap_evalctx
=
False
else
:
has_evalctxfilter
=
getattr
(
normal_filter
,
"evalcontextfilter"
,
False
)
is
True
has_ctxfilter
=
getattr
(
normal_filter
,
"contextfilter"
,
False
)
is
True
wrap_evalctx
=
not
has_evalctxfilter
and
not
has_ctxfilter
def
is_async
(
args
):
return
args
[
0
].
environment
.
is_async
@
wraps
(
normal_filter
)
def
wrapper
(
*
args
,
**
kwargs
):
b
=
is_async
(
args
)
if
wrap_evalctx
:
args
=
args
[
1
:]
if
b
:
return
async_filter
(
*
args
,
**
kwargs
)
return
normal_filter
(
*
args
,
**
kwargs
)
if
wrap_evalctx
:
wrapper
.
evalcontextfilter
=
True
wrapper
.
asyncfiltervariant
=
True
return
wrapper
def
asyncfiltervariant
(
original
):
def
decorator
(
f
):
return
dualfilter
(
original
,
f
)
return
decorator
@
asyncfiltervariant
(
filters
.
do_first
)
async
def
do_first
(
environment
,
seq
):
try
:
return
await
auto_aiter
(
seq
).
__anext__
()
except
StopAsyncIteration
:
return
environment
.
undefined
(
"No first item, sequence was empty."
)
@
asyncfiltervariant
(
filters
.
do_groupby
)
async
def
do_groupby
(
environment
,
value
,
attribute
):
expr
=
filters
.
make_attrgetter
(
environment
,
attribute
)
return
[
filters
.
_GroupTuple
(
key
,
await
auto_to_seq
(
values
))
for
key
,
values
in
filters
.
groupby
(
sorted
(
await
auto_to_seq
(
value
),
key
=
expr
),
expr
)
]
@
asyncfiltervariant
(
filters
.
do_join
)
async
def
do_join
(
eval_ctx
,
value
,
d
=
u
""
,
attribute
=
None
):
return
filters
.
do_join
(
eval_ctx
,
await
auto_to_seq
(
value
),
d
,
attribute
)
@
asyncfiltervariant
(
filters
.
do_list
)
async
def
do_list
(
value
):
return
await
auto_to_seq
(
value
)
@
asyncfiltervariant
(
filters
.
do_reject
)
async
def
do_reject
(
*
args
,
**
kwargs
):
return
async_select_or_reject
(
args
,
kwargs
,
lambda
x
:
not
x
,
False
)
@
asyncfiltervariant
(
filters
.
do_rejectattr
)
async
def
do_rejectattr
(
*
args
,
**
kwargs
):
return
async_select_or_reject
(
args
,
kwargs
,
lambda
x
:
not
x
,
True
)
@
asyncfiltervariant
(
filters
.
do_select
)
async
def
do_select
(
*
args
,
**
kwargs
):
return
async_select_or_reject
(
args
,
kwargs
,
lambda
x
:
x
,
False
)
@
asyncfiltervariant
(
filters
.
do_selectattr
)
async
def
do_selectattr
(
*
args
,
**
kwargs
):
return
async_select_or_reject
(
args
,
kwargs
,
lambda
x
:
x
,
True
)
@
asyncfiltervariant
(
filters
.
do_map
)
async
def
do_map
(
*
args
,
**
kwargs
):
seq
,
func
=
filters
.
prepare_map
(
args
,
kwargs
)
if
seq
:
async
for
item
in
auto_aiter
(
seq
):
yield
await
auto_await
(
func
(
item
))
@
asyncfiltervariant
(
filters
.
do_sum
)
async
def
do_sum
(
environment
,
iterable
,
attribute
=
None
,
start
=
0
):
rv
=
start
if
attribute
is
not
None
:
func
=
filters
.
make_attrgetter
(
environment
,
attribute
)
else
:
def
func
(
x
):
return
x
async
for
item
in
auto_aiter
(
iterable
):
rv
+=
func
(
item
)
return
rv
@
asyncfiltervariant
(
filters
.
do_slice
)
async
def
do_slice
(
value
,
slices
,
fill_with
=
None
):
return
filters
.
do_slice
(
await
auto_to_seq
(
value
),
slices
,
fill_with
)
ASYNC_FILTERS
=
{
"first"
:
do_first
,
"groupby"
:
do_groupby
,
"join"
:
do_join
,
"list"
:
do_list
,
# we intentionally do not support do_last because that would be
# ridiculous
"reject"
:
do_reject
,
"rejectattr"
:
do_rejectattr
,
"map"
:
do_map
,
"select"
:
do_select
,
"selectattr"
:
do_selectattr
,
"sum"
:
do_sum
,
"slice"
:
do_slice
,
}
magma/squid_cnf/charms/squid/venv/jinja2/asyncsupport.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
"""The code for async support. Importing this patches Jinja on supported
Python versions.
"""
import
asyncio
import
inspect
from
functools
import
update_wrapper
from
markupsafe
import
Markup
from
.environment
import
TemplateModule
from
.runtime
import
LoopContext
from
.utils
import
concat
from
.utils
import
internalcode
from
.utils
import
missing
async
def
concat_async
(
async_gen
):
rv
=
[]
async
def
collect
():
async
for
event
in
async_gen
:
rv
.
append
(
event
)
await
collect
()
return
concat
(
rv
)
async
def
generate_async
(
self
,
*
args
,
**
kwargs
):
vars
=
dict
(
*
args
,
**
kwargs
)
try
:
async
for
event
in
self
.
root_render_func
(
self
.
new_context
(
vars
)):
yield
event
except
Exception
:
yield
self
.
environment
.
handle_exception
()
def
wrap_generate_func
(
original_generate
):
def
_convert_generator
(
self
,
loop
,
args
,
kwargs
):
async_gen
=
self
.
generate_async
(
*
args
,
**
kwargs
)
try
:
while
1
:
yield
loop
.
run_until_complete
(
async_gen
.
__anext__
())
except
StopAsyncIteration
:
pass
def
generate
(
self
,
*
args
,
**
kwargs
):
if
not
self
.
environment
.
is_async
:
return
original_generate
(
self
,
*
args
,
**
kwargs
)
return
_convert_generator
(
self
,
asyncio
.
get_event_loop
(),
args
,
kwargs
)
return
update_wrapper
(
generate
,
original_generate
)
async
def
render_async
(
self
,
*
args
,
**
kwargs
):
if
not
self
.
environment
.
is_async
:
raise
RuntimeError
(
"The environment was not created with async mode enabled."
)
vars
=
dict
(
*
args
,
**
kwargs
)
ctx
=
self
.
new_context
(
vars
)
try
:
return
await
concat_async
(
self
.
root_render_func
(
ctx
))
except
Exception
:
return
self
.
environment
.
handle_exception
()
def
wrap_render_func
(
original_render
):
def
render
(
self
,
*
args
,
**
kwargs
):
if
not
self
.
environment
.
is_async
:
return
original_render
(
self
,
*
args
,
**
kwargs
)
loop
=
asyncio
.
get_event_loop
()
return
loop
.
run_until_complete
(
self
.
render_async
(
*
args
,
**
kwargs
))
return
update_wrapper
(
render
,
original_render
)
def
wrap_block_reference_call
(
original_call
):
@
internalcode
async
def
async_call
(
self
):
rv
=
await
concat_async
(
self
.
_stack
[
self
.
_depth
](
self
.
_context
))
if
self
.
_context
.
eval_ctx
.
autoescape
:
rv
=
Markup
(
rv
)
return
rv
@
internalcode
def
__call__
(
self
):
if
not
self
.
_context
.
environment
.
is_async
:
return
original_call
(
self
)
return
async_call
(
self
)
return
update_wrapper
(
__call__
,
original_call
)
def
wrap_macro_invoke
(
original_invoke
):
@
internalcode
async
def
async_invoke
(
self
,
arguments
,
autoescape
):
rv
=
await
self
.
_func
(
*
arguments
)
if
autoescape
:
rv
=
Markup
(
rv
)
return
rv
@
internalcode
def
_invoke
(
self
,
arguments
,
autoescape
):
if
not
self
.
_environment
.
is_async
:
return
original_invoke
(
self
,
arguments
,
autoescape
)
return
async_invoke
(
self
,
arguments
,
autoescape
)
return
update_wrapper
(
_invoke
,
original_invoke
)
@
internalcode
async
def
get_default_module_async
(
self
):
if
self
.
_module
is
not
None
:
return
self
.
_module
self
.
_module
=
rv
=
await
self
.
make_module_async
()
return
rv
def
wrap_default_module
(
original_default_module
):
@
internalcode
def
_get_default_module
(
self
):
if
self
.
environment
.
is_async
:
raise
RuntimeError
(
"Template module attribute is unavailable in async mode"
)
return
original_default_module
(
self
)
return
_get_default_module
async
def
make_module_async
(
self
,
vars
=
None
,
shared
=
False
,
locals
=
None
):
context
=
self
.
new_context
(
vars
,
shared
,
locals
)
body_stream
=
[]
async
for
item
in
self
.
root_render_func
(
context
):
body_stream
.
append
(
item
)
return
TemplateModule
(
self
,
context
,
body_stream
)
def
patch_template
():
from
.
import
Template
Template
.
generate
=
wrap_generate_func
(
Template
.
generate
)
Template
.
generate_async
=
update_wrapper
(
generate_async
,
Template
.
generate_async
)
Template
.
render_async
=
update_wrapper
(
render_async
,
Template
.
render_async
)
Template
.
render
=
wrap_render_func
(
Template
.
render
)
Template
.
_get_default_module
=
wrap_default_module
(
Template
.
_get_default_module
)
Template
.
_get_default_module_async
=
get_default_module_async
Template
.
make_module_async
=
update_wrapper
(
make_module_async
,
Template
.
make_module_async
)
def
patch_runtime
():
from
.runtime
import
BlockReference
,
Macro
BlockReference
.
__call__
=
wrap_block_reference_call
(
BlockReference
.
__call__
)
Macro
.
_invoke
=
wrap_macro_invoke
(
Macro
.
_invoke
)
def
patch_filters
():
from
.filters
import
FILTERS
from
.asyncfilters
import
ASYNC_FILTERS
FILTERS
.
update
(
ASYNC_FILTERS
)
def
patch_all
():
patch_template
()
patch_runtime
()
patch_filters
()
async
def
auto_await
(
value
):
if
inspect
.
isawaitable
(
value
):
return
await
value
return
value
async
def
auto_aiter
(
iterable
):
if
hasattr
(
iterable
,
"__aiter__"
):
async
for
item
in
iterable
:
yield
item
return
for
item
in
iterable
:
yield
item
class
AsyncLoopContext
(
LoopContext
):
_to_iterator
=
staticmethod
(
auto_aiter
)
@
property
async
def
length
(
self
):
if
self
.
_length
is
not
None
:
return
self
.
_length
try
:
self
.
_length
=
len
(
self
.
_iterable
)
except
TypeError
:
iterable
=
[
x
async
for
x
in
self
.
_iterator
]
self
.
_iterator
=
self
.
_to_iterator
(
iterable
)
self
.
_length
=
len
(
iterable
)
+
self
.
index
+
(
self
.
_after
is
not
missing
)
return
self
.
_length
@
property
async
def
revindex0
(
self
):
return
await
self
.
length
-
self
.
index
@
property
async
def
revindex
(
self
):
return
await
self
.
length
-
self
.
index0
async
def
_peek_next
(
self
):
if
self
.
_after
is
not
missing
:
return
self
.
_after
try
:
self
.
_after
=
await
self
.
_iterator
.
__anext__
()
except
StopAsyncIteration
:
self
.
_after
=
missing
return
self
.
_after
@
property
async
def
last
(
self
):
return
await
self
.
_peek_next
()
is
missing
@
property
async
def
nextitem
(
self
):
rv
=
await
self
.
_peek_next
()
if
rv
is
missing
:
return
self
.
_undefined
(
"there is no next item"
)
return
rv
def
__aiter__
(
self
):
return
self
async
def
__anext__
(
self
):
if
self
.
_after
is
not
missing
:
rv
=
self
.
_after
self
.
_after
=
missing
else
:
rv
=
await
self
.
_iterator
.
__anext__
()
self
.
index0
+=
1
self
.
_before
=
self
.
_current
self
.
_current
=
rv
return
rv
,
self
async
def
make_async_loop_context
(
iterable
,
undefined
,
recurse
=
None
,
depth0
=
0
):
import
warnings
warnings
.
warn
(
"This template must be recompiled with at least Jinja 2.11, or"
" it will fail in 3.0."
,
DeprecationWarning
,
stacklevel
=
2
,
)
return
AsyncLoopContext
(
iterable
,
undefined
,
recurse
,
depth0
)
patch_all
()
magma/squid_cnf/charms/squid/venv/jinja2/bccache.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
"""The optional bytecode cache system. This is useful if you have very
complex template situations and the compilation of all those templates
slows down your application too much.
Situations where this is useful are often forking web applications that
are initialized on the first request.
"""
import
errno
import
fnmatch
import
os
import
stat
import
sys
import
tempfile
from
hashlib
import
sha1
from
os
import
listdir
from
os
import
path
from
._compat
import
BytesIO
from
._compat
import
marshal_dump
from
._compat
import
marshal_load
from
._compat
import
pickle
from
._compat
import
text_type
from
.utils
import
open_if_exists
bc_version
=
4
# Magic bytes to identify Jinja bytecode cache files. Contains the
# Python major and minor version to avoid loading incompatible bytecode
# if a project upgrades its Python version.
bc_magic
=
(
b
"j2"
+
pickle
.
dumps
(
bc_version
,
2
)
+
pickle
.
dumps
((
sys
.
version_info
[
0
]
<<
24
)
|
sys
.
version_info
[
1
],
2
)
)
class
Bucket
(
object
):
"""Buckets are used to store the bytecode for one template. It's created
and initialized by the bytecode cache and passed to the loading functions.
The buckets get an internal checksum from the cache assigned and use this
to automatically reject outdated cache material. Individual bytecode
cache subclasses don't have to care about cache invalidation.
"""
def
__init__
(
self
,
environment
,
key
,
checksum
):
self
.
environment
=
environment
self
.
key
=
key
self
.
checksum
=
checksum
self
.
reset
()
def
reset
(
self
):
"""Resets the bucket (unloads the bytecode)."""
self
.
code
=
None
def
load_bytecode
(
self
,
f
):
"""Loads bytecode from a file or file like object."""
# make sure the magic header is correct
magic
=
f
.
read
(
len
(
bc_magic
))
if
magic
!=
bc_magic
:
self
.
reset
()
return
# the source code of the file changed, we need to reload
checksum
=
pickle
.
load
(
f
)
if
self
.
checksum
!=
checksum
:
self
.
reset
()
return
# if marshal_load fails then we need to reload
try
:
self
.
code
=
marshal_load
(
f
)
except
(
EOFError
,
ValueError
,
TypeError
):
self
.
reset
()
return
def
write_bytecode
(
self
,
f
):
"""Dump the bytecode into the file or file like object passed."""
if
self
.
code
is
None
:
raise
TypeError
(
"can't write empty bucket"
)
f
.
write
(
bc_magic
)
pickle
.
dump
(
self
.
checksum
,
f
,
2
)
marshal_dump
(
self
.
code
,
f
)
def
bytecode_from_string
(
self
,
string
):
"""Load bytecode from a string."""
self
.
load_bytecode
(
BytesIO
(
string
))
def
bytecode_to_string
(
self
):
"""Return the bytecode as string."""
out
=
BytesIO
()
self
.
write_bytecode
(
out
)
return
out
.
getvalue
()
class
BytecodeCache
(
object
):
"""To implement your own bytecode cache you have to subclass this class
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
these methods are passed a :class:`~jinja2.bccache.Bucket`.
A very basic bytecode cache that saves the bytecode on the file system::
from os import path
class MyCache(BytecodeCache):
def __init__(self, directory):
self.directory = directory
def load_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
if path.exists(filename):
with open(filename, 'rb') as f:
bucket.load_bytecode(f)
def dump_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
with open(filename, 'wb') as f:
bucket.write_bytecode(f)
A more advanced version of a filesystem based bytecode cache is part of
Jinja.
"""
def
load_bytecode
(
self
,
bucket
):
"""Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
"""
raise
NotImplementedError
()
def
dump_bytecode
(
self
,
bucket
):
"""Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
"""
raise
NotImplementedError
()
def
clear
(
self
):
"""Clears the cache. This method is not used by Jinja but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
"""
def
get_cache_key
(
self
,
name
,
filename
=
None
):
"""Returns the unique hash key for this template name."""
hash
=
sha1
(
name
.
encode
(
"utf-8"
))
if
filename
is
not
None
:
filename
=
"|"
+
filename
if
isinstance
(
filename
,
text_type
):
filename
=
filename
.
encode
(
"utf-8"
)
hash
.
update
(
filename
)
return
hash
.
hexdigest
()
def
get_source_checksum
(
self
,
source
):
"""Returns a checksum for the source."""
return
sha1
(
source
.
encode
(
"utf-8"
)).
hexdigest
()
def
get_bucket
(
self
,
environment
,
name
,
filename
,
source
):
"""Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
"""
key
=
self
.
get_cache_key
(
name
,
filename
)
checksum
=
self
.
get_source_checksum
(
source
)
bucket
=
Bucket
(
environment
,
key
,
checksum
)
self
.
load_bytecode
(
bucket
)
return
bucket
def
set_bucket
(
self
,
bucket
):
"""Put the bucket into the cache."""
self
.
dump_bytecode
(
bucket
)
class
FileSystemBytecodeCache
(
BytecodeCache
):
"""A bytecode cache that stores bytecode on the filesystem. It accepts
two arguments: The directory where the cache items are stored and a
pattern string that is used to build the filename.
If no directory is specified a default cache directory is selected. On
Windows the user's temp directory is used, on UNIX systems a directory
is created for the user in the system temp directory.
The pattern can be used to have multiple separate caches operate on the
same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
is replaced with the cache key.
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
This bytecode cache supports clearing of the cache using the clear method.
"""
def
__init__
(
self
,
directory
=
None
,
pattern
=
"__jinja2_%s.cache"
):
if
directory
is
None
:
directory
=
self
.
_get_default_cache_dir
()
self
.
directory
=
directory
self
.
pattern
=
pattern
def
_get_default_cache_dir
(
self
):
def
_unsafe_dir
():
raise
RuntimeError
(
"Cannot determine safe temp directory. You "
"need to explicitly provide one."
)
tmpdir
=
tempfile
.
gettempdir
()
# On windows the temporary directory is used specific unless
# explicitly forced otherwise. We can just use that.
if
os
.
name
==
"nt"
:
return
tmpdir
if
not
hasattr
(
os
,
"getuid"
):
_unsafe_dir
()
dirname
=
"_jinja2-cache-%d"
%
os
.
getuid
()
actual_dir
=
os
.
path
.
join
(
tmpdir
,
dirname
)
try
:
os
.
mkdir
(
actual_dir
,
stat
.
S_IRWXU
)
except
OSError
as
e
:
if
e
.
errno
!=
errno
.
EEXIST
:
raise
try
:
os
.
chmod
(
actual_dir
,
stat
.
S_IRWXU
)
actual_dir_stat
=
os
.
lstat
(
actual_dir
)
if
(
actual_dir_stat
.
st_uid
!=
os
.
getuid
()
or
not
stat
.
S_ISDIR
(
actual_dir_stat
.
st_mode
)
or
stat
.
S_IMODE
(
actual_dir_stat
.
st_mode
)
!=
stat
.
S_IRWXU
):
_unsafe_dir
()
except
OSError
as
e
:
if
e
.
errno
!=
errno
.
EEXIST
:
raise
actual_dir_stat
=
os
.
lstat
(
actual_dir
)
if
(
actual_dir_stat
.
st_uid
!=
os
.
getuid
()
or
not
stat
.
S_ISDIR
(
actual_dir_stat
.
st_mode
)
or
stat
.
S_IMODE
(
actual_dir_stat
.
st_mode
)
!=
stat
.
S_IRWXU
):
_unsafe_dir
()
return
actual_dir
def
_get_cache_filename
(
self
,
bucket
):
return
path
.
join
(
self
.
directory
,
self
.
pattern
%
bucket
.
key
)
def
load_bytecode
(
self
,
bucket
):
f
=
open_if_exists
(
self
.
_get_cache_filename
(
bucket
),
"rb"
)
if
f
is
not
None
:
try
:
bucket
.
load_bytecode
(
f
)
finally
:
f
.
close
()
def
dump_bytecode
(
self
,
bucket
):
f
=
open
(
self
.
_get_cache_filename
(
bucket
),
"wb"
)
try
:
bucket
.
write_bytecode
(
f
)
finally
:
f
.
close
()
def
clear
(
self
):
# imported lazily here because google app-engine doesn't support
# write access on the file system and the function does not exist
# normally.
from
os
import
remove
files
=
fnmatch
.
filter
(
listdir
(
self
.
directory
),
self
.
pattern
%
"*"
)
for
filename
in
files
:
try
:
remove
(
path
.
join
(
self
.
directory
,
filename
))
except
OSError
:
pass
class
MemcachedBytecodeCache
(
BytecodeCache
):
"""This class implements a bytecode cache that uses a memcache cache for
storing the information. It does not enforce a specific memcache library
(tummy's memcache or cmemcache) but will accept any class that provides
the minimal interface required.
Libraries compatible with this class:
- `cachelib <https://github.com/pallets/cachelib>`_
- `python-memcached <https://pypi.org/project/python-memcached/>`_
(Unfortunately the django cache interface is not compatible because it
does not support storing binary data, only unicode. You can however pass
the underlying cache client to the bytecode cache which is available
as `django.core.cache.cache._client`.)
The minimal interface for the client passed to the constructor is this:
.. class:: MinimalClientInterface
.. method:: set(key, value[, timeout])
Stores the bytecode in the cache. `value` is a string and
`timeout` the timeout of the key. If timeout is not provided
a default timeout or no timeout should be assumed, if it's
provided it's an integer with the number of seconds the cache
item should exist.
.. method:: get(key)
Returns the value for the cache key. If the item does not
exist in the cache the return value must be `None`.
The other arguments to the constructor are the prefix for all keys that
is added before the actual cache key and the timeout for the bytecode in
the cache system. We recommend a high (or no) timeout.
This bytecode cache does not support clearing of used items in the cache.
The clear method is a no-operation function.
.. versionadded:: 2.7
Added support for ignoring memcache errors through the
`ignore_memcache_errors` parameter.
"""
def
__init__
(
self
,
client
,
prefix
=
"jinja2/bytecode/"
,
timeout
=
None
,
ignore_memcache_errors
=
True
,
):
self
.
client
=
client
self
.
prefix
=
prefix
self
.
timeout
=
timeout
self
.
ignore_memcache_errors
=
ignore_memcache_errors
def
load_bytecode
(
self
,
bucket
):
try
:
code
=
self
.
client
.
get
(
self
.
prefix
+
bucket
.
key
)
except
Exception
:
if
not
self
.
ignore_memcache_errors
:
raise
code
=
None
if
code
is
not
None
:
bucket
.
bytecode_from_string
(
code
)
def
dump_bytecode
(
self
,
bucket
):
args
=
(
self
.
prefix
+
bucket
.
key
,
bucket
.
bytecode_to_string
())
if
self
.
timeout
is
not
None
:
args
+=
(
self
.
timeout
,)
try
:
self
.
client
.
set
(
*
args
)
except
Exception
:
if
not
self
.
ignore_memcache_errors
:
raise
magma/squid_cnf/charms/squid/venv/jinja2/compiler.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
"""Compiles nodes from the parser into Python code."""
from
collections
import
namedtuple
from
functools
import
update_wrapper
from
itertools
import
chain
from
keyword
import
iskeyword
as
is_python_keyword
from
markupsafe
import
escape
from
markupsafe
import
Markup
from
.
import
nodes
from
._compat
import
imap
from
._compat
import
iteritems
from
._compat
import
izip
from
._compat
import
NativeStringIO
from
._compat
import
range_type
from
._compat
import
string_types
from
._compat
import
text_type
from
.exceptions
import
TemplateAssertionError
from
.idtracking
import
Symbols
from
.idtracking
import
VAR_LOAD_ALIAS
from
.idtracking
import
VAR_LOAD_PARAMETER
from
.idtracking
import
VAR_LOAD_RESOLVE
from
.idtracking
import
VAR_LOAD_UNDEFINED
from
.nodes
import
EvalContext
from
.optimizer
import
Optimizer
from
.utils
import
concat
from
.visitor
import
NodeVisitor
operators
=
{
"eq"
:
"=="
,
"ne"
:
"!="
,
"gt"
:
">"
,
"gteq"
:
">="
,
"lt"
:
"<"
,
"lteq"
:
"<="
,
"in"
:
"in"
,
"notin"
:
"not in"
,
}
# what method to iterate over items do we want to use for dict iteration
# in generated code? on 2.x let's go with iteritems, on 3.x with items
if
hasattr
(
dict
,
"iteritems"
):
dict_item_iter
=
"iteritems"
else
:
dict_item_iter
=
"items"
code_features
=
[
"division"
]
# does this python version support generator stops? (PEP 0479)
try
:
exec
(
"from __future__ import generator_stop"
)
code_features
.
append
(
"generator_stop"
)
except
SyntaxError
:
pass
# does this python version support yield from?
try
:
exec
(
"def f(): yield from x()"
)
except
SyntaxError
:
supports_yield_from
=
False
else
:
supports_yield_from
=
True
def
optimizeconst
(
f
):
def
new_func
(
self
,
node
,
frame
,
**
kwargs
):
# Only optimize if the frame is not volatile
if
self
.
optimized
and
not
frame
.
eval_ctx
.
volatile
:
new_node
=
self
.
optimizer
.
visit
(
node
,
frame
.
eval_ctx
)
if
new_node
!=
node
:
return
self
.
visit
(
new_node
,
frame
)
return
f
(
self
,
node
,
frame
,
**
kwargs
)
return
update_wrapper
(
new_func
,
f
)
def
generate
(
node
,
environment
,
name
,
filename
,
stream
=
None
,
defer_init
=
False
,
optimized
=
True
):
"""Generate the python source for a node tree."""
if
not
isinstance
(
node
,
nodes
.
Template
):
raise
TypeError
(
"Can't compile non template nodes"
)
generator
=
environment
.
code_generator_class
(
environment
,
name
,
filename
,
stream
,
defer_init
,
optimized
)
generator
.
visit
(
node
)
if
stream
is
None
:
return
generator
.
stream
.
getvalue
()
def
has_safe_repr
(
value
):
"""Does the node have a safe representation?"""
if
value
is
None
or
value
is
NotImplemented
or
value
is
Ellipsis
:
return
True
if
type
(
value
)
in
(
bool
,
int
,
float
,
complex
,
range_type
,
Markup
)
+
string_types
:
return
True
if
type
(
value
)
in
(
tuple
,
list
,
set
,
frozenset
):
for
item
in
value
:
if
not
has_safe_repr
(
item
):
return
False
return
True
elif
type
(
value
)
is
dict
:
for
key
,
value
in
iteritems
(
value
):
if
not
has_safe_repr
(
key
):
return
False
if
not
has_safe_repr
(
value
):
return
False
return
True
return
False
def
find_undeclared
(
nodes
,
names
):
"""Check if the names passed are accessed undeclared. The return value
is a set of all the undeclared names from the sequence of names found.
"""
visitor
=
UndeclaredNameVisitor
(
names
)
try
:
for
node
in
nodes
:
visitor
.
visit
(
node
)
except
VisitorExit
:
pass
return
visitor
.
undeclared
class
MacroRef
(
object
):
def
__init__
(
self
,
node
):
self
.
node
=
node
self
.
accesses_caller
=
False
self
.
accesses_kwargs
=
False
self
.
accesses_varargs
=
False
class
Frame
(
object
):
"""Holds compile time information for us."""
def
__init__
(
self
,
eval_ctx
,
parent
=
None
,
level
=
None
):
self
.
eval_ctx
=
eval_ctx
self
.
symbols
=
Symbols
(
parent
and
parent
.
symbols
or
None
,
level
=
level
)
# a toplevel frame is the root + soft frames such as if conditions.
self
.
toplevel
=
False
# the root frame is basically just the outermost frame, so no if
# conditions. This information is used to optimize inheritance
# situations.
self
.
rootlevel
=
False
# in some dynamic inheritance situations the compiler needs to add
# write tests around output statements.
self
.
require_output_check
=
parent
and
parent
.
require_output_check
# inside some tags we are using a buffer rather than yield statements.
# this for example affects {% filter %} or {% macro %}. If a frame
# is buffered this variable points to the name of the list used as
# buffer.
self
.
buffer
=
None
# the name of the block we're in, otherwise None.
self
.
block
=
parent
and
parent
.
block
or
None
# the parent of this frame
self
.
parent
=
parent
if
parent
is
not
None
:
self
.
buffer
=
parent
.
buffer
def
copy
(
self
):
"""Create a copy of the current one."""
rv
=
object
.
__new__
(
self
.
__class__
)
rv
.
__dict__
.
update
(
self
.
__dict__
)
rv
.
symbols
=
self
.
symbols
.
copy
()
return
rv
def
inner
(
self
,
isolated
=
False
):
"""Return an inner frame."""
if
isolated
:
return
Frame
(
self
.
eval_ctx
,
level
=
self
.
symbols
.
level
+
1
)
return
Frame
(
self
.
eval_ctx
,
self
)
def
soft
(
self
):
"""Return a soft frame. A soft frame may not be modified as
standalone thing as it shares the resources with the frame it
was created of, but it's not a rootlevel frame any longer.
This is only used to implement if-statements.
"""
rv
=
self
.
copy
()
rv
.
rootlevel
=
False
return
rv
__copy__
=
copy
class
VisitorExit
(
RuntimeError
):
"""Exception used by the `UndeclaredNameVisitor` to signal a stop."""
class
DependencyFinderVisitor
(
NodeVisitor
):
"""A visitor that collects filter and test calls."""
def
__init__
(
self
):
self
.
filters
=
set
()
self
.
tests
=
set
()
def
visit_Filter
(
self
,
node
):
self
.
generic_visit
(
node
)
self
.
filters
.
add
(
node
.
name
)
def
visit_Test
(
self
,
node
):
self
.
generic_visit
(
node
)
self
.
tests
.
add
(
node
.
name
)
def
visit_Block
(
self
,
node
):
"""Stop visiting at blocks."""
class
UndeclaredNameVisitor
(
NodeVisitor
):
"""A visitor that checks if a name is accessed without being
declared. This is different from the frame visitor as it will
not stop at closure frames.
"""
def
__init__
(
self
,
names
):
self
.
names
=
set
(
names
)
self
.
undeclared
=
set
()
def
visit_Name
(
self
,
node
):
if
node
.
ctx
==
"load"
and
node
.
name
in
self
.
names
:
self
.
undeclared
.
add
(
node
.
name
)
if
self
.
undeclared
==
self
.
names
:
raise
VisitorExit
()
else
:
self
.
names
.
discard
(
node
.
name
)
def
visit_Block
(
self
,
node
):
"""Stop visiting a blocks."""
class
CompilerExit
(
Exception
):
"""Raised if the compiler encountered a situation where it just
doesn't make sense to further process the code. Any block that
raises such an exception is not further processed.
"""
class
CodeGenerator
(
NodeVisitor
):
def
__init__
(
self
,
environment
,
name
,
filename
,
stream
=
None
,
defer_init
=
False
,
optimized
=
True
):
if
stream
is
None
:
stream
=
NativeStringIO
()
self
.
environment
=
environment
self
.
name
=
name
self
.
filename
=
filename
self
.
stream
=
stream
self
.
created_block_context
=
False
self
.
defer_init
=
defer_init
self
.
optimized
=
optimized
if
optimized
:
self
.
optimizer
=
Optimizer
(
environment
)
# aliases for imports
self
.
import_aliases
=
{}
# a registry for all blocks. Because blocks are moved out
# into the global python scope they are registered here
self
.
blocks
=
{}
# the number of extends statements so far
self
.
extends_so_far
=
0
# some templates have a rootlevel extends. In this case we
# can safely assume that we're a child template and do some
# more optimizations.
self
.
has_known_extends
=
False
# the current line number
self
.
code_lineno
=
1
# registry of all filters and tests (global, not block local)
self
.
tests
=
{}
self
.
filters
=
{}
# the debug information
self
.
debug_info
=
[]
self
.
_write_debug_info
=
None
# the number of new lines before the next write()
self
.
_new_lines
=
0
# the line number of the last written statement
self
.
_last_line
=
0
# true if nothing was written so far.
self
.
_first_write
=
True
# used by the `temporary_identifier` method to get new
# unique, temporary identifier
self
.
_last_identifier
=
0
# the current indentation
self
.
_indentation
=
0
# Tracks toplevel assignments
self
.
_assign_stack
=
[]
# Tracks parameter definition blocks
self
.
_param_def_block
=
[]
# Tracks the current context.
self
.
_context_reference_stack
=
[
"context"
]
# -- Various compilation helpers
def
fail
(
self
,
msg
,
lineno
):
"""Fail with a :exc:`TemplateAssertionError`."""
raise
TemplateAssertionError
(
msg
,
lineno
,
self
.
name
,
self
.
filename
)
def
temporary_identifier
(
self
):
"""Get a new unique identifier."""
self
.
_last_identifier
+=
1
return
"t_%d"
%
self
.
_last_identifier
def
buffer
(
self
,
frame
):
"""Enable buffering for the frame from that point onwards."""
frame
.
buffer
=
self
.
temporary_identifier
()
self
.
writeline
(
"%s = []"
%
frame
.
buffer
)
def
return_buffer_contents
(
self
,
frame
,
force_unescaped
=
False
):
"""Return the buffer contents of the frame."""
if
not
force_unescaped
:
if
frame
.
eval_ctx
.
volatile
:
self
.
writeline
(
"if context.eval_ctx.autoescape:"
)
self
.
indent
()
self
.
writeline
(
"return Markup(concat(%s))"
%
frame
.
buffer
)
self
.
outdent
()
self
.
writeline
(
"else:"
)
self
.
indent
()
self
.
writeline
(
"return concat(%s)"
%
frame
.
buffer
)
self
.
outdent
()
return
elif
frame
.
eval_ctx
.
autoescape
:
self
.
writeline
(
"return Markup(concat(%s))"
%
frame
.
buffer
)
return
self
.
writeline
(
"return concat(%s)"
%
frame
.
buffer
)
def
indent
(
self
):
"""Indent by one."""
self
.
_indentation
+=
1
def
outdent
(
self
,
step
=
1
):
"""Outdent by step."""
self
.
_indentation
-=
step
def
start_write
(
self
,
frame
,
node
=
None
):
"""Yield or write into the frame buffer."""
if
frame
.
buffer
is
None
:
self
.
writeline
(
"yield "
,
node
)
else
:
self
.
writeline
(
"%s.append("
%
frame
.
buffer
,
node
)
def
end_write
(
self
,
frame
):
"""End the writing process started by `start_write`."""
if
frame
.
buffer
is
not
None
:
self
.
write
(
")"
)
def
simple_write
(
self
,
s
,
frame
,
node
=
None
):
"""Simple shortcut for start_write + write + end_write."""
self
.
start_write
(
frame
,
node
)
self
.
write
(
s
)
self
.
end_write
(
frame
)
def
blockvisit
(
self
,
nodes
,
frame
):
"""Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically.
"""
try
:
self
.
writeline
(
"pass"
)
for
node
in
nodes
:
self
.
visit
(
node
,
frame
)
except
CompilerExit
:
pass
def
write
(
self
,
x
):
"""Write a string into the output stream."""
if
self
.
_new_lines
:
if
not
self
.
_first_write
:
self
.
stream
.
write
(
"
\n
"
*
self
.
_new_lines
)
self
.
code_lineno
+=
self
.
_new_lines
if
self
.
_write_debug_info
is
not
None
:
self
.
debug_info
.
append
((
self
.
_write_debug_info
,
self
.
code_lineno
))
self
.
_write_debug_info
=
None
self
.
_first_write
=
False
self
.
stream
.
write
(
" "
*
self
.
_indentation
)
self
.
_new_lines
=
0
self
.
stream
.
write
(
x
)
def
writeline
(
self
,
x
,
node
=
None
,
extra
=
0
):
"""Combination of newline and write."""
self
.
newline
(
node
,
extra
)
self
.
write
(
x
)
def
newline
(
self
,
node
=
None
,
extra
=
0
):
"""Add one or more newlines before the next write."""
self
.
_new_lines
=
max
(
self
.
_new_lines
,
1
+
extra
)
if
node
is
not
None
and
node
.
lineno
!=
self
.
_last_line
:
self
.
_write_debug_info
=
node
.
lineno
self
.
_last_line
=
node
.
lineno
def
signature
(
self
,
node
,
frame
,
extra_kwargs
=
None
):
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occur. The extra keyword arguments should be given
as python dict.
"""
# if any of the given keyword arguments is a python keyword
# we have to make sure that no invalid call is created.
kwarg_workaround
=
False
for
kwarg
in
chain
((
x
.
key
for
x
in
node
.
kwargs
),
extra_kwargs
or
()):
if
is_python_keyword
(
kwarg
):
kwarg_workaround
=
True
break
for
arg
in
node
.
args
:
self
.
write
(
", "
)
self
.
visit
(
arg
,
frame
)
if
not
kwarg_workaround
:
for
kwarg
in
node
.
kwargs
:
self
.
write
(
", "
)
self
.
visit
(
kwarg
,
frame
)
if
extra_kwargs
is
not
None
:
for
key
,
value
in
iteritems
(
extra_kwargs
):
self
.
write
(
", %s=%s"
%
(
key
,
value
))
if
node
.
dyn_args
:
self
.
write
(
", *"
)
self
.
visit
(
node
.
dyn_args
,
frame
)
if
kwarg_workaround
:
if
node
.
dyn_kwargs
is
not
None
:
self
.
write
(
", **dict({"
)
else
:
self
.
write
(
", **{"
)
for
kwarg
in
node
.
kwargs
:
self
.
write
(
"%r: "
%
kwarg
.
key
)
self
.
visit
(
kwarg
.
value
,
frame
)
self
.
write
(
", "
)
if
extra_kwargs
is
not
None
:
for
key
,
value
in
iteritems
(
extra_kwargs
):
self
.
write
(
"%r: %s, "
%
(
key
,
value
))
if
node
.
dyn_kwargs
is
not
None
:
self
.
write
(
"}, **"
)
self
.
visit
(
node
.
dyn_kwargs
,
frame
)
self
.
write
(
")"
)
else
:
self
.
write
(
"}"
)
elif
node
.
dyn_kwargs
is
not
None
:
self
.
write
(
", **"
)
self
.
visit
(
node
.
dyn_kwargs
,
frame
)
def
pull_dependencies
(
self
,
nodes
):
"""Pull all the dependencies."""
visitor
=
DependencyFinderVisitor
()
for
node
in
nodes
:
visitor
.
visit
(
node
)
for
dependency
in
"filters"
,
"tests"
:
mapping
=
getattr
(
self
,
dependency
)
for
name
in
getattr
(
visitor
,
dependency
):
if
name
not
in
mapping
:
mapping
[
name
]
=
self
.
temporary_identifier
()
self
.
writeline
(
"%s = environment.%s[%r]"
%
(
mapping
[
name
],
dependency
,
name
)
)
def
enter_frame
(
self
,
frame
):
undefs
=
[]
for
target
,
(
action
,
param
)
in
iteritems
(
frame
.
symbols
.
loads
):
if
action
==
VAR_LOAD_PARAMETER
:
pass
elif
action
==
VAR_LOAD_RESOLVE
:
self
.
writeline
(
"%s = %s(%r)"
%
(
target
,
self
.
get_resolve_func
(),
param
))
elif
action
==
VAR_LOAD_ALIAS
:
self
.
writeline
(
"%s = %s"
%
(
target
,
param
))
elif
action
==
VAR_LOAD_UNDEFINED
:
undefs
.
append
(
target
)
else
:
raise
NotImplementedError
(
"unknown load instruction"
)
if
undefs
:
self
.
writeline
(
"%s = missing"
%
" = "
.
join
(
undefs
))
def
leave_frame
(
self
,
frame
,
with_python_scope
=
False
):
if
not
with_python_scope
:
undefs
=
[]
for
target
,
_
in
iteritems
(
frame
.
symbols
.
loads
):
undefs
.
append
(
target
)
if
undefs
:
self
.
writeline
(
"%s = missing"
%
" = "
.
join
(
undefs
))
def
func
(
self
,
name
):
if
self
.
environment
.
is_async
:
return
"async def %s"
%
name
return
"def %s"
%
name
def
macro_body
(
self
,
node
,
frame
):
"""Dump the function def of a macro or call block."""
frame
=
frame
.
inner
()
frame
.
symbols
.
analyze_node
(
node
)
macro_ref
=
MacroRef
(
node
)
explicit_caller
=
None
skip_special_params
=
set
()
args
=
[]
for
idx
,
arg
in
enumerate
(
node
.
args
):
if
arg
.
name
==
"caller"
:
explicit_caller
=
idx
if
arg
.
name
in
(
"kwargs"
,
"varargs"
):
skip_special_params
.
add
(
arg
.
name
)
args
.
append
(
frame
.
symbols
.
ref
(
arg
.
name
))
undeclared
=
find_undeclared
(
node
.
body
,
(
"caller"
,
"kwargs"
,
"varargs"
))
if
"caller"
in
undeclared
:
# In older Jinja versions there was a bug that allowed caller
# to retain the special behavior even if it was mentioned in
# the argument list. However thankfully this was only really
# working if it was the last argument. So we are explicitly
# checking this now and error out if it is anywhere else in
# the argument list.
if
explicit_caller
is
not
None
:
try
:
node
.
defaults
[
explicit_caller
-
len
(
node
.
args
)]
except
IndexError
:
self
.
fail
(
"When defining macros or call blocks the "
'special "caller" argument must be omitted '
"or be given a default."
,
node
.
lineno
,
)
else
:
args
.
append
(
frame
.
symbols
.
declare_parameter
(
"caller"
))
macro_ref
.
accesses_caller
=
True
if
"kwargs"
in
undeclared
and
"kwargs"
not
in
skip_special_params
:
args
.
append
(
frame
.
symbols
.
declare_parameter
(
"kwargs"
))
macro_ref
.
accesses_kwargs
=
True
if
"varargs"
in
undeclared
and
"varargs"
not
in
skip_special_params
:
args
.
append
(
frame
.
symbols
.
declare_parameter
(
"varargs"
))
macro_ref
.
accesses_varargs
=
True
# macros are delayed, they never require output checks
frame
.
require_output_check
=
False
frame
.
symbols
.
analyze_node
(
node
)
self
.
writeline
(
"%s(%s):"
%
(
self
.
func
(
"macro"
),
", "
.
join
(
args
)),
node
)
self
.
indent
()
self
.
buffer
(
frame
)
self
.
enter_frame
(
frame
)
self
.
push_parameter_definitions
(
frame
)
for
idx
,
arg
in
enumerate
(
node
.
args
):
ref
=
frame
.
symbols
.
ref
(
arg
.
name
)
self
.
writeline
(
"if %s is missing:"
%
ref
)
self
.
indent
()
try
:
default
=
node
.
defaults
[
idx
-
len
(
node
.
args
)]
except
IndexError
:
self
.
writeline
(
"%s = undefined(%r, name=%r)"
%
(
ref
,
"parameter %r was not provided"
%
arg
.
name
,
arg
.
name
)
)
else
:
self
.
writeline
(
"%s = "
%
ref
)
self
.
visit
(
default
,
frame
)
self
.
mark_parameter_stored
(
ref
)
self
.
outdent
()
self
.
pop_parameter_definitions
()
self
.
blockvisit
(
node
.
body
,
frame
)
self
.
return_buffer_contents
(
frame
,
force_unescaped
=
True
)
self
.
leave_frame
(
frame
,
with_python_scope
=
True
)
self
.
outdent
()
return
frame
,
macro_ref
def
macro_def
(
self
,
macro_ref
,
frame
):
"""Dump the macro definition for the def created by macro_body."""
arg_tuple
=
", "
.
join
(
repr
(
x
.
name
)
for
x
in
macro_ref
.
node
.
args
)
name
=
getattr
(
macro_ref
.
node
,
"name"
,
None
)
if
len
(
macro_ref
.
node
.
args
)
==
1
:
arg_tuple
+=
","
self
.
write
(
"Macro(environment, macro, %r, (%s), %r, %r, %r, "
"context.eval_ctx.autoescape)"
%
(
name
,
arg_tuple
,
macro_ref
.
accesses_kwargs
,
macro_ref
.
accesses_varargs
,
macro_ref
.
accesses_caller
,
)
)
def
position
(
self
,
node
):
"""Return a human readable position for the node."""
rv
=
"line %d"
%
node
.
lineno
if
self
.
name
is
not
None
:
rv
+=
" in "
+
repr
(
self
.
name
)
return
rv
def
dump_local_context
(
self
,
frame
):
return
"{%s}"
%
", "
.
join
(
"%r: %s"
%
(
name
,
target
)
for
name
,
target
in
iteritems
(
frame
.
symbols
.
dump_stores
())
)
def
write_commons
(
self
):
"""Writes a common preamble that is used by root and block functions.
Primarily this sets up common local helpers and enforces a generator
through a dead branch.
"""
self
.
writeline
(
"resolve = context.resolve_or_missing"
)
self
.
writeline
(
"undefined = environment.undefined"
)
# always use the standard Undefined class for the implicit else of
# conditional expressions
self
.
writeline
(
"cond_expr_undefined = Undefined"
)
self
.
writeline
(
"if 0: yield None"
)
def
push_parameter_definitions
(
self
,
frame
):
"""Pushes all parameter targets from the given frame into a local
stack that permits tracking of yet to be assigned parameters. In
particular this enables the optimization from `visit_Name` to skip
undefined expressions for parameters in macros as macros can reference
otherwise unbound parameters.
"""
self
.
_param_def_block
.
append
(
frame
.
symbols
.
dump_param_targets
())
def
pop_parameter_definitions
(
self
):
"""Pops the current parameter definitions set."""
self
.
_param_def_block
.
pop
()
def
mark_parameter_stored
(
self
,
target
):
"""Marks a parameter in the current parameter definitions as stored.
This will skip the enforced undefined checks.
"""
if
self
.
_param_def_block
:
self
.
_param_def_block
[
-
1
].
discard
(
target
)
def
push_context_reference
(
self
,
target
):
self
.
_context_reference_stack
.
append
(
target
)
def
pop_context_reference
(
self
):
self
.
_context_reference_stack
.
pop
()
def
get_context_ref
(
self
):
return
self
.
_context_reference_stack
[
-
1
]
def
get_resolve_func
(
self
):
target
=
self
.
_context_reference_stack
[
-
1
]
if
target
==
"context"
:
return
"resolve"
return
"%s.resolve"
%
target
def
derive_context
(
self
,
frame
):
return
"%s.derived(%s)"
%
(
self
.
get_context_ref
(),
self
.
dump_local_context
(
frame
),
)
def
parameter_is_undeclared
(
self
,
target
):
"""Checks if a given target is an undeclared parameter."""
if
not
self
.
_param_def_block
:
return
False
return
target
in
self
.
_param_def_block
[
-
1
]
def
push_assign_tracking
(
self
):
"""Pushes a new layer for assignment tracking."""
self
.
_assign_stack
.
append
(
set
())
def
pop_assign_tracking
(
self
,
frame
):
"""Pops the topmost level for assignment tracking and updates the
context variables if necessary.
"""
vars
=
self
.
_assign_stack
.
pop
()
if
not
frame
.
toplevel
or
not
vars
:
return
public_names
=
[
x
for
x
in
vars
if
x
[:
1
]
!=
"_"
]
if
len
(
vars
)
==
1
:
name
=
next
(
iter
(
vars
))
ref
=
frame
.
symbols
.
ref
(
name
)
self
.
writeline
(
"context.vars[%r] = %s"
%
(
name
,
ref
))
else
:
self
.
writeline
(
"context.vars.update({"
)
for
idx
,
name
in
enumerate
(
vars
):
if
idx
:
self
.
write
(
", "
)
ref
=
frame
.
symbols
.
ref
(
name
)
self
.
write
(
"%r: %s"
%
(
name
,
ref
))
self
.
write
(
"})"
)
if
public_names
:
if
len
(
public_names
)
==
1
:
self
.
writeline
(
"context.exported_vars.add(%r)"
%
public_names
[
0
])
else
:
self
.
writeline
(
"context.exported_vars.update((%s))"
%
", "
.
join
(
imap
(
repr
,
public_names
))
)
# -- Statement Visitors
def
visit_Template
(
self
,
node
,
frame
=
None
):
assert
frame
is
None
,
"no root frame allowed"
eval_ctx
=
EvalContext
(
self
.
environment
,
self
.
name
)
from
.runtime
import
exported
self
.
writeline
(
"from __future__ import %s"
%
", "
.
join
(
code_features
))
self
.
writeline
(
"from jinja2.runtime import "
+
", "
.
join
(
exported
))
if
self
.
environment
.
is_async
:
self
.
writeline
(
"from jinja2.asyncsupport import auto_await, "
"auto_aiter, AsyncLoopContext"
)
# if we want a deferred initialization we cannot move the
# environment into a local name
envenv
=
not
self
.
defer_init
and
", environment=environment"
or
""
# do we have an extends tag at all? If not, we can save some
# overhead by just not processing any inheritance code.
have_extends
=
node
.
find
(
nodes
.
Extends
)
is
not
None
# find all blocks
for
block
in
node
.
find_all
(
nodes
.
Block
):
if
block
.
name
in
self
.
blocks
:
self
.
fail
(
"block %r defined twice"
%
block
.
name
,
block
.
lineno
)
self
.
blocks
[
block
.
name
]
=
block
# find all imports and import them
for
import_
in
node
.
find_all
(
nodes
.
ImportedName
):
if
import_
.
importname
not
in
self
.
import_aliases
:
imp
=
import_
.
importname
self
.
import_aliases
[
imp
]
=
alias
=
self
.
temporary_identifier
()
if
"."
in
imp
:
module
,
obj
=
imp
.
rsplit
(
"."
,
1
)
self
.
writeline
(
"from %s import %s as %s"
%
(
module
,
obj
,
alias
))
else
:
self
.
writeline
(
"import %s as %s"
%
(
imp
,
alias
))
# add the load name
self
.
writeline
(
"name = %r"
%
self
.
name
)
# generate the root render function.
self
.
writeline
(
"%s(context, missing=missing%s):"
%
(
self
.
func
(
"root"
),
envenv
),
extra
=
1
)
self
.
indent
()
self
.
write_commons
()
# process the root
frame
=
Frame
(
eval_ctx
)
if
"self"
in
find_undeclared
(
node
.
body
,
(
"self"
,)):
ref
=
frame
.
symbols
.
declare_parameter
(
"self"
)
self
.
writeline
(
"%s = TemplateReference(context)"
%
ref
)
frame
.
symbols
.
analyze_node
(
node
)
frame
.
toplevel
=
frame
.
rootlevel
=
True
frame
.
require_output_check
=
have_extends
and
not
self
.
has_known_extends
if
have_extends
:
self
.
writeline
(
"parent_template = None"
)
self
.
enter_frame
(
frame
)
self
.
pull_dependencies
(
node
.
body
)
self
.
blockvisit
(
node
.
body
,
frame
)
self
.
leave_frame
(
frame
,
with_python_scope
=
True
)
self
.
outdent
()
# make sure that the parent root is called.
if
have_extends
:
if
not
self
.
has_known_extends
:
self
.
indent
()
self
.
writeline
(
"if parent_template is not None:"
)
self
.
indent
()
if
supports_yield_from
and
not
self
.
environment
.
is_async
:
self
.
writeline
(
"yield from parent_template.root_render_func(context)"
)
else
:
self
.
writeline
(
"%sfor event in parent_template."
"root_render_func(context):"
%
(
self
.
environment
.
is_async
and
"async "
or
""
)
)
self
.
indent
()
self
.
writeline
(
"yield event"
)
self
.
outdent
()
self
.
outdent
(
1
+
(
not
self
.
has_known_extends
))
# at this point we now have the blocks collected and can visit them too.
for
name
,
block
in
iteritems
(
self
.
blocks
):
self
.
writeline
(
"%s(context, missing=missing%s):"
%
(
self
.
func
(
"block_"
+
name
),
envenv
),
block
,
1
,
)
self
.
indent
()
self
.
write_commons
()
# It's important that we do not make this frame a child of the
# toplevel template. This would cause a variety of
# interesting issues with identifier tracking.
block_frame
=
Frame
(
eval_ctx
)
undeclared
=
find_undeclared
(
block
.
body
,
(
"self"
,
"super"
))
if
"self"
in
undeclared
:
ref
=
block_frame
.
symbols
.
declare_parameter
(
"self"
)
self
.
writeline
(
"%s = TemplateReference(context)"
%
ref
)
if
"super"
in
undeclared
:
ref
=
block_frame
.
symbols
.
declare_parameter
(
"super"
)
self
.
writeline
(
"%s = context.super(%r, block_%s)"
%
(
ref
,
name
,
name
))
block_frame
.
symbols
.
analyze_node
(
block
)
block_frame
.
block
=
name
self
.
enter_frame
(
block_frame
)
self
.
pull_dependencies
(
block
.
body
)
self
.
blockvisit
(
block
.
body
,
block_frame
)
self
.
leave_frame
(
block_frame
,
with_python_scope
=
True
)
self
.
outdent
()
self
.
writeline
(
"blocks = {%s}"
%
", "
.
join
(
"%r: block_%s"
%
(
x
,
x
)
for
x
in
self
.
blocks
),
extra
=
1
,
)
# add a function that returns the debug info
self
.
writeline
(
"debug_info = %r"
%
"&"
.
join
(
"%s=%s"
%
x
for
x
in
self
.
debug_info
)
)
def
visit_Block
(
self
,
node
,
frame
):
"""Call a block and register it for the template."""
level
=
0
if
frame
.
toplevel
:
# if we know that we are a child template, there is no need to
# check if we are one
if
self
.
has_known_extends
:
return
if
self
.
extends_so_far
>
0
:
self
.
writeline
(
"if parent_template is None:"
)
self
.
indent
()
level
+=
1
if
node
.
scoped
:
context
=
self
.
derive_context
(
frame
)
else
:
context
=
self
.
get_context_ref
()
if
(
supports_yield_from
and
not
self
.
environment
.
is_async
and
frame
.
buffer
is
None
):
self
.
writeline
(
"yield from context.blocks[%r][0](%s)"
%
(
node
.
name
,
context
),
node
)
else
:
loop
=
self
.
environment
.
is_async
and
"async for"
or
"for"
self
.
writeline
(
"%s event in context.blocks[%r][0](%s):"
%
(
loop
,
node
.
name
,
context
),
node
,
)
self
.
indent
()
self
.
simple_write
(
"event"
,
frame
)
self
.
outdent
()
self
.
outdent
(
level
)
def
visit_Extends
(
self
,
node
,
frame
):
"""Calls the extender."""
if
not
frame
.
toplevel
:
self
.
fail
(
"cannot use extend from a non top-level scope"
,
node
.
lineno
)
# if the number of extends statements in general is zero so
# far, we don't have to add a check if something extended
# the template before this one.
if
self
.
extends_so_far
>
0
:
# if we have a known extends we just add a template runtime
# error into the generated code. We could catch that at compile
# time too, but i welcome it not to confuse users by throwing the
# same error at different times just "because we can".
if
not
self
.
has_known_extends
:
self
.
writeline
(
"if parent_template is not None:"
)
self
.
indent
()
self
.
writeline
(
"raise TemplateRuntimeError(%r)"
%
"extended multiple times"
)
# if we have a known extends already we don't need that code here
# as we know that the template execution will end here.
if
self
.
has_known_extends
:
raise
CompilerExit
()
else
:
self
.
outdent
()
self
.
writeline
(
"parent_template = environment.get_template("
,
node
)
self
.
visit
(
node
.
template
,
frame
)
self
.
write
(
", %r)"
%
self
.
name
)
self
.
writeline
(
"for name, parent_block in parent_template.blocks.%s():"
%
dict_item_iter
)
self
.
indent
()
self
.
writeline
(
"context.blocks.setdefault(name, []).append(parent_block)"
)
self
.
outdent
()
# if this extends statement was in the root level we can take
# advantage of that information and simplify the generated code
# in the top level from this point onwards
if
frame
.
rootlevel
:
self
.
has_known_extends
=
True
# and now we have one more
self
.
extends_so_far
+=
1
def
visit_Include
(
self
,
node
,
frame
):
"""Handles includes."""
if
node
.
ignore_missing
:
self
.
writeline
(
"try:"
)
self
.
indent
()
func_name
=
"get_or_select_template"
if
isinstance
(
node
.
template
,
nodes
.
Const
):
if
isinstance
(
node
.
template
.
value
,
string_types
):
func_name
=
"get_template"
elif
isinstance
(
node
.
template
.
value
,
(
tuple
,
list
)):
func_name
=
"select_template"
elif
isinstance
(
node
.
template
,
(
nodes
.
Tuple
,
nodes
.
List
)):
func_name
=
"select_template"
self
.
writeline
(
"template = environment.%s("
%
func_name
,
node
)
self
.
visit
(
node
.
template
,
frame
)
self
.
write
(
", %r)"
%
self
.
name
)
if
node
.
ignore_missing
:
self
.
outdent
()
self
.
writeline
(
"except TemplateNotFound:"
)
self
.
indent
()
self
.
writeline
(
"pass"
)
self
.
outdent
()
self
.
writeline
(
"else:"
)
self
.
indent
()
skip_event_yield
=
False
if
node
.
with_context
:
loop
=
self
.
environment
.
is_async
and
"async for"
or
"for"
self
.
writeline
(
"%s event in template.root_render_func("
"template.new_context(context.get_all(), True, "
"%s)):"
%
(
loop
,
self
.
dump_local_context
(
frame
))
)
elif
self
.
environment
.
is_async
:
self
.
writeline
(
"for event in (await "
"template._get_default_module_async())"
"._body_stream:"
)
else
:
if
supports_yield_from
:
self
.
writeline
(
"yield from template._get_default_module()._body_stream"
)
skip_event_yield
=
True
else
:
self
.
writeline
(
"for event in template._get_default_module()._body_stream:"
)
if
not
skip_event_yield
:
self
.
indent
()
self
.
simple_write
(
"event"
,
frame
)
self
.
outdent
()
if
node
.
ignore_missing
:
self
.
outdent
()
def
visit_Import
(
self
,
node
,
frame
):
"""Visit regular imports."""
self
.
writeline
(
"%s = "
%
frame
.
symbols
.
ref
(
node
.
target
),
node
)
if
frame
.
toplevel
:
self
.
write
(
"context.vars[%r] = "
%
node
.
target
)
if
self
.
environment
.
is_async
:
self
.
write
(
"await "
)
self
.
write
(
"environment.get_template("
)
self
.
visit
(
node
.
template
,
frame
)
self
.
write
(
", %r)."
%
self
.
name
)
if
node
.
with_context
:
self
.
write
(
"make_module%s(context.get_all(), True, %s)"
%
(
self
.
environment
.
is_async
and
"_async"
or
""
,
self
.
dump_local_context
(
frame
),
)
)
elif
self
.
environment
.
is_async
:
self
.
write
(
"_get_default_module_async()"
)
else
:
self
.
write
(
"_get_default_module()"
)
if
frame
.
toplevel
and
not
node
.
target
.
startswith
(
"_"
):
self
.
writeline
(
"context.exported_vars.discard(%r)"
%
node
.
target
)
def
visit_FromImport
(
self
,
node
,
frame
):
"""Visit named imports."""
self
.
newline
(
node
)
self
.
write
(
"included_template = %senvironment.get_template("
%
(
self
.
environment
.
is_async
and
"await "
or
""
)
)
self
.
visit
(
node
.
template
,
frame
)
self
.
write
(
", %r)."
%
self
.
name
)
if
node
.
with_context
:
self
.
write
(
"make_module%s(context.get_all(), True, %s)"
%
(
self
.
environment
.
is_async
and
"_async"
or
""
,
self
.
dump_local_context
(
frame
),
)
)
elif
self
.
environment
.
is_async
:
self
.
write
(
"_get_default_module_async()"
)
else
:
self
.
write
(
"_get_default_module()"
)
var_names
=
[]
discarded_names
=
[]
for
name
in
node
.
names
:
if
isinstance
(
name
,
tuple
):
name
,
alias
=
name
else
:
alias
=
name
self
.
writeline
(
"%s = getattr(included_template, "
"%r, missing)"
%
(
frame
.
symbols
.
ref
(
alias
),
name
)
)
self
.
writeline
(
"if %s is missing:"
%
frame
.
symbols
.
ref
(
alias
))
self
.
indent
()
self
.
writeline
(
"%s = undefined(%r %% "
"included_template.__name__, "
"name=%r)"
%
(
frame
.
symbols
.
ref
(
alias
),
"the template %%r (imported on %s) does "
"not export the requested name %s"
%
(
self
.
position
(
node
),
repr
(
name
)),
name
,
)
)
self
.
outdent
()
if
frame
.
toplevel
:
var_names
.
append
(
alias
)
if
not
alias
.
startswith
(
"_"
):
discarded_names
.
append
(
alias
)
if
var_names
:
if
len
(
var_names
)
==
1
:
name
=
var_names
[
0
]
self
.
writeline
(
"context.vars[%r] = %s"
%
(
name
,
frame
.
symbols
.
ref
(
name
))
)
else
:
self
.
writeline
(
"context.vars.update({%s})"
%
", "
.
join
(
"%r: %s"
%
(
name
,
frame
.
symbols
.
ref
(
name
))
for
name
in
var_names
)
)
if
discarded_names
:
if
len
(
discarded_names
)
==
1
:
self
.
writeline
(
"context.exported_vars.discard(%r)"
%
discarded_names
[
0
])
else
:
self
.
writeline
(
"context.exported_vars.difference_"
"update((%s))"
%
", "
.
join
(
imap
(
repr
,
discarded_names
))
)
def
visit_For
(
self
,
node
,
frame
):
loop_frame
=
frame
.
inner
()
test_frame
=
frame
.
inner
()
else_frame
=
frame
.
inner
()
# try to figure out if we have an extended loop. An extended loop
# is necessary if the loop is in recursive mode if the special loop
# variable is accessed in the body.
extended_loop
=
node
.
recursive
or
"loop"
in
find_undeclared
(
node
.
iter_child_nodes
(
only
=
(
"body"
,)),
(
"loop"
,)
)
loop_ref
=
None
if
extended_loop
:
loop_ref
=
loop_frame
.
symbols
.
declare_parameter
(
"loop"
)
loop_frame
.
symbols
.
analyze_node
(
node
,
for_branch
=
"body"
)
if
node
.
else_
:
else_frame
.
symbols
.
analyze_node
(
node
,
for_branch
=
"else"
)
if
node
.
test
:
loop_filter_func
=
self
.
temporary_identifier
()
test_frame
.
symbols
.
analyze_node
(
node
,
for_branch
=
"test"
)
self
.
writeline
(
"%s(fiter):"
%
self
.
func
(
loop_filter_func
),
node
.
test
)
self
.
indent
()
self
.
enter_frame
(
test_frame
)
self
.
writeline
(
self
.
environment
.
is_async
and
"async for "
or
"for "
)
self
.
visit
(
node
.
target
,
loop_frame
)
self
.
write
(
" in "
)
self
.
write
(
self
.
environment
.
is_async
and
"auto_aiter(fiter)"
or
"fiter"
)
self
.
write
(
":"
)
self
.
indent
()
self
.
writeline
(
"if "
,
node
.
test
)
self
.
visit
(
node
.
test
,
test_frame
)
self
.
write
(
":"
)
self
.
indent
()
self
.
writeline
(
"yield "
)
self
.
visit
(
node
.
target
,
loop_frame
)
self
.
outdent
(
3
)
self
.
leave_frame
(
test_frame
,
with_python_scope
=
True
)
# if we don't have an recursive loop we have to find the shadowed
# variables at that point. Because loops can be nested but the loop
# variable is a special one we have to enforce aliasing for it.
if
node
.
recursive
:
self
.
writeline
(
"%s(reciter, loop_render_func, depth=0):"
%
self
.
func
(
"loop"
),
node
)
self
.
indent
()
self
.
buffer
(
loop_frame
)
# Use the same buffer for the else frame
else_frame
.
buffer
=
loop_frame
.
buffer
# make sure the loop variable is a special one and raise a template
# assertion error if a loop tries to write to loop
if
extended_loop
:
self
.
writeline
(
"%s = missing"
%
loop_ref
)
for
name
in
node
.
find_all
(
nodes
.
Name
):
if
name
.
ctx
==
"store"
and
name
.
name
==
"loop"
:
self
.
fail
(
"Can't assign to special loop variable in for-loop target"
,
name
.
lineno
,
)
if
node
.
else_
:
iteration_indicator
=
self
.
temporary_identifier
()
self
.
writeline
(
"%s = 1"
%
iteration_indicator
)
self
.
writeline
(
self
.
environment
.
is_async
and
"async for "
or
"for "
,
node
)
self
.
visit
(
node
.
target
,
loop_frame
)
if
extended_loop
:
if
self
.
environment
.
is_async
:
self
.
write
(
", %s in AsyncLoopContext("
%
loop_ref
)
else
:
self
.
write
(
", %s in LoopContext("
%
loop_ref
)
else
:
self
.
write
(
" in "
)
if
node
.
test
:
self
.
write
(
"%s("
%
loop_filter_func
)
if
node
.
recursive
:
self
.
write
(
"reciter"
)
else
:
if
self
.
environment
.
is_async
and
not
extended_loop
:
self
.
write
(
"auto_aiter("
)
self
.
visit
(
node
.
iter
,
frame
)
if
self
.
environment
.
is_async
and
not
extended_loop
:
self
.
write
(
")"
)
if
node
.
test
:
self
.
write
(
")"
)
if
node
.
recursive
:
self
.
write
(
", undefined, loop_render_func, depth):"
)
else
:
self
.
write
(
extended_loop
and
", undefined):"
or
":"
)
self
.
indent
()
self
.
enter_frame
(
loop_frame
)
self
.
blockvisit
(
node
.
body
,
loop_frame
)
if
node
.
else_
:
self
.
writeline
(
"%s = 0"
%
iteration_indicator
)
self
.
outdent
()
self
.
leave_frame
(
loop_frame
,
with_python_scope
=
node
.
recursive
and
not
node
.
else_
)
if
node
.
else_
:
self
.
writeline
(
"if %s:"
%
iteration_indicator
)
self
.
indent
()
self
.
enter_frame
(
else_frame
)
self
.
blockvisit
(
node
.
else_
,
else_frame
)
self
.
leave_frame
(
else_frame
)
self
.
outdent
()
# if the node was recursive we have to return the buffer contents
# and start the iteration code
if
node
.
recursive
:
self
.
return_buffer_contents
(
loop_frame
)
self
.
outdent
()
self
.
start_write
(
frame
,
node
)
if
self
.
environment
.
is_async
:
self
.
write
(
"await "
)
self
.
write
(
"loop("
)
if
self
.
environment
.
is_async
:
self
.
write
(
"auto_aiter("
)
self
.
visit
(
node
.
iter
,
frame
)
if
self
.
environment
.
is_async
:
self
.
write
(
")"
)
self
.
write
(
", loop)"
)
self
.
end_write
(
frame
)
def
visit_If
(
self
,
node
,
frame
):
if_frame
=
frame
.
soft
()
self
.
writeline
(
"if "
,
node
)
self
.
visit
(
node
.
test
,
if_frame
)
self
.
write
(
":"
)
self
.
indent
()
self
.
blockvisit
(
node
.
body
,
if_frame
)
self
.
outdent
()
for
elif_
in
node
.
elif_
:
self
.
writeline
(
"elif "
,
elif_
)
self
.
visit
(
elif_
.
test
,
if_frame
)
self
.
write
(
":"
)
self
.
indent
()
self
.
blockvisit
(
elif_
.
body
,
if_frame
)
self
.
outdent
()
if
node
.
else_
:
self
.
writeline
(
"else:"
)
self
.
indent
()
self
.
blockvisit
(
node
.
else_
,
if_frame
)
self
.
outdent
()
def
visit_Macro
(
self
,
node
,
frame
):
macro_frame
,
macro_ref
=
self
.
macro_body
(
node
,
frame
)
self
.
newline
()
if
frame
.
toplevel
:
if
not
node
.
name
.
startswith
(
"_"
):
self
.
write
(
"context.exported_vars.add(%r)"
%
node
.
name
)
self
.
writeline
(
"context.vars[%r] = "
%
node
.
name
)
self
.
write
(
"%s = "
%
frame
.
symbols
.
ref
(
node
.
name
))
self
.
macro_def
(
macro_ref
,
macro_frame
)
def
visit_CallBlock
(
self
,
node
,
frame
):
call_frame
,
macro_ref
=
self
.
macro_body
(
node
,
frame
)
self
.
writeline
(
"caller = "
)
self
.
macro_def
(
macro_ref
,
call_frame
)
self
.
start_write
(
frame
,
node
)
self
.
visit_Call
(
node
.
call
,
frame
,
forward_caller
=
True
)
self
.
end_write
(
frame
)
def
visit_FilterBlock
(
self
,
node
,
frame
):
filter_frame
=
frame
.
inner
()
filter_frame
.
symbols
.
analyze_node
(
node
)
self
.
enter_frame
(
filter_frame
)
self
.
buffer
(
filter_frame
)
self
.
blockvisit
(
node
.
body
,
filter_frame
)
self
.
start_write
(
frame
,
node
)
self
.
visit_Filter
(
node
.
filter
,
filter_frame
)
self
.
end_write
(
frame
)
self
.
leave_frame
(
filter_frame
)
def
visit_With
(
self
,
node
,
frame
):
with_frame
=
frame
.
inner
()
with_frame
.
symbols
.
analyze_node
(
node
)
self
.
enter_frame
(
with_frame
)
for
target
,
expr
in
izip
(
node
.
targets
,
node
.
values
):
self
.
newline
()
self
.
visit
(
target
,
with_frame
)
self
.
write
(
" = "
)
self
.
visit
(
expr
,
frame
)
self
.
blockvisit
(
node
.
body
,
with_frame
)
self
.
leave_frame
(
with_frame
)
def
visit_ExprStmt
(
self
,
node
,
frame
):
self
.
newline
(
node
)
self
.
visit
(
node
.
node
,
frame
)
_FinalizeInfo
=
namedtuple
(
"_FinalizeInfo"
,
(
"const"
,
"src"
))
#: The default finalize function if the environment isn't configured
#: with one. Or if the environment has one, this is called on that
#: function's output for constants.
_default_finalize
=
text_type
_finalize
=
None
def
_make_finalize
(
self
):
"""Build the finalize function to be used on constants and at
runtime. Cached so it's only created once for all output nodes.
Returns a ``namedtuple`` with the following attributes:
``const``
A function to finalize constant data at compile time.
``src``
Source code to output around nodes to be evaluated at
runtime.
"""
if
self
.
_finalize
is
not
None
:
return
self
.
_finalize
finalize
=
default
=
self
.
_default_finalize
src
=
None
if
self
.
environment
.
finalize
:
src
=
"environment.finalize("
env_finalize
=
self
.
environment
.
finalize
def
finalize
(
value
):
return
default
(
env_finalize
(
value
))
if
getattr
(
env_finalize
,
"contextfunction"
,
False
)
is
True
:
src
+=
"context, "
finalize
=
None
# noqa: F811
elif
getattr
(
env_finalize
,
"evalcontextfunction"
,
False
)
is
True
:
src
+=
"context.eval_ctx, "
finalize
=
None
elif
getattr
(
env_finalize
,
"environmentfunction"
,
False
)
is
True
:
src
+=
"environment, "
def
finalize
(
value
):
return
default
(
env_finalize
(
self
.
environment
,
value
))
self
.
_finalize
=
self
.
_FinalizeInfo
(
finalize
,
src
)
return
self
.
_finalize
def
_output_const_repr
(
self
,
group
):
"""Given a group of constant values converted from ``Output``
child nodes, produce a string to write to the template module
source.
"""
return
repr
(
concat
(
group
))
def
_output_child_to_const
(
self
,
node
,
frame
,
finalize
):
"""Try to optimize a child of an ``Output`` node by trying to
convert it to constant, finalized data at compile time.
If :exc:`Impossible` is raised, the node is not constant and
will be evaluated at runtime. Any other exception will also be
evaluated at runtime for easier debugging.
"""
const
=
node
.
as_const
(
frame
.
eval_ctx
)
if
frame
.
eval_ctx
.
autoescape
:
const
=
escape
(
const
)
# Template data doesn't go through finalize.
if
isinstance
(
node
,
nodes
.
TemplateData
):
return
text_type
(
const
)
return
finalize
.
const
(
const
)
def
_output_child_pre
(
self
,
node
,
frame
,
finalize
):
"""Output extra source code before visiting a child of an
``Output`` node.
"""
if
frame
.
eval_ctx
.
volatile
:
self
.
write
(
"(escape if context.eval_ctx.autoescape else to_string)("
)
elif
frame
.
eval_ctx
.
autoescape
:
self
.
write
(
"escape("
)
else
:
self
.
write
(
"to_string("
)
if
finalize
.
src
is
not
None
:
self
.
write
(
finalize
.
src
)
def
_output_child_post
(
self
,
node
,
frame
,
finalize
):
"""Output extra source code after visiting a child of an
``Output`` node.
"""
self
.
write
(
")"
)
if
finalize
.
src
is
not
None
:
self
.
write
(
")"
)
def
visit_Output
(
self
,
node
,
frame
):
# If an extends is active, don't render outside a block.
if
frame
.
require_output_check
:
# A top-level extends is known to exist at compile time.
if
self
.
has_known_extends
:
return
self
.
writeline
(
"if parent_template is None:"
)
self
.
indent
()
finalize
=
self
.
_make_finalize
()
body
=
[]
# Evaluate constants at compile time if possible. Each item in
# body will be either a list of static data or a node to be
# evaluated at runtime.
for
child
in
node
.
nodes
:
try
:
if
not
(
# If the finalize function requires runtime context,
# constants can't be evaluated at compile time.
finalize
.
const
# Unless it's basic template data that won't be
# finalized anyway.
or
isinstance
(
child
,
nodes
.
TemplateData
)
):
raise
nodes
.
Impossible
()
const
=
self
.
_output_child_to_const
(
child
,
frame
,
finalize
)
except
(
nodes
.
Impossible
,
Exception
):
# The node was not constant and needs to be evaluated at
# runtime. Or another error was raised, which is easier
# to debug at runtime.
body
.
append
(
child
)
continue
if
body
and
isinstance
(
body
[
-
1
],
list
):
body
[
-
1
].
append
(
const
)
else
:
body
.
append
([
const
])
if
frame
.
buffer
is
not
None
:
if
len
(
body
)
==
1
:
self
.
writeline
(
"%s.append("
%
frame
.
buffer
)
else
:
self
.
writeline
(
"%s.extend(("
%
frame
.
buffer
)
self
.
indent
()
for
item
in
body
:
if
isinstance
(
item
,
list
):
# A group of constant data to join and output.
val
=
self
.
_output_const_repr
(
item
)
if
frame
.
buffer
is
None
:
self
.
writeline
(
"yield "
+
val
)
else
:
self
.
writeline
(
val
+
","
)
else
:
if
frame
.
buffer
is
None
:
self
.
writeline
(
"yield "
,
item
)
else
:
self
.
newline
(
item
)
# A node to be evaluated at runtime.
self
.
_output_child_pre
(
item
,
frame
,
finalize
)
self
.
visit
(
item
,
frame
)
self
.
_output_child_post
(
item
,
frame
,
finalize
)
if
frame
.
buffer
is
not
None
:
self
.
write
(
","
)
if
frame
.
buffer
is
not
None
:
self
.
outdent
()
self
.
writeline
(
")"
if
len
(
body
)
==
1
else
"))"
)
if
frame
.
require_output_check
:
self
.
outdent
()
def
visit_Assign
(
self
,
node
,
frame
):
self
.
push_assign_tracking
()
self
.
newline
(
node
)
self
.
visit
(
node
.
target
,
frame
)
self
.
write
(
" = "
)
self
.
visit
(
node
.
node
,
frame
)
self
.
pop_assign_tracking
(
frame
)
def
visit_AssignBlock
(
self
,
node
,
frame
):
self
.
push_assign_tracking
()
block_frame
=
frame
.
inner
()
# This is a special case. Since a set block always captures we
# will disable output checks. This way one can use set blocks
# toplevel even in extended templates.
block_frame
.
require_output_check
=
False
block_frame
.
symbols
.
analyze_node
(
node
)
self
.
enter_frame
(
block_frame
)
self
.
buffer
(
block_frame
)
self
.
blockvisit
(
node
.
body
,
block_frame
)
self
.
newline
(
node
)
self
.
visit
(
node
.
target
,
frame
)
self
.
write
(
" = (Markup if context.eval_ctx.autoescape else identity)("
)
if
node
.
filter
is
not
None
:
self
.
visit_Filter
(
node
.
filter
,
block_frame
)
else
:
self
.
write
(
"concat(%s)"
%
block_frame
.
buffer
)
self
.
write
(
")"
)
self
.
pop_assign_tracking
(
frame
)
self
.
leave_frame
(
block_frame
)
# -- Expression Visitors
def
visit_Name
(
self
,
node
,
frame
):
if
node
.
ctx
==
"store"
and
frame
.
toplevel
:
if
self
.
_assign_stack
:
self
.
_assign_stack
[
-
1
].
add
(
node
.
name
)
ref
=
frame
.
symbols
.
ref
(
node
.
name
)
# If we are looking up a variable we might have to deal with the
# case where it's undefined. We can skip that case if the load
# instruction indicates a parameter which are always defined.
if
node
.
ctx
==
"load"
:
load
=
frame
.
symbols
.
find_load
(
ref
)
if
not
(
load
is
not
None
and
load
[
0
]
==
VAR_LOAD_PARAMETER
and
not
self
.
parameter_is_undeclared
(
ref
)
):
self
.
write
(
"(undefined(name=%r) if %s is missing else %s)"
%
(
node
.
name
,
ref
,
ref
)
)
return
self
.
write
(
ref
)
def
visit_NSRef
(
self
,
node
,
frame
):
# NSRefs can only be used to store values; since they use the normal
# `foo.bar` notation they will be parsed as a normal attribute access
# when used anywhere but in a `set` context
ref
=
frame
.
symbols
.
ref
(
node
.
name
)
self
.
writeline
(
"if not isinstance(%s, Namespace):"
%
ref
)
self
.
indent
()
self
.
writeline
(
"raise TemplateRuntimeError(%r)"
%
"cannot assign attribute on non-namespace object"
)
self
.
outdent
()
self
.
writeline
(
"%s[%r]"
%
(
ref
,
node
.
attr
))
def
visit_Const
(
self
,
node
,
frame
):
val
=
node
.
as_const
(
frame
.
eval_ctx
)
if
isinstance
(
val
,
float
):
self
.
write
(
str
(
val
))
else
:
self
.
write
(
repr
(
val
))
def
visit_TemplateData
(
self
,
node
,
frame
):
try
:
self
.
write
(
repr
(
node
.
as_const
(
frame
.
eval_ctx
)))
except
nodes
.
Impossible
:
self
.
write
(
"(Markup if context.eval_ctx.autoescape else identity)(%r)"
%
node
.
data
)
def
visit_Tuple
(
self
,
node
,
frame
):
self
.
write
(
"("
)
idx
=
-
1
for
idx
,
item
in
enumerate
(
node
.
items
):
if
idx
:
self
.
write
(
", "
)
self
.
visit
(
item
,
frame
)
self
.
write
(
idx
==
0
and
",)"
or
")"
)
def
visit_List
(
self
,
node
,
frame
):
self
.
write
(
"["
)
for
idx
,
item
in
enumerate
(
node
.
items
):
if
idx
:
self
.
write
(
", "
)
self
.
visit
(
item
,
frame
)
self
.
write
(
"]"
)
def
visit_Dict
(
self
,
node
,
frame
):
self
.
write
(
"{"
)
for
idx
,
item
in
enumerate
(
node
.
items
):
if
idx
:
self
.
write
(
", "
)
self
.
visit
(
item
.
key
,
frame
)
self
.
write
(
": "
)
self
.
visit
(
item
.
value
,
frame
)
self
.
write
(
"}"
)
def
binop
(
operator
,
interceptable
=
True
):
# noqa: B902
@
optimizeconst
def
visitor
(
self
,
node
,
frame
):
if
(
self
.
environment
.
sandboxed
and
operator
in
self
.
environment
.
intercepted_binops
):
self
.
write
(
"environment.call_binop(context, %r, "
%
operator
)
self
.
visit
(
node
.
left
,
frame
)
self
.
write
(
", "
)
self
.
visit
(
node
.
right
,
frame
)
else
:
self
.
write
(
"("
)
self
.
visit
(
node
.
left
,
frame
)
self
.
write
(
" %s "
%
operator
)
self
.
visit
(
node
.
right
,
frame
)
self
.
write
(
")"
)
return
visitor
def
uaop
(
operator
,
interceptable
=
True
):
# noqa: B902
@
optimizeconst
def
visitor
(
self
,
node
,
frame
):
if
(
self
.
environment
.
sandboxed
and
operator
in
self
.
environment
.
intercepted_unops
):
self
.
write
(
"environment.call_unop(context, %r, "
%
operator
)
self
.
visit
(
node
.
node
,
frame
)
else
:
self
.
write
(
"("
+
operator
)
self
.
visit
(
node
.
node
,
frame
)
self
.
write
(
")"
)
return
visitor
visit_Add
=
binop
(
"+"
)
visit_Sub
=
binop
(
"-"
)
visit_Mul
=
binop
(
"*"
)
visit_Div
=
binop
(
"/"
)
visit_FloorDiv
=
binop
(
"//"
)
visit_Pow
=
binop
(
"**"
)
visit_Mod
=
binop
(
"%"
)
visit_And
=
binop
(
"and"
,
interceptable
=
False
)
visit_Or
=
binop
(
"or"
,
interceptable
=
False
)
visit_Pos
=
uaop
(
"+"
)
visit_Neg
=
uaop
(
"-"
)
visit_Not
=
uaop
(
"not "
,
interceptable
=
False
)
del
binop
,
uaop
@
optimizeconst
def
visit_Concat
(
self
,
node
,
frame
):
if
frame
.
eval_ctx
.
volatile
:
func_name
=
"(context.eval_ctx.volatile and markup_join or unicode_join)"
elif
frame
.
eval_ctx
.
autoescape
:
func_name
=
"markup_join"
else
:
func_name
=
"unicode_join"
self
.
write
(
"%s(("
%
func_name
)
for
arg
in
node
.
nodes
:
self
.
visit
(
arg
,
frame
)
self
.
write
(
", "
)
self
.
write
(
"))"
)
@
optimizeconst
def
visit_Compare
(
self
,
node
,
frame
):
self
.
write
(
"("
)
self
.
visit
(
node
.
expr
,
frame
)
for
op
in
node
.
ops
:
self
.
visit
(
op
,
frame
)
self
.
write
(
")"
)
def
visit_Operand
(
self
,
node
,
frame
):
self
.
write
(
" %s "
%
operators
[
node
.
op
])
self
.
visit
(
node
.
expr
,
frame
)
@
optimizeconst
def
visit_Getattr
(
self
,
node
,
frame
):
if
self
.
environment
.
is_async
:
self
.
write
(
"(await auto_await("
)
self
.
write
(
"environment.getattr("
)
self
.
visit
(
node
.
node
,
frame
)
self
.
write
(
", %r)"
%
node
.
attr
)
if
self
.
environment
.
is_async
:
self
.
write
(
"))"
)
@
optimizeconst
def
visit_Getitem
(
self
,
node
,
frame
):
# slices bypass the environment getitem method.
if
isinstance
(
node
.
arg
,
nodes
.
Slice
):
self
.
visit
(
node
.
node
,
frame
)
self
.
write
(
"["
)
self
.
visit
(
node
.
arg
,
frame
)
self
.
write
(
"]"
)
else
:
if
self
.
environment
.
is_async
:
self
.
write
(
"(await auto_await("
)
self
.
write
(
"environment.getitem("
)
self
.
visit
(
node
.
node
,
frame
)
self
.
write
(
", "
)
self
.
visit
(
node
.
arg
,
frame
)
self
.
write
(
")"
)
if
self
.
environment
.
is_async
:
self
.
write
(
"))"
)
def
visit_Slice
(
self
,
node
,
frame
):
if
node
.
start
is
not
None
:
self
.
visit
(
node
.
start
,
frame
)
self
.
write
(
":"
)
if
node
.
stop
is
not
None
:
self
.
visit
(
node
.
stop
,
frame
)
if
node
.
step
is
not
None
:
self
.
write
(
":"
)
self
.
visit
(
node
.
step
,
frame
)
@
optimizeconst
def
visit_Filter
(
self
,
node
,
frame
):
if
self
.
environment
.
is_async
:
self
.
write
(
"await auto_await("
)
self
.
write
(
self
.
filters
[
node
.
name
]
+
"("
)
func
=
self
.
environment
.
filters
.
get
(
node
.
name
)
if
func
is
None
:
self
.
fail
(
"no filter named %r"
%
node
.
name
,
node
.
lineno
)
if
getattr
(
func
,
"contextfilter"
,
False
)
is
True
:
self
.
write
(
"context, "
)
elif
getattr
(
func
,
"evalcontextfilter"
,
False
)
is
True
:
self
.
write
(
"context.eval_ctx, "
)
elif
getattr
(
func
,
"environmentfilter"
,
False
)
is
True
:
self
.
write
(
"environment, "
)
# if the filter node is None we are inside a filter block
# and want to write to the current buffer
if
node
.
node
is
not
None
:
self
.
visit
(
node
.
node
,
frame
)
elif
frame
.
eval_ctx
.
volatile
:
self
.
write
(
"(context.eval_ctx.autoescape and"
" Markup(concat(%s)) or concat(%s))"
%
(
frame
.
buffer
,
frame
.
buffer
)
)
elif
frame
.
eval_ctx
.
autoescape
:
self
.
write
(
"Markup(concat(%s))"
%
frame
.
buffer
)
else
:
self
.
write
(
"concat(%s)"
%
frame
.
buffer
)
self
.
signature
(
node
,
frame
)
self
.
write
(
")"
)
if
self
.
environment
.
is_async
:
self
.
write
(
")"
)
@
optimizeconst
def
visit_Test
(
self
,
node
,
frame
):
self
.
write
(
self
.
tests
[
node
.
name
]
+
"("
)
if
node
.
name
not
in
self
.
environment
.
tests
:
self
.
fail
(
"no test named %r"
%
node
.
name
,
node
.
lineno
)
self
.
visit
(
node
.
node
,
frame
)
self
.
signature
(
node
,
frame
)
self
.
write
(
")"
)
@
optimizeconst
def
visit_CondExpr
(
self
,
node
,
frame
):
def
write_expr2
():
if
node
.
expr2
is
not
None
:
return
self
.
visit
(
node
.
expr2
,
frame
)
self
.
write
(
"cond_expr_undefined(%r)"
%
(
"the inline if-"
"expression on %s evaluated to false and "
"no else section was defined."
%
self
.
position
(
node
)
)
)
self
.
write
(
"("
)
self
.
visit
(
node
.
expr1
,
frame
)
self
.
write
(
" if "
)
self
.
visit
(
node
.
test
,
frame
)
self
.
write
(
" else "
)
write_expr2
()
self
.
write
(
")"
)
@
optimizeconst
def
visit_Call
(
self
,
node
,
frame
,
forward_caller
=
False
):
if
self
.
environment
.
is_async
:
self
.
write
(
"await auto_await("
)
if
self
.
environment
.
sandboxed
:
self
.
write
(
"environment.call(context, "
)
else
:
self
.
write
(
"context.call("
)
self
.
visit
(
node
.
node
,
frame
)
extra_kwargs
=
forward_caller
and
{
"caller"
:
"caller"
}
or
None
self
.
signature
(
node
,
frame
,
extra_kwargs
)
self
.
write
(
")"
)
if
self
.
environment
.
is_async
:
self
.
write
(
")"
)
def
visit_Keyword
(
self
,
node
,
frame
):
self
.
write
(
node
.
key
+
"="
)
self
.
visit
(
node
.
value
,
frame
)
# -- Unused nodes for extensions
def
visit_MarkSafe
(
self
,
node
,
frame
):
self
.
write
(
"Markup("
)
self
.
visit
(
node
.
expr
,
frame
)
self
.
write
(
")"
)
def
visit_MarkSafeIfAutoescape
(
self
,
node
,
frame
):
self
.
write
(
"(context.eval_ctx.autoescape and Markup or identity)("
)
self
.
visit
(
node
.
expr
,
frame
)
self
.
write
(
")"
)
def
visit_EnvironmentAttribute
(
self
,
node
,
frame
):
self
.
write
(
"environment."
+
node
.
name
)
def
visit_ExtensionAttribute
(
self
,
node
,
frame
):
self
.
write
(
"environment.extensions[%r].%s"
%
(
node
.
identifier
,
node
.
name
))
def
visit_ImportedName
(
self
,
node
,
frame
):
self
.
write
(
self
.
import_aliases
[
node
.
importname
])
def
visit_InternalName
(
self
,
node
,
frame
):
self
.
write
(
node
.
name
)
def
visit_ContextReference
(
self
,
node
,
frame
):
self
.
write
(
"context"
)
def
visit_DerivedContextReference
(
self
,
node
,
frame
):
self
.
write
(
self
.
derive_context
(
frame
))
def
visit_Continue
(
self
,
node
,
frame
):
self
.
writeline
(
"continue"
,
node
)
def
visit_Break
(
self
,
node
,
frame
):
self
.
writeline
(
"break"
,
node
)
def
visit_Scope
(
self
,
node
,
frame
):
scope_frame
=
frame
.
inner
()
scope_frame
.
symbols
.
analyze_node
(
node
)
self
.
enter_frame
(
scope_frame
)
self
.
blockvisit
(
node
.
body
,
scope_frame
)
self
.
leave_frame
(
scope_frame
)
def
visit_OverlayScope
(
self
,
node
,
frame
):
ctx
=
self
.
temporary_identifier
()
self
.
writeline
(
"%s = %s"
%
(
ctx
,
self
.
derive_context
(
frame
)))
self
.
writeline
(
"%s.vars = "
%
ctx
)
self
.
visit
(
node
.
context
,
frame
)
self
.
push_context_reference
(
ctx
)
scope_frame
=
frame
.
inner
(
isolated
=
True
)
scope_frame
.
symbols
.
analyze_node
(
node
)
self
.
enter_frame
(
scope_frame
)
self
.
blockvisit
(
node
.
body
,
scope_frame
)
self
.
leave_frame
(
scope_frame
)
self
.
pop_context_reference
()
def
visit_EvalContextModifier
(
self
,
node
,
frame
):
for
keyword
in
node
.
options
:
self
.
writeline
(
"context.eval_ctx.%s = "
%
keyword
.
key
)
self
.
visit
(
keyword
.
value
,
frame
)
try
:
val
=
keyword
.
value
.
as_const
(
frame
.
eval_ctx
)
except
nodes
.
Impossible
:
frame
.
eval_ctx
.
volatile
=
True
else
:
setattr
(
frame
.
eval_ctx
,
keyword
.
key
,
val
)
def
visit_ScopedEvalContextModifier
(
self
,
node
,
frame
):
old_ctx_name
=
self
.
temporary_identifier
()
saved_ctx
=
frame
.
eval_ctx
.
save
()
self
.
writeline
(
"%s = context.eval_ctx.save()"
%
old_ctx_name
)
self
.
visit_EvalContextModifier
(
node
,
frame
)
for
child
in
node
.
body
:
self
.
visit
(
child
,
frame
)
frame
.
eval_ctx
.
revert
(
saved_ctx
)
self
.
writeline
(
"context.eval_ctx.revert(%s)"
%
old_ctx_name
)
magma/squid_cnf/charms/squid/venv/jinja2/constants.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
#: list of lorem ipsum words used by the lipsum() helper function
LOREM_IPSUM_WORDS
=
u
"""
\
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
auctor augue bibendum blandit class commodo condimentum congue consectetuer
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
penatibus per pharetra phasellus placerat platea porta porttitor posuere
potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
viverra volutpat vulputate"""
magma/squid_cnf/charms/squid/venv/jinja2/debug.py
0 → 100644
View file @
17583c8b
import
sys
from
types
import
CodeType
from
.
import
TemplateSyntaxError
from
._compat
import
PYPY
from
.utils
import
internal_code
from
.utils
import
missing
def
rewrite_traceback_stack
(
source
=
None
):
"""Rewrite the current exception to replace any tracebacks from
within compiled template code with tracebacks that look like they
came from the template source.
This must be called within an ``except`` block.
:param exc_info: A :meth:`sys.exc_info` tuple. If not provided,
the current ``exc_info`` is used.
:param source: For ``TemplateSyntaxError``, the original source if
known.
:return: A :meth:`sys.exc_info` tuple that can be re-raised.
"""
exc_type
,
exc_value
,
tb
=
sys
.
exc_info
()
if
isinstance
(
exc_value
,
TemplateSyntaxError
)
and
not
exc_value
.
translated
:
exc_value
.
translated
=
True
exc_value
.
source
=
source
try
:
# Remove the old traceback on Python 3, otherwise the frames
# from the compiler still show up.
exc_value
.
with_traceback
(
None
)
except
AttributeError
:
pass
# Outside of runtime, so the frame isn't executing template
# code, but it still needs to point at the template.
tb
=
fake_traceback
(
exc_value
,
None
,
exc_value
.
filename
or
"<unknown>"
,
exc_value
.
lineno
)
else
:
# Skip the frame for the render function.
tb
=
tb
.
tb_next
stack
=
[]
# Build the stack of traceback object, replacing any in template
# code with the source file and line information.
while
tb
is
not
None
:
# Skip frames decorated with @internalcode. These are internal
# calls that aren't useful in template debugging output.
if
tb
.
tb_frame
.
f_code
in
internal_code
:
tb
=
tb
.
tb_next
continue
template
=
tb
.
tb_frame
.
f_globals
.
get
(
"__jinja_template__"
)
if
template
is
not
None
:
lineno
=
template
.
get_corresponding_lineno
(
tb
.
tb_lineno
)
fake_tb
=
fake_traceback
(
exc_value
,
tb
,
template
.
filename
,
lineno
)
stack
.
append
(
fake_tb
)
else
:
stack
.
append
(
tb
)
tb
=
tb
.
tb_next
tb_next
=
None
# Assign tb_next in reverse to avoid circular references.
for
tb
in
reversed
(
stack
):
tb_next
=
tb_set_next
(
tb
,
tb_next
)
return
exc_type
,
exc_value
,
tb_next
def
fake_traceback
(
exc_value
,
tb
,
filename
,
lineno
):
"""Produce a new traceback object that looks like it came from the
template source instead of the compiled code. The filename, line
number, and location name will point to the template, and the local
variables will be the current template context.
:param exc_value: The original exception to be re-raised to create
the new traceback.
:param tb: The original traceback to get the local variables and
code info from.
:param filename: The template filename.
:param lineno: The line number in the template source.
"""
if
tb
is
not
None
:
# Replace the real locals with the context that would be
# available at that point in the template.
locals
=
get_template_locals
(
tb
.
tb_frame
.
f_locals
)
locals
.
pop
(
"__jinja_exception__"
,
None
)
else
:
locals
=
{}
globals
=
{
"__name__"
:
filename
,
"__file__"
:
filename
,
"__jinja_exception__"
:
exc_value
,
}
# Raise an exception at the correct line number.
code
=
compile
(
"
\n
"
*
(
lineno
-
1
)
+
"raise __jinja_exception__"
,
filename
,
"exec"
)
# Build a new code object that points to the template file and
# replaces the location with a block name.
try
:
location
=
"template"
if
tb
is
not
None
:
function
=
tb
.
tb_frame
.
f_code
.
co_name
if
function
==
"root"
:
location
=
"top-level template code"
elif
function
.
startswith
(
"block_"
):
location
=
'block "%s"'
%
function
[
6
:]
# Collect arguments for the new code object. CodeType only
# accepts positional arguments, and arguments were inserted in
# new Python versions.
code_args
=
[]
for
attr
in
(
"argcount"
,
"posonlyargcount"
,
# Python 3.8
"kwonlyargcount"
,
# Python 3
"nlocals"
,
"stacksize"
,
"flags"
,
"code"
,
# codestring
"consts"
,
# constants
"names"
,
"varnames"
,
(
"filename"
,
filename
),
(
"name"
,
location
),
"firstlineno"
,
"lnotab"
,
"freevars"
,
"cellvars"
,
):
if
isinstance
(
attr
,
tuple
):
# Replace with given value.
code_args
.
append
(
attr
[
1
])
continue
try
:
# Copy original value if it exists.
code_args
.
append
(
getattr
(
code
,
"co_"
+
attr
))
except
AttributeError
:
# Some arguments were added later.
continue
code
=
CodeType
(
*
code_args
)
except
Exception
:
# Some environments such as Google App Engine don't support
# modifying code objects.
pass
# Execute the new code, which is guaranteed to raise, and return
# the new traceback without this frame.
try
:
exec
(
code
,
globals
,
locals
)
except
BaseException
:
return
sys
.
exc_info
()[
2
].
tb_next
def
get_template_locals
(
real_locals
):
"""Based on the runtime locals, get the context that would be
available at that point in the template.
"""
# Start with the current template context.
ctx
=
real_locals
.
get
(
"context"
)
if
ctx
:
data
=
ctx
.
get_all
().
copy
()
else
:
data
=
{}
# Might be in a derived context that only sets local variables
# rather than pushing a context. Local variables follow the scheme
# l_depth_name. Find the highest-depth local that has a value for
# each name.
local_overrides
=
{}
for
name
,
value
in
real_locals
.
items
():
if
not
name
.
startswith
(
"l_"
)
or
value
is
missing
:
# Not a template variable, or no longer relevant.
continue
try
:
_
,
depth
,
name
=
name
.
split
(
"_"
,
2
)
depth
=
int
(
depth
)
except
ValueError
:
continue
cur_depth
=
local_overrides
.
get
(
name
,
(
-
1
,))[
0
]
if
cur_depth
<
depth
:
local_overrides
[
name
]
=
(
depth
,
value
)
# Modify the context with any derived context.
for
name
,
(
_
,
value
)
in
local_overrides
.
items
():
if
value
is
missing
:
data
.
pop
(
name
,
None
)
else
:
data
[
name
]
=
value
return
data
if
sys
.
version_info
>=
(
3
,
7
):
# tb_next is directly assignable as of Python 3.7
def
tb_set_next
(
tb
,
tb_next
):
tb
.
tb_next
=
tb_next
return
tb
elif
PYPY
:
# PyPy might have special support, and won't work with ctypes.
try
:
import
tputil
except
ImportError
:
# Without tproxy support, use the original traceback.
def
tb_set_next
(
tb
,
tb_next
):
return
tb
else
:
# With tproxy support, create a proxy around the traceback that
# returns the new tb_next.
def
tb_set_next
(
tb
,
tb_next
):
def
controller
(
op
):
if
op
.
opname
==
"__getattribute__"
and
op
.
args
[
0
]
==
"tb_next"
:
return
tb_next
return
op
.
delegate
()
return
tputil
.
make_proxy
(
controller
,
obj
=
tb
)
else
:
# Use ctypes to assign tb_next at the C level since it's read-only
# from Python.
import
ctypes
class
_CTraceback
(
ctypes
.
Structure
):
_fields_
=
[
# Extra PyObject slots when compiled with Py_TRACE_REFS.
(
"PyObject_HEAD"
,
ctypes
.
c_byte
*
object
().
__sizeof__
()),
# Only care about tb_next as an object, not a traceback.
(
"tb_next"
,
ctypes
.
py_object
),
]
def
tb_set_next
(
tb
,
tb_next
):
c_tb
=
_CTraceback
.
from_address
(
id
(
tb
))
# Clear out the old tb_next.
if
tb
.
tb_next
is
not
None
:
c_tb_next
=
ctypes
.
py_object
(
tb
.
tb_next
)
c_tb
.
tb_next
=
ctypes
.
py_object
()
ctypes
.
pythonapi
.
Py_DecRef
(
c_tb_next
)
# Assign the new tb_next.
if
tb_next
is
not
None
:
c_tb_next
=
ctypes
.
py_object
(
tb_next
)
ctypes
.
pythonapi
.
Py_IncRef
(
c_tb_next
)
c_tb
.
tb_next
=
c_tb_next
return
tb
magma/squid_cnf/charms/squid/venv/jinja2/defaults.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
from
._compat
import
range_type
from
.filters
import
FILTERS
as
DEFAULT_FILTERS
# noqa: F401
from
.tests
import
TESTS
as
DEFAULT_TESTS
# noqa: F401
from
.utils
import
Cycler
from
.utils
import
generate_lorem_ipsum
from
.utils
import
Joiner
from
.utils
import
Namespace
# defaults for the parser / lexer
BLOCK_START_STRING
=
"{%"
BLOCK_END_STRING
=
"%}"
VARIABLE_START_STRING
=
"{{"
VARIABLE_END_STRING
=
"}}"
COMMENT_START_STRING
=
"{#"
COMMENT_END_STRING
=
"#}"
LINE_STATEMENT_PREFIX
=
None
LINE_COMMENT_PREFIX
=
None
TRIM_BLOCKS
=
False
LSTRIP_BLOCKS
=
False
NEWLINE_SEQUENCE
=
"
\n
"
KEEP_TRAILING_NEWLINE
=
False
# default filters, tests and namespace
DEFAULT_NAMESPACE
=
{
"range"
:
range_type
,
"dict"
:
dict
,
"lipsum"
:
generate_lorem_ipsum
,
"cycler"
:
Cycler
,
"joiner"
:
Joiner
,
"namespace"
:
Namespace
,
}
# default policies
DEFAULT_POLICIES
=
{
"compiler.ascii_str"
:
True
,
"urlize.rel"
:
"noopener"
,
"urlize.target"
:
None
,
"truncate.leeway"
:
5
,
"json.dumps_function"
:
None
,
"json.dumps_kwargs"
:
{
"sort_keys"
:
True
},
"ext.i18n.trimmed"
:
False
,
}
magma/squid_cnf/charms/squid/venv/jinja2/environment.py
0 → 100644
View file @
17583c8b
# -*- coding: utf-8 -*-
"""Classes for managing templates and their runtime and compile time
options.
"""
import
os
import
sys
import
weakref
from
functools
import
partial
from
functools
import
reduce
from
markupsafe
import
Markup
from
.
import
nodes
from
._compat
import
encode_filename
from
._compat
import
implements_iterator
from
._compat
import
implements_to_string
from
._compat
import
iteritems
from
._compat
import
PY2
from
._compat
import
PYPY
from
._compat
import
reraise
from
._compat
import
string_types
from
._compat
import
text_type
from
.compiler
import
CodeGenerator
from
.compiler
import
generate
from
.defaults
import
BLOCK_END_STRING
from
.defaults
import
BLOCK_START_STRING
from
.defaults
import
COMMENT_END_STRING
from
.defaults
import
COMMENT_START_STRING
from
.defaults
import
DEFAULT_FILTERS
from
.defaults
import
DEFAULT_NAMESPACE
from
.defaults
import
DEFAULT_POLICIES
from
.defaults
import
DEFAULT_TESTS
from
.defaults
import
KEEP_TRAILING_NEWLINE
from
.defaults
import
LINE_COMMENT_PREFIX
from
.defaults
import
LINE_STATEMENT_PREFIX
from
.defaults
import
LSTRIP_BLOCKS
from
.defaults
import
NEWLINE_SEQUENCE
from
.defaults
import
TRIM_BLOCKS
from
.defaults
import
VARIABLE_END_STRING
from
.defaults
import
VARIABLE_START_STRING
from
.exceptions
import
TemplateNotFound
from
.exceptions
import
TemplateRuntimeError
from
.exceptions
import
TemplatesNotFound
from
.exceptions
import
TemplateSyntaxError
from
.exceptions
import
UndefinedError
from
.lexer
import
get_lexer
from
.lexer
import
TokenStream
from
.nodes
import
EvalContext
from
.parser
import
Parser
from
.runtime
import
Context
from
.runtime
import
new_context
from
.runtime
import
Undefined
from
.utils
import
concat
from
.utils
import
consume
from
.utils
import
have_async_gen
from
.utils
import
import_string
from
.utils
import
internalcode
from
.utils
import
LRUCache
from
.utils
import
missing
# for direct template usage we have up to ten living environments
_spontaneous_environments
=
LRUCache
(
10
)
def
get_spontaneous_environment
(
cls
,
*
args
):
"""Return a new spontaneous environment. A spontaneous environment
is used for templates created directly rather than through an
existing environment.
:param cls: Environment class to create.
:param args: Positional arguments passed to environment.
"""
key
=
(
cls
,
args
)
try
:
return
_spontaneous_environments
[
key
]
except
KeyError
:
_spontaneous_environments
[
key
]
=
env
=
cls
(
*
args
)
env
.
shared
=
True
return
env
def
create_cache
(
size
):
"""Return the cache class for the given size."""
if
size
==
0
:
return
None
if
size
<
0
:
return
{}
return
LRUCache
(
size
)
def
copy_cache
(
cache
):
"""Create an empty copy of the given cache."""
if
cache
is
None
:
return
None
elif
type
(
cache
)
is
dict
:
return
{}
return
LRUCache
(
cache
.
capacity
)
def
load_extensions
(
environment
,
extensions
):
"""Load the extensions from the list and bind it to the environment.
Returns a dict of instantiated environments.
"""
result
=
{}
for
extension
in
extensions
:
if
isinstance
(
extension
,
string_types
):
extension
=
import_string
(
extension
)
result
[
extension
.
identifier
]
=
extension
(
environment
)
return
result
def
fail_for_missing_callable
(
string
,
name
):
msg
=
string
%
name
if
isinstance
(
name
,
Undefined
):
try
:
name
.
_fail_with_undefined_error
()
except
Exception
as
e
:
msg
=
"%s (%s; did you forget to quote the callable name?)"
%
(
msg
,
e
)
raise
TemplateRuntimeError
(
msg
)
def
_environment_sanity_check
(
environment
):
"""Perform a sanity check on the environment."""
assert
issubclass
(
environment
.
undefined
,
Undefined
),
"undefined must be a subclass of undefined because filters depend on it."
assert
(
environment
.
block_start_string
!=
environment
.
variable_start_string
!=
environment
.
comment_start_string
),
"block, variable and comment start strings must be different"
assert
environment
.
newline_sequence
in
(
"
\r
"
,
"
\r\n
"
,
"
\n
"
,
),
"newline_sequence set to unknown line ending string."
return
environment
class
Environment
(
object
):
r
"""The core component of Jinja is the `Environment`. It contains
important shared variables like configuration, filters, tests,
globals and others. Instances of this class may be modified if
they are not shared and if no template was loaded so far.
Modifications on environments after the first template was loaded
will lead to surprising effects and undefined behavior.
Here are the possible initialization parameters:
`block_start_string`
The string marking the beginning of a block. Defaults to ``'{%'``.
`block_end_string`
The string marking the end of a block. Defaults to ``'%}'``.
`variable_start_string`
The string marking the beginning of a print statement.
Defaults to ``'{{'``.
`variable_end_string`
The string marking the end of a print statement. Defaults to
``'}}'``.
`comment_start_string`
The string marking the beginning of a comment. Defaults to ``'{#'``.
`comment_end_string`
The string marking the end of a comment. Defaults to ``'#}'``.
`line_statement_prefix`
If given and a string, this will be used as prefix for line based
statements. See also :ref:`line-statements`.
`line_comment_prefix`
If given and a string, this will be used as prefix for line based
comments. See also :ref:`line-statements`.
.. versionadded:: 2.2
`trim_blocks`
If this is set to ``True`` the first newline after a block is
removed (block, not variable tag!). Defaults to `False`.
`lstrip_blocks`
If this is set to ``True`` leading spaces and tabs are stripped
from the start of a line to a block. Defaults to `False`.
`newline_sequence`
The sequence that starts a newline. Must be one of ``'\r'``,
``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
useful default for Linux and OS X systems as well as web
applications.
`keep_trailing_newline`
Preserve the trailing newline when rendering templates.
The default is ``False``, which causes a single newline,
if present, to be stripped from the end of the template.
.. versionadded:: 2.7
`extensions`
List of Jinja extensions to use. This can either be import paths
as strings or extension classes. For more information have a
look at :ref:`the extensions documentation <jinja-extensions>`.
`optimized`
should the optimizer be enabled? Default is ``True``.
`undefined`
:class:`Undefined` or a subclass of it that is used to represent
undefined values in the template.
`finalize`
A callable that can be used to process the result of a variable
expression before it is output. For example one can convert
``None`` implicitly into an empty string here.
`autoescape`
If set to ``True`` the XML/HTML autoescaping feature is enabled by
default. For more details about autoescaping see
:class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
be a callable that is passed the template name and has to
return ``True`` or ``False`` depending on autoescape should be
enabled by default.
.. versionchanged:: 2.4
`autoescape` can now be a function
`loader`
The template loader for this environment.
`cache_size`
The size of the cache. Per default this is ``400`` which means
that if more than 400 templates are loaded the loader will clean
out the least recently used template. If the cache size is set to
``0`` templates are recompiled all the time, if the cache size is
``-1`` the cache will not be cleaned.
.. versionchanged:: 2.8
The cache size was increased to 400 from a low 50.
`auto_reload`
Some loaders load templates from locations where the template
sources may change (ie: file system or database). If
``auto_reload`` is set to ``True`` (default) every time a template is
requested the loader checks if the source changed and if yes, it
will reload the template. For higher performance it's possible to
disable that.
`bytecode_cache`
If set to a bytecode cache object, this object will provide a
cache for the internal Jinja bytecode so that templates don't
have to be parsed if they were not changed.
See :ref:`bytecode-cache` for more information.
`enable_async`
If set to true this enables async template execution which allows
you to take advantage of newer Python features. This requires
Python 3.6 or later.
"""
#: if this environment is sandboxed. Modifying this variable won't make
#: the environment sandboxed though. For a real sandboxed environment
#: have a look at jinja2.sandbox. This flag alone controls the code
#: generation by the compiler.
sandboxed
=
False
#: True if the environment is just an overlay
overlayed
=
False
#: the environment this environment is linked to if it is an overlay
linked_to
=
None
#: shared environments have this set to `True`. A shared environment
#: must not be modified
shared
=
False
#: the class that is used for code generation. See
#: :class:`~jinja2.compiler.CodeGenerator` for more information.
code_generator_class
=
CodeGenerator
#: the context class thatis used for templates. See
#: :class:`~jinja2.runtime.Context` for more information.
context_class
=
Context
def
__init__
(
self
,
block_start_string
=
BLOCK_START_STRING
,
block_end_string
=
BLOCK_END_STRING
,
variable_start_string
=
VARIABLE_START_STRING
,
variable_end_string
=
VARIABLE_END_STRING
,
comment_start_string
=
COMMENT_START_STRING
,
comment_end_string
=
COMMENT_END_STRING
,
line_statement_prefix
=
LINE_STATEMENT_PREFIX
,
line_comment_prefix
=
LINE_COMMENT_PREFIX
,
trim_blocks
=
TRIM_BLOCKS
,
lstrip_blocks
=
LSTRIP_BLOCKS
,
newline_sequence
=
NEWLINE_SEQUENCE
,
keep_trailing_newline
=
KEEP_TRAILING_NEWLINE
,
extensions
=
(),
optimized
=
True
,
undefined
=
Undefined
,
finalize
=
None
,
autoescape
=
False
,
loader
=
None
,
cache_size
=
400
,
auto_reload
=
True
,
bytecode_cache
=
None
,
enable_async
=
False
,
):
# !!Important notice!!
# The constructor accepts quite a few arguments that should be
# passed by keyword rather than position. However it's important to
# not change the order of arguments because it's used at least
# internally in those cases:
# - spontaneous environments (i18n extension and Template)
# - unittests
# If parameter changes are required only add parameters at the end
# and don't change the arguments (or the defaults!) of the arguments
# existing already.
# lexer / parser information
self
.
block_start_string
=
block_start_string
self
.
block_end_string
=
block_end_string
self
.
variable_start_string
=
variable_start_string
self
.
variable_end_string
=
variable_end_string
self
.
comment_start_string
=
comment_start_string
self
.
comment_end_string
=
comment_end_string
self
.
line_statement_prefix
=
line_statement_prefix
self
.
line_comment_prefix
=
line_comment_prefix
self
.
trim_blocks
=
trim_blocks
self
.
lstrip_blocks
=
lstrip_blocks
self
.
newline_sequence
=
newline_sequence
self
.
keep_trailing_newline
=
keep_trailing_newline
# runtime information
self
.
undefined
=
undefined
self
.
optimized
=
optimized
self
.
finalize
=
finalize
self
.
autoescape
=
autoescape
# defaults
self
.
filters
=
DEFAULT_FILTERS
.
copy
()
self
.
tests
=
DEFAULT_TESTS
.
copy
()
self
.
globals
=
DEFAULT_NAMESPACE
.
copy
()
# set the loader provided
self
.
loader
=
loader
self
.
cache
=
create_cache
(
cache_size
)
self
.
bytecode_cache
=
bytecode_cache
self
.
auto_reload
=
auto_reload
# configurable policies
self
.
policies
=
DEFAULT_POLICIES
.
copy
()
# load extensions
self
.
extensions
=
load_extensions
(
self
,
extensions
)
self
.
enable_async
=
enable_async
self
.
is_async
=
self
.
enable_async
and
have_async_gen
if
self
.
is_async
:
# runs patch_all() to enable async support
from
.
import
asyncsupport
# noqa: F401
_environment_sanity_check
(
self
)
def
add_extension
(
self
,
extension
):
"""Adds an extension after the environment was created.
.. versionadded:: 2.5
"""
self
.
extensions
.
update
(
load_extensions
(
self
,
[
extension
]))
def
extend
(
self
,
**
attributes
):
"""Add the items to the instance of the environment if they do not exist
yet. This is used by :ref:`extensions <writing-extensions>` to register
callbacks and configuration values without breaking inheritance.
"""
for
key
,
value
in
iteritems
(
attributes
):
if
not
hasattr
(
self
,
key
):
setattr
(
self
,
key
,
value
)
def
overlay
(
self
,
block_start_string
=
missing
,
block_end_string
=
missing
,
variable_start_string
=
missing
,
variable_end_string
=
missing
,
comment_start_string
=
missing
,
comment_end_string
=
missing
,
line_statement_prefix
=
missing
,
line_comment_prefix
=
missing
,
trim_blocks
=
missing
,
lstrip_blocks
=
missing
,
extensions
=
missing
,
optimized
=
missing
,
undefined
=
missing
,
finalize
=
missing
,
autoescape
=
missing
,
loader
=
missing
,
cache_size
=
missing
,
auto_reload
=
missing
,
bytecode_cache
=
missing
,
):
"""Create a new overlay environment that shares all the data with the
current environment except for cache and the overridden attributes.
Extensions cannot be removed for an overlayed environment. An overlayed
environment automatically gets all the extensions of the environment it
is linked to plus optional extra extensions.
Creating overlays should happen after the initial environment was set
up completely. Not all attributes are truly linked, some are just
copied over so modifications on the original environment may not shine
through.
"""
args
=
dict
(
locals
())
del
args
[
"self"
],
args
[
"cache_size"
],
args
[
"extensions"
]
rv
=
object
.
__new__
(
self
.
__class__
)
rv
.
__dict__
.
update
(
self
.
__dict__
)
rv
.
overlayed
=
True
rv
.
linked_to
=
self
for
key
,
value
in
iteritems
(
args
):
if
value
is
not
missing
:
setattr
(
rv
,
key
,
value
)
if
cache_size
is
not
missing
:
rv
.
cache
=
create_cache
(
cache_size
)
else
:
rv
.
cache
=
copy_cache
(
self
.
cache
)
rv
.
extensions
=
{}
for
key
,
value
in
iteritems
(
self
.
extensions
):
rv
.
extensions
[
key
]
=
value
.
bind
(
rv
)
if
extensions
is
not
missing
:
rv
.
extensions
.
update
(
load_extensions
(
rv
,
extensions
))
return
_environment_sanity_check
(
rv
)
lexer
=
property
(
get_lexer
,
doc
=
"The lexer for this environment."
)
def
iter_extensions
(
self
):
"""Iterates over the extensions by priority."""
return
iter
(
sorted
(
self
.
extensions
.
values
(),
key
=
lambda
x
:
x
.
priority
))
def
getitem
(
self
,
obj
,
argument
):
"""Get an item or attribute of an object but prefer the item."""
try
:
return
obj
[
argument
]
except
(
AttributeError
,
TypeError
,
LookupError
):
if
isinstance
(
argument
,
string_types
):
try
:
attr
=
str
(
argument
)
except
Exception
:
pass
else
:
try
:
return
getattr
(
obj
,
attr
)
except
AttributeError
:
pass
return
self
.
undefined
(
obj
=
obj
,
name
=
argument
)
def
getattr
(
self
,
obj
,
attribute
):
"""Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
"""
try
:
return
getattr
(
obj
,
attribute
)
except
AttributeError
:
pass
try
:
return
obj
[
attribute
]
except
(
TypeError
,
LookupError
,
AttributeError
):
return
self
.
undefined
(
obj
=
obj
,
name
=
attribute
)
def
call_filter
(
self
,
name
,
value
,
args
=
None
,
kwargs
=
None
,
context
=
None
,
eval_ctx
=
None
):
"""Invokes a filter on a value the same way the compiler does it.
Note that on Python 3 this might return a coroutine in case the
filter is running from an environment in async mode and the filter
supports async execution. It's your responsibility to await this
if needed.
.. versionadded:: 2.7
"""
func
=
self
.
filters
.
get
(
name
)
if
func
is
None
:
fail_for_missing_callable
(
"no filter named %r"
,
name
)
args
=
[
value
]
+
list
(
args
or
())
if
getattr
(
func
,
"contextfilter"
,
False
)
is
True
:
if
context
is
None
:
raise
TemplateRuntimeError
(
"Attempted to invoke context filter without context"
)
args
.
insert
(
0
,
context
)
elif
getattr
(
func
,
"evalcontextfilter"
,
False
)
is
True
:
if
eval_ctx
is
None
:
if
context
is
not
None
:
eval_ctx
=
context
.
eval_ctx
else
:
eval_ctx
=
EvalContext
(
self
)
args
.
insert
(
0
,
eval_ctx
)
elif
getattr
(
func
,
"environmentfilter"
,
False
)
is
True
:
args
.
insert
(
0
,
self
)
return
func
(
*
args
,
**
(
kwargs
or
{}))
def
call_test
(
self
,
name
,
value
,
args
=
None
,
kwargs
=
None
):
"""Invokes a test on a value the same way the compiler does it.
.. versionadded:: 2.7
"""
func
=
self
.
tests
.
get
(
name
)
if
func
is
None
:
fail_for_missing_callable
(
"no test named %r"
,
name
)
return
func
(
value
,
*
(
args
or
()),
**
(
kwargs
or
{}))
@
internalcode
def
parse
(
self
,
source
,
name
=
None
,
filename
=
None
):
"""Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
executable source- or bytecode. This is useful for debugging or to
extract information from templates.
If you are :ref:`developing Jinja extensions <writing-extensions>`
this gives you a good overview of the node tree generated.
"""
try
:
return
self
.
_parse
(
source
,
name
,
filename
)
except
TemplateSyntaxError
:
self
.
handle_exception
(
source
=
source
)
def
_parse
(
self
,
source
,
name
,
filename
):
"""Internal parsing function used by `parse` and `compile`."""
return
Parser
(
self
,
source
,
name
,
encode_filename
(
filename
)).
parse
()
def
lex
(
self
,
source
,
name
=
None
,
filename
=
None
):
"""Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
This can be useful for :ref:`extension development <writing-extensions>`
and debugging templates.
This does not perform preprocessing. If you want the preprocessing
of the extensions to be applied you have to filter source through
the :meth:`preprocess` method.
"""
source
=
text_type
(
source
)
try
:
return
self
.
lexer
.
tokeniter
(
source
,
name
,
filename
)
except
TemplateSyntaxError
:
self
.
handle_exception
(
source
=
source
)
def
preprocess
(
self
,
source
,
name
=
None
,
filename
=
None
):
"""Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.
"""
return
reduce
(
lambda
s
,
e
:
e
.
preprocess
(
s
,
name
,
filename
),
self
.
iter_extensions
(),
text_type
(
source
),
)
def
_tokenize
(
self
,
source
,
name
,
filename
=
None
,
state
=
None
):
"""Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
"""
source
=
self
.
preprocess
(
source
,
name
,
filename
)
stream
=
self
.
lexer
.
tokenize
(
source
,
name
,
filename
,
state
)
for
ext
in
self
.
iter_extensions
():
stream
=
ext
.
filter_stream
(
stream
)
if
not
isinstance
(
stream
,
TokenStream
):
stream
=
TokenStream
(
stream
,
name
,
filename
)
return
stream
def
_generate
(
self
,
source
,
name
,
filename
,
defer_init
=
False
):
"""Internal hook that can be overridden to hook a different generate
method in.
.. versionadded:: 2.5
"""
return
generate
(
source
,
self
,
name
,
filename
,
defer_init
=
defer_init
,
optimized
=
self
.
optimized
,
)
def
_compile
(
self
,
source
,
filename
):
"""Internal hook that can be overridden to hook a different compile
method in.
.. versionadded:: 2.5
"""
return
compile
(
source
,
filename
,
"exec"
)
@
internalcode
def
compile
(
self
,
source
,
name
=
None
,
filename
=
None
,
raw
=
False
,
defer_init
=
False
):
"""Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
the `filename` parameter is the estimated filename of the template on
the file system. If the template came from a database or memory this
can be omitted.
The return value of this method is a python code object. If the `raw`
parameter is `True` the return value will be a string with python
code equivalent to the bytecode returned otherwise. This method is
mainly used internally.
`defer_init` is use internally to aid the module code generator. This
causes the generated code to be able to import without the global
environment variable to be set.
.. versionadded:: 2.4
`defer_init` parameter added.
"""
source_hint
=
None
try
:
if
isinstance
(
source
,
string_types
):
source_hint
=
source
source
=
self
.
_parse
(
source
,
name
,
filename
)
source
=
self
.
_generate
(
source
,
name
,
filename
,
defer_init
=
defer_init
)
if
raw
:
return
source
if
filename
is
None
:
filename
=
"<template>"
else
:
filename
=
encode_filename
(
filename
)
return
self
.
_compile
(
source
,
filename
)
except
TemplateSyntaxError
:
self
.
handle_exception
(
source
=
source_hint
)
def
compile_expression
(
self
,
source
,
undefined_to_none
=
True
):
"""A handy helper method that returns a callable that accepts keyword
arguments that appear as variables in the expression. If called it
returns the result of the expression.
This is useful if applications want to use the same rules as Jinja
in template "configuration files" or similar situations.
Example usage:
>>> env = Environment()
>>> expr = env.compile_expression('foo == 42')
>>> expr(foo=23)
False
>>> expr(foo=42)
True
Per default the return value is converted to `None` if the
expression returns an undefined value. This can be changed
by setting `undefined_to_none` to `False`.
>>> env.compile_expression('var')() is None
True
>>> env.compile_expression('var', undefined_to_none=False)()
Undefined
.. versionadded:: 2.1
"""
parser
=
Parser
(
self
,
source
,
state
=
"variable"
)
try
:
expr
=
parser
.
parse_expression
()
if
not
parser
.
stream
.
eos
:
raise
TemplateSyntaxError
(
"chunk after expression"
,
parser
.
stream
.
current
.
lineno
,
None
,
None
)
expr
.
set_environment
(
self
)
except
TemplateSyntaxError
:
if
sys
.
exc_info
()
is
not
None
:
self
.
handle_exception
(
source
=
source
)
body
=
[
nodes
.
Assign
(
nodes
.
Name
(
"result"
,
"store"
),
expr
,
lineno
=
1
)]
template
=
self
.
from_string
(
nodes
.
Template
(
body
,
lineno
=
1
))
return
TemplateExpression
(
template
,
undefined_to_none
)
def
compile_templates
(
self
,
target
,
extensions
=
None
,
filter_func
=
None
,
zip
=
"deflated"
,
log_function
=
None
,
ignore_errors
=
True
,
py_compile
=
False
,
):
"""Finds all the templates the loader can find, compiles them
and stores them in `target`. If `zip` is `None`, instead of in a
zipfile, the templates will be stored in a directory.
By default a deflate zip algorithm is used. To switch to
the stored algorithm, `zip` can be set to ``'stored'``.
`extensions` and `filter_func` are passed to :meth:`list_templates`.
Each template returned will be compiled to the target folder or
zipfile.
By default template compilation errors are ignored. In case a
log function is provided, errors are logged. If you want template
syntax errors to abort the compilation you can set `ignore_errors`
to `False` and you will get an exception on syntax errors.
If `py_compile` is set to `True` .pyc files will be written to the
target instead of standard .py files. This flag does not do anything
on pypy and Python 3 where pyc files are not picked up by itself and
don't give much benefit.
.. versionadded:: 2.4
"""
from
.loaders
import
ModuleLoader
if
log_function
is
None
:
def
log_function
(
x
):
pass
if
py_compile
:
if
not
PY2
or
PYPY
:
import
warnings
warnings
.
warn
(
"'py_compile=True' has no effect on PyPy or Python"
" 3 and will be removed in version 3.0"
,
DeprecationWarning
,
stacklevel
=
2
,
)
py_compile
=
False
else
:
import
imp
import
marshal
py_header
=
imp
.
get_magic
()
+
u
"
\xff\xff\xff\xff
"
.
encode
(
"iso-8859-15"
)
# Python 3.3 added a source filesize to the header
if
sys
.
version_info
>=
(
3
,
3
):
py_header
+=
u
"
\x00\x00\x00\x00
"
.
encode
(
"iso-8859-15"
)
def
write_file
(
filename
,
data
):
if
zip
:
info
=
ZipInfo
(
filename
)
info
.
external_attr
=
0o755
<<
16
zip_file
.
writestr
(
info
,
data
)
else
:
if
isinstance
(
data
,
text_type
):
data
=
data
.
encode
(
"utf8"
)
with
open
(
os
.
path
.
join
(
target
,
filename
),
"wb"
)
as
f
:
f
.
write
(
data
)
if
zip
is
not
None
:
from
zipfile
import
ZipFile
,
ZipInfo
,
ZIP_DEFLATED
,
ZIP_STORED
zip_file
=
ZipFile
(
target
,
"w"
,
dict
(
deflated
=
ZIP_DEFLATED
,
stored
=
ZIP_STORED
)[
zip
]
)
log_function
(
'Compiling into Zip archive "%s"'
%
target
)
else
:
if
not
os
.
path
.
isdir
(
target
):
os
.
makedirs
(
target
)
log_function
(
'Compiling into folder "%s"'
%
target
)
try
:
for
name
in
self
.
list_templates
(
extensions
,
filter_func
):
source
,
filename
,
_
=
self
.
loader
.
get_source
(
self
,
name
)
try
:
code
=
self
.
compile
(
source
,
name
,
filename
,
True
,
True
)
except
TemplateSyntaxError
as
e
:
if
not
ignore_errors
:
raise
log_function
(
'Could not compile "%s": %s'
%
(
name
,
e
))
continue
filename
=
ModuleLoader
.
get_module_filename
(
name
)
if
py_compile
:
c
=
self
.
_compile
(
code
,
encode_filename
(
filename
))
write_file
(
filename
+
"c"
,
py_header
+
marshal
.
dumps
(
c
))
log_function
(
'Byte-compiled "%s" as %s'
%
(
name
,
filename
+
"c"
))
else
:
write_file
(
filename
,
code
)
log_function
(
'Compiled "%s" as %s'
%
(
name
,
filename
))
finally
:
if
zip
:
zip_file
.
close
()
log_function
(
"Finished compiling templates"
)
def
list_templates
(
self
,
extensions
=
None
,
filter_func
=
None
):
"""Returns a list of templates for this environment. This requires
that the loader supports the loader's
:meth:`~BaseLoader.list_templates` method.
If there are other files in the template folder besides the
actual templates, the returned list can be filtered. There are two
ways: either `extensions` is set to a list of file extensions for
templates, or a `filter_func` can be provided which is a callable that
is passed a template name and should return `True` if it should end up
in the result list.
If the loader does not support that, a :exc:`TypeError` is raised.
.. versionadded:: 2.4
"""
names
=
self
.
loader
.
list_templates
()
if
extensions
is
not
None
:
if
filter_func
is
not
None
:
raise
TypeError
(
"either extensions or filter_func can be passed, but not both"
)
def
filter_func
(
x
):
return
"."
in
x
and
x
.
rsplit
(
"."
,
1
)[
1
]
in
extensions
if
filter_func
is
not
None
:
names
=
[
name
for
name
in
names
if
filter_func
(
name
)]
return
names
def
handle_exception
(
self
,
source
=
None
):
"""Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.
"""
from
.debug
import
rewrite_traceback_stack
reraise
(
*
rewrite_traceback_stack
(
source
=
source
))
def
join_path
(
self
,
template
,
parent
):
"""Join a template with the parent. By default all the lookups are
relative to the loader root so this method returns the `template`
parameter unchanged, but if the paths should be relative to the
parent template, this function can be used to calculate the real
template name.
Subclasses may override this method and implement template path
joining here.
"""
return
template
@
internalcode
def
_load_template
(
self
,
name
,
globals
):
if
self
.
loader
is
None
:
raise
TypeError
(
"no loader for this environment specified"
)
cache_key
=
(
weakref
.
ref
(
self
.
loader
),
name
)
if
self
.
cache
is
not
None
:
template
=
self
.
cache
.
get
(
cache_key
)
if
template
is
not
None
and
(
not
self
.
auto_reload
or
template
.
is_up_to_date
):
return
template
template
=
self
.
loader
.
load
(
self
,
name
,
globals
)
if
self
.
cache
is
not
None
:
self
.
cache
[
cache_key
]
=
template
return
template
@
internalcode
def
get_template
(
self
,
name
,
parent
=
None
,
globals
=
None
):
"""Load a template from the loader. If a loader is configured this
method asks the loader for the template and returns a :class:`Template`.
If the `parent` parameter is not `None`, :meth:`join_path` is called
to get the real template name before loading.
The `globals` parameter can be used to provide template wide globals.
These variables are available in the context at render time.
If the template does not exist a :exc:`TemplateNotFound` exception is
raised.
.. versionchanged:: 2.4
If `name` is a :class:`Template` object it is returned from the
function unchanged.
"""
if
isinstance
(
name
,
Template
):
return
name
if
parent
is
not
None
:
name
=
self
.
join_path
(
name
,
parent
)
return
self
.
_load_template
(
name
,
self
.
make_globals
(
globals
))
@
internalcode
def
select_template
(
self
,
names
,
parent
=
None
,
globals
=
None
):
"""Works like :meth:`get_template` but tries a number of templates
before it fails. If it cannot find any of the templates, it will
raise a :exc:`TemplatesNotFound` exception.
.. versionchanged:: 2.11
If names is :class:`Undefined`, an :exc:`UndefinedError` is
raised instead. If no templates were found and names
contains :class:`Undefined`, the message is more helpful.
.. versionchanged:: 2.4
If `names` contains a :class:`Template` object it is returned
from the function unchanged.
.. versionadded:: 2.3
"""
if
isinstance
(
names
,
Undefined
):
names
.
_fail_with_undefined_error
()
if
not
names
:
raise
TemplatesNotFound
(
message
=
u
"Tried to select from an empty list "
u
"of templates."
)
globals
=
self
.
make_globals
(
globals
)
for
name
in
names
:
if
isinstance
(
name
,
Template
):
return
name
if
parent
is
not
None
:
name
=
self
.
join_path
(
name
,
parent
)
try
:
return
self
.
_load_template
(
name
,
globals
)
except
(
TemplateNotFound
,
UndefinedError
):
pass
raise
TemplatesNotFound
(
names
)
@
internalcode
def
get_or_select_template
(
self
,
template_name_or_list
,
parent
=
None
,
globals
=
None
):
"""Does a typecheck and dispatches to :meth:`select_template`
if an iterable of template names is given, otherwise to
:meth:`get_template`.
.. versionadded:: 2.3
"""
if
isinstance
(
template_name_or_list
,
(
string_types
,
Undefined
)):
return
self
.
get_template
(
template_name_or_list
,
parent
,
globals
)
elif
isinstance
(
template_name_or_list
,
Template
):
return
template_name_or_list
return
self
.
select_template
(
template_name_or_list
,
parent
,
globals
)
def
from_string
(
self
,
source
,
globals
=
None
,
template_class
=
None
):
"""Load a template from a string. This parses the source given and
returns a :class:`Template` object.
"""
globals
=
self
.
make_globals
(
globals
)
cls
=
template_class
or
self
.
template_class
return
cls
.
from_code
(
self
,
self
.
compile
(
source
),
globals
,
None
)
def
make_globals
(
self
,
d
):
"""Return a dict for the globals."""
if
not
d
:
return
self
.
globals
return
dict
(
self
.
globals
,
**
d
)
class
Template
(
object
):
"""The central template object. This class represents a compiled template
and is used to evaluate it.
Normally the template object is generated from an :class:`Environment` but
it also has a constructor that makes it possible to create a template
instance directly using the constructor. It takes the same arguments as
the environment constructor but it's not possible to specify a loader.
Every template object has a few methods and members that are guaranteed
to exist. However it's important that a template object should be
considered immutable. Modifications on the object are not supported.
Template objects created from the constructor rather than an environment
do have an `environment` attribute that points to a temporary environment
that is probably shared with other templates created with the constructor
and compatible settings.
>>> template = Template('Hello {{ name }}!')
>>> template.render(name='John Doe') == u'Hello John Doe!'
True
>>> stream = template.stream(name='John Doe')
>>> next(stream) == u'Hello John Doe!'
True
>>> next(stream)
Traceback (most recent call last):
...
StopIteration
"""
#: Type of environment to create when creating a template directly
#: rather than through an existing environment.
environment_class
=
Environment
def
__new__
(
cls
,
source
,
block_start_string
=
BLOCK_START_STRING
,
block_end_string
=
BLOCK_END_STRING
,
variable_start_string
=
VARIABLE_START_STRING
,
variable_end_string
=
VARIABLE_END_STRING
,
comment_start_string
=
COMMENT_START_STRING
,
comment_end_string
=
COMMENT_END_STRING
,
line_statement_prefix
=
LINE_STATEMENT_PREFIX
,
line_comment_prefix
=
LINE_COMMENT_PREFIX
,
trim_blocks
=
TRIM_BLOCKS
,
lstrip_blocks
=
LSTRIP_BLOCKS
,
newline_sequence
=
NEWLINE_SEQUENCE
,
keep_trailing_newline
=
KEEP_TRAILING_NEWLINE
,
extensions
=
(),
optimized
=
True
,
undefined
=
Undefined
,
finalize
=
None
,
autoescape
=
False
,
enable_async
=
False
,
):
env
=
get_spontaneous_environment
(
cls
.
environment_class
,
block_start_string
,
block_end_string
,
variable_start_string
,
variable_end_string
,
comment_start_string
,
comment_end_string
,
line_statement_prefix
,
line_comment_prefix
,
trim_blocks
,
lstrip_blocks
,
newline_sequence
,
keep_trailing_newline
,
frozenset
(
extensions
),
optimized
,
undefined
,
finalize
,
autoescape
,
None
,
0
,
False
,
None
,
enable_async
,
)
return
env
.
from_string
(
source
,
template_class
=
cls
)
@
classmethod
def
from_code
(
cls
,
environment
,
code
,
globals
,
uptodate
=
None
):
"""Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object.
"""
namespace
=
{
"environment"
:
environment
,
"__file__"
:
code
.
co_filename
}
exec
(
code
,
namespace
)
rv
=
cls
.
_from_namespace
(
environment
,
namespace
,
globals
)
rv
.
_uptodate
=
uptodate
return
rv
@
classmethod
def
from_module_dict
(
cls
,
environment
,
module_dict
,
globals
):
"""Creates a template object from a module. This is used by the
module loader to create a template object.
.. versionadded:: 2.4
"""
return
cls
.
_from_namespace
(
environment
,
module_dict
,
globals
)
@
classmethod
def
_from_namespace
(
cls
,
environment
,
namespace
,
globals
):
t
=
object
.
__new__
(
cls
)
t
.
environment
=
environment
t
.
globals
=
globals
t
.
name
=
namespace
[
"name"
]
t
.
filename
=
namespace
[
"__file__"
]
t
.
blocks
=
namespace
[
"blocks"
]
# render function and module
t
.
root_render_func
=
namespace
[
"root"
]
t
.
_module
=
None
# debug and loader helpers
t
.
_debug_info
=
namespace
[
"debug_info"
]
t
.
_uptodate
=
None
# store the reference
namespace
[
"environment"
]
=
environment
namespace
[
"__jinja_template__"
]
=
t
return
t
def
render
(
self
,
*
args
,
**
kwargs
):
"""This method accepts the same arguments as the `dict` constructor:
A dict, a dict subclass or some keyword arguments. If no arguments
are given the context will be empty. These two calls do the same::
template.render(knights='that say nih')
template.render({'knights': 'that say nih'})
This will return the rendered template as unicode string.
"""
vars
=
dict
(
*
args
,
**
kwargs
)
try
:
return
concat
(
self
.
root_render_func
(
self
.
new_context
(
vars
)))
except
Exception
:
self
.
environment
.
handle_exception
()
def
render_async
(
self
,
*
args
,
**
kwargs
):
"""This works similar to :meth:`render` but returns a coroutine
that when awaited returns the entire rendered template string. This
requires the async feature to be enabled.
Example usage::
await template.render_async(knights='that say nih; asynchronously')
"""
# see asyncsupport for the actual implementation
raise
NotImplementedError
(
"This feature is not available for this version of Python"
)
def
stream
(
self
,
*
args
,
**
kwargs
):
"""Works exactly like :meth:`generate` but returns a
:class:`TemplateStream`.
"""
return
TemplateStream
(
self
.
generate
(
*
args
,
**
kwargs
))
def
generate
(
self
,
*
args
,
**
kwargs
):
"""For very large templates it can be useful to not render the whole
template at once but evaluate each statement after another and yield
piece for piece. This method basically does exactly that and returns
a generator that yields one item after another as unicode strings.
It accepts the same arguments as :meth:`render`.
"""
vars
=
dict
(
*
args
,
**
kwargs
)
try
:
for
event
in
self
.
root_render_func
(
self
.
new_context
(
vars
)):
yield
event
except
Exception
:
yield
self
.
environment
.
handle_exception
()
def
generate_async
(
self
,
*
args
,
**
kwargs
):
"""An async version of :meth:`generate`. Works very similarly but
returns an async iterator instead.
"""
# see asyncsupport for the actual implementation
raise
NotImplementedError
(
"This feature is not available for this version of Python"
)
def
new_context
(
self
,
vars
=
None
,
shared
=
False
,
locals
=
None
):
"""Create a new :class:`Context` for this template. The vars
provided will be passed to the template. Per default the globals
are added to the context. If shared is set to `True` the data
is passed as is to the context without adding the globals.
`locals` can be a dict of local variables for internal usage.
"""
return
new_context
(
self
.
environment
,
self
.
name
,
self
.
blocks
,
vars
,
shared
,
self
.
globals
,
locals
)
def
make_module
(
self
,
vars
=
None
,
shared
=
False
,
locals
=
None
):
"""This method works like the :attr:`module` attribute when called
without arguments but it will evaluate the template on every call
rather than caching it. It's also possible to provide
a dict which is then used as context. The arguments are the same
as for the :meth:`new_context` method.
"""
return
TemplateModule
(
self
,
self
.
new_context
(
vars
,
shared
,
locals
))
def
make_module_async
(
self
,
vars
=
None
,
shared
=
False
,
locals
=
None
):
"""As template module creation can invoke template code for
asynchronous executions this method must be used instead of the
normal :meth:`make_module` one. Likewise the module attribute
becomes unavailable in async mode.
"""
# see asyncsupport for the actual implementation
raise
NotImplementedError
(
"This feature is not available for this version of Python"
)
@
internalcode
def
_get_default_module
(
self
):
if
self
.
_module
is
not
None
:
return
self
.
_module
self
.
_module
=
rv
=
self
.
make_module
()
return
rv
@
property
def
module
(
self
):
"""The template as module. This is used for imports in the
template runtime but is also useful if one wants to access
exported template variables from the Python layer:
>>> t = Template('{% macro foo() %}42{% endmacro %}23')
>>> str(t.module)
'23'
>>> t.module.foo() == u'42'
True
This attribute is not available if async mode is enabled.
"""
return
self
.
_get_default_module
()
def
get_corresponding_lineno
(
self
,
lineno
):
"""Return the source line number of a line number in the
generated bytecode as they are not in sync.
"""
for
template_line
,
code_line
in
reversed
(
self
.
debug_info
):
if
code_line
<=
lineno
:
return
template_line
return
1
@
property
def
is_up_to_date
(
self
):
"""If this variable is `False` there is a newer version available."""
if
self
.
_uptodate
is
None
:
return
True
return
self
.
_uptodate
()
@
property
def
debug_info
(
self
):
"""The debug info mapping."""
if
self
.
_debug_info
:
return
[
tuple
(
map
(
int
,
x
.
split
(
"="
)))
for
x
in
self
.
_debug_info
.
split
(
"&"
)]
return
[]
def
__repr__
(
self
):
if
self
.
name
is
None
:
name
=
"memory:%x"
%
id
(
self
)
else
:
name
=
repr
(
self
.
name
)
return
"<%s %s>"
%
(
self
.
__class__
.
__name__
,
name
)
@
implements_to_string
class
TemplateModule
(
object
):
"""Represents an imported template. All the exported names of the
template are available as attributes on this object. Additionally
converting it into an unicode- or bytestrings renders the contents.
"""
def
__init__
(
self
,
template
,
context
,
body_stream
=
None
):
if
body_stream
is
None
:
if
context
.
environment
.
is_async
:
raise
RuntimeError
(
"Async mode requires a body stream "
"to be passed to a template module. Use "
"the async methods of the API you are "
"using."
)
body_stream
=
list
(
template
.
root_render_func
(
context
))
self
.
_body_stream
=
body_stream
self
.
__dict__
.
update
(
context
.
get_exported
())
self
.
__name__
=
template
.
name
def
__html__
(
self
):
return
Markup
(
concat
(
self
.
_body_stream
))
def
__str__
(
self
):
return
concat
(
self
.
_body_stream
)
def
__repr__
(
self
):
if
self
.
__name__
is
None
:
name
=
"memory:%x"
%
id
(
self
)
else
:
name
=
repr
(
self
.
__name__
)
return
"<%s %s>"
%
(
self
.
__class__
.
__name__
,
name
)
class
TemplateExpression
(
object
):
"""The :meth:`jinja2.Environment.compile_expression` method returns an
instance of this object. It encapsulates the expression-like access
to the template with an expression it wraps.
"""
def
__init__
(
self
,
template
,
undefined_to_none
):
self
.
_template
=
template
self
.
_undefined_to_none
=
undefined_to_none
def
__call__
(
self
,
*
args
,
**
kwargs
):
context
=
self
.
_template
.
new_context
(
dict
(
*
args
,
**
kwargs
))
consume
(
self
.
_template
.
root_render_func
(
context
))
rv
=
context
.
vars
[
"result"
]
if
self
.
_undefined_to_none
and
isinstance
(
rv
,
Undefined
):
rv
=
None
return
rv
@
implements_iterator
class
TemplateStream
(
object
):
"""A template stream works pretty much like an ordinary python generator
but it can buffer multiple items to reduce the number of total iterations.
Per default the output is unbuffered which means that for every unbuffered
instruction in the template one unicode string is yielded.
If buffering is enabled with a buffer size of 5, five items are combined
into a new unicode string. This is mainly useful if you are streaming
big templates to a client via WSGI which flushes after each iteration.
"""
def
__init__
(
self
,
gen
):
self
.
_gen
=
gen
self
.
disable_buffering
()
def
dump
(
self
,
fp
,
encoding
=
None
,
errors
=
"strict"
):
"""Dump the complete stream into a file or file-like object.
Per default unicode strings are written, if you want to encode
before writing specify an `encoding`.
Example usage::
Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
"""
close
=
False
if
isinstance
(
fp
,
string_types
):
if
encoding
is
None
:
encoding
=
"utf-8"
fp
=
open
(
fp
,
"wb"
)
close
=
True
try
:
if
encoding
is
not
None
:
iterable
=
(
x
.
encode
(
encoding
,
errors
)
for
x
in
self
)
else
:
iterable
=
self
if
hasattr
(
fp
,
"writelines"
):
fp
.
writelines
(
iterable
)
else
:
for
item
in
iterable
:
fp
.
write
(
item
)
finally
:
if
close
:
fp
.
close
()
def
disable_buffering
(
self
):
"""Disable the output buffering."""
self
.
_next
=
partial
(
next
,
self
.
_gen
)
self
.
buffered
=
False
def
_buffered_generator
(
self
,
size
):
buf
=
[]
c_size
=
0
push
=
buf
.
append
while
1
:
try
:
while
c_size
<
size
:
c
=
next
(
self
.
_gen
)
push
(
c
)
if
c
:
c_size
+=
1
except
StopIteration
:
if
not
c_size
:
return
yield
concat
(
buf
)
del
buf
[:]
c_size
=
0
def
enable_buffering
(
self
,
size
=
5
):
"""Enable buffering. Buffer `size` items before yielding them."""
if
size
<=
1
:
raise
ValueError
(
"buffer size too small"
)
self
.
buffered
=
True
self
.
_next
=
partial
(
next
,
self
.
_buffered_generator
(
size
))
def
__iter__
(
self
):
return
self
def
__next__
(
self
):
return
self
.
_next
()
# hook in default template class. if anyone reads this comment: ignore that
# it's possible to use custom templates ;-)
Environment
.
template_class
=
Template
Prev
1
2
3
4
5
6
7
8
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment