rasterizer/jitter/builder_x86.cpp
rasterizer/jitter/builder_x86.h
rasterizer/jitter/state_llvm.h
-rasterizer/scripts/gen_knobs.cpp
-rasterizer/scripts/gen_knobs.h
+rasterizer/codegen/gen_knobs.cpp
+rasterizer/codegen/gen_knobs.h
rasterizer/core/BackendPixelRate0.cpp
$(GALLIUM_DRIVER_CFLAGS) \
$(LLVM_CXXFLAGS) \
$(SWR_CXX11_CXXFLAGS) \
- -I$(builddir)/rasterizer/scripts \
+ -I$(builddir)/rasterizer/codegen \
-I$(builddir)/rasterizer/jitter \
-I$(builddir)/rasterizer/archrast \
-I$(srcdir)/rasterizer \
-I$(srcdir)/rasterizer/core \
- -I$(srcdir)/rasterizer/scripts \
+ -I$(srcdir)/rasterizer/codegen \
-I$(srcdir)/rasterizer/jitter \
-I$(srcdir)/rasterizer/archrast
BUILT_SOURCES = \
swr_context_llvm.h \
- rasterizer/scripts/gen_knobs.cpp \
- rasterizer/scripts/gen_knobs.h \
+ rasterizer/codegen/gen_knobs.cpp \
+ rasterizer/codegen/gen_knobs.h \
rasterizer/jitter/state_llvm.h \
rasterizer/jitter/builder_x86.h \
rasterizer/jitter/builder_x86.cpp \
MKDIR_GEN = $(AM_V_at)$(MKDIR_P) $(@D)
PYTHON_GEN = $(AM_V_GEN)$(PYTHON2) $(PYTHON_FLAGS)
-swr_context_llvm.h: rasterizer/jitter/scripts/gen_llvm_types.py swr_context.h
+swr_context_llvm.h: rasterizer/codegen/gen_llvm_types.py swr_context.h
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/jitter/scripts/gen_llvm_types.py \
+ $(srcdir)/rasterizer/codegen/gen_llvm_types.py \
--input $(srcdir)/swr_context.h \
--output swr_context_llvm.h
-rasterizer/scripts/gen_knobs.cpp: rasterizer/scripts/gen_knobs.py rasterizer/scripts/knob_defs.py rasterizer/scripts/templates/knobs.template
+rasterizer/codegen/gen_knobs.cpp: rasterizer/codegen/gen_knobs.py rasterizer/codegen/knob_defs.py rasterizer/codegen/templates/knobs.template
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/scripts/gen_knobs.py \
- --input $(srcdir)/rasterizer/scripts/templates/knobs.template \
- --output rasterizer/scripts/gen_knobs.cpp \
+ $(srcdir)/rasterizer/codegen/gen_knobs.py \
+ --input $(srcdir)/rasterizer/codegen/templates/knobs.template \
+ --output rasterizer/codegen/gen_knobs.cpp \
--gen_cpp
-rasterizer/scripts/gen_knobs.h: rasterizer/scripts/gen_knobs.py rasterizer/scripts/knob_defs.py rasterizer/scripts/templates/knobs.template
+rasterizer/codegen/gen_knobs.h: rasterizer/codegen/gen_knobs.py rasterizer/codegen/knob_defs.py rasterizer/codegen/templates/knobs.template
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/scripts/gen_knobs.py \
- --input $(srcdir)/rasterizer/scripts/templates/knobs.template \
- --output rasterizer/scripts/gen_knobs.h \
+ $(srcdir)/rasterizer/codegen/gen_knobs.py \
+ --input $(srcdir)/rasterizer/codegen/templates/knobs.template \
+ --output rasterizer/codegen/gen_knobs.h \
--gen_h
-rasterizer/jitter/state_llvm.h: rasterizer/jitter/scripts/gen_llvm_types.py rasterizer/core/state.h
+rasterizer/jitter/state_llvm.h: rasterizer/codegen/gen_llvm_types.py rasterizer/core/state.h
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/jitter/scripts/gen_llvm_types.py \
+ $(srcdir)/rasterizer/codegen/gen_llvm_types.py \
--input $(srcdir)/rasterizer/core/state.h \
--output rasterizer/jitter/state_llvm.h
-rasterizer/jitter/builder_gen.h: rasterizer/jitter/scripts/gen_llvm_ir_macros.py
+rasterizer/jitter/builder_gen.h: rasterizer/codegen/gen_llvm_ir_macros.py
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/jitter/scripts/gen_llvm_ir_macros.py \
+ $(srcdir)/rasterizer/codegen/gen_llvm_ir_macros.py \
--input $(LLVM_INCLUDEDIR)/llvm/IR/IRBuilder.h \
--output rasterizer/jitter/builder_gen.h \
--gen_h
-rasterizer/jitter/builder_gen.cpp: rasterizer/jitter/scripts/gen_llvm_ir_macros.py
+rasterizer/jitter/builder_gen.cpp: rasterizer/codegen/gen_llvm_ir_macros.py
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/jitter/scripts/gen_llvm_ir_macros.py \
+ $(srcdir)/rasterizer/codegen/gen_llvm_ir_macros.py \
--input $(LLVM_INCLUDEDIR)/llvm/IR/IRBuilder.h \
--output rasterizer/jitter/builder_gen.cpp \
--gen_cpp
-rasterizer/jitter/builder_x86.h: rasterizer/jitter/scripts/gen_llvm_ir_macros.py
+rasterizer/jitter/builder_x86.h: rasterizer/codegen/gen_llvm_ir_macros.py
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/jitter/scripts/gen_llvm_ir_macros.py \
+ $(srcdir)/rasterizer/codegen/gen_llvm_ir_macros.py \
--output rasterizer/jitter/builder_x86.h \
--gen_x86_h
-rasterizer/jitter/builder_x86.cpp: rasterizer/jitter/scripts/gen_llvm_ir_macros.py
+rasterizer/jitter/builder_x86.cpp: rasterizer/codegen/gen_llvm_ir_macros.py
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/jitter/scripts/gen_llvm_ir_macros.py \
+ $(srcdir)/rasterizer/codegen/gen_llvm_ir_macros.py \
--output rasterizer/jitter/builder_x86.cpp \
--gen_x86_cpp
-rasterizer/archrast/gen_ar_event.h: rasterizer/scripts/gen_archrast.py rasterizer/scripts/templates/ar_event_h.template rasterizer/archrast/events.proto
+rasterizer/archrast/gen_ar_event.h: rasterizer/codegen/gen_archrast.py rasterizer/codegen/templates/ar_event_h.template rasterizer/archrast/events.proto
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/scripts/gen_archrast.py \
+ $(srcdir)/rasterizer/codegen/gen_archrast.py \
--proto $(srcdir)/rasterizer/archrast/events.proto \
--output rasterizer/archrast/gen_ar_event.h \
--gen_event_h
-rasterizer/archrast/gen_ar_event.cpp: rasterizer/scripts/gen_archrast.py rasterizer/scripts/templates/ar_event_cpp.template rasterizer/archrast/events.proto
+rasterizer/archrast/gen_ar_event.cpp: rasterizer/codegen/gen_archrast.py rasterizer/codegen/templates/ar_event_cpp.template rasterizer/archrast/events.proto
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/scripts/gen_archrast.py \
+ $(srcdir)/rasterizer/codegen/gen_archrast.py \
--proto $(srcdir)/rasterizer/archrast/events.proto \
--output rasterizer/archrast/gen_ar_event.cpp \
--gen_event_cpp
-rasterizer/archrast/gen_ar_eventhandler.h: rasterizer/scripts/gen_archrast.py rasterizer/scripts/templates/ar_eventhandler_h.template rasterizer/archrast/events.proto
+rasterizer/archrast/gen_ar_eventhandler.h: rasterizer/codegen/gen_archrast.py rasterizer/codegen/templates/ar_eventhandler_h.template rasterizer/archrast/events.proto
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/scripts/gen_archrast.py \
+ $(srcdir)/rasterizer/codegen/gen_archrast.py \
--proto $(srcdir)/rasterizer/archrast/events.proto \
--output rasterizer/archrast/gen_ar_eventhandler.h \
--gen_eventhandler_h
-rasterizer/archrast/gen_ar_eventhandlerfile.h: rasterizer/scripts/gen_archrast.py rasterizer/scripts/templates/ar_eventhandlerfile_h.template rasterizer/archrast/events.proto
+rasterizer/archrast/gen_ar_eventhandlerfile.h: rasterizer/codegen/gen_archrast.py rasterizer/codegen/templates/ar_eventhandlerfile_h.template rasterizer/archrast/events.proto
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/scripts/gen_archrast.py \
+ $(srcdir)/rasterizer/codegen/gen_archrast.py \
--proto $(srcdir)/rasterizer/archrast/events.proto \
--output rasterizer/archrast/gen_ar_eventhandlerfile.h \
--gen_eventhandlerfile_h
# 2 centroid
# 2 forcedSampleCount
# 2 canEarlyZ
-rasterizer/core/BackendPixelRate0.cpp: rasterizer/scripts/gen_backends.py rasterizer/scripts/templates/backend_template.cpp
+rasterizer/core/BackendPixelRate0.cpp: rasterizer/codegen/gen_backends.py rasterizer/codegen/templates/backend_template.cpp
$(MKDIR_GEN)
$(PYTHON_GEN) \
- $(srcdir)/rasterizer/scripts/gen_backends.py \
+ $(srcdir)/rasterizer/codegen/gen_backends.py \
--outdir rasterizer/core \
--dim 5 2 3 2 2 2 \
--split 0 \
EXTRA_DIST = \
SConscript \
rasterizer/archrast/events.proto \
- rasterizer/jitter/scripts/gen_llvm_ir_macros.py \
- rasterizer/jitter/scripts/gen_llvm_types.py \
- rasterizer/scripts/gen_archrast.py \
- rasterizer/scripts/gen_backends.py \
- rasterizer/scripts/gen_knobs.py \
- rasterizer/scripts/knob_defs.py \
- rasterizer/scripts/mako/ast.py \
- rasterizer/scripts/mako/_ast_util.py \
- rasterizer/scripts/mako/cache.py \
- rasterizer/scripts/mako/cmd.py \
- rasterizer/scripts/mako/codegen.py \
- rasterizer/scripts/mako/compat.py \
- rasterizer/scripts/mako/exceptions.py \
- rasterizer/scripts/mako/filters.py \
- rasterizer/scripts/mako/__init__.py \
- rasterizer/scripts/mako/lexer.py \
- rasterizer/scripts/mako/lookup.py \
- rasterizer/scripts/mako/parsetree.py \
- rasterizer/scripts/mako/pygen.py \
- rasterizer/scripts/mako/pyparser.py \
- rasterizer/scripts/mako/runtime.py \
- rasterizer/scripts/mako/template.py \
- rasterizer/scripts/mako/util.py \
- rasterizer/scripts/templates/knobs.template \
- rasterizer/scripts/templates/ar_event_h.template \
- rasterizer/scripts/templates/ar_event_cpp.template \
- rasterizer/scripts/templates/ar_eventhandler_h.template \
- rasterizer/scripts/templates/ar_eventhandlerfile_h.template \
- rasterizer/scripts/templates/backend_template.cpp
+ rasterizer/codegen/gen_llvm_ir_macros.py \
+ rasterizer/codegen/gen_llvm_types.py \
+ rasterizer/codegen/gen_archrast.py \
+ rasterizer/codegen/gen_backends.py \
+ rasterizer/codegen/gen_knobs.py \
+ rasterizer/codegen/knob_defs.py \
+ rasterizer/codegen/mako/ast.py \
+ rasterizer/codegen/mako/_ast_util.py \
+ rasterizer/codegen/mako/cache.py \
+ rasterizer/codegen/mako/cmd.py \
+ rasterizer/codegen/mako/codegen.py \
+ rasterizer/codegen/mako/compat.py \
+ rasterizer/codegen/mako/exceptions.py \
+ rasterizer/codegen/mako/filters.py \
+ rasterizer/codegen/mako/__init__.py \
+ rasterizer/codegen/mako/lexer.py \
+ rasterizer/codegen/mako/lookup.py \
+ rasterizer/codegen/mako/parsetree.py \
+ rasterizer/codegen/mako/pygen.py \
+ rasterizer/codegen/mako/pyparser.py \
+ rasterizer/codegen/mako/runtime.py \
+ rasterizer/codegen/mako/template.py \
+ rasterizer/codegen/mako/util.py \
+ rasterizer/codegen/templates/knobs.template \
+ rasterizer/codegen/templates/ar_event_h.template \
+ rasterizer/codegen/templates/ar_event_cpp.template \
+ rasterizer/codegen/templates/ar_eventhandler_h.template \
+ rasterizer/codegen/templates/ar_eventhandlerfile_h.template \
+ rasterizer/codegen/templates/backend_template.cpp
swrroot = '#src/gallium/drivers/swr/'
env.CodeGenerate(
- target = 'rasterizer/scripts/gen_knobs.cpp',
- script = swrroot + 'rasterizer/scripts/gen_knobs.py',
- source = 'rasterizer/scripts/templates/knobs.template',
+ target = 'rasterizer/codegen/gen_knobs.cpp',
+ script = swrroot + 'rasterizer/codegen/gen_knobs.py',
+ source = 'rasterizer/codegen/templates/knobs.template',
command = python_cmd + ' $SCRIPT --input $SOURCE --output $TARGET --gen_cpp'
)
env.CodeGenerate(
- target = 'rasterizer/scripts/gen_knobs.h',
- script = swrroot + 'rasterizer/scripts/gen_knobs.py',
- source = 'rasterizer/scripts/templates/knobs.template',
+ target = 'rasterizer/codegen/gen_knobs.h',
+ script = swrroot + 'rasterizer/codegen/gen_knobs.py',
+ source = 'rasterizer/codegen/templates/knobs.template',
command = python_cmd + ' $SCRIPT --input $SOURCE --output $TARGET --gen_h'
)
env.CodeGenerate(
target = 'rasterizer/jitter/state_llvm.h',
- script = swrroot + 'rasterizer/jitter/scripts/gen_llvm_types.py',
+ script = swrroot + 'rasterizer/codegen/gen_llvm_types.py',
source = 'rasterizer/core/state.h',
command = python_cmd + ' $SCRIPT --input $SOURCE --output $TARGET'
)
env.CodeGenerate(
target = 'rasterizer/jitter/builder_gen.h',
- script = swrroot + 'rasterizer/jitter/scripts/gen_llvm_ir_macros.py',
+ script = swrroot + 'rasterizer/codegen/gen_llvm_ir_macros.py',
source = os.path.join(llvm_includedir, 'llvm/IR/IRBuilder.h'),
command = python_cmd + ' $SCRIPT --input $SOURCE --output $TARGET --gen_h'
)
env.CodeGenerate(
target = 'rasterizer/jitter/builder_gen.cpp',
- script = swrroot + 'rasterizer/jitter/scripts/gen_llvm_ir_macros.py',
+ script = swrroot + 'rasterizer/codegen/gen_llvm_ir_macros.py',
source = os.path.join(llvm_includedir, 'llvm/IR/IRBuilder.h'),
command = python_cmd + ' $SCRIPT --input $SOURCE --output $TARGET --gen_cpp'
)
env.CodeGenerate(
target = 'rasterizer/jitter/builder_x86.h',
- script = swrroot + 'rasterizer/jitter/scripts/gen_llvm_ir_macros.py',
+ script = swrroot + 'rasterizer/codegen/gen_llvm_ir_macros.py',
source = '',
command = python_cmd + ' $SCRIPT --output $TARGET --gen_x86_h'
)
env.CodeGenerate(
target = 'rasterizer/jitter/builder_x86.cpp',
- script = swrroot + 'rasterizer/jitter/scripts/gen_llvm_ir_macros.py',
+ script = swrroot + 'rasterizer/codegen/gen_llvm_ir_macros.py',
source = '',
command = python_cmd + ' $SCRIPT --output $TARGET --gen_x86_cpp'
)
env.CodeGenerate(
target = 'swr_context_llvm.h',
- script = swrroot + 'rasterizer/jitter/scripts/gen_llvm_types.py',
+ script = swrroot + 'rasterizer/codegen/gen_llvm_types.py',
source = 'swr_context.h',
command = python_cmd + ' $SCRIPT --input $SOURCE --output $TARGET'
)
env.CodeGenerate(
target = 'rasterizer/archrast/gen_ar_event.h',
- script = swrroot + 'rasterizer/scripts/gen_archrast.py',
+ script = swrroot + 'rasterizer/codegen/gen_archrast.py',
source = 'rasterizer/archrast/events.proto',
command = python_cmd + ' $SCRIPT --proto $SOURCE --output $TARGET --gen_event_h'
)
env.CodeGenerate(
target = 'rasterizer/archrast/gen_ar_event.cpp',
- script = swrroot + 'rasterizer/scripts/gen_archrast.py',
+ script = swrroot + 'rasterizer/codegen/gen_archrast.py',
source = 'rasterizer/archrast/events.proto',
command = python_cmd + ' $SCRIPT --proto $SOURCE --output $TARGET --gen_event_cpp'
)
env.CodeGenerate(
target = 'rasterizer/archrast/gen_ar_eventhandler.h',
- script = swrroot + 'rasterizer/scripts/gen_archrast.py',
+ script = swrroot + 'rasterizer/codegen/gen_archrast.py',
source = 'rasterizer/archrast/events.proto',
command = python_cmd + ' $SCRIPT --proto $SOURCE --output $TARGET --gen_eventhandler_h'
)
env.CodeGenerate(
target = 'rasterizer/archrast/gen_ar_eventhandlerfile.h',
- script = swrroot + 'rasterizer/scripts/gen_archrast.py',
+ script = swrroot + 'rasterizer/codegen/gen_archrast.py',
source = 'rasterizer/archrast/events.proto',
command = python_cmd + ' $SCRIPT --proto $SOURCE --output $TARGET --gen_eventhandlerfile_h'
)
# 2 canEarlyZ
env.CodeGenerate(
target = 'rasterizer/core/BackendPixelRate0.cpp',
- script = swrroot + 'rasterizer/scripts/gen_backends.py',
+ script = swrroot + 'rasterizer/codegen/gen_backends.py',
command = python_cmd + ' $SCRIPT --output rasterizer/core --dim 5 2 3 2 2 2 --split 0 --cpp'
)
# Auto-generated .cpp files (that need to generate object files)
built_sources = [
- 'rasterizer/scripts/gen_knobs.cpp',
+ 'rasterizer/codegen/gen_knobs.cpp',
'rasterizer/jitter/builder_gen.cpp',
'rasterizer/jitter/builder_x86.cpp',
'rasterizer/archrast/gen_ar_event.cpp',
env.Prepend(CPPPATH = [
'.',
'rasterizer',
- 'rasterizer/scripts',
+ 'rasterizer/codegen',
'rasterizer/core',
'rasterizer/jitter',
'rasterizer/archrast',
--- /dev/null
+# Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice (including the next
+# paragraph) shall be included in all copies or substantial portions of the
+# Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+# Python source
+from __future__ import print_function
+import os
+import sys
+import re
+import argparse
+from mako.template import Template
+from mako.exceptions import RichTraceback
+
+def write_template_to_string(template_filename, **kwargs):
+ try:
+ template = Template(filename=template_filename)
+ # Split + Join fixes line-endings for whatever platform you are using
+ return '\n'.join(template.render(**kwargs).splitlines())
+ except:
+ traceback = RichTraceback()
+ for (filename, lineno, function, line) in traceback.traceback:
+ print("File %s, line %s, in %s" % (filename, lineno, function))
+ print(line, "\n")
+ print("%s: %s" % (str(traceback.error.__class__.__name__), traceback.error))
+
+def write_template_to_file(template_filename, output_filename, **kwargs):
+ with open(output_filename, "w") as outfile:
+ print(write_template_to_string(template_filename, **kwargs), file=outfile)
+
+def parse_event_fields(lines, idx, event_dict):
+ field_names = []
+ field_types = []
+ end_of_event = False
+
+ num_fields = 0
+
+ # record all fields in event definition.
+ # note: we don't check if there's a leading brace.
+ while not end_of_event and idx < len(lines):
+ line = lines[idx].rstrip()
+ idx += 1
+
+ field = re.match(r"(\s*)(\w+)(\s*)(\w+)", line)
+
+ if field:
+ field_types.append(field.group(2))
+ field_names.append(field.group(4))
+ num_fields += 1
+
+ end_of_event = re.match(r"(\s*)};", line)
+
+ event_dict['field_types'] = field_types
+ event_dict['field_names'] = field_names
+ event_dict['num_fields'] = num_fields
+
+ return idx
+
+def parse_enums(lines, idx, event_dict):
+ enum_names = []
+ end_of_enum = False
+
+ # record all enum values in enumeration
+ # note: we don't check if there's a leading brace.
+ while not end_of_enum and idx < len(lines):
+ line = lines[idx].rstrip()
+ idx += 1
+
+ preprocessor = re.search(r"#if|#endif", line)
+
+ if not preprocessor:
+ enum = re.match(r"(\s*)(\w+)(\s*)", line)
+
+ if enum:
+ enum_names.append(line)
+
+ end_of_enum = re.match(r"(\s*)};", line)
+
+ event_dict['names'] = enum_names
+ return idx
+
+def parse_protos(filename):
+ protos = {}
+
+ with open(filename, 'r') as f:
+ lines=f.readlines()
+
+ idx = 0
+
+ protos['events'] = {} # event dictionary containing events with their fields
+ protos['event_names'] = [] # needed to keep events in order parsed. dict is not ordered.
+ protos['enums'] = {}
+ protos['enum_names'] = []
+
+ eventId = 0
+ raw_text = []
+ while idx < len(lines):
+ line = lines[idx].rstrip()
+ idx += 1
+
+ # search for event definitions.
+ match = re.match(r"(\s*)event(\s*)(\w+)", line)
+
+ if match:
+ eventId += 1
+ event_name = match.group(3)
+ protos['event_names'].append(event_name)
+
+ protos['events'][event_name] = {}
+ protos['events'][event_name]['event_id'] = eventId
+ idx = parse_event_fields(lines, idx, protos['events'][event_name])
+
+ # search for enums.
+ match = re.match(r"(\s*)enum(\s*)(\w+)", line)
+
+ if match:
+ enum_name = match.group(3)
+ protos['enum_names'].append(enum_name)
+
+ protos['enums'][enum_name] = {}
+ idx = parse_enums(lines, idx, protos['enums'][enum_name])
+
+ return protos
+
+def main():
+
+ # Parse args...
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--proto", "-p", help="Path to proto file", required=True)
+ parser.add_argument("--output", "-o", help="Output filename (i.e. event.h)", required=True)
+ parser.add_argument("--gen_event_h", "-geh", help="Generate event header", action="store_true", default=False)
+ parser.add_argument("--gen_event_cpp", "-gec", help="Generate event cpp", action="store_true", default=False)
+ parser.add_argument("--gen_eventhandler_h", "-gehh", help="Generate eventhandler header", action="store_true", default=False)
+ parser.add_argument("--gen_eventhandlerfile_h", "-gehf", help="Generate eventhandler header for writing to files", action="store_true", default=False)
+ args = parser.parse_args()
+
+ proto_filename = args.proto
+
+ (output_dir, output_filename) = os.path.split(args.output)
+
+ if not output_dir:
+ output_dir = "."
+
+ #print("output_dir = %s" % output_dir, file=sys.stderr)
+ #print("output_filename = %s" % output_filename, file=sys.stderr)
+
+ if not os.path.exists(proto_filename):
+ print("Error: Could not find proto file %s" % proto_filename, file=sys.stderr)
+ return 1
+
+ protos = parse_protos(proto_filename)
+
+ # Generate event header
+ if args.gen_event_h:
+ curdir = os.path.dirname(os.path.abspath(__file__))
+ template_file = os.sep.join([curdir, 'templates', 'ar_event_h.template'])
+ output_fullpath = os.sep.join([output_dir, output_filename])
+
+ write_template_to_file(template_file, output_fullpath,
+ filename=output_filename,
+ protos=protos)
+
+ # Generate event implementation
+ if args.gen_event_cpp:
+ curdir = os.path.dirname(os.path.abspath(__file__))
+ template_file = os.sep.join([curdir, 'templates', 'ar_event_cpp.template'])
+ output_fullpath = os.sep.join([output_dir, output_filename])
+
+ write_template_to_file(template_file, output_fullpath,
+ filename=output_filename,
+ protos=protos)
+
+ # Generate event handler header
+ if args.gen_eventhandler_h:
+ curdir = os.path.dirname(os.path.abspath(__file__))
+ template_file = os.sep.join([curdir, 'templates', 'ar_eventhandler_h.template'])
+ output_fullpath = os.sep.join([output_dir, output_filename])
+
+ write_template_to_file(template_file, output_fullpath,
+ filename=output_filename,
+ event_header="gen_ar_event.h", # todo: fix this!
+ protos=protos)
+
+ # Generate event handler header
+ if args.gen_eventhandlerfile_h:
+ curdir = os.path.dirname(os.path.abspath(__file__))
+ template_file = os.sep.join([curdir, 'templates', 'ar_eventhandlerfile_h.template'])
+ output_fullpath = os.sep.join([output_dir, output_filename])
+
+ write_template_to_file(template_file, output_fullpath,
+ filename=output_filename,
+ event_header="gen_ar_eventhandler.h", # todo: fix this!
+ protos=protos)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
+
--- /dev/null
+# Copyright (C) 2017 Intel Corporation. All Rights Reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice (including the next
+# paragraph) shall be included in all copies or substantial portions of the
+# Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+# Python source
+# Compatible with Python2.X and Python3.X
+
+from __future__ import print_function
+import itertools
+import math
+import argparse
+import os
+import sys
+from mako.template import Template
+from mako.exceptions import RichTraceback
+
+def write_template_to_string(template_filename, **kwargs):
+ try:
+ template = Template(filename=os.path.abspath(template_filename))
+ # Split + Join fixes line-endings for whatever platform you are using
+ return '\n'.join(template.render(**kwargs).splitlines())
+ except:
+ traceback = RichTraceback()
+ for (filename, lineno, function, line) in traceback.traceback:
+ print("File %s, line %s, in %s" % (filename, lineno, function))
+ print(line, "\n")
+ print("%s: %s" % (str(traceback.error.__class__.__name__), traceback.error))
+
+def write_template_to_file(template_filename, output_filename, **kwargs):
+ output_dirname = os.path.dirname(output_filename)
+ if not os.path.exists(output_dirname):
+ os.makedirs(output_dirname)
+ with open(output_filename, "w") as outfile:
+ print(write_template_to_string(template_filename, **kwargs), file=outfile)
+
+
+def main(args=sys.argv[1:]):
+ thisDir = os.path.dirname(os.path.realpath(__file__))
+ parser = argparse.ArgumentParser("Generate files and initialization functions for all permutuations of BackendPixelRate.")
+ parser.add_argument('--dim', help="gBackendPixelRateTable array dimensions", nargs='+', type=int, required=True)
+ parser.add_argument('--outdir', help="output directory", nargs='?', type=str, default=thisDir)
+ parser.add_argument('--split', help="how many lines of initialization per file [0=no split]", nargs='?', type=int, default='512')
+ parser.add_argument('--cpp', help="Generate cpp file(s)", action='store_true', default=False)
+ parser.add_argument('--cmake', help="Generate cmake file", action='store_true', default=False)
+
+
+ args = parser.parse_args(args);
+
+ output_list = []
+ for x in args.dim:
+ output_list.append(list(range(x)))
+
+ # generate all permutations possible for template paremeter inputs
+ output_combinations = list(itertools.product(*output_list))
+ output_list = []
+
+ # for each permutation
+ for x in range(len(output_combinations)):
+ # separate each template peram into its own list member
+ new_list = [output_combinations[x][i] for i in range(len(output_combinations[x]))]
+ tempStr = 'gBackendPixelRateTable'
+ #print each list member as an index in the multidimensional array
+ for i in new_list:
+ tempStr += '[' + str(i) + ']'
+ #map each entry in the permuation as its own string member, store as the template instantiation string
+ tempStr += " = BackendPixelRate<SwrBackendTraits<" + ','.join(map(str, output_combinations[x])) + '>>;'
+ #append the line of c++ code in the list of output lines
+ output_list.append(tempStr)
+
+ # how many files should we split the global template initialization into?
+ if (args.split == 0):
+ numFiles = 1
+ else:
+ numFiles = (len(output_list) + args.split - 1) // args.split
+ linesPerFile = (len(output_list) + numFiles - 1) // numFiles
+ chunkedList = [output_list[x:x+linesPerFile] for x in range(0, len(output_list), linesPerFile)]
+
+ # generate .cpp files
+ if args.cpp:
+ baseCppName = os.path.join(args.outdir, 'BackendPixelRate%s.cpp')
+ templateCpp = os.path.join(thisDir, 'templates', 'backend_template.cpp')
+
+ for fileNum in range(numFiles):
+ filename = baseCppName % str(fileNum)
+ print('Generating', filename)
+ write_template_to_file(
+ templateCpp,
+ baseCppName % str(fileNum),
+ fileNum=fileNum,
+ funcList=chunkedList[fileNum])
+
+ # generate gen_backend.cmake file
+ if args.cmake:
+ templateCmake = os.path.join(thisDir, 'templates', 'backend_template.cmake')
+ cmakeFile = os.path.join(args.outdir, 'gen_backends.cmake')
+ print('Generating', cmakeFile)
+ write_template_to_file(
+ templateCmake,
+ cmakeFile,
+ numFiles=numFiles,
+ baseCppName=baseCppName.replace('\\','/'))
+
+ print("Generated %d template instantiations in %d files" % (len(output_list), numFiles))
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
--- /dev/null
+# Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice (including the next
+# paragraph) shall be included in all copies or substantial portions of the
+# Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+# Python source
+from __future__ import print_function
+import os
+import sys
+import argparse
+import knob_defs
+from mako.template import Template
+from mako.exceptions import RichTraceback
+
+def write_template_to_string(template_filename, **kwargs):
+ try:
+ template = Template(filename=os.path.abspath(template_filename))
+ # Split + Join fixes line-endings for whatever platform you are using
+ return '\n'.join(template.render(**kwargs).splitlines())
+ except:
+ traceback = RichTraceback()
+ for (filename, lineno, function, line) in traceback.traceback:
+ print("File %s, line %s, in %s" % (filename, lineno, function))
+ print(line, "\n")
+ print("%s: %s" % (str(traceback.error.__class__.__name__), traceback.error))
+
+def write_template_to_file(template_filename, output_filename, **kwargs):
+ output_dirname = os.path.dirname(output_filename)
+ if not os.path.exists(output_dirname):
+ os.makedirs(output_dirname)
+ with open(output_filename, "w") as outfile:
+ print(write_template_to_string(template_filename, **kwargs), file=outfile)
+
+def main(args=sys.argv[1:]):
+
+ # parse args
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--input", "-i", help="Path to knobs.template", required=True)
+ parser.add_argument("--output", "-o", help="Path to output file", required=True)
+ parser.add_argument("--gen_h", "-gen_h", help="Generate gen_knobs.h", action="store_true", default=False)
+ parser.add_argument("--gen_cpp", "-gen_cpp", help="Generate gen_knobs.cpp", action="store_true", required=False)
+
+ args = parser.parse_args()
+
+ if args.input:
+ if args.gen_h:
+ write_template_to_file(args.input,
+ args.output,
+ filename='gen_knobs',
+ knobs=knob_defs.KNOBS,
+ includes=['core/knobs_init.h', 'common/os.h', 'sstream', 'iomanip'],
+ gen_header=True)
+
+ if args.gen_cpp:
+ write_template_to_file(args.input,
+ args.output,
+ filename='gen_knobs',
+ knobs=knob_defs.KNOBS,
+ includes=['core/knobs_init.h', 'common/os.h', 'sstream', 'iomanip'],
+ gen_header=False)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
+
--- /dev/null
+# Copyright (C) 2014-2015 Intel Corporation. All Rights Reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice (including the next
+# paragraph) shall be included in all copies or substantial portions of the
+# Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+#!deps/python32/python.exe
+
+import os, sys, re
+import argparse
+import json as JSON
+import operator
+
+header = r"""/****************************************************************************
+* Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and associated documentation files (the "Software"),
+* to deal in the Software without restriction, including without limitation
+* the rights to use, copy, modify, merge, publish, distribute, sublicense,
+* and/or sell copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice (including the next
+* paragraph) shall be included in all copies or substantial portions of the
+* Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+* IN THE SOFTWARE.
+*
+* @file %s
+*
+* @brief auto-generated file
+*
+* DO NOT EDIT
+*
+******************************************************************************/
+
+"""
+
+"""
+"""
+def gen_file_header(filename):
+ global header
+ headerStr = header % filename
+ return headerStr.splitlines()
+
+
+inst_aliases = {
+ 'SHUFFLE_VECTOR': 'VSHUFFLE',
+ 'INSERT_ELEMENT': 'VINSERT',
+ 'EXTRACT_ELEMENT': 'VEXTRACT',
+ 'MEM_SET': 'MEMSET',
+ 'MEM_CPY': 'MEMCOPY',
+ 'MEM_MOVE': 'MEMMOVE',
+ 'L_SHR': 'LSHR',
+ 'A_SHR': 'ASHR',
+ 'BIT_CAST': 'BITCAST',
+ 'U_DIV': 'UDIV',
+ 'S_DIV': 'SDIV',
+ 'U_REM': 'UREM',
+ 'S_REM': 'SREM',
+ 'BIN_OP': 'BINOP',
+}
+
+intrinsics = [
+ ["VGATHERPD", "x86_avx2_gather_d_pd_256", ["src", "pBase", "indices", "mask", "scale"]],
+ ["VGATHERPS", "x86_avx2_gather_d_ps_256", ["src", "pBase", "indices", "mask", "scale"]],
+ ["VGATHERDD", "x86_avx2_gather_d_d_256", ["src", "pBase", "indices", "mask", "scale"]],
+ ["VSQRTPS", "x86_avx_sqrt_ps_256", ["a"]],
+ ["VRSQRTPS", "x86_avx_rsqrt_ps_256", ["a"]],
+ ["VRCPPS", "x86_avx_rcp_ps_256", ["a"]],
+ ["VMINPS", "x86_avx_min_ps_256", ["a", "b"]],
+ ["VMAXPS", "x86_avx_max_ps_256", ["a", "b"]],
+ ["VROUND", "x86_avx_round_ps_256", ["a", "rounding"]],
+ ["VCMPPS", "x86_avx_cmp_ps_256", ["a", "b", "cmpop"]],
+ ["VBLENDVPS", "x86_avx_blendv_ps_256", ["a", "b", "mask"]],
+ ["BEXTR_32", "x86_bmi_bextr_32", ["src", "control"]],
+ ["VMASKLOADD", "x86_avx2_maskload_d_256", ["src", "mask"]],
+ ["VMASKMOVPS", "x86_avx_maskload_ps_256", ["src", "mask"]],
+ ["VMASKSTOREPS", "x86_avx_maskstore_ps_256", ["src", "mask", "val"]],
+ ["VPSHUFB", "x86_avx2_pshuf_b", ["a", "b"]],
+ ["VPERMD", "x86_avx2_permd", ["a", "idx"]],
+ ["VPERMPS", "x86_avx2_permps", ["idx", "a"]],
+ ["VCVTPD2PS", "x86_avx_cvt_pd2_ps_256", ["a"]],
+ ["VCVTPH2PS", "x86_vcvtph2ps_256", ["a"]],
+ ["VCVTPS2PH", "x86_vcvtps2ph_256", ["a", "round"]],
+ ["VHSUBPS", "x86_avx_hsub_ps_256", ["a", "b"]],
+ ["VPTESTC", "x86_avx_ptestc_256", ["a", "b"]],
+ ["VPTESTZ", "x86_avx_ptestz_256", ["a", "b"]],
+ ["VFMADDPS", "x86_fma_vfmadd_ps_256", ["a", "b", "c"]],
+ ["VMOVMSKPS", "x86_avx_movmsk_ps_256", ["a"]],
+ ["INTERRUPT", "x86_int", ["a"]],
+ ]
+
+def convert_uppercamel(name):
+ s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
+ return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).upper()
+
+"""
+ Given an input file (e.g. IRBuilder.h) generates function dictionary.
+"""
+def parse_ir_builder(input_file):
+
+ functions = []
+
+ lines = input_file.readlines()
+
+ idx = 0
+ while idx < len(lines) - 1:
+ line = lines[idx].rstrip()
+ idx += 1
+
+ #match = re.search(r"\*Create", line)
+ match = re.search(r"[\*\s]Create(\w*)\(", line)
+ if match is not None:
+ #print("Line: %s" % match.group(1))
+
+ if re.search(r"^\s*Create", line) is not None:
+ func_sig = lines[idx-2].rstrip() + line
+ else:
+ func_sig = line
+
+ end_of_args = False
+ while not end_of_args:
+ end_paren = re.search(r"\)", line)
+ if end_paren is not None:
+ end_of_args = True
+ else:
+ line = lines[idx].rstrip()
+ func_sig += line
+ idx += 1
+
+ delfunc = re.search(r"LLVM_DELETED_FUNCTION|= delete;", func_sig)
+
+ if not delfunc:
+ func = re.search(r"(.*?)\*[\n\s]*(Create\w*)\((.*?)\)", func_sig)
+ if func is not None:
+
+ return_type = func.group(1).lstrip() + '*'
+ func_name = func.group(2)
+ arguments = func.group(3)
+
+ func_args = ''
+ func_args_nodefs = ''
+
+ num_args = arguments.count(',')
+
+ arg_names = []
+ num_args = 0
+ args = arguments.split(',')
+ for arg in args:
+ arg = arg.lstrip()
+ if arg:
+ if num_args > 0:
+ func_args += ', '
+ func_args_nodefs += ', '
+ func_args += arg
+ func_args_nodefs += arg.split(' =')[0]
+
+ split_args = arg.split('=')
+ arg_name = split_args[0].rsplit(None, 1)[-1]
+
+ #print("Before ArgName = %s" % arg_name)
+
+ reg_arg = re.search(r"[\&\*]*(\w*)", arg_name)
+ if reg_arg:
+ #print("Arg Name = %s" % reg_arg.group(1))
+ arg_names += [reg_arg.group(1)]
+
+ num_args += 1
+
+ ignore = False
+
+ # The following functions need to be ignored.
+ if func_name == 'CreateInsertNUWNSWBinOp':
+ ignore = True
+
+ if func_name == 'CreateMaskedIntrinsic':
+ ignore = True
+
+ # Convert CamelCase to CAMEL_CASE
+ func_mod = re.search(r"Create(\w*)", func_name)
+ if func_mod:
+ func_mod = func_mod.group(1)
+ func_mod = convert_uppercamel(func_mod)
+ if func_mod[0:2] == 'F_' or func_mod[0:2] == 'I_':
+ func_mod = func_mod[0] + func_mod[2:]
+
+ # Substitute alias based on CAMEL_CASE name.
+ func_alias = inst_aliases.get(func_mod)
+ if not func_alias:
+ func_alias = func_mod
+
+ if func_name == 'CreateCall' or func_name == 'CreateGEP':
+ arglist = re.search(r'ArrayRef', func_args)
+ if arglist:
+ func_alias = func_alias + 'A'
+
+ if not ignore:
+ functions.append({
+ "name": func_name,
+ "alias": func_alias,
+ "return": return_type,
+ "args": func_args,
+ "args_nodefs": func_args_nodefs,
+ "arg_names": arg_names
+ })
+
+ return functions
+
+"""
+ Auto-generates macros for LLVM IR
+"""
+def generate_gen_h(functions, output_file):
+ output_lines = gen_file_header(os.path.basename(output_file.name))
+
+ output_lines += [
+ '#pragma once',
+ '',
+ '//////////////////////////////////////////////////////////////////////////',
+ '/// Auto-generated Builder IR declarations',
+ '//////////////////////////////////////////////////////////////////////////',
+ ]
+
+ for func in functions:
+ name = func['name']
+ if func['alias']:
+ name = func['alias']
+ output_lines += [
+ '%s%s(%s);' % (func['return'], name, func['args'])
+ ]
+
+ output_file.write('\n'.join(output_lines) + '\n')
+
+"""
+ Auto-generates macros for LLVM IR
+"""
+def generate_gen_cpp(functions, output_file):
+ output_lines = gen_file_header(os.path.basename(output_file.name))
+
+ output_lines += [
+ '#include \"builder.h\"',
+ '',
+ 'namespace SwrJit',
+ '{',
+ ' using namespace llvm;',
+ '',
+ ]
+
+ for func in functions:
+ name = func['name']
+ if func['alias']:
+ name = func['alias']
+
+ args = func['arg_names']
+ func_args = ''
+ first_arg = True
+ for arg in args:
+ if not first_arg:
+ func_args += ', '
+ func_args += arg
+ first_arg = False
+
+ output_lines += [
+ ' //////////////////////////////////////////////////////////////////////////',
+ ' %sBuilder::%s(%s)' % (func['return'], name, func['args_nodefs']),
+ ' {',
+ ' return IRB()->%s(%s);' % (func['name'], func_args),
+ ' }',
+ '',
+ ]
+ output_lines.append('}')
+ output_file.write('\n'.join(output_lines) + '\n')
+
+"""
+ Auto-generates macros for LLVM IR
+"""
+def generate_x86_h(output_file):
+ output_lines = gen_file_header(os.path.basename(output_file.name))
+
+ output_lines += [
+ '#pragma once',
+ '',
+ '//////////////////////////////////////////////////////////////////////////',
+ '/// Auto-generated x86 intrinsics',
+ '//////////////////////////////////////////////////////////////////////////',
+ ]
+
+ for inst in intrinsics:
+ #print("Inst: %s, x86: %s numArgs: %d" % (inst[0], inst[1], len(inst[2])))
+
+ args = ''
+ first = True
+ for arg in inst[2]:
+ if not first:
+ args += ', '
+ args += ("Value* %s" % arg)
+ first = False
+
+ output_lines += [
+ 'Value *%s(%s);' % (inst[0], args)
+ ]
+
+ output_file.write('\n'.join(output_lines) + '\n')
+
+"""
+ Auto-generates macros for LLVM IR
+"""
+def generate_x86_cpp(output_file):
+ output_lines = gen_file_header(os.path.basename(output_file.name))
+
+ output_lines += [
+ '#include \"builder.h\"',
+ '',
+ 'namespace SwrJit',
+ '{',
+ ' using namespace llvm;',
+ '',
+ ]
+
+ for inst in intrinsics:
+ #print("Inst: %s, x86: %s numArgs: %d" % (inst[0], inst[1], len(inst[2])))
+
+ args = ''
+ pass_args = ''
+ first = True
+ for arg in inst[2]:
+ if not first:
+ args += ', '
+ pass_args += ', '
+ args += ("Value* %s" % arg)
+ pass_args += arg
+ first = False
+
+ output_lines += [
+ ' //////////////////////////////////////////////////////////////////////////',
+ ' Value *Builder::%s(%s)' % (inst[0], args),
+ ' {',
+ ' Function *func = Intrinsic::getDeclaration(JM()->mpCurrentModule, Intrinsic::%s);' % inst[1],
+ ]
+ if inst[0] == "VPERMD":
+ rev_args = ''
+ first = True
+ for arg in reversed(inst[2]):
+ if not first:
+ rev_args += ', '
+ rev_args += arg
+ first = False
+
+ output_lines += [
+ '#if (HAVE_LLVM == 0x306) && (LLVM_VERSION_PATCH == 0)',
+ ' return CALL(func, std::initializer_list<Value*>{%s});' % rev_args,
+ '#else',
+ ]
+ output_lines += [
+ ' return CALL(func, std::initializer_list<Value*>{%s});' % pass_args,
+ ]
+ if inst[0] == "VPERMD":
+ output_lines += [
+ '#endif',
+ ]
+ output_lines += [
+ ' }',
+ '',
+ ]
+
+ output_lines.append('}')
+ output_file.write('\n'.join(output_lines) + '\n')
+
+"""
+ Function which is invoked when this script is started from a command line.
+ Will present and consume a set of arguments which will tell this script how
+ to behave
+"""
+def main():
+
+ # Parse args...
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--input", "-i", type=argparse.FileType('r'), help="Path to IRBuilder.h", required=False)
+ parser.add_argument("--output", "-o", type=argparse.FileType('w'), help="Path to output file", required=True)
+ parser.add_argument("--gen_h", "-gen_h", help="Generate builder_gen.h", action="store_true", default=False)
+ parser.add_argument("--gen_cpp", "-gen_cpp", help="Generate builder_gen.cpp", action="store_true", default=False)
+ parser.add_argument("--gen_x86_h", "-gen_x86_h", help="Generate x86 intrinsics. No input is needed.", action="store_true", default=False)
+ parser.add_argument("--gen_x86_cpp", "-gen_x86_cpp", help="Generate x86 intrinsics. No input is needed.", action="store_true", default=False)
+ args = parser.parse_args()
+
+ if args.input:
+ functions = parse_ir_builder(args.input)
+
+ if args.gen_h:
+ generate_gen_h(functions, args.output)
+
+ if args.gen_cpp:
+ generate_gen_cpp(functions, args.output)
+ else:
+ if args.gen_x86_h:
+ generate_x86_h(args.output)
+
+ if args.gen_x86_cpp:
+ generate_x86_cpp(args.output)
+
+ if args.gen_h:
+ print("Need to specify --input for --gen_h!")
+
+ if args.gen_cpp:
+ print("Need to specify --input for --gen_cpp!")
+
+if __name__ == '__main__':
+ main()
+# END OF FILE
--- /dev/null
+# Copyright (C) 2014-2015 Intel Corporation. All Rights Reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice (including the next
+# paragraph) shall be included in all copies or substantial portions of the
+# Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+#!deps/python32/python.exe
+
+import os, sys, re
+import argparse
+import json as JSON
+import operator
+
+header = r"""
+/****************************************************************************
+* Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and associated documentation files (the "Software"),
+* to deal in the Software without restriction, including without limitation
+* the rights to use, copy, modify, merge, publish, distribute, sublicense,
+* and/or sell copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice (including the next
+* paragraph) shall be included in all copies or substantial portions of the
+* Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+* IN THE SOFTWARE.
+*
+* @file %s
+*
+* @brief auto-generated file
+*
+* DO NOT EDIT
+*
+******************************************************************************/
+
+#pragma once
+
+namespace SwrJit
+{
+ using namespace llvm;
+
+"""
+
+"""
+"""
+def gen_file_header(filename):
+ global header
+ headerStr = header % filename
+ return headerStr.splitlines()
+
+"""
+"""
+def gen_llvm_type(type, name, postfix_name, is_pointer, is_pointer_pointer, is_array, is_array_array, array_count, array_count1, is_llvm_struct, is_llvm_enum, is_llvm_pfn, output_file):
+
+ llvm_type = ''
+
+ if is_llvm_struct:
+ if is_pointer or is_pointer_pointer:
+ llvm_type = 'Type::getInt32Ty(ctx)'
+ else:
+ llvm_type = 'ArrayType::get(Type::getInt8Ty(ctx), sizeof(%s))' % type
+ elif is_llvm_enum:
+ llvm_type = 'Type::getInt32Ty(ctx)'
+ elif is_llvm_pfn:
+ llvm_type = 'PointerType::get(Type::getInt8Ty(ctx), 0)'
+ else:
+ if type == "BYTE" or type == "char" or type == "uint8_t" or type == "int8_t" or type == 'bool':
+ llvm_type = 'Type::getInt8Ty(ctx)'
+ elif type == 'UINT64' or type == 'INT64' or type == 'uint64_t' or type == 'int64_t':
+ llvm_type = 'Type::getInt64Ty(ctx)'
+ elif type == 'UINT16' or type == 'int16_t' or type == 'uint16_t':
+ llvm_type = 'Type::getInt16Ty(ctx)'
+ elif type == 'UINT' or type == 'INT' or type == 'int' or type == 'BOOL' or type == 'uint32_t' or type == 'int32_t':
+ llvm_type = 'Type::getInt32Ty(ctx)'
+ elif type == 'float' or type == 'FLOAT':
+ llvm_type = 'Type::getFloatTy(ctx)'
+ elif type == 'double' or type == 'DOUBLE':
+ llvm_type = 'Type::getDoubleTy(ctx)'
+ elif type == 'void' or type == 'VOID':
+ llvm_type = 'Type::getInt32Ty(ctx)'
+ elif type == 'HANDLE':
+ llvm_type = 'PointerType::get(Type::getInt32Ty(ctx), 0)'
+ elif type == 'simdscalar':
+ llvm_type = 'VectorType::get(Type::getFloatTy(ctx), pJitMgr->mVWidth)'
+ elif type == 'simdscalari':
+ llvm_type = 'VectorType::get(Type::getInt32Ty(ctx), pJitMgr->mVWidth)'
+ elif type == 'simdvector':
+ llvm_type = 'ArrayType::get(VectorType::get(Type::getFloatTy(ctx), pJitMgr->mVWidth), 4)'
+ else:
+ llvm_type = 'Gen_%s%s(pJitMgr)' % (type, postfix_name)
+
+ if is_pointer:
+ llvm_type = 'PointerType::get(%s, 0)' % llvm_type
+
+ if is_pointer_pointer:
+ llvm_type = 'PointerType::get(%s, 0)' % llvm_type
+
+ if is_array_array:
+ llvm_type = 'ArrayType::get(ArrayType::get(%s, %s), %s)' % (llvm_type, array_count1, array_count)
+ elif is_array:
+ llvm_type = 'ArrayType::get(%s, %s)' % (llvm_type, array_count)
+
+ return [' members.push_back( %s ); // %s' % (llvm_type, name)]
+
+"""
+"""
+def gen_llvm_types(input_file, output_file):
+
+ output_lines = gen_file_header(os.path.basename(output_file.name))
+
+ lines = input_file.readlines()
+
+ postfix_name = ""
+
+ for idx in range(len(lines)):
+ line = lines[idx].rstrip()
+
+ if "gen_llvm_types FINI" in line:
+ break
+
+ match = re.match(r"(\s*)struct(\s*)(\w+)", line)
+ if match:
+ llvm_args = []
+
+ # Detect start of structure
+ is_fwd_decl = re.search(r";", line)
+
+ if not is_fwd_decl:
+
+ # Extract the command name
+ struct_name = match.group(3).strip()
+
+ output_lines += [
+ ' //////////////////////////////////////////////////////////////////////////',
+ ' /// Generate LLVM type information for %s' % struct_name,
+ ' INLINE static StructType *Gen_%s%s(JitManager* pJitMgr)' % (struct_name, postfix_name),
+ ' {',
+ ' LLVMContext& ctx = pJitMgr->mContext;',
+ ' std::vector<Type*> members;',
+ '',
+ ]
+
+ end_of_struct = False
+
+ while not end_of_struct and idx < len(lines)-1:
+ idx += 1
+ line = lines[idx].rstrip()
+
+ is_llvm_typedef = re.search(r"@llvm_typedef", line)
+ if is_llvm_typedef is not None:
+ is_llvm_typedef = True
+ else:
+ is_llvm_typedef = False
+
+ ###########################################
+ # Is field a llvm struct? Tells script to treat type as array of bytes that is size of structure.
+ is_llvm_struct = re.search(r"@llvm_struct", line)
+
+ if is_llvm_struct is not None:
+ is_llvm_struct = True
+ else:
+ is_llvm_struct = False
+
+ ###########################################
+ # Is field a llvm enum? Tells script to treat type as an enum and replaced with uint32 type.
+ is_llvm_enum = re.search(r"@llvm_enum", line)
+
+ if is_llvm_enum is not None:
+ is_llvm_enum = True
+ else:
+ is_llvm_enum = False
+
+ ###########################################
+ # Is field a llvm function pointer? Tells script to treat type as an enum and replaced with uint32 type.
+ is_llvm_pfn = re.search(r"@llvm_pfn", line)
+
+ if is_llvm_pfn is not None:
+ is_llvm_pfn = True
+ else:
+ is_llvm_pfn = False
+
+ ###########################################
+ # Is field const?
+ is_const = re.search(r"\s+const\s+", line)
+
+ if is_const is not None:
+ is_const = True
+ else:
+ is_const = False
+
+ ###########################################
+ # Is field a pointer?
+ is_pointer_pointer = re.search("\*\*", line)
+
+ if is_pointer_pointer is not None:
+ is_pointer_pointer = True
+ else:
+ is_pointer_pointer = False
+
+ ###########################################
+ # Is field a pointer?
+ is_pointer = re.search("\*", line)
+
+ if is_pointer is not None:
+ is_pointer = True
+ else:
+ is_pointer = False
+
+ ###########################################
+ # Is field an array of arrays?
+ # TODO: Can add this to a list.
+ is_array_array = re.search("\[(\w*)\]\[(\w*)\]", line)
+ array_count = '0'
+ array_count1 = '0'
+
+ if is_array_array is not None:
+ array_count = is_array_array.group(1)
+ array_count1 = is_array_array.group(2)
+ is_array_array = True
+ else:
+ is_array_array = False
+
+ ###########################################
+ # Is field an array?
+ is_array = re.search("\[(\w*)\]", line)
+
+ if is_array is not None:
+ array_count = is_array.group(1)
+ is_array = True
+ else:
+ is_array = False
+
+ is_scoped = re.search("::", line)
+
+ if is_scoped is not None:
+ is_scoped = True
+ else:
+ is_scoped = False
+
+ type = None
+ name = None
+ if is_const and is_pointer:
+
+ if is_scoped:
+ field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+::)(\w+)(\s*\**\s*)(\w+)", line)
+
+ type = "%s%s" % (field_match.group(4), field_match.group(5))
+ name = field_match.group(7)
+ else:
+ field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+)(\s*\**\s*)(\w+)", line)
+
+ type = field_match.group(4)
+ name = field_match.group(6)
+
+ elif is_pointer:
+ field_match = re.match(r"(\s*)(\s+)(\w+\<*\w*\>*)(\s*\**\s*)(\w+)", line)
+
+ if field_match:
+ type = field_match.group(3)
+ name = field_match.group(5)
+ elif is_const:
+ field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+)(\s*)(\w+)", line)
+
+ if field_match:
+ type = field_match.group(4)
+ name = field_match.group(6)
+ else:
+ if is_scoped:
+ field_match = re.match(r"\s*(\w+\<*\w*\>*)\s*::\s*(\w+\<*\w*\>*)\s+(\w+)", line)
+
+ if field_match:
+ type = field_match.group(1) + '::' + field_match.group(2)
+ name = field_match.group(3)
+ else:
+ field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+)", line)
+
+ if field_match:
+ type = field_match.group(2)
+ name = field_match.group(4)
+
+ if is_llvm_typedef is False:
+ if type is not None:
+ output_lines += gen_llvm_type(type, name, postfix_name, is_pointer, is_pointer_pointer, is_array, is_array_array, array_count, array_count1, is_llvm_struct, is_llvm_enum, is_llvm_pfn, output_file)
+ llvm_args.append(name)
+
+ # Detect end of structure
+ end_of_struct = re.match(r"(\s*)};", line)
+
+ if (end_of_struct):
+ output_lines += [
+ '',
+ ' return StructType::get(ctx, members, false);',
+ ' }',
+ '',
+ ]
+
+ for i in range(len(llvm_args)):
+ output_lines.append(' static const uint32_t %s%s_%s = %s;' % (struct_name, postfix_name, llvm_args[i], i))
+
+ output_lines.append('')
+
+ output_lines.append('}')
+ output_file.write('\n'.join(output_lines) + '\n')
+
+"""
+ Function which is invoked when this script is started from a command line.
+ Will present and consume a set of arguments which will tell this script how
+ to behave
+"""
+def main():
+
+ # Parse args...
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--input", "-i", type=argparse.FileType('r'),
+ help="Path to input file containing structs", required=True)
+ parser.add_argument("--output", "-o", type=argparse.FileType('w'),
+ help="Path to output file", required=True)
+ parser.add_argument("--scalar", "-scalar", help="Generates scalar files with all enums", action="store_true", default=False)
+ args = parser.parse_args()
+
+ gen_llvm_types(args.input, args.output)
+
+if __name__ == '__main__':
+ main()
+# END OF FILE
--- /dev/null
+# Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice (including the next
+# paragraph) shall be included in all copies or substantial portions of the
+# Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+# Python source
+KNOBS = [
+
+ ['ENABLE_ASSERT_DIALOGS', {
+ 'type' : 'bool',
+ 'default' : 'true',
+ 'desc' : ['Use dialogs when asserts fire.',
+ 'Asserts are only enabled in debug builds'],
+ 'category' : 'debug',
+ }],
+
+ ['SINGLE_THREADED', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['If enabled will perform all rendering on the API thread.',
+ 'This is useful mainly for debugging purposes.'],
+ 'category' : 'debug',
+ }],
+
+ ['DUMP_SHADER_IR', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Dumps shader LLVM IR at various stages of jit compilation.'],
+ 'category' : 'debug',
+ }],
+
+ ['USE_GENERIC_STORETILE', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Always use generic function for performing StoreTile.',
+ 'Will be slightly slower than using optimized (jitted) path'],
+ 'category' : 'debug',
+ }],
+
+ ['FAST_CLEAR', {
+ 'type' : 'bool',
+ 'default' : 'true',
+ 'desc' : ['Replace 3D primitive execute with a SWRClearRT operation and',
+ 'defer clear execution to first backend op on hottile, or hottile store'],
+ 'category' : 'perf',
+ }],
+
+ ['MAX_NUMA_NODES', {
+ 'type' : 'uint32_t',
+ 'default' : '0',
+ 'desc' : ['Maximum # of NUMA-nodes per system used for worker threads',
+ ' 0 == ALL NUMA-nodes in the system',
+ ' N == Use at most N NUMA-nodes for rendering'],
+ 'category' : 'perf',
+ }],
+
+ ['MAX_CORES_PER_NUMA_NODE', {
+ 'type' : 'uint32_t',
+ 'default' : '0',
+ 'desc' : ['Maximum # of cores per NUMA-node used for worker threads.',
+ ' 0 == ALL non-API thread cores per NUMA-node',
+ ' N == Use at most N cores per NUMA-node'],
+ 'category' : 'perf',
+ }],
+
+ ['MAX_THREADS_PER_CORE', {
+ 'type' : 'uint32_t',
+ 'default' : '1',
+ 'desc' : ['Maximum # of (hyper)threads per physical core used for worker threads.',
+ ' 0 == ALL hyper-threads per core',
+ ' N == Use at most N hyper-threads per physical core'],
+ 'category' : 'perf',
+ }],
+
+ ['MAX_WORKER_THREADS', {
+ 'type' : 'uint32_t',
+ 'default' : '0',
+ 'desc' : ['Maximum worker threads to spawn.',
+ '',
+ 'IMPORTANT: If this is non-zero, no worker threads will be bound to',
+ 'specific HW threads. They will all be "floating" SW threads.',
+ 'In this case, the above 3 KNOBS will be ignored.'],
+ 'category' : 'perf',
+ }],
+
+ ['BUCKETS_START_FRAME', {
+ 'type' : 'uint32_t',
+ 'default' : '1200',
+ 'desc' : ['Frame from when to start saving buckets data.',
+ '',
+ 'NOTE: KNOB_ENABLE_RDTSC must be enabled in core/knobs.h',
+ 'for this to have an effect.'],
+ 'category' : 'perf',
+ }],
+
+ ['BUCKETS_END_FRAME', {
+ 'type' : 'uint32_t',
+ 'default' : '1400',
+ 'desc' : ['Frame at which to stop saving buckets data.',
+ '',
+ 'NOTE: KNOB_ENABLE_RDTSC must be enabled in core/knobs.h',
+ 'for this to have an effect.'],
+ 'category' : 'perf',
+ }],
+
+ ['WORKER_SPIN_LOOP_COUNT', {
+ 'type' : 'uint32_t',
+ 'default' : '5000',
+ 'desc' : ['Number of spin-loop iterations worker threads will perform',
+ 'before going to sleep when waiting for work'],
+ 'category' : 'perf',
+ }],
+
+ ['MAX_DRAWS_IN_FLIGHT', {
+ 'type' : 'uint32_t',
+ 'default' : '128',
+ 'desc' : ['Maximum number of draws outstanding before API thread blocks.',
+ 'This value MUST be evenly divisible into 2^32'],
+ 'category' : 'perf',
+ }],
+
+ ['MAX_PRIMS_PER_DRAW', {
+ 'type' : 'uint32_t',
+ 'default' : '2040',
+ 'desc' : ['Maximum primitives in a single Draw().',
+ 'Larger primitives are split into smaller Draw calls.',
+ 'Should be a multiple of (3 * vectorWidth).'],
+ 'category' : 'perf',
+ }],
+
+ ['MAX_TESS_PRIMS_PER_DRAW', {
+ 'type' : 'uint32_t',
+ 'default' : '16',
+ 'desc' : ['Maximum primitives in a single Draw() with tessellation enabled.',
+ 'Larger primitives are split into smaller Draw calls.',
+ 'Should be a multiple of (vectorWidth).'],
+ 'category' : 'perf',
+ }],
+
+
+ ['DEBUG_OUTPUT_DIR', {
+ 'type' : 'std::string',
+ 'default' : '/tmp/Rast/DebugOutput',
+ 'desc' : ['Output directory for debug data.'],
+ 'category' : 'debug',
+ }],
+
+ ['TOSS_DRAW', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Disable per-draw/dispatch execution'],
+ 'category' : 'perf',
+ }],
+
+ ['TOSS_QUEUE_FE', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Stop per-draw execution at worker FE',
+ '',
+ 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
+ 'category' : 'perf',
+ 'advanced' : 'true',
+ }],
+
+ ['TOSS_FETCH', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Stop per-draw execution at vertex fetch',
+ '',
+ 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
+ 'category' : 'perf',
+ 'advanced' : 'true',
+ }],
+
+ ['TOSS_IA', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Stop per-draw execution at input assembler',
+ '',
+ 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
+ 'category' : 'perf',
+ 'advanced' : 'true',
+ }],
+
+ ['TOSS_VS', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Stop per-draw execution at vertex shader',
+ '',
+ 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
+ 'category' : 'perf',
+ 'advanced' : 'true',
+ }],
+
+ ['TOSS_SETUP_TRIS', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Stop per-draw execution at primitive setup',
+ '',
+ 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
+ 'category' : 'perf',
+ 'advanced' : 'true',
+ }],
+
+ ['TOSS_BIN_TRIS', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Stop per-draw execution at primitive binning',
+ '',
+ 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
+ 'category' : 'perf',
+ 'advanced' : 'true',
+ }],
+
+ ['TOSS_RS', {
+ 'type' : 'bool',
+ 'default' : 'false',
+ 'desc' : ['Stop per-draw execution at rasterizer',
+ '',
+ 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
+ 'category' : 'perf',
+ 'advanced' : 'true',
+ }],
+
+ ]
--- /dev/null
+# mako/__init__.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+__version__ = '1.0.1'
--- /dev/null
+# mako/_ast_util.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+ ast
+ ~~~
+
+ The `ast` module helps Python applications to process trees of the Python
+ abstract syntax grammar. The abstract syntax itself might change with
+ each Python release; this module helps to find out programmatically what
+ the current grammar looks like and allows modifications of it.
+
+ An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
+ a flag to the `compile()` builtin function or by using the `parse()`
+ function from this module. The result will be a tree of objects whose
+ classes all inherit from `ast.AST`.
+
+ A modified abstract syntax tree can be compiled into a Python code object
+ using the built-in `compile()` function.
+
+ Additionally various helper functions are provided that make working with
+ the trees simpler. The main intention of the helper functions and this
+ module in general is to provide an easy to use interface for libraries
+ that work tightly with the python syntax (template engines for example).
+
+
+ :copyright: Copyright 2008 by Armin Ronacher.
+ :license: Python License.
+"""
+from _ast import *
+from mako.compat import arg_stringname
+
+BOOLOP_SYMBOLS = {
+ And: 'and',
+ Or: 'or'
+}
+
+BINOP_SYMBOLS = {
+ Add: '+',
+ Sub: '-',
+ Mult: '*',
+ Div: '/',
+ FloorDiv: '//',
+ Mod: '%',
+ LShift: '<<',
+ RShift: '>>',
+ BitOr: '|',
+ BitAnd: '&',
+ BitXor: '^'
+}
+
+CMPOP_SYMBOLS = {
+ Eq: '==',
+ Gt: '>',
+ GtE: '>=',
+ In: 'in',
+ Is: 'is',
+ IsNot: 'is not',
+ Lt: '<',
+ LtE: '<=',
+ NotEq: '!=',
+ NotIn: 'not in'
+}
+
+UNARYOP_SYMBOLS = {
+ Invert: '~',
+ Not: 'not',
+ UAdd: '+',
+ USub: '-'
+}
+
+ALL_SYMBOLS = {}
+ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
+ALL_SYMBOLS.update(BINOP_SYMBOLS)
+ALL_SYMBOLS.update(CMPOP_SYMBOLS)
+ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
+
+
+def parse(expr, filename='<unknown>', mode='exec'):
+ """Parse an expression into an AST node."""
+ return compile(expr, filename, mode, PyCF_ONLY_AST)
+
+
+def to_source(node, indent_with=' ' * 4):
+ """
+ This function can convert a node tree back into python sourcecode. This
+ is useful for debugging purposes, especially if you're dealing with custom
+ asts not generated by python itself.
+
+ It could be that the sourcecode is evaluable when the AST itself is not
+ compilable / evaluable. The reason for this is that the AST contains some
+ more data than regular sourcecode does, which is dropped during
+ conversion.
+
+ Each level of indentation is replaced with `indent_with`. Per default this
+ parameter is equal to four spaces as suggested by PEP 8, but it might be
+ adjusted to match the application's styleguide.
+ """
+ generator = SourceGenerator(indent_with)
+ generator.visit(node)
+ return ''.join(generator.result)
+
+
+def dump(node):
+ """
+ A very verbose representation of the node passed. This is useful for
+ debugging purposes.
+ """
+ def _format(node):
+ if isinstance(node, AST):
+ return '%s(%s)' % (node.__class__.__name__,
+ ', '.join('%s=%s' % (a, _format(b))
+ for a, b in iter_fields(node)))
+ elif isinstance(node, list):
+ return '[%s]' % ', '.join(_format(x) for x in node)
+ return repr(node)
+ if not isinstance(node, AST):
+ raise TypeError('expected AST, got %r' % node.__class__.__name__)
+ return _format(node)
+
+
+def copy_location(new_node, old_node):
+ """
+ Copy the source location hint (`lineno` and `col_offset`) from the
+ old to the new node if possible and return the new one.
+ """
+ for attr in 'lineno', 'col_offset':
+ if attr in old_node._attributes and attr in new_node._attributes \
+ and hasattr(old_node, attr):
+ setattr(new_node, attr, getattr(old_node, attr))
+ return new_node
+
+
+def fix_missing_locations(node):
+ """
+ Some nodes require a line number and the column offset. Without that
+ information the compiler will abort the compilation. Because it can be
+ a dull task to add appropriate line numbers and column offsets when
+ adding new nodes this function can help. It copies the line number and
+ column offset of the parent node to the child nodes without this
+ information.
+
+ Unlike `copy_location` this works recursive and won't touch nodes that
+ already have a location information.
+ """
+ def _fix(node, lineno, col_offset):
+ if 'lineno' in node._attributes:
+ if not hasattr(node, 'lineno'):
+ node.lineno = lineno
+ else:
+ lineno = node.lineno
+ if 'col_offset' in node._attributes:
+ if not hasattr(node, 'col_offset'):
+ node.col_offset = col_offset
+ else:
+ col_offset = node.col_offset
+ for child in iter_child_nodes(node):
+ _fix(child, lineno, col_offset)
+ _fix(node, 1, 0)
+ return node
+
+
+def increment_lineno(node, n=1):
+ """
+ Increment the line numbers of all nodes by `n` if they have line number
+ attributes. This is useful to "move code" to a different location in a
+ file.
+ """
+ for node in zip((node,), walk(node)):
+ if 'lineno' in node._attributes:
+ node.lineno = getattr(node, 'lineno', 0) + n
+
+
+def iter_fields(node):
+ """Iterate over all fields of a node, only yielding existing fields."""
+ # CPython 2.5 compat
+ if not hasattr(node, '_fields') or not node._fields:
+ return
+ for field in node._fields:
+ try:
+ yield field, getattr(node, field)
+ except AttributeError:
+ pass
+
+
+def get_fields(node):
+ """Like `iter_fiels` but returns a dict."""
+ return dict(iter_fields(node))
+
+
+def iter_child_nodes(node):
+ """Iterate over all child nodes or a node."""
+ for name, field in iter_fields(node):
+ if isinstance(field, AST):
+ yield field
+ elif isinstance(field, list):
+ for item in field:
+ if isinstance(item, AST):
+ yield item
+
+
+def get_child_nodes(node):
+ """Like `iter_child_nodes` but returns a list."""
+ return list(iter_child_nodes(node))
+
+
+def get_compile_mode(node):
+ """
+ Get the mode for `compile` of a given node. If the node is not a `mod`
+ node (`Expression`, `Module` etc.) a `TypeError` is thrown.
+ """
+ if not isinstance(node, mod):
+ raise TypeError('expected mod node, got %r' % node.__class__.__name__)
+ return {
+ Expression: 'eval',
+ Interactive: 'single'
+ }.get(node.__class__, 'expr')
+
+
+def get_docstring(node):
+ """
+ Return the docstring for the given node or `None` if no docstring can be
+ found. If the node provided does not accept docstrings a `TypeError`
+ will be raised.
+ """
+ if not isinstance(node, (FunctionDef, ClassDef, Module)):
+ raise TypeError("%r can't have docstrings" % node.__class__.__name__)
+ if node.body and isinstance(node.body[0], Str):
+ return node.body[0].s
+
+
+def walk(node):
+ """
+ Iterate over all nodes. This is useful if you only want to modify nodes in
+ place and don't care about the context or the order the nodes are returned.
+ """
+ from collections import deque
+ todo = deque([node])
+ while todo:
+ node = todo.popleft()
+ todo.extend(iter_child_nodes(node))
+ yield node
+
+
+class NodeVisitor(object):
+ """
+ Walks the abstract syntax tree and call visitor functions for every node
+ found. The visitor functions may return values which will be forwarded
+ by the `visit` method.
+
+ Per default the visitor functions for the nodes are ``'visit_'`` +
+ class name of the node. So a `TryFinally` node visit function would
+ be `visit_TryFinally`. This behavior can be changed by overriding
+ the `get_visitor` function. If no visitor function exists for a node
+ (return value `None`) the `generic_visit` visitor is used instead.
+
+ Don't use the `NodeVisitor` if you want to apply changes to nodes during
+ traversing. For this a special visitor exists (`NodeTransformer`) that
+ allows modifications.
+ """
+
+ def get_visitor(self, node):
+ """
+ Return the visitor function for this node or `None` if no visitor
+ exists for this node. In that case the generic visit function is
+ used instead.
+ """
+ method = 'visit_' + node.__class__.__name__
+ return getattr(self, method, None)
+
+ def visit(self, node):
+ """Visit a node."""
+ f = self.get_visitor(node)
+ if f is not None:
+ return f(node)
+ return self.generic_visit(node)
+
+ def generic_visit(self, node):
+ """Called if no explicit visitor function exists for a node."""
+ for field, value in iter_fields(node):
+ if isinstance(value, list):
+ for item in value:
+ if isinstance(item, AST):
+ self.visit(item)
+ elif isinstance(value, AST):
+ self.visit(value)
+
+
+class NodeTransformer(NodeVisitor):
+ """
+ Walks the abstract syntax tree and allows modifications of nodes.
+
+ The `NodeTransformer` will walk the AST and use the return value of the
+ visitor functions to replace or remove the old node. If the return
+ value of the visitor function is `None` the node will be removed
+ from the previous location otherwise it's replaced with the return
+ value. The return value may be the original node in which case no
+ replacement takes place.
+
+ Here an example transformer that rewrites all `foo` to `data['foo']`::
+
+ class RewriteName(NodeTransformer):
+
+ def visit_Name(self, node):
+ return copy_location(Subscript(
+ value=Name(id='data', ctx=Load()),
+ slice=Index(value=Str(s=node.id)),
+ ctx=node.ctx
+ ), node)
+
+ Keep in mind that if the node you're operating on has child nodes
+ you must either transform the child nodes yourself or call the generic
+ visit function for the node first.
+
+ Nodes that were part of a collection of statements (that applies to
+ all statement nodes) may also return a list of nodes rather than just
+ a single node.
+
+ Usually you use the transformer like this::
+
+ node = YourTransformer().visit(node)
+ """
+
+ def generic_visit(self, node):
+ for field, old_value in iter_fields(node):
+ old_value = getattr(node, field, None)
+ if isinstance(old_value, list):
+ new_values = []
+ for value in old_value:
+ if isinstance(value, AST):
+ value = self.visit(value)
+ if value is None:
+ continue
+ elif not isinstance(value, AST):
+ new_values.extend(value)
+ continue
+ new_values.append(value)
+ old_value[:] = new_values
+ elif isinstance(old_value, AST):
+ new_node = self.visit(old_value)
+ if new_node is None:
+ delattr(node, field)
+ else:
+ setattr(node, field, new_node)
+ return node
+
+
+class SourceGenerator(NodeVisitor):
+ """
+ This visitor is able to transform a well formed syntax tree into python
+ sourcecode. For more details have a look at the docstring of the
+ `node_to_source` function.
+ """
+
+ def __init__(self, indent_with):
+ self.result = []
+ self.indent_with = indent_with
+ self.indentation = 0
+ self.new_lines = 0
+
+ def write(self, x):
+ if self.new_lines:
+ if self.result:
+ self.result.append('\n' * self.new_lines)
+ self.result.append(self.indent_with * self.indentation)
+ self.new_lines = 0
+ self.result.append(x)
+
+ def newline(self, n=1):
+ self.new_lines = max(self.new_lines, n)
+
+ def body(self, statements):
+ self.new_line = True
+ self.indentation += 1
+ for stmt in statements:
+ self.visit(stmt)
+ self.indentation -= 1
+
+ def body_or_else(self, node):
+ self.body(node.body)
+ if node.orelse:
+ self.newline()
+ self.write('else:')
+ self.body(node.orelse)
+
+ def signature(self, node):
+ want_comma = []
+ def write_comma():
+ if want_comma:
+ self.write(', ')
+ else:
+ want_comma.append(True)
+
+ padding = [None] * (len(node.args) - len(node.defaults))
+ for arg, default in zip(node.args, padding + node.defaults):
+ write_comma()
+ self.visit(arg)
+ if default is not None:
+ self.write('=')
+ self.visit(default)
+ if node.vararg is not None:
+ write_comma()
+ self.write('*' + arg_stringname(node.vararg))
+ if node.kwarg is not None:
+ write_comma()
+ self.write('**' + arg_stringname(node.kwarg))
+
+ def decorators(self, node):
+ for decorator in node.decorator_list:
+ self.newline()
+ self.write('@')
+ self.visit(decorator)
+
+ # Statements
+
+ def visit_Assign(self, node):
+ self.newline()
+ for idx, target in enumerate(node.targets):
+ if idx:
+ self.write(', ')
+ self.visit(target)
+ self.write(' = ')
+ self.visit(node.value)
+
+ def visit_AugAssign(self, node):
+ self.newline()
+ self.visit(node.target)
+ self.write(BINOP_SYMBOLS[type(node.op)] + '=')
+ self.visit(node.value)
+
+ def visit_ImportFrom(self, node):
+ self.newline()
+ self.write('from %s%s import ' % ('.' * node.level, node.module))
+ for idx, item in enumerate(node.names):
+ if idx:
+ self.write(', ')
+ self.write(item)
+
+ def visit_Import(self, node):
+ self.newline()
+ for item in node.names:
+ self.write('import ')
+ self.visit(item)
+
+ def visit_Expr(self, node):
+ self.newline()
+ self.generic_visit(node)
+
+ def visit_FunctionDef(self, node):
+ self.newline(n=2)
+ self.decorators(node)
+ self.newline()
+ self.write('def %s(' % node.name)
+ self.signature(node.args)
+ self.write('):')
+ self.body(node.body)
+
+ def visit_ClassDef(self, node):
+ have_args = []
+ def paren_or_comma():
+ if have_args:
+ self.write(', ')
+ else:
+ have_args.append(True)
+ self.write('(')
+
+ self.newline(n=3)
+ self.decorators(node)
+ self.newline()
+ self.write('class %s' % node.name)
+ for base in node.bases:
+ paren_or_comma()
+ self.visit(base)
+ # XXX: the if here is used to keep this module compatible
+ # with python 2.6.
+ if hasattr(node, 'keywords'):
+ for keyword in node.keywords:
+ paren_or_comma()
+ self.write(keyword.arg + '=')
+ self.visit(keyword.value)
+ if node.starargs is not None:
+ paren_or_comma()
+ self.write('*')
+ self.visit(node.starargs)
+ if node.kwargs is not None:
+ paren_or_comma()
+ self.write('**')
+ self.visit(node.kwargs)
+ self.write(have_args and '):' or ':')
+ self.body(node.body)
+
+ def visit_If(self, node):
+ self.newline()
+ self.write('if ')
+ self.visit(node.test)
+ self.write(':')
+ self.body(node.body)
+ while True:
+ else_ = node.orelse
+ if len(else_) == 1 and isinstance(else_[0], If):
+ node = else_[0]
+ self.newline()
+ self.write('elif ')
+ self.visit(node.test)
+ self.write(':')
+ self.body(node.body)
+ else:
+ self.newline()
+ self.write('else:')
+ self.body(else_)
+ break
+
+ def visit_For(self, node):
+ self.newline()
+ self.write('for ')
+ self.visit(node.target)
+ self.write(' in ')
+ self.visit(node.iter)
+ self.write(':')
+ self.body_or_else(node)
+
+ def visit_While(self, node):
+ self.newline()
+ self.write('while ')
+ self.visit(node.test)
+ self.write(':')
+ self.body_or_else(node)
+
+ def visit_With(self, node):
+ self.newline()
+ self.write('with ')
+ self.visit(node.context_expr)
+ if node.optional_vars is not None:
+ self.write(' as ')
+ self.visit(node.optional_vars)
+ self.write(':')
+ self.body(node.body)
+
+ def visit_Pass(self, node):
+ self.newline()
+ self.write('pass')
+
+ def visit_Print(self, node):
+ # XXX: python 2.6 only
+ self.newline()
+ self.write('print ')
+ want_comma = False
+ if node.dest is not None:
+ self.write(' >> ')
+ self.visit(node.dest)
+ want_comma = True
+ for value in node.values:
+ if want_comma:
+ self.write(', ')
+ self.visit(value)
+ want_comma = True
+ if not node.nl:
+ self.write(',')
+
+ def visit_Delete(self, node):
+ self.newline()
+ self.write('del ')
+ for idx, target in enumerate(node):
+ if idx:
+ self.write(', ')
+ self.visit(target)
+
+ def visit_TryExcept(self, node):
+ self.newline()
+ self.write('try:')
+ self.body(node.body)
+ for handler in node.handlers:
+ self.visit(handler)
+
+ def visit_TryFinally(self, node):
+ self.newline()
+ self.write('try:')
+ self.body(node.body)
+ self.newline()
+ self.write('finally:')
+ self.body(node.finalbody)
+
+ def visit_Global(self, node):
+ self.newline()
+ self.write('global ' + ', '.join(node.names))
+
+ def visit_Nonlocal(self, node):
+ self.newline()
+ self.write('nonlocal ' + ', '.join(node.names))
+
+ def visit_Return(self, node):
+ self.newline()
+ self.write('return ')
+ self.visit(node.value)
+
+ def visit_Break(self, node):
+ self.newline()
+ self.write('break')
+
+ def visit_Continue(self, node):
+ self.newline()
+ self.write('continue')
+
+ def visit_Raise(self, node):
+ # XXX: Python 2.6 / 3.0 compatibility
+ self.newline()
+ self.write('raise')
+ if hasattr(node, 'exc') and node.exc is not None:
+ self.write(' ')
+ self.visit(node.exc)
+ if node.cause is not None:
+ self.write(' from ')
+ self.visit(node.cause)
+ elif hasattr(node, 'type') and node.type is not None:
+ self.visit(node.type)
+ if node.inst is not None:
+ self.write(', ')
+ self.visit(node.inst)
+ if node.tback is not None:
+ self.write(', ')
+ self.visit(node.tback)
+
+ # Expressions
+
+ def visit_Attribute(self, node):
+ self.visit(node.value)
+ self.write('.' + node.attr)
+
+ def visit_Call(self, node):
+ want_comma = []
+ def write_comma():
+ if want_comma:
+ self.write(', ')
+ else:
+ want_comma.append(True)
+
+ self.visit(node.func)
+ self.write('(')
+ for arg in node.args:
+ write_comma()
+ self.visit(arg)
+ for keyword in node.keywords:
+ write_comma()
+ self.write(keyword.arg + '=')
+ self.visit(keyword.value)
+ if node.starargs is not None:
+ write_comma()
+ self.write('*')
+ self.visit(node.starargs)
+ if node.kwargs is not None:
+ write_comma()
+ self.write('**')
+ self.visit(node.kwargs)
+ self.write(')')
+
+ def visit_Name(self, node):
+ self.write(node.id)
+
+ def visit_NameConstant(self, node):
+ self.write(str(node.value))
+
+ def visit_arg(self, node):
+ self.write(node.arg)
+
+ def visit_Str(self, node):
+ self.write(repr(node.s))
+
+ def visit_Bytes(self, node):
+ self.write(repr(node.s))
+
+ def visit_Num(self, node):
+ self.write(repr(node.n))
+
+ def visit_Tuple(self, node):
+ self.write('(')
+ idx = -1
+ for idx, item in enumerate(node.elts):
+ if idx:
+ self.write(', ')
+ self.visit(item)
+ self.write(idx and ')' or ',)')
+
+ def sequence_visit(left, right):
+ def visit(self, node):
+ self.write(left)
+ for idx, item in enumerate(node.elts):
+ if idx:
+ self.write(', ')
+ self.visit(item)
+ self.write(right)
+ return visit
+
+ visit_List = sequence_visit('[', ']')
+ visit_Set = sequence_visit('{', '}')
+ del sequence_visit
+
+ def visit_Dict(self, node):
+ self.write('{')
+ for idx, (key, value) in enumerate(zip(node.keys, node.values)):
+ if idx:
+ self.write(', ')
+ self.visit(key)
+ self.write(': ')
+ self.visit(value)
+ self.write('}')
+
+ def visit_BinOp(self, node):
+ self.write('(')
+ self.visit(node.left)
+ self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
+ self.visit(node.right)
+ self.write(')')
+
+ def visit_BoolOp(self, node):
+ self.write('(')
+ for idx, value in enumerate(node.values):
+ if idx:
+ self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
+ self.visit(value)
+ self.write(')')
+
+ def visit_Compare(self, node):
+ self.write('(')
+ self.visit(node.left)
+ for op, right in zip(node.ops, node.comparators):
+ self.write(' %s ' % CMPOP_SYMBOLS[type(op)])
+ self.visit(right)
+ self.write(')')
+
+ def visit_UnaryOp(self, node):
+ self.write('(')
+ op = UNARYOP_SYMBOLS[type(node.op)]
+ self.write(op)
+ if op == 'not':
+ self.write(' ')
+ self.visit(node.operand)
+ self.write(')')
+
+ def visit_Subscript(self, node):
+ self.visit(node.value)
+ self.write('[')
+ self.visit(node.slice)
+ self.write(']')
+
+ def visit_Slice(self, node):
+ if node.lower is not None:
+ self.visit(node.lower)
+ self.write(':')
+ if node.upper is not None:
+ self.visit(node.upper)
+ if node.step is not None:
+ self.write(':')
+ if not (isinstance(node.step, Name) and node.step.id == 'None'):
+ self.visit(node.step)
+
+ def visit_ExtSlice(self, node):
+ for idx, item in node.dims:
+ if idx:
+ self.write(', ')
+ self.visit(item)
+
+ def visit_Yield(self, node):
+ self.write('yield ')
+ self.visit(node.value)
+
+ def visit_Lambda(self, node):
+ self.write('lambda ')
+ self.signature(node.args)
+ self.write(': ')
+ self.visit(node.body)
+
+ def visit_Ellipsis(self, node):
+ self.write('Ellipsis')
+
+ def generator_visit(left, right):
+ def visit(self, node):
+ self.write(left)
+ self.visit(node.elt)
+ for comprehension in node.generators:
+ self.visit(comprehension)
+ self.write(right)
+ return visit
+
+ visit_ListComp = generator_visit('[', ']')
+ visit_GeneratorExp = generator_visit('(', ')')
+ visit_SetComp = generator_visit('{', '}')
+ del generator_visit
+
+ def visit_DictComp(self, node):
+ self.write('{')
+ self.visit(node.key)
+ self.write(': ')
+ self.visit(node.value)
+ for comprehension in node.generators:
+ self.visit(comprehension)
+ self.write('}')
+
+ def visit_IfExp(self, node):
+ self.visit(node.body)
+ self.write(' if ')
+ self.visit(node.test)
+ self.write(' else ')
+ self.visit(node.orelse)
+
+ def visit_Starred(self, node):
+ self.write('*')
+ self.visit(node.value)
+
+ def visit_Repr(self, node):
+ # XXX: python 2.6 only
+ self.write('`')
+ self.visit(node.value)
+ self.write('`')
+
+ # Helper Nodes
+
+ def visit_alias(self, node):
+ self.write(node.name)
+ if node.asname is not None:
+ self.write(' as ' + node.asname)
+
+ def visit_comprehension(self, node):
+ self.write(' for ')
+ self.visit(node.target)
+ self.write(' in ')
+ self.visit(node.iter)
+ if node.ifs:
+ for if_ in node.ifs:
+ self.write(' if ')
+ self.visit(if_)
+
+ def visit_excepthandler(self, node):
+ self.newline()
+ self.write('except')
+ if node.type is not None:
+ self.write(' ')
+ self.visit(node.type)
+ if node.name is not None:
+ self.write(' as ')
+ self.visit(node.name)
+ self.write(':')
+ self.body(node.body)
--- /dev/null
+# mako/ast.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""utilities for analyzing expressions and blocks of Python
+code, as well as generating Python from AST nodes"""
+
+from mako import exceptions, pyparser, compat
+import re
+
+class PythonCode(object):
+ """represents information about a string containing Python code"""
+ def __init__(self, code, **exception_kwargs):
+ self.code = code
+
+ # represents all identifiers which are assigned to at some point in
+ # the code
+ self.declared_identifiers = set()
+
+ # represents all identifiers which are referenced before their
+ # assignment, if any
+ self.undeclared_identifiers = set()
+
+ # note that an identifier can be in both the undeclared and declared
+ # lists.
+
+ # using AST to parse instead of using code.co_varnames,
+ # code.co_names has several advantages:
+ # - we can locate an identifier as "undeclared" even if
+ # its declared later in the same block of code
+ # - AST is less likely to break with version changes
+ # (for example, the behavior of co_names changed a little bit
+ # in python version 2.5)
+ if isinstance(code, compat.string_types):
+ expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
+ else:
+ expr = code
+
+ f = pyparser.FindIdentifiers(self, **exception_kwargs)
+ f.visit(expr)
+
+class ArgumentList(object):
+ """parses a fragment of code as a comma-separated list of expressions"""
+ def __init__(self, code, **exception_kwargs):
+ self.codeargs = []
+ self.args = []
+ self.declared_identifiers = set()
+ self.undeclared_identifiers = set()
+ if isinstance(code, compat.string_types):
+ if re.match(r"\S", code) and not re.match(r",\s*$", code):
+ # if theres text and no trailing comma, insure its parsed
+ # as a tuple by adding a trailing comma
+ code += ","
+ expr = pyparser.parse(code, "exec", **exception_kwargs)
+ else:
+ expr = code
+
+ f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
+ f.visit(expr)
+
+class PythonFragment(PythonCode):
+ """extends PythonCode to provide identifier lookups in partial control
+ statements
+
+ e.g.
+ for x in 5:
+ elif y==9:
+ except (MyException, e):
+ etc.
+ """
+ def __init__(self, code, **exception_kwargs):
+ m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
+ if not m:
+ raise exceptions.CompileException(
+ "Fragment '%s' is not a partial control statement" %
+ code, **exception_kwargs)
+ if m.group(3):
+ code = code[:m.start(3)]
+ (keyword, expr) = m.group(1,2)
+ if keyword in ['for','if', 'while']:
+ code = code + "pass"
+ elif keyword == 'try':
+ code = code + "pass\nexcept:pass"
+ elif keyword == 'elif' or keyword == 'else':
+ code = "if False:pass\n" + code + "pass"
+ elif keyword == 'except':
+ code = "try:pass\n" + code + "pass"
+ elif keyword == 'with':
+ code = code + "pass"
+ else:
+ raise exceptions.CompileException(
+ "Unsupported control keyword: '%s'" %
+ keyword, **exception_kwargs)
+ super(PythonFragment, self).__init__(code, **exception_kwargs)
+
+
+class FunctionDecl(object):
+ """function declaration"""
+ def __init__(self, code, allow_kwargs=True, **exception_kwargs):
+ self.code = code
+ expr = pyparser.parse(code, "exec", **exception_kwargs)
+
+ f = pyparser.ParseFunc(self, **exception_kwargs)
+ f.visit(expr)
+ if not hasattr(self, 'funcname'):
+ raise exceptions.CompileException(
+ "Code '%s' is not a function declaration" % code,
+ **exception_kwargs)
+ if not allow_kwargs and self.kwargs:
+ raise exceptions.CompileException(
+ "'**%s' keyword argument not allowed here" %
+ self.kwargnames[-1], **exception_kwargs)
+
+ def get_argument_expressions(self, as_call=False):
+ """Return the argument declarations of this FunctionDecl as a printable
+ list.
+
+ By default the return value is appropriate for writing in a ``def``;
+ set `as_call` to true to build arguments to be passed to the function
+ instead (assuming locals with the same names as the arguments exist).
+ """
+
+ namedecls = []
+
+ # Build in reverse order, since defaults and slurpy args come last
+ argnames = self.argnames[::-1]
+ kwargnames = self.kwargnames[::-1]
+ defaults = self.defaults[::-1]
+ kwdefaults = self.kwdefaults[::-1]
+
+ # Named arguments
+ if self.kwargs:
+ namedecls.append("**" + kwargnames.pop(0))
+
+ for name in kwargnames:
+ # Keyword-only arguments must always be used by name, so even if
+ # this is a call, print out `foo=foo`
+ if as_call:
+ namedecls.append("%s=%s" % (name, name))
+ elif kwdefaults:
+ default = kwdefaults.pop(0)
+ if default is None:
+ # The AST always gives kwargs a default, since you can do
+ # `def foo(*, a=1, b, c=3)`
+ namedecls.append(name)
+ else:
+ namedecls.append("%s=%s" % (
+ name, pyparser.ExpressionGenerator(default).value()))
+ else:
+ namedecls.append(name)
+
+ # Positional arguments
+ if self.varargs:
+ namedecls.append("*" + argnames.pop(0))
+
+ for name in argnames:
+ if as_call or not defaults:
+ namedecls.append(name)
+ else:
+ default = defaults.pop(0)
+ namedecls.append("%s=%s" % (
+ name, pyparser.ExpressionGenerator(default).value()))
+
+ namedecls.reverse()
+ return namedecls
+
+ @property
+ def allargnames(self):
+ return tuple(self.argnames) + tuple(self.kwargnames)
+
+class FunctionArgs(FunctionDecl):
+ """the argument portion of a function declaration"""
+
+ def __init__(self, code, **kwargs):
+ super(FunctionArgs, self).__init__("def ANON(%s):pass" % code,
+ **kwargs)
--- /dev/null
+# mako/cache.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from mako import compat, util
+
+_cache_plugins = util.PluginLoader("mako.cache")
+
+register_plugin = _cache_plugins.register
+register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl")
+
+
+class Cache(object):
+ """Represents a data content cache made available to the module
+ space of a specific :class:`.Template` object.
+
+ .. versionadded:: 0.6
+ :class:`.Cache` by itself is mostly a
+ container for a :class:`.CacheImpl` object, which implements
+ a fixed API to provide caching services; specific subclasses exist to
+ implement different
+ caching strategies. Mako includes a backend that works with
+ the Beaker caching system. Beaker itself then supports
+ a number of backends (i.e. file, memory, memcached, etc.)
+
+ The construction of a :class:`.Cache` is part of the mechanics
+ of a :class:`.Template`, and programmatic access to this
+ cache is typically via the :attr:`.Template.cache` attribute.
+
+ """
+
+ impl = None
+ """Provide the :class:`.CacheImpl` in use by this :class:`.Cache`.
+
+ This accessor allows a :class:`.CacheImpl` with additional
+ methods beyond that of :class:`.Cache` to be used programmatically.
+
+ """
+
+ id = None
+ """Return the 'id' that identifies this cache.
+
+ This is a value that should be globally unique to the
+ :class:`.Template` associated with this cache, and can
+ be used by a caching system to name a local container
+ for data specific to this template.
+
+ """
+
+ starttime = None
+ """Epochal time value for when the owning :class:`.Template` was
+ first compiled.
+
+ A cache implementation may wish to invalidate data earlier than
+ this timestamp; this has the effect of the cache for a specific
+ :class:`.Template` starting clean any time the :class:`.Template`
+ is recompiled, such as when the original template file changed on
+ the filesystem.
+
+ """
+
+ def __init__(self, template, *args):
+ # check for a stale template calling the
+ # constructor
+ if isinstance(template, compat.string_types) and args:
+ return
+ self.template = template
+ self.id = template.module.__name__
+ self.starttime = template.module._modified_time
+ self._def_regions = {}
+ self.impl = self._load_impl(self.template.cache_impl)
+
+ def _load_impl(self, name):
+ return _cache_plugins.load(name)(self)
+
+ def get_or_create(self, key, creation_function, **kw):
+ """Retrieve a value from the cache, using the given creation function
+ to generate a new value."""
+
+ return self._ctx_get_or_create(key, creation_function, None, **kw)
+
+ def _ctx_get_or_create(self, key, creation_function, context, **kw):
+ """Retrieve a value from the cache, using the given creation function
+ to generate a new value."""
+
+ if not self.template.cache_enabled:
+ return creation_function()
+
+ return self.impl.get_or_create(
+ key,
+ creation_function,
+ **self._get_cache_kw(kw, context))
+
+ def set(self, key, value, **kw):
+ """Place a value in the cache.
+
+ :param key: the value's key.
+ :param value: the value.
+ :param \**kw: cache configuration arguments.
+
+ """
+
+ self.impl.set(key, value, **self._get_cache_kw(kw, None))
+
+ put = set
+ """A synonym for :meth:`.Cache.set`.
+
+ This is here for backwards compatibility.
+
+ """
+
+ def get(self, key, **kw):
+ """Retrieve a value from the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments. The
+ backend is configured using these arguments upon first request.
+ Subsequent requests that use the same series of configuration
+ values will use that same backend.
+
+ """
+ return self.impl.get(key, **self._get_cache_kw(kw, None))
+
+ def invalidate(self, key, **kw):
+ """Invalidate a value in the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments. The
+ backend is configured using these arguments upon first request.
+ Subsequent requests that use the same series of configuration
+ values will use that same backend.
+
+ """
+ self.impl.invalidate(key, **self._get_cache_kw(kw, None))
+
+ def invalidate_body(self):
+ """Invalidate the cached content of the "body" method for this
+ template.
+
+ """
+ self.invalidate('render_body', __M_defname='render_body')
+
+ def invalidate_def(self, name):
+ """Invalidate the cached content of a particular ``<%def>`` within this
+ template.
+
+ """
+
+ self.invalidate('render_%s' % name, __M_defname='render_%s' % name)
+
+ def invalidate_closure(self, name):
+ """Invalidate a nested ``<%def>`` within this template.
+
+ Caching of nested defs is a blunt tool as there is no
+ management of scope -- nested defs that use cache tags
+ need to have names unique of all other nested defs in the
+ template, else their content will be overwritten by
+ each other.
+
+ """
+
+ self.invalidate(name, __M_defname=name)
+
+ def _get_cache_kw(self, kw, context):
+ defname = kw.pop('__M_defname', None)
+ if not defname:
+ tmpl_kw = self.template.cache_args.copy()
+ tmpl_kw.update(kw)
+ elif defname in self._def_regions:
+ tmpl_kw = self._def_regions[defname]
+ else:
+ tmpl_kw = self.template.cache_args.copy()
+ tmpl_kw.update(kw)
+ self._def_regions[defname] = tmpl_kw
+ if context and self.impl.pass_context:
+ tmpl_kw = tmpl_kw.copy()
+ tmpl_kw.setdefault('context', context)
+ return tmpl_kw
+
+
+class CacheImpl(object):
+ """Provide a cache implementation for use by :class:`.Cache`."""
+
+ def __init__(self, cache):
+ self.cache = cache
+
+ pass_context = False
+ """If ``True``, the :class:`.Context` will be passed to
+ :meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``.
+ """
+
+ def get_or_create(self, key, creation_function, **kw):
+ """Retrieve a value from the cache, using the given creation function
+ to generate a new value.
+
+ This function *must* return a value, either from
+ the cache, or via the given creation function.
+ If the creation function is called, the newly
+ created value should be populated into the cache
+ under the given key before being returned.
+
+ :param key: the value's key.
+ :param creation_function: function that when called generates
+ a new value.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
+
+ def set(self, key, value, **kw):
+ """Place a value in the cache.
+
+ :param key: the value's key.
+ :param value: the value.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
+
+ def get(self, key, **kw):
+ """Retrieve a value from the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
+
+ def invalidate(self, key, **kw):
+ """Invalidate a value in the cache.
+
+ :param key: the value's key.
+ :param \**kw: cache configuration arguments.
+
+ """
+ raise NotImplementedError()
--- /dev/null
+# mako/cmd.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from argparse import ArgumentParser
+from os.path import isfile, dirname
+import sys
+from mako.template import Template
+from mako.lookup import TemplateLookup
+from mako import exceptions
+
+def varsplit(var):
+ if "=" not in var:
+ return (var, "")
+ return var.split("=", 1)
+
+def _exit():
+ sys.stderr.write(exceptions.text_error_template().render())
+ sys.exit(1)
+
+def cmdline(argv=None):
+
+ parser = ArgumentParser("usage: %prog [FILENAME]")
+ parser.add_argument("--var", default=[], action="append",
+ help="variable (can be used multiple times, use name=value)")
+ parser.add_argument("--template-dir", default=[], action="append",
+ help="Directory to use for template lookup (multiple "
+ "directories may be provided). If not given then if the "
+ "template is read from stdin, the value defaults to be "
+ "the current directory, otherwise it defaults to be the "
+ "parent directory of the file provided.")
+ parser.add_argument('input', nargs='?', default='-')
+
+ options = parser.parse_args(argv)
+ if options.input == '-':
+ lookup_dirs = options.template_dir or ["."]
+ lookup = TemplateLookup(lookup_dirs)
+ try:
+ template = Template(sys.stdin.read(), lookup=lookup)
+ except:
+ _exit()
+ else:
+ filename = options.input
+ if not isfile(filename):
+ raise SystemExit("error: can't find %s" % filename)
+ lookup_dirs = options.template_dir or [dirname(filename)]
+ lookup = TemplateLookup(lookup_dirs)
+ try:
+ template = Template(filename=filename, lookup=lookup)
+ except:
+ _exit()
+
+ kw = dict([varsplit(var) for var in options.var])
+ try:
+ print(template.render(**kw))
+ except:
+ _exit()
+
+
+if __name__ == "__main__":
+ cmdline()
--- /dev/null
+# mako/codegen.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""provides functionality for rendering a parsetree constructing into module
+source code."""
+
+import time
+import re
+from mako.pygen import PythonPrinter
+from mako import util, ast, parsetree, filters, exceptions
+from mako import compat
+
+
+MAGIC_NUMBER = 10
+
+# names which are hardwired into the
+# template and are not accessed via the
+# context itself
+RESERVED_NAMES = set(['context', 'loop', 'UNDEFINED'])
+
+def compile(node,
+ uri,
+ filename=None,
+ default_filters=None,
+ buffer_filters=None,
+ imports=None,
+ future_imports=None,
+ source_encoding=None,
+ generate_magic_comment=True,
+ disable_unicode=False,
+ strict_undefined=False,
+ enable_loop=True,
+ reserved_names=frozenset()):
+
+ """Generate module source code given a parsetree node,
+ uri, and optional source filename"""
+
+ # if on Py2K, push the "source_encoding" string to be
+ # a bytestring itself, as we will be embedding it into
+ # the generated source and we don't want to coerce the
+ # result into a unicode object, in "disable_unicode" mode
+ if not compat.py3k and isinstance(source_encoding, compat.text_type):
+ source_encoding = source_encoding.encode(source_encoding)
+
+
+ buf = util.FastEncodingBuffer()
+
+ printer = PythonPrinter(buf)
+ _GenerateRenderMethod(printer,
+ _CompileContext(uri,
+ filename,
+ default_filters,
+ buffer_filters,
+ imports,
+ future_imports,
+ source_encoding,
+ generate_magic_comment,
+ disable_unicode,
+ strict_undefined,
+ enable_loop,
+ reserved_names),
+ node)
+ return buf.getvalue()
+
+class _CompileContext(object):
+ def __init__(self,
+ uri,
+ filename,
+ default_filters,
+ buffer_filters,
+ imports,
+ future_imports,
+ source_encoding,
+ generate_magic_comment,
+ disable_unicode,
+ strict_undefined,
+ enable_loop,
+ reserved_names):
+ self.uri = uri
+ self.filename = filename
+ self.default_filters = default_filters
+ self.buffer_filters = buffer_filters
+ self.imports = imports
+ self.future_imports = future_imports
+ self.source_encoding = source_encoding
+ self.generate_magic_comment = generate_magic_comment
+ self.disable_unicode = disable_unicode
+ self.strict_undefined = strict_undefined
+ self.enable_loop = enable_loop
+ self.reserved_names = reserved_names
+
+class _GenerateRenderMethod(object):
+ """A template visitor object which generates the
+ full module source for a template.
+
+ """
+ def __init__(self, printer, compiler, node):
+ self.printer = printer
+ self.compiler = compiler
+ self.node = node
+ self.identifier_stack = [None]
+ self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag))
+
+ if self.in_def:
+ name = "render_%s" % node.funcname
+ args = node.get_argument_expressions()
+ filtered = len(node.filter_args.args) > 0
+ buffered = eval(node.attributes.get('buffered', 'False'))
+ cached = eval(node.attributes.get('cached', 'False'))
+ defs = None
+ pagetag = None
+ if node.is_block and not node.is_anonymous:
+ args += ['**pageargs']
+ else:
+ defs = self.write_toplevel()
+ pagetag = self.compiler.pagetag
+ name = "render_body"
+ if pagetag is not None:
+ args = pagetag.body_decl.get_argument_expressions()
+ if not pagetag.body_decl.kwargs:
+ args += ['**pageargs']
+ cached = eval(pagetag.attributes.get('cached', 'False'))
+ self.compiler.enable_loop = self.compiler.enable_loop or eval(
+ pagetag.attributes.get(
+ 'enable_loop', 'False')
+ )
+ else:
+ args = ['**pageargs']
+ cached = False
+ buffered = filtered = False
+ if args is None:
+ args = ['context']
+ else:
+ args = [a for a in ['context'] + args]
+
+ self.write_render_callable(
+ pagetag or node,
+ name, args,
+ buffered, filtered, cached)
+
+ if defs is not None:
+ for node in defs:
+ _GenerateRenderMethod(printer, compiler, node)
+
+ if not self.in_def:
+ self.write_metadata_struct()
+
+ def write_metadata_struct(self):
+ self.printer.source_map[self.printer.lineno] = \
+ max(self.printer.source_map)
+ struct = {
+ "filename": self.compiler.filename,
+ "uri": self.compiler.uri,
+ "source_encoding": self.compiler.source_encoding,
+ "line_map": self.printer.source_map,
+ }
+ self.printer.writelines(
+ '"""',
+ '__M_BEGIN_METADATA',
+ compat.json.dumps(struct),
+ '__M_END_METADATA\n'
+ '"""'
+ )
+
+ @property
+ def identifiers(self):
+ return self.identifier_stack[-1]
+
+ def write_toplevel(self):
+ """Traverse a template structure for module-level directives and
+ generate the start of module-level code.
+
+ """
+ inherit = []
+ namespaces = {}
+ module_code = []
+
+ self.compiler.pagetag = None
+
+ class FindTopLevel(object):
+ def visitInheritTag(s, node):
+ inherit.append(node)
+ def visitNamespaceTag(s, node):
+ namespaces[node.name] = node
+ def visitPageTag(s, node):
+ self.compiler.pagetag = node
+ def visitCode(s, node):
+ if node.ismodule:
+ module_code.append(node)
+
+ f = FindTopLevel()
+ for n in self.node.nodes:
+ n.accept_visitor(f)
+
+ self.compiler.namespaces = namespaces
+
+ module_ident = set()
+ for n in module_code:
+ module_ident = module_ident.union(n.declared_identifiers())
+
+ module_identifiers = _Identifiers(self.compiler)
+ module_identifiers.declared = module_ident
+
+ # module-level names, python code
+ if self.compiler.generate_magic_comment and \
+ self.compiler.source_encoding:
+ self.printer.writeline("# -*- coding:%s -*-" %
+ self.compiler.source_encoding)
+
+ if self.compiler.future_imports:
+ self.printer.writeline("from __future__ import %s" %
+ (", ".join(self.compiler.future_imports),))
+ self.printer.writeline("from mako import runtime, filters, cache")
+ self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
+ self.printer.writeline("__M_dict_builtin = dict")
+ self.printer.writeline("__M_locals_builtin = locals")
+ self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER)
+ self.printer.writeline("_modified_time = %r" % time.time())
+ self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
+ self.printer.writeline(
+ "_template_filename = %r" % self.compiler.filename)
+ self.printer.writeline("_template_uri = %r" % self.compiler.uri)
+ self.printer.writeline(
+ "_source_encoding = %r" % self.compiler.source_encoding)
+ if self.compiler.imports:
+ buf = ''
+ for imp in self.compiler.imports:
+ buf += imp + "\n"
+ self.printer.writeline(imp)
+ impcode = ast.PythonCode(
+ buf,
+ source='', lineno=0,
+ pos=0,
+ filename='template defined imports')
+ else:
+ impcode = None
+
+ main_identifiers = module_identifiers.branch(self.node)
+ module_identifiers.topleveldefs = \
+ module_identifiers.topleveldefs.\
+ union(main_identifiers.topleveldefs)
+ module_identifiers.declared.add("UNDEFINED")
+ if impcode:
+ module_identifiers.declared.update(impcode.declared_identifiers)
+
+ self.compiler.identifiers = module_identifiers
+ self.printer.writeline("_exports = %r" %
+ [n.name for n in
+ main_identifiers.topleveldefs.values()]
+ )
+ self.printer.write_blanks(2)
+
+ if len(module_code):
+ self.write_module_code(module_code)
+
+ if len(inherit):
+ self.write_namespaces(namespaces)
+ self.write_inherit(inherit[-1])
+ elif len(namespaces):
+ self.write_namespaces(namespaces)
+
+ return list(main_identifiers.topleveldefs.values())
+
+ def write_render_callable(self, node, name, args, buffered, filtered,
+ cached):
+ """write a top-level render callable.
+
+ this could be the main render() method or that of a top-level def."""
+
+ if self.in_def:
+ decorator = node.decorator
+ if decorator:
+ self.printer.writeline(
+ "@runtime._decorate_toplevel(%s)" % decorator)
+
+ self.printer.start_source(node.lineno)
+ self.printer.writelines(
+ "def %s(%s):" % (name, ','.join(args)),
+ # push new frame, assign current frame to __M_caller
+ "__M_caller = context.caller_stack._push_frame()",
+ "try:"
+ )
+ if buffered or filtered or cached:
+ self.printer.writeline("context._push_buffer()")
+
+ self.identifier_stack.append(
+ self.compiler.identifiers.branch(self.node))
+ if (not self.in_def or self.node.is_block) and '**pageargs' in args:
+ self.identifier_stack[-1].argument_declared.add('pageargs')
+
+ if not self.in_def and (
+ len(self.identifiers.locally_assigned) > 0 or
+ len(self.identifiers.argument_declared) > 0
+ ):
+ self.printer.writeline("__M_locals = __M_dict_builtin(%s)" %
+ ','.join([
+ "%s=%s" % (x, x) for x in
+ self.identifiers.argument_declared
+ ]))
+
+ self.write_variable_declares(self.identifiers, toplevel=True)
+
+ for n in self.node.nodes:
+ n.accept_visitor(self)
+
+ self.write_def_finish(self.node, buffered, filtered, cached)
+ self.printer.writeline(None)
+ self.printer.write_blanks(2)
+ if cached:
+ self.write_cache_decorator(
+ node, name,
+ args, buffered,
+ self.identifiers, toplevel=True)
+
+ def write_module_code(self, module_code):
+ """write module-level template code, i.e. that which
+ is enclosed in <%! %> tags in the template."""
+ for n in module_code:
+ self.printer.start_source(n.lineno)
+ self.printer.write_indented_block(n.text)
+
+ def write_inherit(self, node):
+ """write the module-level inheritance-determination callable."""
+
+ self.printer.writelines(
+ "def _mako_inherit(template, context):",
+ "_mako_generate_namespaces(context)",
+ "return runtime._inherit_from(context, %s, _template_uri)" %
+ (node.parsed_attributes['file']),
+ None
+ )
+
+ def write_namespaces(self, namespaces):
+ """write the module-level namespace-generating callable."""
+ self.printer.writelines(
+ "def _mako_get_namespace(context, name):",
+ "try:",
+ "return context.namespaces[(__name__, name)]",
+ "except KeyError:",
+ "_mako_generate_namespaces(context)",
+ "return context.namespaces[(__name__, name)]",
+ None, None
+ )
+ self.printer.writeline("def _mako_generate_namespaces(context):")
+
+
+ for node in namespaces.values():
+ if 'import' in node.attributes:
+ self.compiler.has_ns_imports = True
+ self.printer.start_source(node.lineno)
+ if len(node.nodes):
+ self.printer.writeline("def make_namespace():")
+ export = []
+ identifiers = self.compiler.identifiers.branch(node)
+ self.in_def = True
+ class NSDefVisitor(object):
+ def visitDefTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitBlockTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitDefOrBase(s, node):
+ if node.is_anonymous:
+ raise exceptions.CompileException(
+ "Can't put anonymous blocks inside "
+ "<%namespace>",
+ **node.exception_kwargs
+ )
+ self.write_inline_def(node, identifiers, nested=False)
+ export.append(node.funcname)
+ vis = NSDefVisitor()
+ for n in node.nodes:
+ n.accept_visitor(vis)
+ self.printer.writeline("return [%s]" % (','.join(export)))
+ self.printer.writeline(None)
+ self.in_def = False
+ callable_name = "make_namespace()"
+ else:
+ callable_name = "None"
+
+ if 'file' in node.parsed_attributes:
+ self.printer.writeline(
+ "ns = runtime.TemplateNamespace(%r,"
+ " context._clean_inheritance_tokens(),"
+ " templateuri=%s, callables=%s, "
+ " calling_uri=_template_uri)" %
+ (
+ node.name,
+ node.parsed_attributes.get('file', 'None'),
+ callable_name,
+ )
+ )
+ elif 'module' in node.parsed_attributes:
+ self.printer.writeline(
+ "ns = runtime.ModuleNamespace(%r,"
+ " context._clean_inheritance_tokens(),"
+ " callables=%s, calling_uri=_template_uri,"
+ " module=%s)" %
+ (
+ node.name,
+ callable_name,
+ node.parsed_attributes.get(
+ 'module', 'None')
+ )
+ )
+ else:
+ self.printer.writeline(
+ "ns = runtime.Namespace(%r,"
+ " context._clean_inheritance_tokens(),"
+ " callables=%s, calling_uri=_template_uri)" %
+ (
+ node.name,
+ callable_name,
+ )
+ )
+ if eval(node.attributes.get('inheritable', "False")):
+ self.printer.writeline("context['self'].%s = ns" % (node.name))
+
+ self.printer.writeline(
+ "context.namespaces[(__name__, %s)] = ns" % repr(node.name))
+ self.printer.write_blanks(1)
+ if not len(namespaces):
+ self.printer.writeline("pass")
+ self.printer.writeline(None)
+
+ def write_variable_declares(self, identifiers, toplevel=False, limit=None):
+ """write variable declarations at the top of a function.
+
+ the variable declarations are in the form of callable
+ definitions for defs and/or name lookup within the
+ function's context argument. the names declared are based
+ on the names that are referenced in the function body,
+ which don't otherwise have any explicit assignment
+ operation. names that are assigned within the body are
+ assumed to be locally-scoped variables and are not
+ separately declared.
+
+ for def callable definitions, if the def is a top-level
+ callable then a 'stub' callable is generated which wraps
+ the current Context into a closure. if the def is not
+ top-level, it is fully rendered as a local closure.
+
+ """
+
+ # collection of all defs available to us in this scope
+ comp_idents = dict([(c.funcname, c) for c in identifiers.defs])
+ to_write = set()
+
+ # write "context.get()" for all variables we are going to
+ # need that arent in the namespace yet
+ to_write = to_write.union(identifiers.undeclared)
+
+ # write closure functions for closures that we define
+ # right here
+ to_write = to_write.union(
+ [c.funcname for c in identifiers.closuredefs.values()])
+
+ # remove identifiers that are declared in the argument
+ # signature of the callable
+ to_write = to_write.difference(identifiers.argument_declared)
+
+ # remove identifiers that we are going to assign to.
+ # in this way we mimic Python's behavior,
+ # i.e. assignment to a variable within a block
+ # means that variable is now a "locally declared" var,
+ # which cannot be referenced beforehand.
+ to_write = to_write.difference(identifiers.locally_declared)
+
+ if self.compiler.enable_loop:
+ has_loop = "loop" in to_write
+ to_write.discard("loop")
+ else:
+ has_loop = False
+
+ # if a limiting set was sent, constraint to those items in that list
+ # (this is used for the caching decorator)
+ if limit is not None:
+ to_write = to_write.intersection(limit)
+
+ if toplevel and getattr(self.compiler, 'has_ns_imports', False):
+ self.printer.writeline("_import_ns = {}")
+ self.compiler.has_imports = True
+ for ident, ns in self.compiler.namespaces.items():
+ if 'import' in ns.attributes:
+ self.printer.writeline(
+ "_mako_get_namespace(context, %r)."
+ "_populate(_import_ns, %r)" %
+ (
+ ident,
+ re.split(r'\s*,\s*', ns.attributes['import'])
+ ))
+
+ if has_loop:
+ self.printer.writeline(
+ 'loop = __M_loop = runtime.LoopStack()'
+ )
+
+ for ident in to_write:
+ if ident in comp_idents:
+ comp = comp_idents[ident]
+ if comp.is_block:
+ if not comp.is_anonymous:
+ self.write_def_decl(comp, identifiers)
+ else:
+ self.write_inline_def(comp, identifiers, nested=True)
+ else:
+ if comp.is_root():
+ self.write_def_decl(comp, identifiers)
+ else:
+ self.write_inline_def(comp, identifiers, nested=True)
+
+ elif ident in self.compiler.namespaces:
+ self.printer.writeline(
+ "%s = _mako_get_namespace(context, %r)" %
+ (ident, ident)
+ )
+ else:
+ if getattr(self.compiler, 'has_ns_imports', False):
+ if self.compiler.strict_undefined:
+ self.printer.writelines(
+ "%s = _import_ns.get(%r, UNDEFINED)" %
+ (ident, ident),
+ "if %s is UNDEFINED:" % ident,
+ "try:",
+ "%s = context[%r]" % (ident, ident),
+ "except KeyError:",
+ "raise NameError(\"'%s' is not defined\")" %
+ ident,
+ None, None
+ )
+ else:
+ self.printer.writeline(
+ "%s = _import_ns.get(%r, context.get(%r, UNDEFINED))" %
+ (ident, ident, ident))
+ else:
+ if self.compiler.strict_undefined:
+ self.printer.writelines(
+ "try:",
+ "%s = context[%r]" % (ident, ident),
+ "except KeyError:",
+ "raise NameError(\"'%s' is not defined\")" %
+ ident,
+ None
+ )
+ else:
+ self.printer.writeline(
+ "%s = context.get(%r, UNDEFINED)" % (ident, ident)
+ )
+
+ self.printer.writeline("__M_writer = context.writer()")
+
+ def write_def_decl(self, node, identifiers):
+ """write a locally-available callable referencing a top-level def"""
+ funcname = node.funcname
+ namedecls = node.get_argument_expressions()
+ nameargs = node.get_argument_expressions(as_call=True)
+
+ if not self.in_def and (
+ len(self.identifiers.locally_assigned) > 0 or
+ len(self.identifiers.argument_declared) > 0):
+ nameargs.insert(0, 'context._locals(__M_locals)')
+ else:
+ nameargs.insert(0, 'context')
+ self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
+ self.printer.writeline(
+ "return render_%s(%s)" % (funcname, ",".join(nameargs)))
+ self.printer.writeline(None)
+
+ def write_inline_def(self, node, identifiers, nested):
+ """write a locally-available def callable inside an enclosing def."""
+
+ namedecls = node.get_argument_expressions()
+
+ decorator = node.decorator
+ if decorator:
+ self.printer.writeline(
+ "@runtime._decorate_inline(context, %s)" % decorator)
+ self.printer.writeline(
+ "def %s(%s):" % (node.funcname, ",".join(namedecls)))
+ filtered = len(node.filter_args.args) > 0
+ buffered = eval(node.attributes.get('buffered', 'False'))
+ cached = eval(node.attributes.get('cached', 'False'))
+ self.printer.writelines(
+ # push new frame, assign current frame to __M_caller
+ "__M_caller = context.caller_stack._push_frame()",
+ "try:"
+ )
+ if buffered or filtered or cached:
+ self.printer.writelines(
+ "context._push_buffer()",
+ )
+
+ identifiers = identifiers.branch(node, nested=nested)
+
+ self.write_variable_declares(identifiers)
+
+ self.identifier_stack.append(identifiers)
+ for n in node.nodes:
+ n.accept_visitor(self)
+ self.identifier_stack.pop()
+
+ self.write_def_finish(node, buffered, filtered, cached)
+ self.printer.writeline(None)
+ if cached:
+ self.write_cache_decorator(node, node.funcname,
+ namedecls, False, identifiers,
+ inline=True, toplevel=False)
+
+ def write_def_finish(self, node, buffered, filtered, cached,
+ callstack=True):
+ """write the end section of a rendering function, either outermost or
+ inline.
+
+ this takes into account if the rendering function was filtered,
+ buffered, etc. and closes the corresponding try: block if any, and
+ writes code to retrieve captured content, apply filters, send proper
+ return value."""
+
+ if not buffered and not cached and not filtered:
+ self.printer.writeline("return ''")
+ if callstack:
+ self.printer.writelines(
+ "finally:",
+ "context.caller_stack._pop_frame()",
+ None
+ )
+
+ if buffered or filtered or cached:
+ if buffered or cached:
+ # in a caching scenario, don't try to get a writer
+ # from the context after popping; assume the caching
+ # implemenation might be using a context with no
+ # extra buffers
+ self.printer.writelines(
+ "finally:",
+ "__M_buf = context._pop_buffer()"
+ )
+ else:
+ self.printer.writelines(
+ "finally:",
+ "__M_buf, __M_writer = context._pop_buffer_and_writer()"
+ )
+
+ if callstack:
+ self.printer.writeline("context.caller_stack._pop_frame()")
+
+ s = "__M_buf.getvalue()"
+ if filtered:
+ s = self.create_filter_callable(node.filter_args.args, s,
+ False)
+ self.printer.writeline(None)
+ if buffered and not cached:
+ s = self.create_filter_callable(self.compiler.buffer_filters,
+ s, False)
+ if buffered or cached:
+ self.printer.writeline("return %s" % s)
+ else:
+ self.printer.writelines(
+ "__M_writer(%s)" % s,
+ "return ''"
+ )
+
+ def write_cache_decorator(self, node_or_pagetag, name,
+ args, buffered, identifiers,
+ inline=False, toplevel=False):
+ """write a post-function decorator to replace a rendering
+ callable with a cached version of itself."""
+
+ self.printer.writeline("__M_%s = %s" % (name, name))
+ cachekey = node_or_pagetag.parsed_attributes.get('cache_key',
+ repr(name))
+
+ cache_args = {}
+ if self.compiler.pagetag is not None:
+ cache_args.update(
+ (
+ pa[6:],
+ self.compiler.pagetag.parsed_attributes[pa]
+ )
+ for pa in self.compiler.pagetag.parsed_attributes
+ if pa.startswith('cache_') and pa != 'cache_key'
+ )
+ cache_args.update(
+ (
+ pa[6:],
+ node_or_pagetag.parsed_attributes[pa]
+ ) for pa in node_or_pagetag.parsed_attributes
+ if pa.startswith('cache_') and pa != 'cache_key'
+ )
+ if 'timeout' in cache_args:
+ cache_args['timeout'] = int(eval(cache_args['timeout']))
+
+ self.printer.writeline("def %s(%s):" % (name, ','.join(args)))
+
+ # form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
+ pass_args = [
+ "%s=%s" % ((a.split('=')[0],) * 2) if '=' in a else a
+ for a in args
+ ]
+
+ self.write_variable_declares(
+ identifiers,
+ toplevel=toplevel,
+ limit=node_or_pagetag.undeclared_identifiers()
+ )
+ if buffered:
+ s = "context.get('local')."\
+ "cache._ctx_get_or_create("\
+ "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" % (
+ cachekey, name, ','.join(pass_args),
+ ''.join(["%s=%s, " % (k, v)
+ for k, v in cache_args.items()]),
+ name
+ )
+ # apply buffer_filters
+ s = self.create_filter_callable(self.compiler.buffer_filters, s,
+ False)
+ self.printer.writelines("return " + s, None)
+ else:
+ self.printer.writelines(
+ "__M_writer(context.get('local')."
+ "cache._ctx_get_or_create("
+ "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" %
+ (
+ cachekey, name, ','.join(pass_args),
+ ''.join(["%s=%s, " % (k, v)
+ for k, v in cache_args.items()]),
+ name,
+ ),
+ "return ''",
+ None
+ )
+
+ def create_filter_callable(self, args, target, is_expression):
+ """write a filter-applying expression based on the filters
+ present in the given filter names, adjusting for the global
+ 'default' filter aliases as needed."""
+
+ def locate_encode(name):
+ if re.match(r'decode\..+', name):
+ return "filters." + name
+ elif self.compiler.disable_unicode:
+ return filters.NON_UNICODE_ESCAPES.get(name, name)
+ else:
+ return filters.DEFAULT_ESCAPES.get(name, name)
+
+ if 'n' not in args:
+ if is_expression:
+ if self.compiler.pagetag:
+ args = self.compiler.pagetag.filter_args.args + args
+ if self.compiler.default_filters:
+ args = self.compiler.default_filters + args
+ for e in args:
+ # if filter given as a function, get just the identifier portion
+ if e == 'n':
+ continue
+ m = re.match(r'(.+?)(\(.*\))', e)
+ if m:
+ ident, fargs = m.group(1, 2)
+ f = locate_encode(ident)
+ e = f + fargs
+ else:
+ e = locate_encode(e)
+ assert e is not None
+ target = "%s(%s)" % (e, target)
+ return target
+
+ def visitExpression(self, node):
+ self.printer.start_source(node.lineno)
+ if len(node.escapes) or \
+ (
+ self.compiler.pagetag is not None and
+ len(self.compiler.pagetag.filter_args.args)
+ ) or \
+ len(self.compiler.default_filters):
+
+ s = self.create_filter_callable(node.escapes_code.args,
+ "%s" % node.text, True)
+ self.printer.writeline("__M_writer(%s)" % s)
+ else:
+ self.printer.writeline("__M_writer(%s)" % node.text)
+
+ def visitControlLine(self, node):
+ if node.isend:
+ self.printer.writeline(None)
+ if node.has_loop_context:
+ self.printer.writeline('finally:')
+ self.printer.writeline("loop = __M_loop._exit()")
+ self.printer.writeline(None)
+ else:
+ self.printer.start_source(node.lineno)
+ if self.compiler.enable_loop and node.keyword == 'for':
+ text = mangle_mako_loop(node, self.printer)
+ else:
+ text = node.text
+ self.printer.writeline(text)
+ children = node.get_children()
+ # this covers the three situations where we want to insert a pass:
+ # 1) a ternary control line with no children,
+ # 2) a primary control line with nothing but its own ternary
+ # and end control lines, and
+ # 3) any control line with no content other than comments
+ if not children or (
+ compat.all(isinstance(c, (parsetree.Comment,
+ parsetree.ControlLine))
+ for c in children) and
+ compat.all((node.is_ternary(c.keyword) or c.isend)
+ for c in children
+ if isinstance(c, parsetree.ControlLine))):
+ self.printer.writeline("pass")
+
+ def visitText(self, node):
+ self.printer.start_source(node.lineno)
+ self.printer.writeline("__M_writer(%s)" % repr(node.content))
+
+ def visitTextTag(self, node):
+ filtered = len(node.filter_args.args) > 0
+ if filtered:
+ self.printer.writelines(
+ "__M_writer = context._push_writer()",
+ "try:",
+ )
+ for n in node.nodes:
+ n.accept_visitor(self)
+ if filtered:
+ self.printer.writelines(
+ "finally:",
+ "__M_buf, __M_writer = context._pop_buffer_and_writer()",
+ "__M_writer(%s)" %
+ self.create_filter_callable(
+ node.filter_args.args,
+ "__M_buf.getvalue()",
+ False),
+ None
+ )
+
+ def visitCode(self, node):
+ if not node.ismodule:
+ self.printer.start_source(node.lineno)
+ self.printer.write_indented_block(node.text)
+
+ if not self.in_def and len(self.identifiers.locally_assigned) > 0:
+ # if we are the "template" def, fudge locally
+ # declared/modified variables into the "__M_locals" dictionary,
+ # which is used for def calls within the same template,
+ # to simulate "enclosing scope"
+ self.printer.writeline(
+ '__M_locals_builtin_stored = __M_locals_builtin()')
+ self.printer.writeline(
+ '__M_locals.update(__M_dict_builtin([(__M_key,'
+ ' __M_locals_builtin_stored[__M_key]) for __M_key in'
+ ' [%s] if __M_key in __M_locals_builtin_stored]))' %
+ ','.join([repr(x) for x in node.declared_identifiers()]))
+
+ def visitIncludeTag(self, node):
+ self.printer.start_source(node.lineno)
+ args = node.attributes.get('args')
+ if args:
+ self.printer.writeline(
+ "runtime._include_file(context, %s, _template_uri, %s)" %
+ (node.parsed_attributes['file'], args))
+ else:
+ self.printer.writeline(
+ "runtime._include_file(context, %s, _template_uri)" %
+ (node.parsed_attributes['file']))
+
+ def visitNamespaceTag(self, node):
+ pass
+
+ def visitDefTag(self, node):
+ pass
+
+ def visitBlockTag(self, node):
+ if node.is_anonymous:
+ self.printer.writeline("%s()" % node.funcname)
+ else:
+ nameargs = node.get_argument_expressions(as_call=True)
+ nameargs += ['**pageargs']
+ self.printer.writeline("if 'parent' not in context._data or "
+ "not hasattr(context._data['parent'], '%s'):"
+ % node.funcname)
+ self.printer.writeline(
+ "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs)))
+ self.printer.writeline("\n")
+
+ def visitCallNamespaceTag(self, node):
+ # TODO: we can put namespace-specific checks here, such
+ # as ensure the given namespace will be imported,
+ # pre-import the namespace, etc.
+ self.visitCallTag(node)
+
+ def visitCallTag(self, node):
+ self.printer.writeline("def ccall(caller):")
+ export = ['body']
+ callable_identifiers = self.identifiers.branch(node, nested=True)
+ body_identifiers = callable_identifiers.branch(node, nested=False)
+ # we want the 'caller' passed to ccall to be used
+ # for the body() function, but for other non-body()
+ # <%def>s within <%call> we want the current caller
+ # off the call stack (if any)
+ body_identifiers.add_declared('caller')
+
+ self.identifier_stack.append(body_identifiers)
+ class DefVisitor(object):
+ def visitDefTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitBlockTag(s, node):
+ s.visitDefOrBase(node)
+
+ def visitDefOrBase(s, node):
+ self.write_inline_def(node, callable_identifiers, nested=False)
+ if not node.is_anonymous:
+ export.append(node.funcname)
+ # remove defs that are within the <%call> from the
+ # "closuredefs" defined in the body, so they dont render twice
+ if node.funcname in body_identifiers.closuredefs:
+ del body_identifiers.closuredefs[node.funcname]
+
+ vis = DefVisitor()
+ for n in node.nodes:
+ n.accept_visitor(vis)
+ self.identifier_stack.pop()
+
+ bodyargs = node.body_decl.get_argument_expressions()
+ self.printer.writeline("def body(%s):" % ','.join(bodyargs))
+
+ # TODO: figure out best way to specify
+ # buffering/nonbuffering (at call time would be better)
+ buffered = False
+ if buffered:
+ self.printer.writelines(
+ "context._push_buffer()",
+ "try:"
+ )
+ self.write_variable_declares(body_identifiers)
+ self.identifier_stack.append(body_identifiers)
+
+ for n in node.nodes:
+ n.accept_visitor(self)
+ self.identifier_stack.pop()
+
+ self.write_def_finish(node, buffered, False, False, callstack=False)
+ self.printer.writelines(
+ None,
+ "return [%s]" % (','.join(export)),
+ None
+ )
+
+ self.printer.writelines(
+ # push on caller for nested call
+ "context.caller_stack.nextcaller = "
+ "runtime.Namespace('caller', context, "
+ "callables=ccall(__M_caller))",
+ "try:")
+ self.printer.start_source(node.lineno)
+ self.printer.writelines(
+ "__M_writer(%s)" % self.create_filter_callable(
+ [], node.expression, True),
+ "finally:",
+ "context.caller_stack.nextcaller = None",
+ None
+ )
+
+class _Identifiers(object):
+ """tracks the status of identifier names as template code is rendered."""
+
+ def __init__(self, compiler, node=None, parent=None, nested=False):
+ if parent is not None:
+ # if we are the branch created in write_namespaces(),
+ # we don't share any context from the main body().
+ if isinstance(node, parsetree.NamespaceTag):
+ self.declared = set()
+ self.topleveldefs = util.SetLikeDict()
+ else:
+ # things that have already been declared
+ # in an enclosing namespace (i.e. names we can just use)
+ self.declared = set(parent.declared).\
+ union([c.name for c in parent.closuredefs.values()]).\
+ union(parent.locally_declared).\
+ union(parent.argument_declared)
+
+ # if these identifiers correspond to a "nested"
+ # scope, it means whatever the parent identifiers
+ # had as undeclared will have been declared by that parent,
+ # and therefore we have them in our scope.
+ if nested:
+ self.declared = self.declared.union(parent.undeclared)
+
+ # top level defs that are available
+ self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
+ else:
+ self.declared = set()
+ self.topleveldefs = util.SetLikeDict()
+
+ self.compiler = compiler
+
+ # things within this level that are referenced before they
+ # are declared (e.g. assigned to)
+ self.undeclared = set()
+
+ # things that are declared locally. some of these things
+ # could be in the "undeclared" list as well if they are
+ # referenced before declared
+ self.locally_declared = set()
+
+ # assignments made in explicit python blocks.
+ # these will be propagated to
+ # the context of local def calls.
+ self.locally_assigned = set()
+
+ # things that are declared in the argument
+ # signature of the def callable
+ self.argument_declared = set()
+
+ # closure defs that are defined in this level
+ self.closuredefs = util.SetLikeDict()
+
+ self.node = node
+
+ if node is not None:
+ node.accept_visitor(self)
+
+ illegal_names = self.compiler.reserved_names.intersection(
+ self.locally_declared)
+ if illegal_names:
+ raise exceptions.NameConflictError(
+ "Reserved words declared in template: %s" %
+ ", ".join(illegal_names))
+
+
+ def branch(self, node, **kwargs):
+ """create a new Identifiers for a new Node, with
+ this Identifiers as the parent."""
+
+ return _Identifiers(self.compiler, node, self, **kwargs)
+
+ @property
+ def defs(self):
+ return set(self.topleveldefs.union(self.closuredefs).values())
+
+ def __repr__(self):
+ return "Identifiers(declared=%r, locally_declared=%r, "\
+ "undeclared=%r, topleveldefs=%r, closuredefs=%r, "\
+ "argumentdeclared=%r)" %\
+ (
+ list(self.declared),
+ list(self.locally_declared),
+ list(self.undeclared),
+ [c.name for c in self.topleveldefs.values()],
+ [c.name for c in self.closuredefs.values()],
+ self.argument_declared)
+
+ def check_declared(self, node):
+ """update the state of this Identifiers with the undeclared
+ and declared identifiers of the given node."""
+
+ for ident in node.undeclared_identifiers():
+ if ident != 'context' and\
+ ident not in self.declared.union(self.locally_declared):
+ self.undeclared.add(ident)
+ for ident in node.declared_identifiers():
+ self.locally_declared.add(ident)
+
+ def add_declared(self, ident):
+ self.declared.add(ident)
+ if ident in self.undeclared:
+ self.undeclared.remove(ident)
+
+ def visitExpression(self, node):
+ self.check_declared(node)
+
+ def visitControlLine(self, node):
+ self.check_declared(node)
+
+ def visitCode(self, node):
+ if not node.ismodule:
+ self.check_declared(node)
+ self.locally_assigned = self.locally_assigned.union(
+ node.declared_identifiers())
+
+ def visitNamespaceTag(self, node):
+ # only traverse into the sub-elements of a
+ # <%namespace> tag if we are the branch created in
+ # write_namespaces()
+ if self.node is node:
+ for n in node.nodes:
+ n.accept_visitor(self)
+
+ def _check_name_exists(self, collection, node):
+ existing = collection.get(node.funcname)
+ collection[node.funcname] = node
+ if existing is not None and \
+ existing is not node and \
+ (node.is_block or existing.is_block):
+ raise exceptions.CompileException(
+ "%%def or %%block named '%s' already "
+ "exists in this template." %
+ node.funcname, **node.exception_kwargs)
+
+ def visitDefTag(self, node):
+ if node.is_root() and not node.is_anonymous:
+ self._check_name_exists(self.topleveldefs, node)
+ elif node is not self.node:
+ self._check_name_exists(self.closuredefs, node)
+
+ for ident in node.undeclared_identifiers():
+ if ident != 'context' and \
+ ident not in self.declared.union(self.locally_declared):
+ self.undeclared.add(ident)
+
+ # visit defs only one level deep
+ if node is self.node:
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+
+ for n in node.nodes:
+ n.accept_visitor(self)
+
+ def visitBlockTag(self, node):
+ if node is not self.node and not node.is_anonymous:
+
+ if isinstance(self.node, parsetree.DefTag):
+ raise exceptions.CompileException(
+ "Named block '%s' not allowed inside of def '%s'"
+ % (node.name, self.node.name), **node.exception_kwargs)
+ elif isinstance(self.node,
+ (parsetree.CallTag, parsetree.CallNamespaceTag)):
+ raise exceptions.CompileException(
+ "Named block '%s' not allowed inside of <%%call> tag"
+ % (node.name, ), **node.exception_kwargs)
+
+ for ident in node.undeclared_identifiers():
+ if ident != 'context' and \
+ ident not in self.declared.union(self.locally_declared):
+ self.undeclared.add(ident)
+
+ if not node.is_anonymous:
+ self._check_name_exists(self.topleveldefs, node)
+ self.undeclared.add(node.funcname)
+ elif node is not self.node:
+ self._check_name_exists(self.closuredefs, node)
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+ for n in node.nodes:
+ n.accept_visitor(self)
+
+ def visitTextTag(self, node):
+ for ident in node.undeclared_identifiers():
+ if ident != 'context' and \
+ ident not in self.declared.union(self.locally_declared):
+ self.undeclared.add(ident)
+
+ def visitIncludeTag(self, node):
+ self.check_declared(node)
+
+ def visitPageTag(self, node):
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+ self.check_declared(node)
+
+ def visitCallNamespaceTag(self, node):
+ self.visitCallTag(node)
+
+ def visitCallTag(self, node):
+ if node is self.node:
+ for ident in node.undeclared_identifiers():
+ if ident != 'context' and \
+ ident not in self.declared.union(
+ self.locally_declared):
+ self.undeclared.add(ident)
+ for ident in node.declared_identifiers():
+ self.argument_declared.add(ident)
+ for n in node.nodes:
+ n.accept_visitor(self)
+ else:
+ for ident in node.undeclared_identifiers():
+ if ident != 'context' and \
+ ident not in self.declared.union(
+ self.locally_declared):
+ self.undeclared.add(ident)
+
+
+_FOR_LOOP = re.compile(
+ r'^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*'
+ r'(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):'
+)
+
+def mangle_mako_loop(node, printer):
+ """converts a for loop into a context manager wrapped around a for loop
+ when access to the `loop` variable has been detected in the for loop body
+ """
+ loop_variable = LoopVariable()
+ node.accept_visitor(loop_variable)
+ if loop_variable.detected:
+ node.nodes[-1].has_loop_context = True
+ match = _FOR_LOOP.match(node.text)
+ if match:
+ printer.writelines(
+ 'loop = __M_loop._enter(%s)' % match.group(2),
+ 'try:'
+ #'with __M_loop(%s) as loop:' % match.group(2)
+ )
+ text = 'for %s in loop:' % match.group(1)
+ else:
+ raise SyntaxError("Couldn't apply loop context: %s" % node.text)
+ else:
+ text = node.text
+ return text
+
+
+class LoopVariable(object):
+ """A node visitor which looks for the name 'loop' within undeclared
+ identifiers."""
+
+ def __init__(self):
+ self.detected = False
+
+ def _loop_reference_detected(self, node):
+ if 'loop' in node.undeclared_identifiers():
+ self.detected = True
+ else:
+ for n in node.get_children():
+ n.accept_visitor(self)
+
+ def visitControlLine(self, node):
+ self._loop_reference_detected(node)
+
+ def visitCode(self, node):
+ self._loop_reference_detected(node)
+
+ def visitExpression(self, node):
+ self._loop_reference_detected(node)
--- /dev/null
+import sys
+import time
+
+py3k = sys.version_info >= (3, 0)
+py33 = sys.version_info >= (3, 3)
+py2k = sys.version_info < (3,)
+py26 = sys.version_info >= (2, 6)
+jython = sys.platform.startswith('java')
+win32 = sys.platform.startswith('win')
+pypy = hasattr(sys, 'pypy_version_info')
+
+if py3k:
+ from io import StringIO
+ import builtins as compat_builtins
+ from urllib.parse import quote_plus, unquote_plus
+ from html.entities import codepoint2name, name2codepoint
+ string_types = str,
+ binary_type = bytes
+ text_type = str
+
+ from io import BytesIO as byte_buffer
+
+ def u(s):
+ return s
+
+ def b(s):
+ return s.encode("latin-1")
+
+ def octal(lit):
+ return eval("0o" + lit)
+
+else:
+ import __builtin__ as compat_builtins
+ try:
+ from cStringIO import StringIO
+ except:
+ from StringIO import StringIO
+
+ byte_buffer = StringIO
+
+ from urllib import quote_plus, unquote_plus
+ from htmlentitydefs import codepoint2name, name2codepoint
+ string_types = basestring,
+ binary_type = str
+ text_type = unicode
+
+ def u(s):
+ return unicode(s, "utf-8")
+
+ def b(s):
+ return s
+
+ def octal(lit):
+ return eval("0" + lit)
+
+
+if py33:
+ from importlib import machinery
+ def load_module(module_id, path):
+ return machinery.SourceFileLoader(module_id, path).load_module()
+else:
+ import imp
+ def load_module(module_id, path):
+ fp = open(path, 'rb')
+ try:
+ return imp.load_source(module_id, path, fp)
+ finally:
+ fp.close()
+
+
+if py3k:
+ def reraise(tp, value, tb=None, cause=None):
+ if cause is not None:
+ value.__cause__ = cause
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+else:
+ exec("def reraise(tp, value, tb=None, cause=None):\n"
+ " raise tp, value, tb\n")
+
+
+def exception_as():
+ return sys.exc_info()[1]
+
+try:
+ import threading
+ if py3k:
+ import _thread as thread
+ else:
+ import thread
+except ImportError:
+ import dummy_threading as threading
+ if py3k:
+ import _dummy_thread as thread
+ else:
+ import dummy_thread as thread
+
+if win32 or jython:
+ time_func = time.clock
+else:
+ time_func = time.time
+
+try:
+ from functools import partial
+except:
+ def partial(func, *args, **keywords):
+ def newfunc(*fargs, **fkeywords):
+ newkeywords = keywords.copy()
+ newkeywords.update(fkeywords)
+ return func(*(args + fargs), **newkeywords)
+ return newfunc
+
+
+all = all
+import json
+
+def exception_name(exc):
+ return exc.__class__.__name__
+
+try:
+ from inspect import CO_VARKEYWORDS, CO_VARARGS
+ def inspect_func_args(fn):
+ if py3k:
+ co = fn.__code__
+ else:
+ co = fn.func_code
+
+ nargs = co.co_argcount
+ names = co.co_varnames
+ args = list(names[:nargs])
+
+ varargs = None
+ if co.co_flags & CO_VARARGS:
+ varargs = co.co_varnames[nargs]
+ nargs = nargs + 1
+ varkw = None
+ if co.co_flags & CO_VARKEYWORDS:
+ varkw = co.co_varnames[nargs]
+
+ if py3k:
+ return args, varargs, varkw, fn.__defaults__
+ else:
+ return args, varargs, varkw, fn.func_defaults
+except ImportError:
+ import inspect
+ def inspect_func_args(fn):
+ return inspect.getargspec(fn)
+
+if py3k:
+ def callable(fn):
+ return hasattr(fn, '__call__')
+else:
+ callable = callable
+
+
+################################################
+# cross-compatible metaclass implementation
+# Copyright (c) 2010-2012 Benjamin Peterson
+def with_metaclass(meta, base=object):
+ """Create a base class with a metaclass."""
+ return meta("%sBase" % meta.__name__, (base,), {})
+################################################
+
+
+def arg_stringname(func_arg):
+ """Gets the string name of a kwarg or vararg
+ In Python3.4 a function's args are
+ of _ast.arg type not _ast.name
+ """
+ if hasattr(func_arg, 'arg'):
+ return func_arg.arg
+ else:
+ return str(func_arg)
--- /dev/null
+# mako/exceptions.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""exception classes"""
+
+import traceback
+import sys
+from mako import util, compat
+
+class MakoException(Exception):
+ pass
+
+class RuntimeException(MakoException):
+ pass
+
+def _format_filepos(lineno, pos, filename):
+ if filename is None:
+ return " at line: %d char: %d" % (lineno, pos)
+ else:
+ return " in file '%s' at line: %d char: %d" % (filename, lineno, pos)
+
+
+class CompileException(MakoException):
+ def __init__(self, message, source, lineno, pos, filename):
+ MakoException.__init__(self,
+ message + _format_filepos(lineno, pos, filename))
+ self.lineno = lineno
+ self.pos = pos
+ self.filename = filename
+ self.source = source
+
+class SyntaxException(MakoException):
+ def __init__(self, message, source, lineno, pos, filename):
+ MakoException.__init__(self,
+ message + _format_filepos(lineno, pos, filename))
+ self.lineno = lineno
+ self.pos = pos
+ self.filename = filename
+ self.source = source
+
+class UnsupportedError(MakoException):
+ """raised when a retired feature is used."""
+
+class NameConflictError(MakoException):
+ """raised when a reserved word is used inappropriately"""
+
+class TemplateLookupException(MakoException):
+ pass
+
+class TopLevelLookupException(TemplateLookupException):
+ pass
+
+class RichTraceback(object):
+ """Pull the current exception from the ``sys`` traceback and extracts
+ Mako-specific template information.
+
+ See the usage examples in :ref:`handling_exceptions`.
+
+ """
+ def __init__(self, error=None, traceback=None):
+ self.source, self.lineno = "", 0
+
+ if error is None or traceback is None:
+ t, value, tback = sys.exc_info()
+
+ if error is None:
+ error = value or t
+
+ if traceback is None:
+ traceback = tback
+
+ self.error = error
+ self.records = self._init(traceback)
+
+ if isinstance(self.error, (CompileException, SyntaxException)):
+ self.source = self.error.source
+ self.lineno = self.error.lineno
+ self._has_source = True
+
+ self._init_message()
+
+ @property
+ def errorname(self):
+ return compat.exception_name(self.error)
+
+ def _init_message(self):
+ """Find a unicode representation of self.error"""
+ try:
+ self.message = compat.text_type(self.error)
+ except UnicodeError:
+ try:
+ self.message = str(self.error)
+ except UnicodeEncodeError:
+ # Fallback to args as neither unicode nor
+ # str(Exception(u'\xe6')) work in Python < 2.6
+ self.message = self.error.args[0]
+ if not isinstance(self.message, compat.text_type):
+ self.message = compat.text_type(self.message, 'ascii', 'replace')
+
+ def _get_reformatted_records(self, records):
+ for rec in records:
+ if rec[6] is not None:
+ yield (rec[4], rec[5], rec[2], rec[6])
+ else:
+ yield tuple(rec[0:4])
+
+ @property
+ def traceback(self):
+ """Return a list of 4-tuple traceback records (i.e. normal python
+ format) with template-corresponding lines remapped to the originating
+ template.
+
+ """
+ return list(self._get_reformatted_records(self.records))
+
+ @property
+ def reverse_records(self):
+ return reversed(self.records)
+
+ @property
+ def reverse_traceback(self):
+ """Return the same data as traceback, except in reverse order.
+ """
+
+ return list(self._get_reformatted_records(self.reverse_records))
+
+ def _init(self, trcback):
+ """format a traceback from sys.exc_info() into 7-item tuples,
+ containing the regular four traceback tuple items, plus the original
+ template filename, the line number adjusted relative to the template
+ source, and code line from that line number of the template."""
+
+ import mako.template
+ mods = {}
+ rawrecords = traceback.extract_tb(trcback)
+ new_trcback = []
+ for filename, lineno, function, line in rawrecords:
+ if not line:
+ line = ''
+ try:
+ (line_map, template_lines) = mods[filename]
+ except KeyError:
+ try:
+ info = mako.template._get_module_info(filename)
+ module_source = info.code
+ template_source = info.source
+ template_filename = info.template_filename or filename
+ except KeyError:
+ # A normal .py file (not a Template)
+ if not compat.py3k:
+ try:
+ fp = open(filename, 'rb')
+ encoding = util.parse_encoding(fp)
+ fp.close()
+ except IOError:
+ encoding = None
+ if encoding:
+ line = line.decode(encoding)
+ else:
+ line = line.decode('ascii', 'replace')
+ new_trcback.append((filename, lineno, function, line,
+ None, None, None, None))
+ continue
+
+ template_ln = 1
+
+ source_map = mako.template.ModuleInfo.\
+ get_module_source_metadata(
+ module_source, full_line_map=True)
+ line_map = source_map['full_line_map']
+
+ template_lines = [line for line in
+ template_source.split("\n")]
+ mods[filename] = (line_map, template_lines)
+
+ template_ln = line_map[lineno - 1]
+
+ if template_ln <= len(template_lines):
+ template_line = template_lines[template_ln - 1]
+ else:
+ template_line = None
+ new_trcback.append((filename, lineno, function,
+ line, template_filename, template_ln,
+ template_line, template_source))
+ if not self.source:
+ for l in range(len(new_trcback) - 1, 0, -1):
+ if new_trcback[l][5]:
+ self.source = new_trcback[l][7]
+ self.lineno = new_trcback[l][5]
+ break
+ else:
+ if new_trcback:
+ try:
+ # A normal .py file (not a Template)
+ fp = open(new_trcback[-1][0], 'rb')
+ encoding = util.parse_encoding(fp)
+ fp.seek(0)
+ self.source = fp.read()
+ fp.close()
+ if encoding:
+ self.source = self.source.decode(encoding)
+ except IOError:
+ self.source = ''
+ self.lineno = new_trcback[-1][1]
+ return new_trcback
+
+
+def text_error_template(lookup=None):
+ """Provides a template that renders a stack trace in a similar format to
+ the Python interpreter, substituting source template filenames, line
+ numbers and code for that of the originating source template, as
+ applicable.
+
+ """
+ import mako.template
+ return mako.template.Template(r"""
+<%page args="error=None, traceback=None"/>
+<%!
+ from mako.exceptions import RichTraceback
+%>\
+<%
+ tback = RichTraceback(error=error, traceback=traceback)
+%>\
+Traceback (most recent call last):
+% for (filename, lineno, function, line) in tback.traceback:
+ File "${filename}", line ${lineno}, in ${function or '?'}
+ ${line | trim}
+% endfor
+${tback.errorname}: ${tback.message}
+""")
+
+
+def _install_pygments():
+ global syntax_highlight, pygments_html_formatter
+ from mako.ext.pygmentplugin import syntax_highlight,\
+ pygments_html_formatter
+
+def _install_fallback():
+ global syntax_highlight, pygments_html_formatter
+ from mako.filters import html_escape
+ pygments_html_formatter = None
+ def syntax_highlight(filename='', language=None):
+ return html_escape
+
+def _install_highlighting():
+ try:
+ _install_pygments()
+ except ImportError:
+ _install_fallback()
+_install_highlighting()
+
+def html_error_template():
+ """Provides a template that renders a stack trace in an HTML format,
+ providing an excerpt of code as well as substituting source template
+ filenames, line numbers and code for that of the originating source
+ template, as applicable.
+
+ The template's default ``encoding_errors`` value is
+ ``'htmlentityreplace'``. The template has two options. With the
+ ``full`` option disabled, only a section of an HTML document is
+ returned. With the ``css`` option disabled, the default stylesheet
+ won't be included.
+
+ """
+ import mako.template
+ return mako.template.Template(r"""
+<%!
+ from mako.exceptions import RichTraceback, syntax_highlight,\
+ pygments_html_formatter
+%>
+<%page args="full=True, css=True, error=None, traceback=None"/>
+% if full:
+<html>
+<head>
+ <title>Mako Runtime Error</title>
+% endif
+% if css:
+ <style>
+ body { font-family:verdana; margin:10px 30px 10px 30px;}
+ .stacktrace { margin:5px 5px 5px 5px; }
+ .highlight { padding:0px 10px 0px 10px; background-color:#9F9FDF; }
+ .nonhighlight { padding:0px; background-color:#DFDFDF; }
+ .sample { padding:10px; margin:10px 10px 10px 10px;
+ font-family:monospace; }
+ .sampleline { padding:0px 10px 0px 10px; }
+ .sourceline { margin:5px 5px 10px 5px; font-family:monospace;}
+ .location { font-size:80%; }
+ .highlight { white-space:pre; }
+ .sampleline { white-space:pre; }
+
+ % if pygments_html_formatter:
+ ${pygments_html_formatter.get_style_defs()}
+ .linenos { min-width: 2.5em; text-align: right; }
+ pre { margin: 0; }
+ .syntax-highlighted { padding: 0 10px; }
+ .syntax-highlightedtable { border-spacing: 1px; }
+ .nonhighlight { border-top: 1px solid #DFDFDF;
+ border-bottom: 1px solid #DFDFDF; }
+ .stacktrace .nonhighlight { margin: 5px 15px 10px; }
+ .sourceline { margin: 0 0; font-family:monospace; }
+ .code { background-color: #F8F8F8; width: 100%; }
+ .error .code { background-color: #FFBDBD; }
+ .error .syntax-highlighted { background-color: #FFBDBD; }
+ % endif
+
+ </style>
+% endif
+% if full:
+</head>
+<body>
+% endif
+
+<h2>Error !</h2>
+<%
+ tback = RichTraceback(error=error, traceback=traceback)
+ src = tback.source
+ line = tback.lineno
+ if src:
+ lines = src.split('\n')
+ else:
+ lines = None
+%>
+<h3>${tback.errorname}: ${tback.message|h}</h3>
+
+% if lines:
+ <div class="sample">
+ <div class="nonhighlight">
+% for index in range(max(0, line-4),min(len(lines), line+5)):
+ <%
+ if pygments_html_formatter:
+ pygments_html_formatter.linenostart = index + 1
+ %>
+ % if index + 1 == line:
+ <%
+ if pygments_html_formatter:
+ old_cssclass = pygments_html_formatter.cssclass
+ pygments_html_formatter.cssclass = 'error ' + old_cssclass
+ %>
+ ${lines[index] | syntax_highlight(language='mako')}
+ <%
+ if pygments_html_formatter:
+ pygments_html_formatter.cssclass = old_cssclass
+ %>
+ % else:
+ ${lines[index] | syntax_highlight(language='mako')}
+ % endif
+% endfor
+ </div>
+ </div>
+% endif
+
+<div class="stacktrace">
+% for (filename, lineno, function, line) in tback.reverse_traceback:
+ <div class="location">${filename}, line ${lineno}:</div>
+ <div class="nonhighlight">
+ <%
+ if pygments_html_formatter:
+ pygments_html_formatter.linenostart = lineno
+ %>
+ <div class="sourceline">${line | syntax_highlight(filename)}</div>
+ </div>
+% endfor
+</div>
+
+% if full:
+</body>
+</html>
+% endif
+""", output_encoding=sys.getdefaultencoding(),
+ encoding_errors='htmlentityreplace')
--- /dev/null
+# mako/filters.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+import re
+import codecs
+
+from mako.compat import quote_plus, unquote_plus, codepoint2name, \
+ name2codepoint
+
+from mako import compat
+
+xml_escapes = {
+ '&': '&',
+ '>': '>',
+ '<': '<',
+ '"': '"', # also " in html-only
+ "'": ''' # also ' in html-only
+}
+
+# XXX: " is valid in HTML and XML
+# ' is not valid HTML, but is valid XML
+
+def legacy_html_escape(s):
+ """legacy HTML escape for non-unicode mode."""
+ s = s.replace("&", "&")
+ s = s.replace(">", ">")
+ s = s.replace("<", "<")
+ s = s.replace('"', """)
+ s = s.replace("'", "'")
+ return s
+
+
+try:
+ import markupsafe
+ html_escape = markupsafe.escape
+except ImportError:
+ html_escape = legacy_html_escape
+
+def xml_escape(string):
+ return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
+
+def url_escape(string):
+ # convert into a list of octets
+ string = string.encode("utf8")
+ return quote_plus(string)
+
+def legacy_url_escape(string):
+ # convert into a list of octets
+ return quote_plus(string)
+
+def url_unescape(string):
+ text = unquote_plus(string)
+ if not is_ascii_str(text):
+ text = text.decode("utf8")
+ return text
+
+def trim(string):
+ return string.strip()
+
+
+class Decode(object):
+ def __getattr__(self, key):
+ def decode(x):
+ if isinstance(x, compat.text_type):
+ return x
+ elif not isinstance(x, compat.binary_type):
+ return decode(str(x))
+ else:
+ return compat.text_type(x, encoding=key)
+ return decode
+decode = Decode()
+
+
+_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z')
+
+def is_ascii_str(text):
+ return isinstance(text, str) and _ASCII_re.match(text)
+
+################################################################
+
+class XMLEntityEscaper(object):
+ def __init__(self, codepoint2name, name2codepoint):
+ self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n))
+ for c, n in codepoint2name.items()])
+ self.name2codepoint = name2codepoint
+
+ def escape_entities(self, text):
+ """Replace characters with their character entity references.
+
+ Only characters corresponding to a named entity are replaced.
+ """
+ return compat.text_type(text).translate(self.codepoint2entity)
+
+ def __escape(self, m):
+ codepoint = ord(m.group())
+ try:
+ return self.codepoint2entity[codepoint]
+ except (KeyError, IndexError):
+ return '&#x%X;' % codepoint
+
+
+ __escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
+
+ def escape(self, text):
+ """Replace characters with their character references.
+
+ Replace characters by their named entity references.
+ Non-ASCII characters, if they do not have a named entity reference,
+ are replaced by numerical character references.
+
+ The return value is guaranteed to be ASCII.
+ """
+ return self.__escapable.sub(self.__escape, compat.text_type(text)
+ ).encode('ascii')
+
+ # XXX: This regexp will not match all valid XML entity names__.
+ # (It punts on details involving involving CombiningChars and Extenders.)
+ #
+ # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
+ __characterrefs = re.compile(r'''& (?:
+ \#(\d+)
+ | \#x([\da-f]+)
+ | ( (?!\d) [:\w] [-.:\w]+ )
+ ) ;''',
+ re.X | re.UNICODE)
+
+ def __unescape(self, m):
+ dval, hval, name = m.groups()
+ if dval:
+ codepoint = int(dval)
+ elif hval:
+ codepoint = int(hval, 16)
+ else:
+ codepoint = self.name2codepoint.get(name, 0xfffd)
+ # U+FFFD = "REPLACEMENT CHARACTER"
+ if codepoint < 128:
+ return chr(codepoint)
+ return chr(codepoint)
+
+ def unescape(self, text):
+ """Unescape character references.
+
+ All character references (both entity references and numerical
+ character references) are unescaped.
+ """
+ return self.__characterrefs.sub(self.__unescape, text)
+
+
+_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint)
+
+html_entities_escape = _html_entities_escaper.escape_entities
+html_entities_unescape = _html_entities_escaper.unescape
+
+
+def htmlentityreplace_errors(ex):
+ """An encoding error handler.
+
+ This python `codecs`_ error handler replaces unencodable
+ characters with HTML entities, or, if no HTML entity exists for
+ the character, XML character references.
+
+ >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
+ 'The cost was €12.'
+ """
+ if isinstance(ex, UnicodeEncodeError):
+ # Handle encoding errors
+ bad_text = ex.object[ex.start:ex.end]
+ text = _html_entities_escaper.escape(bad_text)
+ return (compat.text_type(text), ex.end)
+ raise ex
+
+codecs.register_error('htmlentityreplace', htmlentityreplace_errors)
+
+
+# TODO: options to make this dynamic per-compilation will be added in a later
+# release
+DEFAULT_ESCAPES = {
+ 'x': 'filters.xml_escape',
+ 'h': 'filters.html_escape',
+ 'u': 'filters.url_escape',
+ 'trim': 'filters.trim',
+ 'entity': 'filters.html_entities_escape',
+ 'unicode': 'unicode',
+ 'decode': 'decode',
+ 'str': 'str',
+ 'n': 'n'
+}
+
+if compat.py3k:
+ DEFAULT_ESCAPES.update({
+ 'unicode': 'str'
+ })
+
+NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy()
+NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape'
+NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape'
+
--- /dev/null
+# mako/lexer.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""provides the Lexer class for parsing template strings into parse trees."""
+
+import re
+import codecs
+from mako import parsetree, exceptions, compat
+from mako.pygen import adjust_whitespace
+
+_regexp_cache = {}
+
+class Lexer(object):
+ def __init__(self, text, filename=None,
+ disable_unicode=False,
+ input_encoding=None, preprocessor=None):
+ self.text = text
+ self.filename = filename
+ self.template = parsetree.TemplateNode(self.filename)
+ self.matched_lineno = 1
+ self.matched_charpos = 0
+ self.lineno = 1
+ self.match_position = 0
+ self.tag = []
+ self.control_line = []
+ self.ternary_stack = []
+ self.disable_unicode = disable_unicode
+ self.encoding = input_encoding
+
+ if compat.py3k and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "Mako for Python 3 does not "
+ "support disabling Unicode")
+
+ if preprocessor is None:
+ self.preprocessor = []
+ elif not hasattr(preprocessor, '__iter__'):
+ self.preprocessor = [preprocessor]
+ else:
+ self.preprocessor = preprocessor
+
+ @property
+ def exception_kwargs(self):
+ return {'source': self.text,
+ 'lineno': self.matched_lineno,
+ 'pos': self.matched_charpos,
+ 'filename': self.filename}
+
+ def match(self, regexp, flags=None):
+ """compile the given regexp, cache the reg, and call match_reg()."""
+
+ try:
+ reg = _regexp_cache[(regexp, flags)]
+ except KeyError:
+ if flags:
+ reg = re.compile(regexp, flags)
+ else:
+ reg = re.compile(regexp)
+ _regexp_cache[(regexp, flags)] = reg
+
+ return self.match_reg(reg)
+
+ def match_reg(self, reg):
+ """match the given regular expression object to the current text
+ position.
+
+ if a match occurs, update the current text and line position.
+
+ """
+
+ mp = self.match_position
+
+ match = reg.match(self.text, self.match_position)
+ if match:
+ (start, end) = match.span()
+ if end == start:
+ self.match_position = end + 1
+ else:
+ self.match_position = end
+ self.matched_lineno = self.lineno
+ lines = re.findall(r"\n", self.text[mp:self.match_position])
+ cp = mp - 1
+ while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'):
+ cp -= 1
+ self.matched_charpos = mp - cp
+ self.lineno += len(lines)
+ #print "MATCHED:", match.group(0), "LINE START:",
+ # self.matched_lineno, "LINE END:", self.lineno
+ #print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
+ # (match and "TRUE" or "FALSE")
+ return match
+
+ def parse_until_text(self, *text):
+ startpos = self.match_position
+ text_re = r'|'.join(text)
+ brace_level = 0
+ while True:
+ match = self.match(r'#.*\n')
+ if match:
+ continue
+ match = self.match(r'(\"\"\"|\'\'\'|\"|\')((?<!\\)\\\1|.)*?\1',
+ re.S)
+ if match:
+ continue
+ match = self.match(r'(%s)' % text_re)
+ if match:
+ if match.group(1) == '}' and brace_level > 0:
+ brace_level -= 1
+ continue
+ return \
+ self.text[startpos:
+ self.match_position - len(match.group(1))],\
+ match.group(1)
+ match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S)
+ if match:
+ brace_level += match.group(1).count('{')
+ brace_level -= match.group(1).count('}')
+ continue
+ raise exceptions.SyntaxException(
+ "Expected: %s" %
+ ','.join(text),
+ **self.exception_kwargs)
+
+ def append_node(self, nodecls, *args, **kwargs):
+ kwargs.setdefault('source', self.text)
+ kwargs.setdefault('lineno', self.matched_lineno)
+ kwargs.setdefault('pos', self.matched_charpos)
+ kwargs['filename'] = self.filename
+ node = nodecls(*args, **kwargs)
+ if len(self.tag):
+ self.tag[-1].nodes.append(node)
+ else:
+ self.template.nodes.append(node)
+ # build a set of child nodes for the control line
+ # (used for loop variable detection)
+ # also build a set of child nodes on ternary control lines
+ # (used for determining if a pass needs to be auto-inserted
+ if self.control_line:
+ control_frame = self.control_line[-1]
+ control_frame.nodes.append(node)
+ if not (isinstance(node, parsetree.ControlLine) and
+ control_frame.is_ternary(node.keyword)):
+ if self.ternary_stack and self.ternary_stack[-1]:
+ self.ternary_stack[-1][-1].nodes.append(node)
+ if isinstance(node, parsetree.Tag):
+ if len(self.tag):
+ node.parent = self.tag[-1]
+ self.tag.append(node)
+ elif isinstance(node, parsetree.ControlLine):
+ if node.isend:
+ self.control_line.pop()
+ self.ternary_stack.pop()
+ elif node.is_primary:
+ self.control_line.append(node)
+ self.ternary_stack.append([])
+ elif self.control_line and \
+ self.control_line[-1].is_ternary(node.keyword):
+ self.ternary_stack[-1].append(node)
+ elif self.control_line and \
+ not self.control_line[-1].is_ternary(node.keyword):
+ raise exceptions.SyntaxException(
+ "Keyword '%s' not a legal ternary for keyword '%s'" %
+ (node.keyword, self.control_line[-1].keyword),
+ **self.exception_kwargs)
+
+ _coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n')
+
+ def decode_raw_stream(self, text, decode_raw, known_encoding, filename):
+ """given string/unicode or bytes/string, determine encoding
+ from magic encoding comment, return body as unicode
+ or raw if decode_raw=False
+
+ """
+ if isinstance(text, compat.text_type):
+ m = self._coding_re.match(text)
+ encoding = m and m.group(1) or known_encoding or 'ascii'
+ return encoding, text
+
+ if text.startswith(codecs.BOM_UTF8):
+ text = text[len(codecs.BOM_UTF8):]
+ parsed_encoding = 'utf-8'
+ m = self._coding_re.match(text.decode('utf-8', 'ignore'))
+ if m is not None and m.group(1) != 'utf-8':
+ raise exceptions.CompileException(
+ "Found utf-8 BOM in file, with conflicting "
+ "magic encoding comment of '%s'" % m.group(1),
+ text.decode('utf-8', 'ignore'),
+ 0, 0, filename)
+ else:
+ m = self._coding_re.match(text.decode('utf-8', 'ignore'))
+ if m:
+ parsed_encoding = m.group(1)
+ else:
+ parsed_encoding = known_encoding or 'ascii'
+
+ if decode_raw:
+ try:
+ text = text.decode(parsed_encoding)
+ except UnicodeDecodeError:
+ raise exceptions.CompileException(
+ "Unicode decode operation of encoding '%s' failed" %
+ parsed_encoding,
+ text.decode('utf-8', 'ignore'),
+ 0, 0, filename)
+
+ return parsed_encoding, text
+
+ def parse(self):
+ self.encoding, self.text = self.decode_raw_stream(self.text,
+ not self.disable_unicode,
+ self.encoding,
+ self.filename,)
+
+ for preproc in self.preprocessor:
+ self.text = preproc(self.text)
+
+ # push the match marker past the
+ # encoding comment.
+ self.match_reg(self._coding_re)
+
+ self.textlength = len(self.text)
+
+ while (True):
+ if self.match_position > self.textlength:
+ break
+
+ if self.match_end():
+ break
+ if self.match_expression():
+ continue
+ if self.match_control_line():
+ continue
+ if self.match_comment():
+ continue
+ if self.match_tag_start():
+ continue
+ if self.match_tag_end():
+ continue
+ if self.match_python_block():
+ continue
+ if self.match_text():
+ continue
+
+ if self.match_position > self.textlength:
+ break
+ raise exceptions.CompileException("assertion failed")
+
+ if len(self.tag):
+ raise exceptions.SyntaxException("Unclosed tag: <%%%s>" %
+ self.tag[-1].keyword,
+ **self.exception_kwargs)
+ if len(self.control_line):
+ raise exceptions.SyntaxException(
+ "Unterminated control keyword: '%s'" %
+ self.control_line[-1].keyword,
+ self.text,
+ self.control_line[-1].lineno,
+ self.control_line[-1].pos, self.filename)
+ return self.template
+
+ def match_tag_start(self):
+ match = self.match(r'''
+ \<% # opening tag
+
+ ([\w\.\:]+) # keyword
+
+ ((?:\s+\w+|\s*=\s*|".*?"|'.*?')*) # attrname, = \
+ # sign, string expression
+
+ \s* # more whitespace
+
+ (/)?> # closing
+
+ ''',
+
+ re.I | re.S | re.X)
+
+ if match:
+ keyword, attr, isend = match.groups()
+ self.keyword = keyword
+ attributes = {}
+ if attr:
+ for att in re.findall(
+ r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
+ key, val1, val2 = att
+ text = val1 or val2
+ text = text.replace('\r\n', '\n')
+ attributes[key] = text
+ self.append_node(parsetree.Tag, keyword, attributes)
+ if isend:
+ self.tag.pop()
+ else:
+ if keyword == 'text':
+ match = self.match(r'(.*?)(?=\</%text>)', re.S)
+ if not match:
+ raise exceptions.SyntaxException(
+ "Unclosed tag: <%%%s>" %
+ self.tag[-1].keyword,
+ **self.exception_kwargs)
+ self.append_node(parsetree.Text, match.group(1))
+ return self.match_tag_end()
+ return True
+ else:
+ return False
+
+ def match_tag_end(self):
+ match = self.match(r'\</%[\t ]*(.+?)[\t ]*>')
+ if match:
+ if not len(self.tag):
+ raise exceptions.SyntaxException(
+ "Closing tag without opening tag: </%%%s>" %
+ match.group(1),
+ **self.exception_kwargs)
+ elif self.tag[-1].keyword != match.group(1):
+ raise exceptions.SyntaxException(
+ "Closing tag </%%%s> does not match tag: <%%%s>" %
+ (match.group(1), self.tag[-1].keyword),
+ **self.exception_kwargs)
+ self.tag.pop()
+ return True
+ else:
+ return False
+
+ def match_end(self):
+ match = self.match(r'\Z', re.S)
+ if match:
+ string = match.group()
+ if string:
+ return string
+ else:
+ return True
+ else:
+ return False
+
+ def match_text(self):
+ match = self.match(r"""
+ (.*?) # anything, followed by:
+ (
+ (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
+ # comment preceded by a
+ # consumed newline and whitespace
+ |
+ (?=\${) # an expression
+ |
+ (?=</?[%&]) # a substitution or block or call start or end
+ # - don't consume
+ |
+ (\\\r?\n) # an escaped newline - throw away
+ |
+ \Z # end of string
+ )""", re.X | re.S)
+
+ if match:
+ text = match.group(1)
+ if text:
+ self.append_node(parsetree.Text, text)
+ return True
+ else:
+ return False
+
+ def match_python_block(self):
+ match = self.match(r"<%(!)?")
+ if match:
+ line, pos = self.matched_lineno, self.matched_charpos
+ text, end = self.parse_until_text(r'%>')
+ # the trailing newline helps
+ # compiler.parse() not complain about indentation
+ text = adjust_whitespace(text) + "\n"
+ self.append_node(
+ parsetree.Code,
+ text,
+ match.group(1) == '!', lineno=line, pos=pos)
+ return True
+ else:
+ return False
+
+ def match_expression(self):
+ match = self.match(r"\${")
+ if match:
+ line, pos = self.matched_lineno, self.matched_charpos
+ text, end = self.parse_until_text(r'\|', r'}')
+ if end == '|':
+ escapes, end = self.parse_until_text(r'}')
+ else:
+ escapes = ""
+ text = text.replace('\r\n', '\n')
+ self.append_node(
+ parsetree.Expression,
+ text, escapes.strip(),
+ lineno=line, pos=pos)
+ return True
+ else:
+ return False
+
+ def match_control_line(self):
+ match = self.match(
+ r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
+ r"(?:\r?\n|\Z)", re.M)
+ if match:
+ operator = match.group(1)
+ text = match.group(2)
+ if operator == '%':
+ m2 = re.match(r'(end)?(\w+)\s*(.*)', text)
+ if not m2:
+ raise exceptions.SyntaxException(
+ "Invalid control line: '%s'" %
+ text,
+ **self.exception_kwargs)
+ isend, keyword = m2.group(1, 2)
+ isend = (isend is not None)
+
+ if isend:
+ if not len(self.control_line):
+ raise exceptions.SyntaxException(
+ "No starting keyword '%s' for '%s'" %
+ (keyword, text),
+ **self.exception_kwargs)
+ elif self.control_line[-1].keyword != keyword:
+ raise exceptions.SyntaxException(
+ "Keyword '%s' doesn't match keyword '%s'" %
+ (text, self.control_line[-1].keyword),
+ **self.exception_kwargs)
+ self.append_node(parsetree.ControlLine, keyword, isend, text)
+ else:
+ self.append_node(parsetree.Comment, text)
+ return True
+ else:
+ return False
+
+ def match_comment(self):
+ """matches the multiline version of a comment"""
+ match = self.match(r"<%doc>(.*?)</%doc>", re.S)
+ if match:
+ self.append_node(parsetree.Comment, match.group(1))
+ return True
+ else:
+ return False
+
--- /dev/null
+# mako/lookup.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import os, stat, posixpath, re
+from mako import exceptions, util
+from mako.template import Template
+
+try:
+ import threading
+except:
+ import dummy_threading as threading
+
+class TemplateCollection(object):
+ """Represent a collection of :class:`.Template` objects,
+ identifiable via URI.
+
+ A :class:`.TemplateCollection` is linked to the usage of
+ all template tags that address other templates, such
+ as ``<%include>``, ``<%namespace>``, and ``<%inherit>``.
+ The ``file`` attribute of each of those tags refers
+ to a string URI that is passed to that :class:`.Template`
+ object's :class:`.TemplateCollection` for resolution.
+
+ :class:`.TemplateCollection` is an abstract class,
+ with the usual default implementation being :class:`.TemplateLookup`.
+
+ """
+
+ def has_template(self, uri):
+ """Return ``True`` if this :class:`.TemplateLookup` is
+ capable of returning a :class:`.Template` object for the
+ given ``uri``.
+
+ :param uri: String URI of the template to be resolved.
+
+ """
+ try:
+ self.get_template(uri)
+ return True
+ except exceptions.TemplateLookupException:
+ return False
+
+ def get_template(self, uri, relativeto=None):
+ """Return a :class:`.Template` object corresponding to the given
+ ``uri``.
+
+ The default implementation raises
+ :class:`.NotImplementedError`. Implementations should
+ raise :class:`.TemplateLookupException` if the given ``uri``
+ cannot be resolved.
+
+ :param uri: String URI of the template to be resolved.
+ :param relativeto: if present, the given ``uri`` is assumed to
+ be relative to this URI.
+
+ """
+ raise NotImplementedError()
+
+ def filename_to_uri(self, uri, filename):
+ """Convert the given ``filename`` to a URI relative to
+ this :class:`.TemplateCollection`."""
+
+ return uri
+
+ def adjust_uri(self, uri, filename):
+ """Adjust the given ``uri`` based on the calling ``filename``.
+
+ When this method is called from the runtime, the
+ ``filename`` parameter is taken directly to the ``filename``
+ attribute of the calling template. Therefore a custom
+ :class:`.TemplateCollection` subclass can place any string
+ identifier desired in the ``filename`` parameter of the
+ :class:`.Template` objects it constructs and have them come back
+ here.
+
+ """
+ return uri
+
+class TemplateLookup(TemplateCollection):
+ """Represent a collection of templates that locates template source files
+ from the local filesystem.
+
+ The primary argument is the ``directories`` argument, the list of
+ directories to search:
+
+ .. sourcecode:: python
+
+ lookup = TemplateLookup(["/path/to/templates"])
+ some_template = lookup.get_template("/index.html")
+
+ The :class:`.TemplateLookup` can also be given :class:`.Template` objects
+ programatically using :meth:`.put_string` or :meth:`.put_template`:
+
+ .. sourcecode:: python
+
+ lookup = TemplateLookup()
+ lookup.put_string("base.html", '''
+ <html><body>${self.next()}</body></html>
+ ''')
+ lookup.put_string("hello.html", '''
+ <%include file='base.html'/>
+
+ Hello, world !
+ ''')
+
+
+ :param directories: A list of directory names which will be
+ searched for a particular template URI. The URI is appended
+ to each directory and the filesystem checked.
+
+ :param collection_size: Approximate size of the collection used
+ to store templates. If left at its default of ``-1``, the size
+ is unbounded, and a plain Python dictionary is used to
+ relate URI strings to :class:`.Template` instances.
+ Otherwise, a least-recently-used cache object is used which
+ will maintain the size of the collection approximately to
+ the number given.
+
+ :param filesystem_checks: When at its default value of ``True``,
+ each call to :meth:`.TemplateLookup.get_template()` will
+ compare the filesystem last modified time to the time in
+ which an existing :class:`.Template` object was created.
+ This allows the :class:`.TemplateLookup` to regenerate a
+ new :class:`.Template` whenever the original source has
+ been updated. Set this to ``False`` for a very minor
+ performance increase.
+
+ :param modulename_callable: A callable which, when present,
+ is passed the path of the source file as well as the
+ requested URI, and then returns the full path of the
+ generated Python module file. This is used to inject
+ alternate schemes for Python module location. If left at
+ its default of ``None``, the built in system of generation
+ based on ``module_directory`` plus ``uri`` is used.
+
+ All other keyword parameters available for
+ :class:`.Template` are mirrored here. When new
+ :class:`.Template` objects are created, the keywords
+ established with this :class:`.TemplateLookup` are passed on
+ to each new :class:`.Template`.
+
+ """
+
+ def __init__(self,
+ directories=None,
+ module_directory=None,
+ filesystem_checks=True,
+ collection_size=-1,
+ format_exceptions=False,
+ error_handler=None,
+ disable_unicode=False,
+ bytestring_passthrough=False,
+ output_encoding=None,
+ encoding_errors='strict',
+
+ cache_args=None,
+ cache_impl='beaker',
+ cache_enabled=True,
+ cache_type=None,
+ cache_dir=None,
+ cache_url=None,
+
+ modulename_callable=None,
+ module_writer=None,
+ default_filters=None,
+ buffer_filters=(),
+ strict_undefined=False,
+ imports=None,
+ future_imports=None,
+ enable_loop=True,
+ input_encoding=None,
+ preprocessor=None,
+ lexer_cls=None):
+
+ self.directories = [posixpath.normpath(d) for d in
+ util.to_list(directories, ())
+ ]
+ self.module_directory = module_directory
+ self.modulename_callable = modulename_callable
+ self.filesystem_checks = filesystem_checks
+ self.collection_size = collection_size
+
+ if cache_args is None:
+ cache_args = {}
+ # transfer deprecated cache_* args
+ if cache_dir:
+ cache_args.setdefault('dir', cache_dir)
+ if cache_url:
+ cache_args.setdefault('url', cache_url)
+ if cache_type:
+ cache_args.setdefault('type', cache_type)
+
+ self.template_args = {
+ 'format_exceptions':format_exceptions,
+ 'error_handler':error_handler,
+ 'disable_unicode':disable_unicode,
+ 'bytestring_passthrough':bytestring_passthrough,
+ 'output_encoding':output_encoding,
+ 'cache_impl':cache_impl,
+ 'encoding_errors':encoding_errors,
+ 'input_encoding':input_encoding,
+ 'module_directory':module_directory,
+ 'module_writer':module_writer,
+ 'cache_args':cache_args,
+ 'cache_enabled':cache_enabled,
+ 'default_filters':default_filters,
+ 'buffer_filters':buffer_filters,
+ 'strict_undefined':strict_undefined,
+ 'imports':imports,
+ 'future_imports':future_imports,
+ 'enable_loop':enable_loop,
+ 'preprocessor':preprocessor,
+ 'lexer_cls':lexer_cls
+ }
+
+ if collection_size == -1:
+ self._collection = {}
+ self._uri_cache = {}
+ else:
+ self._collection = util.LRUCache(collection_size)
+ self._uri_cache = util.LRUCache(collection_size)
+ self._mutex = threading.Lock()
+
+ def get_template(self, uri):
+ """Return a :class:`.Template` object corresponding to the given
+ ``uri``.
+
+ .. note:: The ``relativeto`` argument is not supported here at the moment.
+
+ """
+
+ try:
+ if self.filesystem_checks:
+ return self._check(uri, self._collection[uri])
+ else:
+ return self._collection[uri]
+ except KeyError:
+ u = re.sub(r'^\/+', '', uri)
+ for dir in self.directories:
+ srcfile = posixpath.normpath(posixpath.join(dir, u))
+ if os.path.isfile(srcfile):
+ return self._load(srcfile, uri)
+ else:
+ raise exceptions.TopLevelLookupException(
+ "Cant locate template for uri %r" % uri)
+
+ def adjust_uri(self, uri, relativeto):
+ """Adjust the given ``uri`` based on the given relative URI."""
+
+ key = (uri, relativeto)
+ if key in self._uri_cache:
+ return self._uri_cache[key]
+
+ if uri[0] != '/':
+ if relativeto is not None:
+ v = self._uri_cache[key] = posixpath.join(
+ posixpath.dirname(relativeto), uri)
+ else:
+ v = self._uri_cache[key] = '/' + uri
+ else:
+ v = self._uri_cache[key] = uri
+ return v
+
+
+ def filename_to_uri(self, filename):
+ """Convert the given ``filename`` to a URI relative to
+ this :class:`.TemplateCollection`."""
+
+ try:
+ return self._uri_cache[filename]
+ except KeyError:
+ value = self._relativeize(filename)
+ self._uri_cache[filename] = value
+ return value
+
+ def _relativeize(self, filename):
+ """Return the portion of a filename that is 'relative'
+ to the directories in this lookup.
+
+ """
+
+ filename = posixpath.normpath(filename)
+ for dir in self.directories:
+ if filename[0:len(dir)] == dir:
+ return filename[len(dir):]
+ else:
+ return None
+
+ def _load(self, filename, uri):
+ self._mutex.acquire()
+ try:
+ try:
+ # try returning from collection one
+ # more time in case concurrent thread already loaded
+ return self._collection[uri]
+ except KeyError:
+ pass
+ try:
+ if self.modulename_callable is not None:
+ module_filename = self.modulename_callable(filename, uri)
+ else:
+ module_filename = None
+ self._collection[uri] = template = Template(
+ uri=uri,
+ filename=posixpath.normpath(filename),
+ lookup=self,
+ module_filename=module_filename,
+ **self.template_args)
+ return template
+ except:
+ # if compilation fails etc, ensure
+ # template is removed from collection,
+ # re-raise
+ self._collection.pop(uri, None)
+ raise
+ finally:
+ self._mutex.release()
+
+ def _check(self, uri, template):
+ if template.filename is None:
+ return template
+
+ try:
+ template_stat = os.stat(template.filename)
+ if template.module._modified_time < \
+ template_stat[stat.ST_MTIME]:
+ self._collection.pop(uri, None)
+ return self._load(template.filename, uri)
+ else:
+ return template
+ except OSError:
+ self._collection.pop(uri, None)
+ raise exceptions.TemplateLookupException(
+ "Cant locate template for uri %r" % uri)
+
+
+ def put_string(self, uri, text):
+ """Place a new :class:`.Template` object into this
+ :class:`.TemplateLookup`, based on the given string of
+ ``text``.
+
+ """
+ self._collection[uri] = Template(
+ text,
+ lookup=self,
+ uri=uri,
+ **self.template_args)
+
+ def put_template(self, uri, template):
+ """Place a new :class:`.Template` object into this
+ :class:`.TemplateLookup`, based on the given
+ :class:`.Template` object.
+
+ """
+ self._collection[uri] = template
+
--- /dev/null
+# mako/parsetree.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""defines the parse tree components for Mako templates."""
+
+from mako import exceptions, ast, util, filters, compat
+import re
+
+class Node(object):
+ """base class for a Node in the parse tree."""
+
+ def __init__(self, source, lineno, pos, filename):
+ self.source = source
+ self.lineno = lineno
+ self.pos = pos
+ self.filename = filename
+
+ @property
+ def exception_kwargs(self):
+ return {'source': self.source, 'lineno': self.lineno,
+ 'pos': self.pos, 'filename': self.filename}
+
+ def get_children(self):
+ return []
+
+ def accept_visitor(self, visitor):
+ def traverse(node):
+ for n in node.get_children():
+ n.accept_visitor(visitor)
+
+ method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
+ method(self)
+
+class TemplateNode(Node):
+ """a 'container' node that stores the overall collection of nodes."""
+
+ def __init__(self, filename):
+ super(TemplateNode, self).__init__('', 0, 0, filename)
+ self.nodes = []
+ self.page_attributes = {}
+
+ def get_children(self):
+ return self.nodes
+
+ def __repr__(self):
+ return "TemplateNode(%s, %r)" % (
+ util.sorted_dict_repr(self.page_attributes),
+ self.nodes)
+
+class ControlLine(Node):
+ """defines a control line, a line-oriented python line or end tag.
+
+ e.g.::
+
+ % if foo:
+ (markup)
+ % endif
+
+ """
+
+ has_loop_context = False
+
+ def __init__(self, keyword, isend, text, **kwargs):
+ super(ControlLine, self).__init__(**kwargs)
+ self.text = text
+ self.keyword = keyword
+ self.isend = isend
+ self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with']
+ self.nodes = []
+ if self.isend:
+ self._declared_identifiers = []
+ self._undeclared_identifiers = []
+ else:
+ code = ast.PythonFragment(text, **self.exception_kwargs)
+ self._declared_identifiers = code.declared_identifiers
+ self._undeclared_identifiers = code.undeclared_identifiers
+
+ def get_children(self):
+ return self.nodes
+
+ def declared_identifiers(self):
+ return self._declared_identifiers
+
+ def undeclared_identifiers(self):
+ return self._undeclared_identifiers
+
+ def is_ternary(self, keyword):
+ """return true if the given keyword is a ternary keyword
+ for this ControlLine"""
+
+ return keyword in {
+ 'if':set(['else', 'elif']),
+ 'try':set(['except', 'finally']),
+ 'for':set(['else'])
+ }.get(self.keyword, [])
+
+ def __repr__(self):
+ return "ControlLine(%r, %r, %r, %r)" % (
+ self.keyword,
+ self.text,
+ self.isend,
+ (self.lineno, self.pos)
+ )
+
+class Text(Node):
+ """defines plain text in the template."""
+
+ def __init__(self, content, **kwargs):
+ super(Text, self).__init__(**kwargs)
+ self.content = content
+
+ def __repr__(self):
+ return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
+
+class Code(Node):
+ """defines a Python code block, either inline or module level.
+
+ e.g.::
+
+ inline:
+ <%
+ x = 12
+ %>
+
+ module level:
+ <%!
+ import logger
+ %>
+
+ """
+
+ def __init__(self, text, ismodule, **kwargs):
+ super(Code, self).__init__(**kwargs)
+ self.text = text
+ self.ismodule = ismodule
+ self.code = ast.PythonCode(text, **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return self.code.declared_identifiers
+
+ def undeclared_identifiers(self):
+ return self.code.undeclared_identifiers
+
+ def __repr__(self):
+ return "Code(%r, %r, %r)" % (
+ self.text,
+ self.ismodule,
+ (self.lineno, self.pos)
+ )
+
+class Comment(Node):
+ """defines a comment line.
+
+ # this is a comment
+
+ """
+
+ def __init__(self, text, **kwargs):
+ super(Comment, self).__init__(**kwargs)
+ self.text = text
+
+ def __repr__(self):
+ return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
+
+class Expression(Node):
+ """defines an inline expression.
+
+ ${x+y}
+
+ """
+
+ def __init__(self, text, escapes, **kwargs):
+ super(Expression, self).__init__(**kwargs)
+ self.text = text
+ self.escapes = escapes
+ self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
+ self.code = ast.PythonCode(text, **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return []
+
+ def undeclared_identifiers(self):
+ # TODO: make the "filter" shortcut list configurable at parse/gen time
+ return self.code.undeclared_identifiers.union(
+ self.escapes_code.undeclared_identifiers.difference(
+ set(filters.DEFAULT_ESCAPES.keys())
+ )
+ ).difference(self.code.declared_identifiers)
+
+ def __repr__(self):
+ return "Expression(%r, %r, %r)" % (
+ self.text,
+ self.escapes_code.args,
+ (self.lineno, self.pos)
+ )
+
+class _TagMeta(type):
+ """metaclass to allow Tag to produce a subclass according to
+ its keyword"""
+
+ _classmap = {}
+
+ def __init__(cls, clsname, bases, dict):
+ if getattr(cls, '__keyword__', None) is not None:
+ cls._classmap[cls.__keyword__] = cls
+ super(_TagMeta, cls).__init__(clsname, bases, dict)
+
+ def __call__(cls, keyword, attributes, **kwargs):
+ if ":" in keyword:
+ ns, defname = keyword.split(':')
+ return type.__call__(CallNamespaceTag, ns, defname,
+ attributes, **kwargs)
+
+ try:
+ cls = _TagMeta._classmap[keyword]
+ except KeyError:
+ raise exceptions.CompileException(
+ "No such tag: '%s'" % keyword,
+ source=kwargs['source'],
+ lineno=kwargs['lineno'],
+ pos=kwargs['pos'],
+ filename=kwargs['filename']
+ )
+ return type.__call__(cls, keyword, attributes, **kwargs)
+
+class Tag(compat.with_metaclass(_TagMeta, Node)):
+ """abstract base class for tags.
+
+ <%sometag/>
+
+ <%someothertag>
+ stuff
+ </%someothertag>
+
+ """
+ __keyword__ = None
+
+ def __init__(self, keyword, attributes, expressions,
+ nonexpressions, required, **kwargs):
+ """construct a new Tag instance.
+
+ this constructor not called directly, and is only called
+ by subclasses.
+
+ :param keyword: the tag keyword
+
+ :param attributes: raw dictionary of attribute key/value pairs
+
+ :param expressions: a set of identifiers that are legal attributes,
+ which can also contain embedded expressions
+
+ :param nonexpressions: a set of identifiers that are legal
+ attributes, which cannot contain embedded expressions
+
+ :param \**kwargs:
+ other arguments passed to the Node superclass (lineno, pos)
+
+ """
+ super(Tag, self).__init__(**kwargs)
+ self.keyword = keyword
+ self.attributes = attributes
+ self._parse_attributes(expressions, nonexpressions)
+ missing = [r for r in required if r not in self.parsed_attributes]
+ if len(missing):
+ raise exceptions.CompileException(
+ "Missing attribute(s): %s" %
+ ",".join([repr(m) for m in missing]),
+ **self.exception_kwargs)
+ self.parent = None
+ self.nodes = []
+
+ def is_root(self):
+ return self.parent is None
+
+ def get_children(self):
+ return self.nodes
+
+ def _parse_attributes(self, expressions, nonexpressions):
+ undeclared_identifiers = set()
+ self.parsed_attributes = {}
+ for key in self.attributes:
+ if key in expressions:
+ expr = []
+ for x in re.compile(r'(\${.+?})',
+ re.S).split(self.attributes[key]):
+ m = re.compile(r'^\${(.+?)}$', re.S).match(x)
+ if m:
+ code = ast.PythonCode(m.group(1).rstrip(),
+ **self.exception_kwargs)
+ # we aren't discarding "declared_identifiers" here,
+ # which we do so that list comprehension-declared
+ # variables aren't counted. As yet can't find a
+ # condition that requires it here.
+ undeclared_identifiers = \
+ undeclared_identifiers.union(
+ code.undeclared_identifiers)
+ expr.append('(%s)' % m.group(1))
+ else:
+ if x:
+ expr.append(repr(x))
+ self.parsed_attributes[key] = " + ".join(expr) or repr('')
+ elif key in nonexpressions:
+ if re.search(r'\${.+?}', self.attributes[key]):
+ raise exceptions.CompileException(
+ "Attibute '%s' in tag '%s' does not allow embedded "
+ "expressions" % (key, self.keyword),
+ **self.exception_kwargs)
+ self.parsed_attributes[key] = repr(self.attributes[key])
+ else:
+ raise exceptions.CompileException(
+ "Invalid attribute for tag '%s': '%s'" %
+ (self.keyword, key),
+ **self.exception_kwargs)
+ self.expression_undeclared_identifiers = undeclared_identifiers
+
+ def declared_identifiers(self):
+ return []
+
+ def undeclared_identifiers(self):
+ return self.expression_undeclared_identifiers
+
+ def __repr__(self):
+ return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
+ self.keyword,
+ util.sorted_dict_repr(self.attributes),
+ (self.lineno, self.pos),
+ self.nodes
+ )
+
+class IncludeTag(Tag):
+ __keyword__ = 'include'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(IncludeTag, self).__init__(
+ keyword,
+ attributes,
+ ('file', 'import', 'args'),
+ (), ('file',), **kwargs)
+ self.page_args = ast.PythonCode(
+ "__DUMMY(%s)" % attributes.get('args', ''),
+ **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return []
+
+ def undeclared_identifiers(self):
+ identifiers = self.page_args.undeclared_identifiers.\
+ difference(set(["__DUMMY"])).\
+ difference(self.page_args.declared_identifiers)
+ return identifiers.union(super(IncludeTag, self).
+ undeclared_identifiers())
+
+class NamespaceTag(Tag):
+ __keyword__ = 'namespace'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(NamespaceTag, self).__init__(
+ keyword, attributes,
+ ('file',),
+ ('name','inheritable',
+ 'import','module'),
+ (), **kwargs)
+
+ self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
+ if not 'name' in attributes and not 'import' in attributes:
+ raise exceptions.CompileException(
+ "'name' and/or 'import' attributes are required "
+ "for <%namespace>",
+ **self.exception_kwargs)
+ if 'file' in attributes and 'module' in attributes:
+ raise exceptions.CompileException(
+ "<%namespace> may only have one of 'file' or 'module'",
+ **self.exception_kwargs
+ )
+
+ def declared_identifiers(self):
+ return []
+
+class TextTag(Tag):
+ __keyword__ = 'text'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(TextTag, self).__init__(
+ keyword,
+ attributes, (),
+ ('filter'), (), **kwargs)
+ self.filter_args = ast.ArgumentList(
+ attributes.get('filter', ''),
+ **self.exception_kwargs)
+
+ def undeclared_identifiers(self):
+ return self.filter_args.\
+ undeclared_identifiers.\
+ difference(filters.DEFAULT_ESCAPES.keys()).union(
+ self.expression_undeclared_identifiers
+ )
+
+class DefTag(Tag):
+ __keyword__ = 'def'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ expressions = ['buffered', 'cached'] + [
+ c for c in attributes if c.startswith('cache_')]
+
+
+ super(DefTag, self).__init__(
+ keyword,
+ attributes,
+ expressions,
+ ('name', 'filter', 'decorator'),
+ ('name',),
+ **kwargs)
+ name = attributes['name']
+ if re.match(r'^[\w_]+$', name):
+ raise exceptions.CompileException(
+ "Missing parenthesis in %def",
+ **self.exception_kwargs)
+ self.function_decl = ast.FunctionDecl("def " + name + ":pass",
+ **self.exception_kwargs)
+ self.name = self.function_decl.funcname
+ self.decorator = attributes.get('decorator', '')
+ self.filter_args = ast.ArgumentList(
+ attributes.get('filter', ''),
+ **self.exception_kwargs)
+
+ is_anonymous = False
+ is_block = False
+
+ @property
+ def funcname(self):
+ return self.function_decl.funcname
+
+ def get_argument_expressions(self, **kw):
+ return self.function_decl.get_argument_expressions(**kw)
+
+ def declared_identifiers(self):
+ return self.function_decl.allargnames
+
+ def undeclared_identifiers(self):
+ res = []
+ for c in self.function_decl.defaults:
+ res += list(ast.PythonCode(c, **self.exception_kwargs).
+ undeclared_identifiers)
+ return set(res).union(
+ self.filter_args.\
+ undeclared_identifiers.\
+ difference(filters.DEFAULT_ESCAPES.keys())
+ ).union(
+ self.expression_undeclared_identifiers
+ ).difference(
+ self.function_decl.allargnames
+ )
+
+class BlockTag(Tag):
+ __keyword__ = 'block'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ expressions = ['buffered', 'cached', 'args'] + [
+ c for c in attributes if c.startswith('cache_')]
+
+ super(BlockTag, self).__init__(
+ keyword,
+ attributes,
+ expressions,
+ ('name','filter', 'decorator'),
+ (),
+ **kwargs)
+ name = attributes.get('name')
+ if name and not re.match(r'^[\w_]+$',name):
+ raise exceptions.CompileException(
+ "%block may not specify an argument signature",
+ **self.exception_kwargs)
+ if not name and attributes.get('args', None):
+ raise exceptions.CompileException(
+ "Only named %blocks may specify args",
+ **self.exception_kwargs
+ )
+ self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
+ **self.exception_kwargs)
+
+ self.name = name
+ self.decorator = attributes.get('decorator', '')
+ self.filter_args = ast.ArgumentList(
+ attributes.get('filter', ''),
+ **self.exception_kwargs)
+
+
+ is_block = True
+
+ @property
+ def is_anonymous(self):
+ return self.name is None
+
+ @property
+ def funcname(self):
+ return self.name or "__M_anon_%d" % (self.lineno, )
+
+ def get_argument_expressions(self, **kw):
+ return self.body_decl.get_argument_expressions(**kw)
+
+ def declared_identifiers(self):
+ return self.body_decl.allargnames
+
+ def undeclared_identifiers(self):
+ return (self.filter_args.\
+ undeclared_identifiers.\
+ difference(filters.DEFAULT_ESCAPES.keys())
+ ).union(self.expression_undeclared_identifiers)
+
+
+
+class CallTag(Tag):
+ __keyword__ = 'call'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(CallTag, self).__init__(keyword, attributes,
+ ('args'), ('expr',), ('expr',), **kwargs)
+ self.expression = attributes['expr']
+ self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
+ self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
+ **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return self.code.declared_identifiers.union(self.body_decl.allargnames)
+
+ def undeclared_identifiers(self):
+ return self.code.undeclared_identifiers.\
+ difference(self.code.declared_identifiers)
+
+class CallNamespaceTag(Tag):
+
+ def __init__(self, namespace, defname, attributes, **kwargs):
+ super(CallNamespaceTag, self).__init__(
+ namespace + ":" + defname,
+ attributes,
+ tuple(attributes.keys()) + ('args', ),
+ (),
+ (),
+ **kwargs)
+
+ self.expression = "%s.%s(%s)" % (
+ namespace,
+ defname,
+ ",".join(["%s=%s" % (k, v) for k, v in
+ self.parsed_attributes.items()
+ if k != 'args'])
+ )
+ self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
+ self.body_decl = ast.FunctionArgs(
+ attributes.get('args', ''),
+ **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return self.code.declared_identifiers.union(self.body_decl.allargnames)
+
+ def undeclared_identifiers(self):
+ return self.code.undeclared_identifiers.\
+ difference(self.code.declared_identifiers)
+
+class InheritTag(Tag):
+ __keyword__ = 'inherit'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ super(InheritTag, self).__init__(
+ keyword, attributes,
+ ('file',), (), ('file',), **kwargs)
+
+class PageTag(Tag):
+ __keyword__ = 'page'
+
+ def __init__(self, keyword, attributes, **kwargs):
+ expressions = ['cached', 'args', 'expression_filter', 'enable_loop'] + [
+ c for c in attributes if c.startswith('cache_')]
+
+ super(PageTag, self).__init__(
+ keyword,
+ attributes,
+ expressions,
+ (),
+ (),
+ **kwargs)
+ self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
+ **self.exception_kwargs)
+ self.filter_args = ast.ArgumentList(
+ attributes.get('expression_filter', ''),
+ **self.exception_kwargs)
+
+ def declared_identifiers(self):
+ return self.body_decl.allargnames
+
+
--- /dev/null
+# mako/pygen.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""utilities for generating and formatting literal Python code."""
+
+import re
+from mako import exceptions
+
+class PythonPrinter(object):
+ def __init__(self, stream):
+ # indentation counter
+ self.indent = 0
+
+ # a stack storing information about why we incremented
+ # the indentation counter, to help us determine if we
+ # should decrement it
+ self.indent_detail = []
+
+ # the string of whitespace multiplied by the indent
+ # counter to produce a line
+ self.indentstring = " "
+
+ # the stream we are writing to
+ self.stream = stream
+
+ # current line number
+ self.lineno = 1
+
+ # a list of lines that represents a buffered "block" of code,
+ # which can be later printed relative to an indent level
+ self.line_buffer = []
+
+ self.in_indent_lines = False
+
+ self._reset_multi_line_flags()
+
+ # mapping of generated python lines to template
+ # source lines
+ self.source_map = {}
+
+ def _update_lineno(self, num):
+ self.lineno += num
+
+ def start_source(self, lineno):
+ if self.lineno not in self.source_map:
+ self.source_map[self.lineno] = lineno
+
+ def write_blanks(self, num):
+ self.stream.write("\n" * num)
+ self._update_lineno(num)
+
+ def write_indented_block(self, block):
+ """print a line or lines of python which already contain indentation.
+
+ The indentation of the total block of lines will be adjusted to that of
+ the current indent level."""
+ self.in_indent_lines = False
+ for l in re.split(r'\r?\n', block):
+ self.line_buffer.append(l)
+ self._update_lineno(1)
+
+ def writelines(self, *lines):
+ """print a series of lines of python."""
+ for line in lines:
+ self.writeline(line)
+
+ def writeline(self, line):
+ """print a line of python, indenting it according to the current
+ indent level.
+
+ this also adjusts the indentation counter according to the
+ content of the line.
+
+ """
+
+ if not self.in_indent_lines:
+ self._flush_adjusted_lines()
+ self.in_indent_lines = True
+
+ if (line is None or
+ re.match(r"^\s*#",line) or
+ re.match(r"^\s*$", line)
+ ):
+ hastext = False
+ else:
+ hastext = True
+
+ is_comment = line and len(line) and line[0] == '#'
+
+ # see if this line should decrease the indentation level
+ if (not is_comment and
+ (not hastext or self._is_unindentor(line))
+ ):
+
+ if self.indent > 0:
+ self.indent -= 1
+ # if the indent_detail stack is empty, the user
+ # probably put extra closures - the resulting
+ # module wont compile.
+ if len(self.indent_detail) == 0:
+ raise exceptions.SyntaxException(
+ "Too many whitespace closures")
+ self.indent_detail.pop()
+
+ if line is None:
+ return
+
+ # write the line
+ self.stream.write(self._indent_line(line) + "\n")
+ self._update_lineno(len(line.split("\n")))
+
+ # see if this line should increase the indentation level.
+ # note that a line can both decrase (before printing) and
+ # then increase (after printing) the indentation level.
+
+ if re.search(r":[ \t]*(?:#.*)?$", line):
+ # increment indentation count, and also
+ # keep track of what the keyword was that indented us,
+ # if it is a python compound statement keyword
+ # where we might have to look for an "unindent" keyword
+ match = re.match(r"^\s*(if|try|elif|while|for|with)", line)
+ if match:
+ # its a "compound" keyword, so we will check for "unindentors"
+ indentor = match.group(1)
+ self.indent += 1
+ self.indent_detail.append(indentor)
+ else:
+ indentor = None
+ # its not a "compound" keyword. but lets also
+ # test for valid Python keywords that might be indenting us,
+ # else assume its a non-indenting line
+ m2 = re.match(r"^\s*(def|class|else|elif|except|finally)",
+ line)
+ if m2:
+ self.indent += 1
+ self.indent_detail.append(indentor)
+
+ def close(self):
+ """close this printer, flushing any remaining lines."""
+ self._flush_adjusted_lines()
+
+ def _is_unindentor(self, line):
+ """return true if the given line is an 'unindentor',
+ relative to the last 'indent' event received.
+
+ """
+
+ # no indentation detail has been pushed on; return False
+ if len(self.indent_detail) == 0:
+ return False
+
+ indentor = self.indent_detail[-1]
+
+ # the last indent keyword we grabbed is not a
+ # compound statement keyword; return False
+ if indentor is None:
+ return False
+
+ # if the current line doesnt have one of the "unindentor" keywords,
+ # return False
+ match = re.match(r"^\s*(else|elif|except|finally).*\:", line)
+ if not match:
+ return False
+
+ # whitespace matches up, we have a compound indentor,
+ # and this line has an unindentor, this
+ # is probably good enough
+ return True
+
+ # should we decide that its not good enough, heres
+ # more stuff to check.
+ #keyword = match.group(1)
+
+ # match the original indent keyword
+ #for crit in [
+ # (r'if|elif', r'else|elif'),
+ # (r'try', r'except|finally|else'),
+ # (r'while|for', r'else'),
+ #]:
+ # if re.match(crit[0], indentor) and re.match(crit[1], keyword):
+ # return True
+
+ #return False
+
+ def _indent_line(self, line, stripspace=''):
+ """indent the given line according to the current indent level.
+
+ stripspace is a string of space that will be truncated from the
+ start of the line before indenting."""
+
+ return re.sub(r"^%s" % stripspace, self.indentstring
+ * self.indent, line)
+
+ def _reset_multi_line_flags(self):
+ """reset the flags which would indicate we are in a backslashed
+ or triple-quoted section."""
+
+ self.backslashed, self.triplequoted = False, False
+
+ def _in_multi_line(self, line):
+ """return true if the given line is part of a multi-line block,
+ via backslash or triple-quote."""
+
+ # we are only looking for explicitly joined lines here, not
+ # implicit ones (i.e. brackets, braces etc.). this is just to
+ # guard against the possibility of modifying the space inside of
+ # a literal multiline string with unfortunately placed
+ # whitespace
+
+ current_state = (self.backslashed or self.triplequoted)
+
+ if re.search(r"\\$", line):
+ self.backslashed = True
+ else:
+ self.backslashed = False
+
+ triples = len(re.findall(r"\"\"\"|\'\'\'", line))
+ if triples == 1 or triples % 2 != 0:
+ self.triplequoted = not self.triplequoted
+
+ return current_state
+
+ def _flush_adjusted_lines(self):
+ stripspace = None
+ self._reset_multi_line_flags()
+
+ for entry in self.line_buffer:
+ if self._in_multi_line(entry):
+ self.stream.write(entry + "\n")
+ else:
+ entry = entry.expandtabs()
+ if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry):
+ stripspace = re.match(r"^([ \t]*)", entry).group(1)
+ self.stream.write(self._indent_line(entry, stripspace) + "\n")
+
+ self.line_buffer = []
+ self._reset_multi_line_flags()
+
+
+def adjust_whitespace(text):
+ """remove the left-whitespace margin of a block of Python code."""
+
+ state = [False, False]
+ (backslashed, triplequoted) = (0, 1)
+
+ def in_multi_line(line):
+ start_state = (state[backslashed] or state[triplequoted])
+
+ if re.search(r"\\$", line):
+ state[backslashed] = True
+ else:
+ state[backslashed] = False
+
+ def match(reg, t):
+ m = re.match(reg, t)
+ if m:
+ return m, t[len(m.group(0)):]
+ else:
+ return None, t
+
+ while line:
+ if state[triplequoted]:
+ m, line = match(r"%s" % state[triplequoted], line)
+ if m:
+ state[triplequoted] = False
+ else:
+ m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
+ else:
+ m, line = match(r'#', line)
+ if m:
+ return start_state
+
+ m, line = match(r"\"\"\"|\'\'\'", line)
+ if m:
+ state[triplequoted] = m.group(0)
+ continue
+
+ m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line)
+
+ return start_state
+
+ def _indent_line(line, stripspace=''):
+ return re.sub(r"^%s" % stripspace, '', line)
+
+ lines = []
+ stripspace = None
+
+ for line in re.split(r'\r?\n', text):
+ if in_multi_line(line):
+ lines.append(line)
+ else:
+ line = line.expandtabs()
+ if stripspace is None and re.search(r"^[ \t]*[^# \t]", line):
+ stripspace = re.match(r"^([ \t]*)", line).group(1)
+ lines.append(_indent_line(line, stripspace))
+ return "\n".join(lines)
--- /dev/null
+# mako/pyparser.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Handles parsing of Python code.
+
+Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
+module is used.
+"""
+
+from mako import exceptions, util, compat
+from mako.compat import arg_stringname
+import operator
+
+if compat.py3k:
+ # words that cannot be assigned to (notably
+ # smaller than the total keys in __builtins__)
+ reserved = set(['True', 'False', 'None', 'print'])
+
+ # the "id" attribute on a function node
+ arg_id = operator.attrgetter('arg')
+else:
+ # words that cannot be assigned to (notably
+ # smaller than the total keys in __builtins__)
+ reserved = set(['True', 'False', 'None'])
+
+ # the "id" attribute on a function node
+ arg_id = operator.attrgetter('id')
+
+import _ast
+util.restore__ast(_ast)
+from mako import _ast_util
+
+
+def parse(code, mode='exec', **exception_kwargs):
+ """Parse an expression into AST"""
+
+ try:
+ return _ast_util.parse(code, '<unknown>', mode)
+ except Exception:
+ raise exceptions.SyntaxException(
+ "(%s) %s (%r)" % (
+ compat.exception_as().__class__.__name__,
+ compat.exception_as(),
+ code[0:50]
+ ), **exception_kwargs)
+
+
+class FindIdentifiers(_ast_util.NodeVisitor):
+
+ def __init__(self, listener, **exception_kwargs):
+ self.in_function = False
+ self.in_assign_targets = False
+ self.local_ident_stack = set()
+ self.listener = listener
+ self.exception_kwargs = exception_kwargs
+
+ def _add_declared(self, name):
+ if not self.in_function:
+ self.listener.declared_identifiers.add(name)
+ else:
+ self.local_ident_stack.add(name)
+
+ def visit_ClassDef(self, node):
+ self._add_declared(node.name)
+
+ def visit_Assign(self, node):
+
+ # flip around the visiting of Assign so the expression gets
+ # evaluated first, in the case of a clause like "x=x+5" (x
+ # is undeclared)
+
+ self.visit(node.value)
+ in_a = self.in_assign_targets
+ self.in_assign_targets = True
+ for n in node.targets:
+ self.visit(n)
+ self.in_assign_targets = in_a
+
+ if compat.py3k:
+
+ # ExceptHandler is in Python 2, but this block only works in
+ # Python 3 (and is required there)
+
+ def visit_ExceptHandler(self, node):
+ if node.name is not None:
+ self._add_declared(node.name)
+ if node.type is not None:
+ self.visit(node.type)
+ for statement in node.body:
+ self.visit(statement)
+
+ def visit_Lambda(self, node, *args):
+ self._visit_function(node, True)
+
+ def visit_FunctionDef(self, node):
+ self._add_declared(node.name)
+ self._visit_function(node, False)
+
+ def _expand_tuples(self, args):
+ for arg in args:
+ if isinstance(arg, _ast.Tuple):
+ for n in arg.elts:
+ yield n
+ else:
+ yield arg
+
+ def _visit_function(self, node, islambda):
+
+ # push function state onto stack. dont log any more
+ # identifiers as "declared" until outside of the function,
+ # but keep logging identifiers as "undeclared". track
+ # argument names in each function header so they arent
+ # counted as "undeclared"
+
+ inf = self.in_function
+ self.in_function = True
+
+ local_ident_stack = self.local_ident_stack
+ self.local_ident_stack = local_ident_stack.union([
+ arg_id(arg) for arg in self._expand_tuples(node.args.args)
+ ])
+ if islambda:
+ self.visit(node.body)
+ else:
+ for n in node.body:
+ self.visit(n)
+ self.in_function = inf
+ self.local_ident_stack = local_ident_stack
+
+ def visit_For(self, node):
+
+ # flip around visit
+
+ self.visit(node.iter)
+ self.visit(node.target)
+ for statement in node.body:
+ self.visit(statement)
+ for statement in node.orelse:
+ self.visit(statement)
+
+ def visit_Name(self, node):
+ if isinstance(node.ctx, _ast.Store):
+ # this is eqiuvalent to visit_AssName in
+ # compiler
+ self._add_declared(node.id)
+ elif node.id not in reserved and node.id \
+ not in self.listener.declared_identifiers and node.id \
+ not in self.local_ident_stack:
+ self.listener.undeclared_identifiers.add(node.id)
+
+ def visit_Import(self, node):
+ for name in node.names:
+ if name.asname is not None:
+ self._add_declared(name.asname)
+ else:
+ self._add_declared(name.name.split('.')[0])
+
+ def visit_ImportFrom(self, node):
+ for name in node.names:
+ if name.asname is not None:
+ self._add_declared(name.asname)
+ else:
+ if name.name == '*':
+ raise exceptions.CompileException(
+ "'import *' is not supported, since all identifier "
+ "names must be explicitly declared. Please use the "
+ "form 'from <modulename> import <name1>, <name2>, "
+ "...' instead.", **self.exception_kwargs)
+ self._add_declared(name.name)
+
+
+class FindTuple(_ast_util.NodeVisitor):
+
+ def __init__(self, listener, code_factory, **exception_kwargs):
+ self.listener = listener
+ self.exception_kwargs = exception_kwargs
+ self.code_factory = code_factory
+
+ def visit_Tuple(self, node):
+ for n in node.elts:
+ p = self.code_factory(n, **self.exception_kwargs)
+ self.listener.codeargs.append(p)
+ self.listener.args.append(ExpressionGenerator(n).value())
+ self.listener.declared_identifiers = \
+ self.listener.declared_identifiers.union(
+ p.declared_identifiers)
+ self.listener.undeclared_identifiers = \
+ self.listener.undeclared_identifiers.union(
+ p.undeclared_identifiers)
+
+
+class ParseFunc(_ast_util.NodeVisitor):
+
+ def __init__(self, listener, **exception_kwargs):
+ self.listener = listener
+ self.exception_kwargs = exception_kwargs
+
+ def visit_FunctionDef(self, node):
+ self.listener.funcname = node.name
+
+ argnames = [arg_id(arg) for arg in node.args.args]
+ if node.args.vararg:
+ argnames.append(arg_stringname(node.args.vararg))
+
+ if compat.py2k:
+ # kw-only args don't exist in Python 2
+ kwargnames = []
+ else:
+ kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs]
+ if node.args.kwarg:
+ kwargnames.append(arg_stringname(node.args.kwarg))
+ self.listener.argnames = argnames
+ self.listener.defaults = node.args.defaults # ast
+ self.listener.kwargnames = kwargnames
+ if compat.py2k:
+ self.listener.kwdefaults = []
+ else:
+ self.listener.kwdefaults = node.args.kw_defaults
+ self.listener.varargs = node.args.vararg
+ self.listener.kwargs = node.args.kwarg
+
+class ExpressionGenerator(object):
+
+ def __init__(self, astnode):
+ self.generator = _ast_util.SourceGenerator(' ' * 4)
+ self.generator.visit(astnode)
+
+ def value(self):
+ return ''.join(self.generator.result)
--- /dev/null
+# mako/runtime.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""provides runtime services for templates, including Context,
+Namespace, and various helper functions."""
+
+from mako import exceptions, util, compat
+from mako.compat import compat_builtins
+import sys
+
+
+class Context(object):
+ """Provides runtime namespace, output buffer, and various
+ callstacks for templates.
+
+ See :ref:`runtime_toplevel` for detail on the usage of
+ :class:`.Context`.
+
+ """
+
+ def __init__(self, buffer, **data):
+ self._buffer_stack = [buffer]
+
+ self._data = data
+
+ self._kwargs = data.copy()
+ self._with_template = None
+ self._outputting_as_unicode = None
+ self.namespaces = {}
+
+ # "capture" function which proxies to the
+ # generic "capture" function
+ self._data['capture'] = compat.partial(capture, self)
+
+ # "caller" stack used by def calls with content
+ self.caller_stack = self._data['caller'] = CallerStack()
+
+ def _set_with_template(self, t):
+ self._with_template = t
+ illegal_names = t.reserved_names.intersection(self._data)
+ if illegal_names:
+ raise exceptions.NameConflictError(
+ "Reserved words passed to render(): %s" %
+ ", ".join(illegal_names))
+
+ @property
+ def lookup(self):
+ """Return the :class:`.TemplateLookup` associated
+ with this :class:`.Context`.
+
+ """
+ return self._with_template.lookup
+
+ @property
+ def kwargs(self):
+ """Return the dictionary of top level keyword arguments associated
+ with this :class:`.Context`.
+
+ This dictionary only includes the top-level arguments passed to
+ :meth:`.Template.render`. It does not include names produced within
+ the template execution such as local variable names or special names
+ such as ``self``, ``next``, etc.
+
+ The purpose of this dictionary is primarily for the case that
+ a :class:`.Template` accepts arguments via its ``<%page>`` tag,
+ which are normally expected to be passed via :meth:`.Template.render`,
+ except the template is being called in an inheritance context,
+ using the ``body()`` method. :attr:`.Context.kwargs` can then be
+ used to propagate these arguments to the inheriting template::
+
+ ${next.body(**context.kwargs)}
+
+ """
+ return self._kwargs.copy()
+
+ def push_caller(self, caller):
+ """Push a ``caller`` callable onto the callstack for
+ this :class:`.Context`."""
+
+
+ self.caller_stack.append(caller)
+
+ def pop_caller(self):
+ """Pop a ``caller`` callable onto the callstack for this
+ :class:`.Context`."""
+
+ del self.caller_stack[-1]
+
+ def keys(self):
+ """Return a list of all names established in this :class:`.Context`."""
+
+ return list(self._data.keys())
+
+ def __getitem__(self, key):
+ if key in self._data:
+ return self._data[key]
+ else:
+ return compat_builtins.__dict__[key]
+
+ def _push_writer(self):
+ """push a capturing buffer onto this Context and return
+ the new writer function."""
+
+ buf = util.FastEncodingBuffer()
+ self._buffer_stack.append(buf)
+ return buf.write
+
+ def _pop_buffer_and_writer(self):
+ """pop the most recent capturing buffer from this Context
+ and return the current writer after the pop.
+
+ """
+
+ buf = self._buffer_stack.pop()
+ return buf, self._buffer_stack[-1].write
+
+ def _push_buffer(self):
+ """push a capturing buffer onto this Context."""
+
+ self._push_writer()
+
+ def _pop_buffer(self):
+ """pop the most recent capturing buffer from this Context."""
+
+ return self._buffer_stack.pop()
+
+ def get(self, key, default=None):
+ """Return a value from this :class:`.Context`."""
+
+ return self._data.get(key, compat_builtins.__dict__.get(key, default))
+
+ def write(self, string):
+ """Write a string to this :class:`.Context` object's
+ underlying output buffer."""
+
+ self._buffer_stack[-1].write(string)
+
+ def writer(self):
+ """Return the current writer function."""
+
+ return self._buffer_stack[-1].write
+
+ def _copy(self):
+ c = Context.__new__(Context)
+ c._buffer_stack = self._buffer_stack
+ c._data = self._data.copy()
+ c._kwargs = self._kwargs
+ c._with_template = self._with_template
+ c._outputting_as_unicode = self._outputting_as_unicode
+ c.namespaces = self.namespaces
+ c.caller_stack = self.caller_stack
+ return c
+
+ def _locals(self, d):
+ """Create a new :class:`.Context` with a copy of this
+ :class:`.Context`'s current state,
+ updated with the given dictionary.
+
+ The :attr:`.Context.kwargs` collection remains
+ unaffected.
+
+
+ """
+
+ if not d:
+ return self
+ c = self._copy()
+ c._data.update(d)
+ return c
+
+ def _clean_inheritance_tokens(self):
+ """create a new copy of this :class:`.Context`. with
+ tokens related to inheritance state removed."""
+
+ c = self._copy()
+ x = c._data
+ x.pop('self', None)
+ x.pop('parent', None)
+ x.pop('next', None)
+ return c
+
+class CallerStack(list):
+ def __init__(self):
+ self.nextcaller = None
+
+ def __nonzero__(self):
+ return self.__bool__()
+
+ def __bool__(self):
+ return len(self) and self._get_caller() and True or False
+
+ def _get_caller(self):
+ # this method can be removed once
+ # codegen MAGIC_NUMBER moves past 7
+ return self[-1]
+
+ def __getattr__(self, key):
+ return getattr(self._get_caller(), key)
+
+ def _push_frame(self):
+ frame = self.nextcaller or None
+ self.append(frame)
+ self.nextcaller = None
+ return frame
+
+ def _pop_frame(self):
+ self.nextcaller = self.pop()
+
+
+class Undefined(object):
+ """Represents an undefined value in a template.
+
+ All template modules have a constant value
+ ``UNDEFINED`` present which is an instance of this
+ object.
+
+ """
+ def __str__(self):
+ raise NameError("Undefined")
+
+ def __nonzero__(self):
+ return self.__bool__()
+
+ def __bool__(self):
+ return False
+
+UNDEFINED = Undefined()
+
+class LoopStack(object):
+ """a stack for LoopContexts that implements the context manager protocol
+ to automatically pop off the top of the stack on context exit
+ """
+
+ def __init__(self):
+ self.stack = []
+
+ def _enter(self, iterable):
+ self._push(iterable)
+ return self._top
+
+ def _exit(self):
+ self._pop()
+ return self._top
+
+ @property
+ def _top(self):
+ if self.stack:
+ return self.stack[-1]
+ else:
+ return self
+
+ def _pop(self):
+ return self.stack.pop()
+
+ def _push(self, iterable):
+ new = LoopContext(iterable)
+ if self.stack:
+ new.parent = self.stack[-1]
+ return self.stack.append(new)
+
+ def __getattr__(self, key):
+ raise exceptions.RuntimeException("No loop context is established")
+
+ def __iter__(self):
+ return iter(self._top)
+
+
+class LoopContext(object):
+ """A magic loop variable.
+ Automatically accessible in any ``% for`` block.
+
+ See the section :ref:`loop_context` for usage
+ notes.
+
+ :attr:`parent` -> :class:`.LoopContext` or ``None``
+ The parent loop, if one exists.
+ :attr:`index` -> `int`
+ The 0-based iteration count.
+ :attr:`reverse_index` -> `int`
+ The number of iterations remaining.
+ :attr:`first` -> `bool`
+ ``True`` on the first iteration, ``False`` otherwise.
+ :attr:`last` -> `bool`
+ ``True`` on the last iteration, ``False`` otherwise.
+ :attr:`even` -> `bool`
+ ``True`` when ``index`` is even.
+ :attr:`odd` -> `bool`
+ ``True`` when ``index`` is odd.
+ """
+
+ def __init__(self, iterable):
+ self._iterable = iterable
+ self.index = 0
+ self.parent = None
+
+ def __iter__(self):
+ for i in self._iterable:
+ yield i
+ self.index += 1
+
+ @util.memoized_instancemethod
+ def __len__(self):
+ return len(self._iterable)
+
+ @property
+ def reverse_index(self):
+ return len(self) - self.index - 1
+
+ @property
+ def first(self):
+ return self.index == 0
+
+ @property
+ def last(self):
+ return self.index == len(self) - 1
+
+ @property
+ def even(self):
+ return not self.odd
+
+ @property
+ def odd(self):
+ return bool(self.index % 2)
+
+ def cycle(self, *values):
+ """Cycle through values as the loop progresses.
+ """
+ if not values:
+ raise ValueError("You must provide values to cycle through")
+ return values[self.index % len(values)]
+
+
+class _NSAttr(object):
+ def __init__(self, parent):
+ self.__parent = parent
+ def __getattr__(self, key):
+ ns = self.__parent
+ while ns:
+ if hasattr(ns.module, key):
+ return getattr(ns.module, key)
+ else:
+ ns = ns.inherits
+ raise AttributeError(key)
+
+class Namespace(object):
+ """Provides access to collections of rendering methods, which
+ can be local, from other templates, or from imported modules.
+
+ To access a particular rendering method referenced by a
+ :class:`.Namespace`, use plain attribute access:
+
+ .. sourcecode:: mako
+
+ ${some_namespace.foo(x, y, z)}
+
+ :class:`.Namespace` also contains several built-in attributes
+ described here.
+
+ """
+
+ def __init__(self, name, context,
+ callables=None, inherits=None,
+ populate_self=True, calling_uri=None):
+ self.name = name
+ self.context = context
+ self.inherits = inherits
+ if callables is not None:
+ self.callables = dict([(c.__name__, c) for c in callables])
+
+ callables = ()
+
+ module = None
+ """The Python module referenced by this :class:`.Namespace`.
+
+ If the namespace references a :class:`.Template`, then
+ this module is the equivalent of ``template.module``,
+ i.e. the generated module for the template.
+
+ """
+
+ template = None
+ """The :class:`.Template` object referenced by this
+ :class:`.Namespace`, if any.
+
+ """
+
+ context = None
+ """The :class:`.Context` object for this :class:`.Namespace`.
+
+ Namespaces are often created with copies of contexts that
+ contain slightly different data, particularly in inheritance
+ scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one
+ can traverse an entire chain of templates that inherit from
+ one-another.
+
+ """
+
+ filename = None
+ """The path of the filesystem file used for this
+ :class:`.Namespace`'s module or template.
+
+ If this is a pure module-based
+ :class:`.Namespace`, this evaluates to ``module.__file__``. If a
+ template-based namespace, it evaluates to the original
+ template file location.
+
+ """
+
+ uri = None
+ """The URI for this :class:`.Namespace`'s template.
+
+ I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
+
+ This is the equivalent of :attr:`.Template.uri`.
+
+ """
+
+ _templateuri = None
+
+ @util.memoized_property
+ def attr(self):
+ """Access module level attributes by name.
+
+ This accessor allows templates to supply "scalar"
+ attributes which are particularly handy in inheritance
+ relationships.
+
+ .. seealso::
+
+ :ref:`inheritance_attr`
+
+ :ref:`namespace_attr_for_includes`
+
+ """
+ return _NSAttr(self)
+
+ def get_namespace(self, uri):
+ """Return a :class:`.Namespace` corresponding to the given ``uri``.
+
+ If the given ``uri`` is a relative URI (i.e. it does not
+ contain a leading slash ``/``), the ``uri`` is adjusted to
+ be relative to the ``uri`` of the namespace itself. This
+ method is therefore mostly useful off of the built-in
+ ``local`` namespace, described in :ref:`namespace_local`.
+
+ In
+ most cases, a template wouldn't need this function, and
+ should instead use the ``<%namespace>`` tag to load
+ namespaces. However, since all ``<%namespace>`` tags are
+ evaluated before the body of a template ever runs,
+ this method can be used to locate namespaces using
+ expressions that were generated within the body code of
+ the template, or to conditionally use a particular
+ namespace.
+
+ """
+ key = (self, uri)
+ if key in self.context.namespaces:
+ return self.context.namespaces[key]
+ else:
+ ns = TemplateNamespace(uri, self.context._copy(),
+ templateuri=uri,
+ calling_uri=self._templateuri)
+ self.context.namespaces[key] = ns
+ return ns
+
+ def get_template(self, uri):
+ """Return a :class:`.Template` from the given ``uri``.
+
+ The ``uri`` resolution is relative to the ``uri`` of this
+ :class:`.Namespace` object's :class:`.Template`.
+
+ """
+ return _lookup_template(self.context, uri, self._templateuri)
+
+ def get_cached(self, key, **kwargs):
+ """Return a value from the :class:`.Cache` referenced by this
+ :class:`.Namespace` object's :class:`.Template`.
+
+ The advantage to this method versus direct access to the
+ :class:`.Cache` is that the configuration parameters
+ declared in ``<%page>`` take effect here, thereby calling
+ up the same configured backend as that configured
+ by ``<%page>``.
+
+ """
+
+ return self.cache.get(key, **kwargs)
+
+ @property
+ def cache(self):
+ """Return the :class:`.Cache` object referenced
+ by this :class:`.Namespace` object's
+ :class:`.Template`.
+
+ """
+ return self.template.cache
+
+ def include_file(self, uri, **kwargs):
+ """Include a file at the given ``uri``."""
+
+ _include_file(self.context, uri, self._templateuri, **kwargs)
+
+ def _populate(self, d, l):
+ for ident in l:
+ if ident == '*':
+ for (k, v) in self._get_star():
+ d[k] = v
+ else:
+ d[ident] = getattr(self, ident)
+
+ def _get_star(self):
+ if self.callables:
+ for key in self.callables:
+ yield (key, self.callables[key])
+
+ def __getattr__(self, key):
+ if key in self.callables:
+ val = self.callables[key]
+ elif self.inherits:
+ val = getattr(self.inherits, key)
+ else:
+ raise AttributeError(
+ "Namespace '%s' has no member '%s'" %
+ (self.name, key))
+ setattr(self, key, val)
+ return val
+
+class TemplateNamespace(Namespace):
+ """A :class:`.Namespace` specific to a :class:`.Template` instance."""
+
+ def __init__(self, name, context, template=None, templateuri=None,
+ callables=None, inherits=None,
+ populate_self=True, calling_uri=None):
+ self.name = name
+ self.context = context
+ self.inherits = inherits
+ if callables is not None:
+ self.callables = dict([(c.__name__, c) for c in callables])
+
+ if templateuri is not None:
+ self.template = _lookup_template(context, templateuri,
+ calling_uri)
+ self._templateuri = self.template.module._template_uri
+ elif template is not None:
+ self.template = template
+ self._templateuri = template.module._template_uri
+ else:
+ raise TypeError("'template' argument is required.")
+
+ if populate_self:
+ lclcallable, lclcontext = \
+ _populate_self_namespace(context, self.template,
+ self_ns=self)
+
+ @property
+ def module(self):
+ """The Python module referenced by this :class:`.Namespace`.
+
+ If the namespace references a :class:`.Template`, then
+ this module is the equivalent of ``template.module``,
+ i.e. the generated module for the template.
+
+ """
+ return self.template.module
+
+ @property
+ def filename(self):
+ """The path of the filesystem file used for this
+ :class:`.Namespace`'s module or template.
+ """
+ return self.template.filename
+
+ @property
+ def uri(self):
+ """The URI for this :class:`.Namespace`'s template.
+
+ I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
+
+ This is the equivalent of :attr:`.Template.uri`.
+
+ """
+ return self.template.uri
+
+ def _get_star(self):
+ if self.callables:
+ for key in self.callables:
+ yield (key, self.callables[key])
+ def get(key):
+ callable_ = self.template._get_def_callable(key)
+ return compat.partial(callable_, self.context)
+ for k in self.template.module._exports:
+ yield (k, get(k))
+
+ def __getattr__(self, key):
+ if key in self.callables:
+ val = self.callables[key]
+ elif self.template.has_def(key):
+ callable_ = self.template._get_def_callable(key)
+ val = compat.partial(callable_, self.context)
+ elif self.inherits:
+ val = getattr(self.inherits, key)
+
+ else:
+ raise AttributeError(
+ "Namespace '%s' has no member '%s'" %
+ (self.name, key))
+ setattr(self, key, val)
+ return val
+
+class ModuleNamespace(Namespace):
+ """A :class:`.Namespace` specific to a Python module instance."""
+
+ def __init__(self, name, context, module,
+ callables=None, inherits=None,
+ populate_self=True, calling_uri=None):
+ self.name = name
+ self.context = context
+ self.inherits = inherits
+ if callables is not None:
+ self.callables = dict([(c.__name__, c) for c in callables])
+
+ mod = __import__(module)
+ for token in module.split('.')[1:]:
+ mod = getattr(mod, token)
+ self.module = mod
+
+ @property
+ def filename(self):
+ """The path of the filesystem file used for this
+ :class:`.Namespace`'s module or template.
+ """
+ return self.module.__file__
+
+ def _get_star(self):
+ if self.callables:
+ for key in self.callables:
+ yield (key, self.callables[key])
+ for key in dir(self.module):
+ if key[0] != '_':
+ callable_ = getattr(self.module, key)
+ if compat.callable(callable_):
+ yield key, compat.partial(callable_, self.context)
+
+
+ def __getattr__(self, key):
+ if key in self.callables:
+ val = self.callables[key]
+ elif hasattr(self.module, key):
+ callable_ = getattr(self.module, key)
+ val = compat.partial(callable_, self.context)
+ elif self.inherits:
+ val = getattr(self.inherits, key)
+ else:
+ raise AttributeError(
+ "Namespace '%s' has no member '%s'" %
+ (self.name, key))
+ setattr(self, key, val)
+ return val
+
+def supports_caller(func):
+ """Apply a caller_stack compatibility decorator to a plain
+ Python function.
+
+ See the example in :ref:`namespaces_python_modules`.
+
+ """
+
+ def wrap_stackframe(context, *args, **kwargs):
+ context.caller_stack._push_frame()
+ try:
+ return func(context, *args, **kwargs)
+ finally:
+ context.caller_stack._pop_frame()
+ return wrap_stackframe
+
+def capture(context, callable_, *args, **kwargs):
+ """Execute the given template def, capturing the output into
+ a buffer.
+
+ See the example in :ref:`namespaces_python_modules`.
+
+ """
+
+ if not compat.callable(callable_):
+ raise exceptions.RuntimeException(
+ "capture() function expects a callable as "
+ "its argument (i.e. capture(func, *args, **kwargs))"
+ )
+ context._push_buffer()
+ try:
+ callable_(*args, **kwargs)
+ finally:
+ buf = context._pop_buffer()
+ return buf.getvalue()
+
+def _decorate_toplevel(fn):
+ def decorate_render(render_fn):
+ def go(context, *args, **kw):
+ def y(*args, **kw):
+ return render_fn(context, *args, **kw)
+ try:
+ y.__name__ = render_fn.__name__[7:]
+ except TypeError:
+ # < Python 2.4
+ pass
+ return fn(y)(context, *args, **kw)
+ return go
+ return decorate_render
+
+def _decorate_inline(context, fn):
+ def decorate_render(render_fn):
+ dec = fn(render_fn)
+ def go(*args, **kw):
+ return dec(context, *args, **kw)
+ return go
+ return decorate_render
+
+def _include_file(context, uri, calling_uri, **kwargs):
+ """locate the template from the given uri and include it in
+ the current output."""
+
+ template = _lookup_template(context, uri, calling_uri)
+ (callable_, ctx) = _populate_self_namespace(
+ context._clean_inheritance_tokens(),
+ template)
+ callable_(ctx, **_kwargs_for_include(callable_, context._data, **kwargs))
+
+def _inherit_from(context, uri, calling_uri):
+ """called by the _inherit method in template modules to set
+ up the inheritance chain at the start of a template's
+ execution."""
+
+ if uri is None:
+ return None
+ template = _lookup_template(context, uri, calling_uri)
+ self_ns = context['self']
+ ih = self_ns
+ while ih.inherits is not None:
+ ih = ih.inherits
+ lclcontext = context._locals({'next': ih})
+ ih.inherits = TemplateNamespace("self:%s" % template.uri,
+ lclcontext,
+ template=template,
+ populate_self=False)
+ context._data['parent'] = lclcontext._data['local'] = ih.inherits
+ callable_ = getattr(template.module, '_mako_inherit', None)
+ if callable_ is not None:
+ ret = callable_(template, lclcontext)
+ if ret:
+ return ret
+
+ gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
+ if gen_ns is not None:
+ gen_ns(context)
+ return (template.callable_, lclcontext)
+
+def _lookup_template(context, uri, relativeto):
+ lookup = context._with_template.lookup
+ if lookup is None:
+ raise exceptions.TemplateLookupException(
+ "Template '%s' has no TemplateLookup associated" %
+ context._with_template.uri)
+ uri = lookup.adjust_uri(uri, relativeto)
+ try:
+ return lookup.get_template(uri)
+ except exceptions.TopLevelLookupException:
+ raise exceptions.TemplateLookupException(str(compat.exception_as()))
+
+def _populate_self_namespace(context, template, self_ns=None):
+ if self_ns is None:
+ self_ns = TemplateNamespace('self:%s' % template.uri,
+ context, template=template,
+ populate_self=False)
+ context._data['self'] = context._data['local'] = self_ns
+ if hasattr(template.module, '_mako_inherit'):
+ ret = template.module._mako_inherit(template, context)
+ if ret:
+ return ret
+ return (template.callable_, context)
+
+def _render(template, callable_, args, data, as_unicode=False):
+ """create a Context and return the string
+ output of the given template and template callable."""
+
+ if as_unicode:
+ buf = util.FastEncodingBuffer(as_unicode=True)
+ elif template.bytestring_passthrough:
+ buf = compat.StringIO()
+ else:
+ buf = util.FastEncodingBuffer(
+ as_unicode=as_unicode,
+ encoding=template.output_encoding,
+ errors=template.encoding_errors)
+ context = Context(buf, **data)
+ context._outputting_as_unicode = as_unicode
+ context._set_with_template(template)
+
+ _render_context(template, callable_, context, *args,
+ **_kwargs_for_callable(callable_, data))
+ return context._pop_buffer().getvalue()
+
+def _kwargs_for_callable(callable_, data):
+ argspec = compat.inspect_func_args(callable_)
+ # for normal pages, **pageargs is usually present
+ if argspec[2]:
+ return data
+
+ # for rendering defs from the top level, figure out the args
+ namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
+ kwargs = {}
+ for arg in namedargs:
+ if arg != 'context' and arg in data and arg not in kwargs:
+ kwargs[arg] = data[arg]
+ return kwargs
+
+def _kwargs_for_include(callable_, data, **kwargs):
+ argspec = compat.inspect_func_args(callable_)
+ namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
+ for arg in namedargs:
+ if arg != 'context' and arg in data and arg not in kwargs:
+ kwargs[arg] = data[arg]
+ return kwargs
+
+def _render_context(tmpl, callable_, context, *args, **kwargs):
+ import mako.template as template
+ # create polymorphic 'self' namespace for this
+ # template with possibly updated context
+ if not isinstance(tmpl, template.DefTemplate):
+ # if main render method, call from the base of the inheritance stack
+ (inherit, lclcontext) = _populate_self_namespace(context, tmpl)
+ _exec_template(inherit, lclcontext, args=args, kwargs=kwargs)
+ else:
+ # otherwise, call the actual rendering method specified
+ (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent)
+ _exec_template(callable_, context, args=args, kwargs=kwargs)
+
+def _exec_template(callable_, context, args=None, kwargs=None):
+ """execute a rendering callable given the callable, a
+ Context, and optional explicit arguments
+
+ the contextual Template will be located if it exists, and
+ the error handling options specified on that Template will
+ be interpreted here.
+ """
+ template = context._with_template
+ if template is not None and \
+ (template.format_exceptions or template.error_handler):
+ try:
+ callable_(context, *args, **kwargs)
+ except Exception:
+ _render_error(template, context, compat.exception_as())
+ except:
+ e = sys.exc_info()[0]
+ _render_error(template, context, e)
+ else:
+ callable_(context, *args, **kwargs)
+
+def _render_error(template, context, error):
+ if template.error_handler:
+ result = template.error_handler(context, error)
+ if not result:
+ compat.reraise(*sys.exc_info())
+ else:
+ error_template = exceptions.html_error_template()
+ if context._outputting_as_unicode:
+ context._buffer_stack[:] = [
+ util.FastEncodingBuffer(as_unicode=True)]
+ else:
+ context._buffer_stack[:] = [util.FastEncodingBuffer(
+ error_template.output_encoding,
+ error_template.encoding_errors)]
+
+ context._set_with_template(error_template)
+ error_template.render_context(context, error=error)
--- /dev/null
+# mako/template.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Provides the Template class, a facade for parsing, generating and executing
+template strings, as well as template runtime operations."""
+
+from mako.lexer import Lexer
+from mako import runtime, util, exceptions, codegen, cache, compat
+import os
+import re
+import shutil
+import stat
+import sys
+import tempfile
+import types
+import weakref
+
+
+class Template(object):
+ """Represents a compiled template.
+
+ :class:`.Template` includes a reference to the original
+ template source (via the :attr:`.source` attribute)
+ as well as the source code of the
+ generated Python module (i.e. the :attr:`.code` attribute),
+ as well as a reference to an actual Python module.
+
+ :class:`.Template` is constructed using either a literal string
+ representing the template text, or a filename representing a filesystem
+ path to a source file.
+
+ :param text: textual template source. This argument is mutually
+ exclusive versus the ``filename`` parameter.
+
+ :param filename: filename of the source template. This argument is
+ mutually exclusive versus the ``text`` parameter.
+
+ :param buffer_filters: string list of filters to be applied
+ to the output of ``%def``\ s which are buffered, cached, or otherwise
+ filtered, after all filters
+ defined with the ``%def`` itself have been applied. Allows the
+ creation of default expression filters that let the output
+ of return-valued ``%def``\ s "opt out" of that filtering via
+ passing special attributes or objects.
+
+ :param bytestring_passthrough: When ``True``, and ``output_encoding`` is
+ set to ``None``, and :meth:`.Template.render` is used to render,
+ the `StringIO` or `cStringIO` buffer will be used instead of the
+ default "fast" buffer. This allows raw bytestrings in the
+ output stream, such as in expressions, to pass straight
+ through to the buffer. This flag is forced
+ to ``True`` if ``disable_unicode`` is also configured.
+
+ .. versionadded:: 0.4
+ Added to provide the same behavior as that of the previous series.
+
+ :param cache_args: Dictionary of cache configuration arguments that
+ will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`.
+
+ :param cache_dir:
+
+ .. deprecated:: 0.6
+ Use the ``'dir'`` argument in the ``cache_args`` dictionary.
+ See :ref:`caching_toplevel`.
+
+ :param cache_enabled: Boolean flag which enables caching of this
+ template. See :ref:`caching_toplevel`.
+
+ :param cache_impl: String name of a :class:`.CacheImpl` caching
+ implementation to use. Defaults to ``'beaker'``.
+
+ :param cache_type:
+
+ .. deprecated:: 0.6
+ Use the ``'type'`` argument in the ``cache_args`` dictionary.
+ See :ref:`caching_toplevel`.
+
+ :param cache_url:
+
+ .. deprecated:: 0.6
+ Use the ``'url'`` argument in the ``cache_args`` dictionary.
+ See :ref:`caching_toplevel`.
+
+ :param default_filters: List of string filter names that will
+ be applied to all expressions. See :ref:`filtering_default_filters`.
+
+ :param disable_unicode: Disables all awareness of Python Unicode
+ objects. See :ref:`unicode_disabled`.
+
+ :param enable_loop: When ``True``, enable the ``loop`` context variable.
+ This can be set to ``False`` to support templates that may
+ be making usage of the name "``loop``". Individual templates can
+ re-enable the "loop" context by placing the directive
+ ``enable_loop="True"`` inside the ``<%page>`` tag -- see
+ :ref:`migrating_loop`.
+
+ :param encoding_errors: Error parameter passed to ``encode()`` when
+ string encoding is performed. See :ref:`usage_unicode`.
+
+ :param error_handler: Python callable which is called whenever
+ compile or runtime exceptions occur. The callable is passed
+ the current context as well as the exception. If the
+ callable returns ``True``, the exception is considered to
+ be handled, else it is re-raised after the function
+ completes. Is used to provide custom error-rendering
+ functions.
+
+ :param format_exceptions: if ``True``, exceptions which occur during
+ the render phase of this template will be caught and
+ formatted into an HTML error page, which then becomes the
+ rendered result of the :meth:`.render` call. Otherwise,
+ runtime exceptions are propagated outwards.
+
+ :param imports: String list of Python statements, typically individual
+ "import" lines, which will be placed into the module level
+ preamble of all generated Python modules. See the example
+ in :ref:`filtering_default_filters`.
+
+ :param future_imports: String list of names to import from `__future__`.
+ These will be concatenated into a comma-separated string and inserted
+ into the beginning of the template, e.g. ``futures_imports=['FOO',
+ 'BAR']`` results in ``from __future__ import FOO, BAR``. If you're
+ interested in using features like the new division operator, you must
+ use future_imports to convey that to the renderer, as otherwise the
+ import will not appear as the first executed statement in the generated
+ code and will therefore not have the desired effect.
+
+ :param input_encoding: Encoding of the template's source code. Can
+ be used in lieu of the coding comment. See
+ :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for
+ details on source encoding.
+
+ :param lookup: a :class:`.TemplateLookup` instance that will be used
+ for all file lookups via the ``<%namespace>``,
+ ``<%include>``, and ``<%inherit>`` tags. See
+ :ref:`usage_templatelookup`.
+
+ :param module_directory: Filesystem location where generated
+ Python module files will be placed.
+
+ :param module_filename: Overrides the filename of the generated
+ Python module file. For advanced usage only.
+
+ :param module_writer: A callable which overrides how the Python
+ module is written entirely. The callable is passed the
+ encoded source content of the module and the destination
+ path to be written to. The default behavior of module writing
+ uses a tempfile in conjunction with a file move in order
+ to make the operation atomic. So a user-defined module
+ writing function that mimics the default behavior would be:
+
+ .. sourcecode:: python
+
+ import tempfile
+ import os
+ import shutil
+
+ def module_writer(source, outputpath):
+ (dest, name) = \\
+ tempfile.mkstemp(
+ dir=os.path.dirname(outputpath)
+ )
+
+ os.write(dest, source)
+ os.close(dest)
+ shutil.move(name, outputpath)
+
+ from mako.template import Template
+ mytemplate = Template(
+ filename="index.html",
+ module_directory="/path/to/modules",
+ module_writer=module_writer
+ )
+
+ The function is provided for unusual configurations where
+ certain platform-specific permissions or other special
+ steps are needed.
+
+ :param output_encoding: The encoding to use when :meth:`.render`
+ is called.
+ See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`.
+
+ :param preprocessor: Python callable which will be passed
+ the full template source before it is parsed. The return
+ result of the callable will be used as the template source
+ code.
+
+ :param lexer_cls: A :class:`.Lexer` class used to parse
+ the template. The :class:`.Lexer` class is used by
+ default.
+
+ .. versionadded:: 0.7.4
+
+ :param strict_undefined: Replaces the automatic usage of
+ ``UNDEFINED`` for any undeclared variables not located in
+ the :class:`.Context` with an immediate raise of
+ ``NameError``. The advantage is immediate reporting of
+ missing variables which include the name.
+
+ .. versionadded:: 0.3.6
+
+ :param uri: string URI or other identifier for this template.
+ If not provided, the ``uri`` is generated from the filesystem
+ path, or from the in-memory identity of a non-file-based
+ template. The primary usage of the ``uri`` is to provide a key
+ within :class:`.TemplateLookup`, as well as to generate the
+ file path of the generated Python module file, if
+ ``module_directory`` is specified.
+
+ """
+
+ lexer_cls = Lexer
+
+ def __init__(self,
+ text=None,
+ filename=None,
+ uri=None,
+ format_exceptions=False,
+ error_handler=None,
+ lookup=None,
+ output_encoding=None,
+ encoding_errors='strict',
+ module_directory=None,
+ cache_args=None,
+ cache_impl='beaker',
+ cache_enabled=True,
+ cache_type=None,
+ cache_dir=None,
+ cache_url=None,
+ module_filename=None,
+ input_encoding=None,
+ disable_unicode=False,
+ module_writer=None,
+ bytestring_passthrough=False,
+ default_filters=None,
+ buffer_filters=(),
+ strict_undefined=False,
+ imports=None,
+ future_imports=None,
+ enable_loop=True,
+ preprocessor=None,
+ lexer_cls=None):
+ if uri:
+ self.module_id = re.sub(r'\W', "_", uri)
+ self.uri = uri
+ elif filename:
+ self.module_id = re.sub(r'\W', "_", filename)
+ drive, path = os.path.splitdrive(filename)
+ path = os.path.normpath(path).replace(os.path.sep, "/")
+ self.uri = path
+ else:
+ self.module_id = "memory:" + hex(id(self))
+ self.uri = self.module_id
+
+ u_norm = self.uri
+ if u_norm.startswith("/"):
+ u_norm = u_norm[1:]
+ u_norm = os.path.normpath(u_norm)
+ if u_norm.startswith(".."):
+ raise exceptions.TemplateLookupException(
+ "Template uri \"%s\" is invalid - "
+ "it cannot be relative outside "
+ "of the root path." % self.uri)
+
+ self.input_encoding = input_encoding
+ self.output_encoding = output_encoding
+ self.encoding_errors = encoding_errors
+ self.disable_unicode = disable_unicode
+ self.bytestring_passthrough = bytestring_passthrough or disable_unicode
+ self.enable_loop = enable_loop
+ self.strict_undefined = strict_undefined
+ self.module_writer = module_writer
+
+ if compat.py3k and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "Mako for Python 3 does not "
+ "support disabling Unicode")
+ elif output_encoding and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "output_encoding must be set to "
+ "None when disable_unicode is used.")
+ if default_filters is None:
+ if compat.py3k or self.disable_unicode:
+ self.default_filters = ['str']
+ else:
+ self.default_filters = ['unicode']
+ else:
+ self.default_filters = default_filters
+ self.buffer_filters = buffer_filters
+
+ self.imports = imports
+ self.future_imports = future_imports
+ self.preprocessor = preprocessor
+
+ if lexer_cls is not None:
+ self.lexer_cls = lexer_cls
+
+ # if plain text, compile code in memory only
+ if text is not None:
+ (code, module) = _compile_text(self, text, filename)
+ self._code = code
+ self._source = text
+ ModuleInfo(module, None, self, filename, code, text)
+ elif filename is not None:
+ # if template filename and a module directory, load
+ # a filesystem-based module file, generating if needed
+ if module_filename is not None:
+ path = module_filename
+ elif module_directory is not None:
+ path = os.path.abspath(
+ os.path.join(
+ os.path.normpath(module_directory),
+ u_norm + ".py"
+ )
+ )
+ else:
+ path = None
+ module = self._compile_from_file(path, filename)
+ else:
+ raise exceptions.RuntimeException(
+ "Template requires text or filename")
+
+ self.module = module
+ self.filename = filename
+ self.callable_ = self.module.render_body
+ self.format_exceptions = format_exceptions
+ self.error_handler = error_handler
+ self.lookup = lookup
+
+ self.module_directory = module_directory
+
+ self._setup_cache_args(
+ cache_impl, cache_enabled, cache_args,
+ cache_type, cache_dir, cache_url
+ )
+
+
+ @util.memoized_property
+ def reserved_names(self):
+ if self.enable_loop:
+ return codegen.RESERVED_NAMES
+ else:
+ return codegen.RESERVED_NAMES.difference(['loop'])
+
+ def _setup_cache_args(self,
+ cache_impl, cache_enabled, cache_args,
+ cache_type, cache_dir, cache_url):
+ self.cache_impl = cache_impl
+ self.cache_enabled = cache_enabled
+ if cache_args:
+ self.cache_args = cache_args
+ else:
+ self.cache_args = {}
+
+ # transfer deprecated cache_* args
+ if cache_type:
+ self.cache_args['type'] = cache_type
+ if cache_dir:
+ self.cache_args['dir'] = cache_dir
+ if cache_url:
+ self.cache_args['url'] = cache_url
+
+ def _compile_from_file(self, path, filename):
+ if path is not None:
+ util.verify_directory(os.path.dirname(path))
+ filemtime = os.stat(filename)[stat.ST_MTIME]
+ if not os.path.exists(path) or \
+ os.stat(path)[stat.ST_MTIME] < filemtime:
+ data = util.read_file(filename)
+ _compile_module_file(
+ self,
+ data,
+ filename,
+ path,
+ self.module_writer)
+ module = compat.load_module(self.module_id, path)
+ del sys.modules[self.module_id]
+ if module._magic_number != codegen.MAGIC_NUMBER:
+ data = util.read_file(filename)
+ _compile_module_file(
+ self,
+ data,
+ filename,
+ path,
+ self.module_writer)
+ module = compat.load_module(self.module_id, path)
+ del sys.modules[self.module_id]
+ ModuleInfo(module, path, self, filename, None, None)
+ else:
+ # template filename and no module directory, compile code
+ # in memory
+ data = util.read_file(filename)
+ code, module = _compile_text(
+ self,
+ data,
+ filename)
+ self._source = None
+ self._code = code
+ ModuleInfo(module, None, self, filename, code, None)
+ return module
+
+ @property
+ def source(self):
+ """Return the template source code for this :class:`.Template`."""
+
+ return _get_module_info_from_callable(self.callable_).source
+
+ @property
+ def code(self):
+ """Return the module source code for this :class:`.Template`."""
+
+ return _get_module_info_from_callable(self.callable_).code
+
+ @util.memoized_property
+ def cache(self):
+ return cache.Cache(self)
+
+ @property
+ def cache_dir(self):
+ return self.cache_args['dir']
+ @property
+ def cache_url(self):
+ return self.cache_args['url']
+ @property
+ def cache_type(self):
+ return self.cache_args['type']
+
+ def render(self, *args, **data):
+ """Render the output of this template as a string.
+
+ If the template specifies an output encoding, the string
+ will be encoded accordingly, else the output is raw (raw
+ output uses `cStringIO` and can't handle multibyte
+ characters). A :class:`.Context` object is created corresponding
+ to the given data. Arguments that are explicitly declared
+ by this template's internal rendering method are also
+ pulled from the given ``*args``, ``**data`` members.
+
+ """
+ return runtime._render(self, self.callable_, args, data)
+
+ def render_unicode(self, *args, **data):
+ """Render the output of this template as a unicode object."""
+
+ return runtime._render(self,
+ self.callable_,
+ args,
+ data,
+ as_unicode=True)
+
+ def render_context(self, context, *args, **kwargs):
+ """Render this :class:`.Template` with the given context.
+
+ The data is written to the context's buffer.
+
+ """
+ if getattr(context, '_with_template', None) is None:
+ context._set_with_template(self)
+ runtime._render_context(self,
+ self.callable_,
+ context,
+ *args,
+ **kwargs)
+
+ def has_def(self, name):
+ return hasattr(self.module, "render_%s" % name)
+
+ def get_def(self, name):
+ """Return a def of this template as a :class:`.DefTemplate`."""
+
+ return DefTemplate(self, getattr(self.module, "render_%s" % name))
+
+ def _get_def_callable(self, name):
+ return getattr(self.module, "render_%s" % name)
+
+ @property
+ def last_modified(self):
+ return self.module._modified_time
+
+class ModuleTemplate(Template):
+ """A Template which is constructed given an existing Python module.
+
+ e.g.::
+
+ t = Template("this is a template")
+ f = file("mymodule.py", "w")
+ f.write(t.code)
+ f.close()
+
+ import mymodule
+
+ t = ModuleTemplate(mymodule)
+ print t.render()
+
+ """
+
+ def __init__(self, module,
+ module_filename=None,
+ template=None,
+ template_filename=None,
+ module_source=None,
+ template_source=None,
+ output_encoding=None,
+ encoding_errors='strict',
+ disable_unicode=False,
+ bytestring_passthrough=False,
+ format_exceptions=False,
+ error_handler=None,
+ lookup=None,
+ cache_args=None,
+ cache_impl='beaker',
+ cache_enabled=True,
+ cache_type=None,
+ cache_dir=None,
+ cache_url=None,
+ ):
+ self.module_id = re.sub(r'\W', "_", module._template_uri)
+ self.uri = module._template_uri
+ self.input_encoding = module._source_encoding
+ self.output_encoding = output_encoding
+ self.encoding_errors = encoding_errors
+ self.disable_unicode = disable_unicode
+ self.bytestring_passthrough = bytestring_passthrough or disable_unicode
+ self.enable_loop = module._enable_loop
+
+ if compat.py3k and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "Mako for Python 3 does not "
+ "support disabling Unicode")
+ elif output_encoding and disable_unicode:
+ raise exceptions.UnsupportedError(
+ "output_encoding must be set to "
+ "None when disable_unicode is used.")
+
+ self.module = module
+ self.filename = template_filename
+ ModuleInfo(module,
+ module_filename,
+ self,
+ template_filename,
+ module_source,
+ template_source)
+
+ self.callable_ = self.module.render_body
+ self.format_exceptions = format_exceptions
+ self.error_handler = error_handler
+ self.lookup = lookup
+ self._setup_cache_args(
+ cache_impl, cache_enabled, cache_args,
+ cache_type, cache_dir, cache_url
+ )
+
+class DefTemplate(Template):
+ """A :class:`.Template` which represents a callable def in a parent
+ template."""
+
+ def __init__(self, parent, callable_):
+ self.parent = parent
+ self.callable_ = callable_
+ self.output_encoding = parent.output_encoding
+ self.module = parent.module
+ self.encoding_errors = parent.encoding_errors
+ self.format_exceptions = parent.format_exceptions
+ self.error_handler = parent.error_handler
+ self.enable_loop = parent.enable_loop
+ self.lookup = parent.lookup
+ self.bytestring_passthrough = parent.bytestring_passthrough
+
+ def get_def(self, name):
+ return self.parent.get_def(name)
+
+class ModuleInfo(object):
+ """Stores information about a module currently loaded into
+ memory, provides reverse lookups of template source, module
+ source code based on a module's identifier.
+
+ """
+ _modules = weakref.WeakValueDictionary()
+
+ def __init__(self,
+ module,
+ module_filename,
+ template,
+ template_filename,
+ module_source,
+ template_source):
+ self.module = module
+ self.module_filename = module_filename
+ self.template_filename = template_filename
+ self.module_source = module_source
+ self.template_source = template_source
+ self._modules[module.__name__] = template._mmarker = self
+ if module_filename:
+ self._modules[module_filename] = self
+
+ @classmethod
+ def get_module_source_metadata(cls, module_source, full_line_map=False):
+ source_map = re.search(
+ r"__M_BEGIN_METADATA(.+?)__M_END_METADATA",
+ module_source, re.S).group(1)
+ source_map = compat.json.loads(source_map)
+ source_map['line_map'] = dict((int(k), int(v))
+ for k, v in source_map['line_map'].items())
+ if full_line_map:
+ f_line_map = source_map['full_line_map'] = []
+ line_map = source_map['line_map']
+
+ curr_templ_line = 1
+ for mod_line in range(1, max(line_map)):
+ if mod_line in line_map:
+ curr_templ_line = line_map[mod_line]
+ f_line_map.append(curr_templ_line)
+ return source_map
+
+ @property
+ def code(self):
+ if self.module_source is not None:
+ return self.module_source
+ else:
+ return util.read_python_file(self.module_filename)
+
+ @property
+ def source(self):
+ if self.template_source is not None:
+ if self.module._source_encoding and \
+ not isinstance(self.template_source, compat.text_type):
+ return self.template_source.decode(
+ self.module._source_encoding)
+ else:
+ return self.template_source
+ else:
+ data = util.read_file(self.template_filename)
+ if self.module._source_encoding:
+ return data.decode(self.module._source_encoding)
+ else:
+ return data
+
+def _compile(template, text, filename, generate_magic_comment):
+ lexer = template.lexer_cls(text,
+ filename,
+ disable_unicode=template.disable_unicode,
+ input_encoding=template.input_encoding,
+ preprocessor=template.preprocessor)
+ node = lexer.parse()
+ source = codegen.compile(node,
+ template.uri,
+ filename,
+ default_filters=template.default_filters,
+ buffer_filters=template.buffer_filters,
+ imports=template.imports,
+ future_imports=template.future_imports,
+ source_encoding=lexer.encoding,
+ generate_magic_comment=generate_magic_comment,
+ disable_unicode=template.disable_unicode,
+ strict_undefined=template.strict_undefined,
+ enable_loop=template.enable_loop,
+ reserved_names=template.reserved_names)
+ return source, lexer
+
+def _compile_text(template, text, filename):
+ identifier = template.module_id
+ source, lexer = _compile(template, text, filename,
+ generate_magic_comment=template.disable_unicode)
+
+ cid = identifier
+ if not compat.py3k and isinstance(cid, compat.text_type):
+ cid = cid.encode()
+ module = types.ModuleType(cid)
+ code = compile(source, cid, 'exec')
+
+ # this exec() works for 2.4->3.3.
+ exec(code, module.__dict__, module.__dict__)
+ return (source, module)
+
+def _compile_module_file(template, text, filename, outputpath, module_writer):
+ source, lexer = _compile(template, text, filename,
+ generate_magic_comment=True)
+
+ if isinstance(source, compat.text_type):
+ source = source.encode(lexer.encoding or 'ascii')
+
+ if module_writer:
+ module_writer(source, outputpath)
+ else:
+ # make tempfiles in the same location as the ultimate
+ # location. this ensures they're on the same filesystem,
+ # avoiding synchronization issues.
+ (dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath))
+
+ os.write(dest, source)
+ os.close(dest)
+ shutil.move(name, outputpath)
+
+def _get_module_info_from_callable(callable_):
+ if compat.py3k:
+ return _get_module_info(callable_.__globals__['__name__'])
+ else:
+ return _get_module_info(callable_.func_globals['__name__'])
+
+def _get_module_info(filename):
+ return ModuleInfo._modules[filename]
+
--- /dev/null
+# mako/util.py
+# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
+#
+# This module is part of Mako and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import re
+import collections
+import codecs
+import os
+from mako import compat
+import operator
+
+def update_wrapper(decorated, fn):
+ decorated.__wrapped__ = fn
+ decorated.__name__ = fn.__name__
+ return decorated
+
+
+class PluginLoader(object):
+ def __init__(self, group):
+ self.group = group
+ self.impls = {}
+
+ def load(self, name):
+ if name in self.impls:
+ return self.impls[name]()
+ else:
+ import pkg_resources
+ for impl in pkg_resources.iter_entry_points(
+ self.group,
+ name):
+ self.impls[name] = impl.load
+ return impl.load()
+ else:
+ from mako import exceptions
+ raise exceptions.RuntimeException(
+ "Can't load plugin %s %s" %
+ (self.group, name))
+
+ def register(self, name, modulepath, objname):
+ def load():
+ mod = __import__(modulepath)
+ for token in modulepath.split(".")[1:]:
+ mod = getattr(mod, token)
+ return getattr(mod, objname)
+ self.impls[name] = load
+
+def verify_directory(dir):
+ """create and/or verify a filesystem directory."""
+
+ tries = 0
+
+ while not os.path.exists(dir):
+ try:
+ tries += 1
+ os.makedirs(dir, compat.octal("0775"))
+ except:
+ if tries > 5:
+ raise
+
+def to_list(x, default=None):
+ if x is None:
+ return default
+ if not isinstance(x, (list, tuple)):
+ return [x]
+ else:
+ return x
+
+
+class memoized_property(object):
+ """A read-only @property that is only evaluated once."""
+ def __init__(self, fget, doc=None):
+ self.fget = fget
+ self.__doc__ = doc or fget.__doc__
+ self.__name__ = fget.__name__
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ obj.__dict__[self.__name__] = result = self.fget(obj)
+ return result
+
+class memoized_instancemethod(object):
+ """Decorate a method memoize its return value.
+
+ Best applied to no-arg methods: memoization is not sensitive to
+ argument values, and will always return the same value even when
+ called with different arguments.
+
+ """
+ def __init__(self, fget, doc=None):
+ self.fget = fget
+ self.__doc__ = doc or fget.__doc__
+ self.__name__ = fget.__name__
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ def oneshot(*args, **kw):
+ result = self.fget(obj, *args, **kw)
+ memo = lambda *a, **kw: result
+ memo.__name__ = self.__name__
+ memo.__doc__ = self.__doc__
+ obj.__dict__[self.__name__] = memo
+ return result
+ oneshot.__name__ = self.__name__
+ oneshot.__doc__ = self.__doc__
+ return oneshot
+
+class SetLikeDict(dict):
+ """a dictionary that has some setlike methods on it"""
+ def union(self, other):
+ """produce a 'union' of this dict and another (at the key level).
+
+ values in the second dict take precedence over that of the first"""
+ x = SetLikeDict(**self)
+ x.update(other)
+ return x
+
+class FastEncodingBuffer(object):
+ """a very rudimentary buffer that is faster than StringIO,
+ but doesn't crash on unicode data like cStringIO."""
+
+ def __init__(self, encoding=None, errors='strict', as_unicode=False):
+ self.data = collections.deque()
+ self.encoding = encoding
+ if as_unicode:
+ self.delim = compat.u('')
+ else:
+ self.delim = ''
+ self.as_unicode = as_unicode
+ self.errors = errors
+ self.write = self.data.append
+
+ def truncate(self):
+ self.data = collections.deque()
+ self.write = self.data.append
+
+ def getvalue(self):
+ if self.encoding:
+ return self.delim.join(self.data).encode(self.encoding,
+ self.errors)
+ else:
+ return self.delim.join(self.data)
+
+class LRUCache(dict):
+ """A dictionary-like object that stores a limited number of items,
+ discarding lesser used items periodically.
+
+ this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based
+ paradigm so that synchronization is not really needed. the size management
+ is inexact.
+ """
+
+ class _Item(object):
+ def __init__(self, key, value):
+ self.key = key
+ self.value = value
+ self.timestamp = compat.time_func()
+ def __repr__(self):
+ return repr(self.value)
+
+ def __init__(self, capacity, threshold=.5):
+ self.capacity = capacity
+ self.threshold = threshold
+
+ def __getitem__(self, key):
+ item = dict.__getitem__(self, key)
+ item.timestamp = compat.time_func()
+ return item.value
+
+ def values(self):
+ return [i.value for i in dict.values(self)]
+
+ def setdefault(self, key, value):
+ if key in self:
+ return self[key]
+ else:
+ self[key] = value
+ return value
+
+ def __setitem__(self, key, value):
+ item = dict.get(self, key)
+ if item is None:
+ item = self._Item(key, value)
+ dict.__setitem__(self, key, item)
+ else:
+ item.value = value
+ self._manage_size()
+
+ def _manage_size(self):
+ while len(self) > self.capacity + self.capacity * self.threshold:
+ bytime = sorted(dict.values(self),
+ key=operator.attrgetter('timestamp'), reverse=True)
+ for item in bytime[self.capacity:]:
+ try:
+ del self[item.key]
+ except KeyError:
+ # if we couldn't find a key, most likely some other thread
+ # broke in on us. loop around and try again
+ break
+
+# Regexp to match python magic encoding line
+_PYTHON_MAGIC_COMMENT_re = re.compile(
+ r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
+ re.VERBOSE)
+
+def parse_encoding(fp):
+ """Deduce the encoding of a Python source file (binary mode) from magic
+ comment.
+
+ It does this in the same way as the `Python interpreter`__
+
+ .. __: http://docs.python.org/ref/encodings.html
+
+ The ``fp`` argument should be a seekable file object in binary mode.
+ """
+ pos = fp.tell()
+ fp.seek(0)
+ try:
+ line1 = fp.readline()
+ has_bom = line1.startswith(codecs.BOM_UTF8)
+ if has_bom:
+ line1 = line1[len(codecs.BOM_UTF8):]
+
+ m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore'))
+ if not m:
+ try:
+ import parser
+ parser.suite(line1.decode('ascii', 'ignore'))
+ except (ImportError, SyntaxError):
+ # Either it's a real syntax error, in which case the source
+ # is not valid python source, or line2 is a continuation of
+ # line1, in which case we don't want to scan line2 for a magic
+ # comment.
+ pass
+ else:
+ line2 = fp.readline()
+ m = _PYTHON_MAGIC_COMMENT_re.match(
+ line2.decode('ascii', 'ignore'))
+
+ if has_bom:
+ if m:
+ raise SyntaxError("python refuses to compile code with both a UTF8" \
+ " byte-order-mark and a magic encoding comment")
+ return 'utf_8'
+ elif m:
+ return m.group(1)
+ else:
+ return None
+ finally:
+ fp.seek(pos)
+
+def sorted_dict_repr(d):
+ """repr() a dictionary with the keys in order.
+
+ Used by the lexer unit test to compare parse trees based on strings.
+
+ """
+ keys = list(d.keys())
+ keys.sort()
+ return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
+
+def restore__ast(_ast):
+ """Attempt to restore the required classes to the _ast module if it
+ appears to be missing them
+ """
+ if hasattr(_ast, 'AST'):
+ return
+ _ast.PyCF_ONLY_AST = 2 << 9
+ m = compile("""\
+def foo(): pass
+class Bar(object): pass
+if False: pass
+baz = 'mako'
+1 + 2 - 3 * 4 / 5
+6 // 7 % 8 << 9 >> 10
+11 & 12 ^ 13 | 14
+15 and 16 or 17
+-baz + (not +18) - ~17
+baz and 'foo' or 'bar'
+(mako is baz == baz) is not baz != mako
+mako > baz < mako >= baz <= mako
+mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
+ _ast.Module = type(m)
+
+ for cls in _ast.Module.__mro__:
+ if cls.__name__ == 'mod':
+ _ast.mod = cls
+ elif cls.__name__ == 'AST':
+ _ast.AST = cls
+
+ _ast.FunctionDef = type(m.body[0])
+ _ast.ClassDef = type(m.body[1])
+ _ast.If = type(m.body[2])
+
+ _ast.Name = type(m.body[3].targets[0])
+ _ast.Store = type(m.body[3].targets[0].ctx)
+ _ast.Str = type(m.body[3].value)
+
+ _ast.Sub = type(m.body[4].value.op)
+ _ast.Add = type(m.body[4].value.left.op)
+ _ast.Div = type(m.body[4].value.right.op)
+ _ast.Mult = type(m.body[4].value.right.left.op)
+
+ _ast.RShift = type(m.body[5].value.op)
+ _ast.LShift = type(m.body[5].value.left.op)
+ _ast.Mod = type(m.body[5].value.left.left.op)
+ _ast.FloorDiv = type(m.body[5].value.left.left.left.op)
+
+ _ast.BitOr = type(m.body[6].value.op)
+ _ast.BitXor = type(m.body[6].value.left.op)
+ _ast.BitAnd = type(m.body[6].value.left.left.op)
+
+ _ast.Or = type(m.body[7].value.op)
+ _ast.And = type(m.body[7].value.values[0].op)
+
+ _ast.Invert = type(m.body[8].value.right.op)
+ _ast.Not = type(m.body[8].value.left.right.op)
+ _ast.UAdd = type(m.body[8].value.left.right.operand.op)
+ _ast.USub = type(m.body[8].value.left.left.op)
+
+ _ast.Or = type(m.body[9].value.op)
+ _ast.And = type(m.body[9].value.values[0].op)
+
+ _ast.IsNot = type(m.body[10].value.ops[0])
+ _ast.NotEq = type(m.body[10].value.ops[1])
+ _ast.Is = type(m.body[10].value.left.ops[0])
+ _ast.Eq = type(m.body[10].value.left.ops[1])
+
+ _ast.Gt = type(m.body[11].value.ops[0])
+ _ast.Lt = type(m.body[11].value.ops[1])
+ _ast.GtE = type(m.body[11].value.ops[2])
+ _ast.LtE = type(m.body[11].value.ops[3])
+
+ _ast.In = type(m.body[12].value.ops[0])
+ _ast.NotIn = type(m.body[12].value.ops[1])
+
+
+
+def read_file(path, mode='rb'):
+ fp = open(path, mode)
+ try:
+ data = fp.read()
+ return data
+ finally:
+ fp.close()
+
+def read_python_file(path):
+ fp = open(path, "rb")
+ try:
+ encoding = parse_encoding(fp)
+ data = fp.read()
+ if encoding:
+ data = data.decode(encoding)
+ return data
+ finally:
+ fp.close()
+
--- /dev/null
+/****************************************************************************
+* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and associated documentation files (the "Software"),
+* to deal in the Software without restriction, including without limitation
+* the rights to use, copy, modify, merge, publish, distribute, sublicense,
+* and/or sell copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice (including the next
+* paragraph) shall be included in all copies or substantial portions of the
+* Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+* IN THE SOFTWARE.
+*
+* @file ${filename}
+*
+* @brief Implementation for events. auto-generated file
+*
+* DO NOT EDIT
+*
+******************************************************************************/
+#include "common/os.h"
+#include "gen_ar_event.h"
+#include "gen_ar_eventhandler.h"
+
+using namespace ArchRast;
+% for name in protos['event_names']:
+
+void ${name}::Accept(EventHandler* pHandler)
+{
+ pHandler->Handle(*this);
+}
+% endfor
--- /dev/null
+/****************************************************************************
+* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and associated documentation files (the "Software"),
+* to deal in the Software without restriction, including without limitation
+* the rights to use, copy, modify, merge, publish, distribute, sublicense,
+* and/or sell copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice (including the next
+* paragraph) shall be included in all copies or substantial portions of the
+* Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+* IN THE SOFTWARE.
+*
+* @file ${filename}
+*
+* @brief Definitions for events. auto-generated file
+*
+* DO NOT EDIT
+*
+******************************************************************************/
+#pragma once
+
+#include "common/os.h"
+#include "core/state.h"
+
+namespace ArchRast
+{
+% for name in protos['enum_names']:
+ enum ${name}
+ {<% names = protos['enums'][name]['names'] %>
+ % for i in range(len(names)):
+ ${names[i].lstrip()}
+ % endfor
+ };
+% endfor
+
+ //Forward decl
+ class EventHandler;
+
+ //////////////////////////////////////////////////////////////////////////
+ /// Event - interface for handling events.
+ //////////////////////////////////////////////////////////////////////////
+ struct Event
+ {
+ Event() {}
+ virtual ~Event() {}
+
+ virtual void Accept(EventHandler* pHandler) = 0;
+ };
+% for name in protos['event_names']:
+
+ //////////////////////////////////////////////////////////////////////////
+ /// ${name}Data
+ //////////////////////////////////////////////////////////////////////////
+#pragma pack(push, 1)
+ struct ${name}Data
+ {<%
+ field_names = protos['events'][name]['field_names']
+ field_types = protos['events'][name]['field_types'] %>
+ // Fields
+ % for i in range(len(field_names)):
+ ${field_types[i]} ${field_names[i]};
+ % endfor
+ };
+#pragma pack(pop)
+
+ //////////////////////////////////////////////////////////////////////////
+ /// ${name}
+ //////////////////////////////////////////////////////////////////////////
+ struct ${name} : Event
+ {<%
+ field_names = protos['events'][name]['field_names']
+ field_types = protos['events'][name]['field_types'] %>
+ ${name}Data data;
+
+ // Constructor
+ ${name}(
+ % for i in range(len(field_names)):
+ % if i < len(field_names)-1:
+ ${field_types[i]} ${field_names[i]},
+ % endif
+ % if i == len(field_names)-1:
+ ${field_types[i]} ${field_names[i]}
+ % endif
+ % endfor
+ )
+ {
+ % for i in range(len(field_names)):
+ data.${field_names[i]} = ${field_names[i]};
+ % endfor
+ }
+
+ virtual void Accept(EventHandler* pHandler);
+ };
+% endfor
+}
\ No newline at end of file
--- /dev/null
+/****************************************************************************
+* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and associated documentation files (the "Software"),
+* to deal in the Software without restriction, including without limitation
+* the rights to use, copy, modify, merge, publish, distribute, sublicense,
+* and/or sell copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice (including the next
+* paragraph) shall be included in all copies or substantial portions of the
+* Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+* IN THE SOFTWARE.
+*
+* @file ${filename}
+*
+* @brief Event handler interface. auto-generated file
+*
+* DO NOT EDIT
+*
+******************************************************************************/
+#pragma once
+
+#include "${event_header}"
+
+namespace ArchRast
+{
+ //////////////////////////////////////////////////////////////////////////
+ /// EventHandler - interface for handling events.
+ //////////////////////////////////////////////////////////////////////////
+ class EventHandler
+ {
+ public:
+ EventHandler() {}
+ virtual ~EventHandler() {}
+
+ virtual void FlushDraw(uint32_t drawId) {}
+
+% for name in protos['event_names']:
+ virtual void Handle(const ${name}& event) {}
+% endfor
+ };
+}
--- /dev/null
+/****************************************************************************
+* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and associated documentation files (the "Software"),
+* to deal in the Software without restriction, including without limitation
+* the rights to use, copy, modify, merge, publish, distribute, sublicense,
+* and/or sell copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice (including the next
+* paragraph) shall be included in all copies or substantial portions of the
+* Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+* IN THE SOFTWARE.
+*
+* @file ${filename}
+*
+* @brief Event handler interface. auto-generated file
+*
+* DO NOT EDIT
+*
+******************************************************************************/
+#pragma once
+
+#include "common/os.h"
+#include "${event_header}"
+#include <fstream>
+#include <sstream>
+
+namespace ArchRast
+{
+ //////////////////////////////////////////////////////////////////////////
+ /// EventHandlerFile - interface for handling events.
+ //////////////////////////////////////////////////////////////////////////
+ class EventHandlerFile : public EventHandler
+ {
+ public:
+ EventHandlerFile(uint32_t id)
+ : mBufOffset(0)
+ {
+#if defined(_WIN32)
+ DWORD pid = GetCurrentProcessId();
+ TCHAR procname[MAX_PATH];
+ GetModuleFileName(NULL, procname, MAX_PATH);
+ const char* pBaseName = strrchr(procname, '\\');
+ std::stringstream outDir;
+ outDir << KNOB_DEBUG_OUTPUT_DIR << pBaseName << "_" << pid << std::ends;
+ CreateDirectory(outDir.str().c_str(), NULL);
+
+ char buf[255];
+ // There could be multiple threads creating thread pools. We
+ // want to make sure they are uniquly identified by adding in
+ // the creator's thread id into the filename.
+ sprintf(buf, "%s\\ar_event%d_%d.bin", outDir.str().c_str(), GetCurrentThreadId(), id);
+ mFilename = std::string(buf);
+#else
+ char buf[255];
+ // There could be multiple threads creating thread pools. We
+ // want to make sure they are uniquly identified by adding in
+ // the creator's thread id into the filename.
+ sprintf(buf, "%s/ar_event%d_%d.bin", "/tmp", GetCurrentThreadId(), id);
+ mFilename = std::string(buf);
+#endif
+ }
+
+ virtual ~EventHandlerFile()
+ {
+ FlushBuffer();
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+ /// @brief Flush buffer to file.
+ bool FlushBuffer()
+ {
+ if (mBufOffset > 0)
+ {
+ if (mBufOffset == mHeaderBufOffset)
+ {
+ // Nothing to flush. Only header has been generated.
+ return false;
+ }
+
+ std::ofstream file;
+ file.open(mFilename, std::ios::out | std::ios::app | std::ios::binary);
+
+ if (!file.is_open())
+ {
+ SWR_INVALID("ArchRast: Could not open event file!");
+ return false;
+ }
+
+ file.write((char*)mBuffer, mBufOffset);
+ file.close();
+
+ mBufOffset = 0;
+ mHeaderBufOffset = 0; // Reset header offset so its no longer considered.
+ }
+ return true;
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+ /// @brief Write event and its payload to the memory buffer.
+ void Write(uint32_t eventId, const char* pBlock, uint32_t size)
+ {
+ if ((mBufOffset + size + sizeof(eventId)) > mBufferSize)
+ {
+ if (!FlushBuffer())
+ {
+ // Don't corrupt what's already in the buffer?
+ /// @todo Maybe add corrupt marker to buffer here in case we can open file in future?
+ return;
+ }
+ }
+
+ memcpy(&mBuffer[mBufOffset], (char*)&eventId, sizeof(eventId));
+ mBufOffset += sizeof(eventId);
+ memcpy(&mBuffer[mBufOffset], pBlock, size);
+ mBufOffset += size;
+ }
+
+% for name in protos['event_names']:
+ //////////////////////////////////////////////////////////////////////////
+ /// @brief Handle ${name} event
+ virtual void Handle(const ${name}& event)
+ {
+% if protos['events'][name]['num_fields'] == 0:
+ Write(${protos['events'][name]['event_id']}, (char*)&event.data, 0);
+% else:
+ Write(${protos['events'][name]['event_id']}, (char*)&event.data, sizeof(event.data));
+%endif
+ }
+% endfor
+
+ //////////////////////////////////////////////////////////////////////////
+ /// @brief Everything written to buffer this point is the header.
+ virtual void MarkHeader()
+ {
+ mHeaderBufOffset = mBufOffset;
+ }
+
+ std::string mFilename;
+
+ static const uint32_t mBufferSize = 1024;
+ uint8_t mBuffer[mBufferSize];
+ uint32_t mBufOffset{0};
+ uint32_t mHeaderBufOffset{0};
+ };
+}
--- /dev/null
+/****************************************************************************
+* Copyright (C) 2017 Intel Corporation. All Rights Reserved.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and associated documentation files (the "Software"),
+* to deal in the Software without restriction, including without limitation
+* the rights to use, copy, modify, merge, publish, distribute, sublicense,
+* and/or sell copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice (including the next
+* paragraph) shall be included in all copies or substantial portions of the
+* Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+* IN THE SOFTWARE.
+*
+* @file BackendPixelRate${fileNum}.cpp
+*
+* @brief auto-generated file
+*
+* DO NOT EDIT
+*
+******************************************************************************/
+
+#include "core/backend.h"
+
+void InitBackendPixelRate${fileNum}()
+{
+ %for func in funcList:
+ ${func}
+ %endfor
+}
--- /dev/null
+<%
+ max_len = 0
+ for knob in knobs:
+ if len(knob[0]) > max_len: max_len = len(knob[0])
+ max_len += len('KNOB_ ')
+ if max_len % 4: max_len += 4 - (max_len % 4)
+
+ def space_knob(knob):
+ knob_len = len('KNOB_' + knob)
+ return ' '*(max_len - knob_len)
+
+ def calc_max_name_len(choices_array):
+ _max_len = 0
+ for choice in choices_array:
+ if len(choice['name']) > _max_len: _max_len = len(choice['name'])
+
+ if _max_len % 4: _max_len += 4 - (_max_len % 4)
+ return _max_len
+
+ def space_name(name, max_len):
+ name_len = len(name)
+ return ' '*(max_len - name_len)
+
+
+%>/******************************************************************************
+*
+* Copyright 2015-2016
+* Intel Corporation
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http ://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+% if gen_header:
+* @file ${filename}.h
+% else:
+* @file ${filename}.cpp
+% endif
+*
+* @brief Dynamic Knobs for Core.
+*
+* ======================= AUTO GENERATED: DO NOT EDIT !!! ====================
+*
+******************************************************************************/
+%if gen_header:
+#pragma once
+#include <string>
+
+template <typename T>
+struct Knob
+{
+ const T& Value() const { return m_Value; }
+ const T& Value(const T& newValue) { m_Value = newValue; return Value(); }
+
+protected:
+ Knob(const T& defaultValue) : m_Value(defaultValue) {}
+
+private:
+ T m_Value;
+};
+
+#define DEFINE_KNOB(_name, _type, _default) \\
+
+ struct Knob_##_name : Knob<_type> \\
+
+ { \\
+
+ Knob_##_name() : Knob<_type>(_default) { } \\
+
+ static const char* Name() { return "KNOB_" #_name; } \\
+
+ } _name;
+
+#define GET_KNOB(_name) g_GlobalKnobs._name.Value()
+#define SET_KNOB(_name, _newValue) g_GlobalKnobs._name.Value(_newValue)
+
+struct GlobalKnobs
+{
+ % for knob in knobs:
+ //-----------------------------------------------------------
+ // KNOB_${knob[0]}
+ //
+ % for line in knob[1]['desc']:
+ // ${line}
+ % endfor
+ % if knob[1].get('choices'):
+ <%
+ choices = knob[1].get('choices')
+ _max_len = calc_max_name_len(choices) %>//
+ % for i in range(len(choices)):
+ // ${choices[i]['name']}${space_name(choices[i]['name'], _max_len)} = ${format(choices[i]['value'], '#010x')}
+ % endfor
+ % endif
+ //
+ % if knob[1]['type'] == 'std::string':
+ DEFINE_KNOB(${knob[0]}, ${knob[1]['type']}, "${repr(knob[1]['default'])[1:-1]}");
+ % else:
+ DEFINE_KNOB(${knob[0]}, ${knob[1]['type']}, ${knob[1]['default']});
+ % endif
+
+ % endfor
+ GlobalKnobs();
+ std::string ToString(const char* optPerLinePrefix="");
+};
+extern GlobalKnobs g_GlobalKnobs;
+
+#undef DEFINE_KNOB
+
+% for knob in knobs:
+#define KNOB_${knob[0]}${space_knob(knob[0])} GET_KNOB(${knob[0]})
+% endfor
+
+% else:
+% for inc in includes:
+#include <${inc}>
+% endfor
+
+//========================================================
+// Static Data Members
+//========================================================
+GlobalKnobs g_GlobalKnobs;
+
+//========================================================
+// Knob Initialization
+//========================================================
+GlobalKnobs::GlobalKnobs()
+{
+ % for knob in knobs:
+ InitKnob(${knob[0]});
+ % endfor
+}
+
+//========================================================
+// Knob Display (Convert to String)
+//========================================================
+std::string GlobalKnobs::ToString(const char* optPerLinePrefix)
+{
+ std::basic_stringstream<char> str;
+ str << std::showbase << std::setprecision(1) << std::fixed;
+
+ if (optPerLinePrefix == nullptr) { optPerLinePrefix = ""; }
+
+ % for knob in knobs:
+ str << optPerLinePrefix << "KNOB_${knob[0]}:${space_knob(knob[0])}";
+ % if knob[1]['type'] == 'bool':
+ str << (KNOB_${knob[0]} ? "+\n" : "-\n");
+ % elif knob[1]['type'] != 'float' and knob[1]['type'] != 'std::string':
+ str << std::hex << std::setw(11) << std::left << KNOB_${knob[0]};
+ str << std::dec << KNOB_${knob[0]} << "\n";
+ % else:
+ str << KNOB_${knob[0]} << "\n";
+ % endif
+ % endfor
+ str << std::ends;
+
+ return str.str();
+}
+
+% endif
+++ /dev/null
-# Copyright (C) 2014-2015 Intel Corporation. All Rights Reserved.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the "Software"),
-# to deal in the Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice (including the next
-# paragraph) shall be included in all copies or substantial portions of the
-# Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
-
-#!deps/python32/python.exe
-
-import os, sys, re
-import argparse
-import json as JSON
-import operator
-
-header = r"""/****************************************************************************
-* Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
-*
-* Permission is hereby granted, free of charge, to any person obtaining a
-* copy of this software and associated documentation files (the "Software"),
-* to deal in the Software without restriction, including without limitation
-* the rights to use, copy, modify, merge, publish, distribute, sublicense,
-* and/or sell copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following conditions:
-*
-* The above copyright notice and this permission notice (including the next
-* paragraph) shall be included in all copies or substantial portions of the
-* Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-* IN THE SOFTWARE.
-*
-* @file %s
-*
-* @brief auto-generated file
-*
-* DO NOT EDIT
-*
-******************************************************************************/
-
-"""
-
-"""
-"""
-def gen_file_header(filename):
- global header
- headerStr = header % filename
- return headerStr.splitlines()
-
-
-inst_aliases = {
- 'SHUFFLE_VECTOR': 'VSHUFFLE',
- 'INSERT_ELEMENT': 'VINSERT',
- 'EXTRACT_ELEMENT': 'VEXTRACT',
- 'MEM_SET': 'MEMSET',
- 'MEM_CPY': 'MEMCOPY',
- 'MEM_MOVE': 'MEMMOVE',
- 'L_SHR': 'LSHR',
- 'A_SHR': 'ASHR',
- 'BIT_CAST': 'BITCAST',
- 'U_DIV': 'UDIV',
- 'S_DIV': 'SDIV',
- 'U_REM': 'UREM',
- 'S_REM': 'SREM',
- 'BIN_OP': 'BINOP',
-}
-
-intrinsics = [
- ["VGATHERPD", "x86_avx2_gather_d_pd_256", ["src", "pBase", "indices", "mask", "scale"]],
- ["VGATHERPS", "x86_avx2_gather_d_ps_256", ["src", "pBase", "indices", "mask", "scale"]],
- ["VGATHERDD", "x86_avx2_gather_d_d_256", ["src", "pBase", "indices", "mask", "scale"]],
- ["VSQRTPS", "x86_avx_sqrt_ps_256", ["a"]],
- ["VRSQRTPS", "x86_avx_rsqrt_ps_256", ["a"]],
- ["VRCPPS", "x86_avx_rcp_ps_256", ["a"]],
- ["VMINPS", "x86_avx_min_ps_256", ["a", "b"]],
- ["VMAXPS", "x86_avx_max_ps_256", ["a", "b"]],
- ["VROUND", "x86_avx_round_ps_256", ["a", "rounding"]],
- ["VCMPPS", "x86_avx_cmp_ps_256", ["a", "b", "cmpop"]],
- ["VBLENDVPS", "x86_avx_blendv_ps_256", ["a", "b", "mask"]],
- ["BEXTR_32", "x86_bmi_bextr_32", ["src", "control"]],
- ["VMASKLOADD", "x86_avx2_maskload_d_256", ["src", "mask"]],
- ["VMASKMOVPS", "x86_avx_maskload_ps_256", ["src", "mask"]],
- ["VMASKSTOREPS", "x86_avx_maskstore_ps_256", ["src", "mask", "val"]],
- ["VPSHUFB", "x86_avx2_pshuf_b", ["a", "b"]],
- ["VPERMD", "x86_avx2_permd", ["a", "idx"]],
- ["VPERMPS", "x86_avx2_permps", ["idx", "a"]],
- ["VCVTPD2PS", "x86_avx_cvt_pd2_ps_256", ["a"]],
- ["VCVTPH2PS", "x86_vcvtph2ps_256", ["a"]],
- ["VCVTPS2PH", "x86_vcvtps2ph_256", ["a", "round"]],
- ["VHSUBPS", "x86_avx_hsub_ps_256", ["a", "b"]],
- ["VPTESTC", "x86_avx_ptestc_256", ["a", "b"]],
- ["VPTESTZ", "x86_avx_ptestz_256", ["a", "b"]],
- ["VFMADDPS", "x86_fma_vfmadd_ps_256", ["a", "b", "c"]],
- ["VMOVMSKPS", "x86_avx_movmsk_ps_256", ["a"]],
- ["INTERRUPT", "x86_int", ["a"]],
- ]
-
-def convert_uppercamel(name):
- s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
- return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).upper()
-
-"""
- Given an input file (e.g. IRBuilder.h) generates function dictionary.
-"""
-def parse_ir_builder(input_file):
-
- functions = []
-
- lines = input_file.readlines()
-
- idx = 0
- while idx < len(lines) - 1:
- line = lines[idx].rstrip()
- idx += 1
-
- #match = re.search(r"\*Create", line)
- match = re.search(r"[\*\s]Create(\w*)\(", line)
- if match is not None:
- #print("Line: %s" % match.group(1))
-
- if re.search(r"^\s*Create", line) is not None:
- func_sig = lines[idx-2].rstrip() + line
- else:
- func_sig = line
-
- end_of_args = False
- while not end_of_args:
- end_paren = re.search(r"\)", line)
- if end_paren is not None:
- end_of_args = True
- else:
- line = lines[idx].rstrip()
- func_sig += line
- idx += 1
-
- delfunc = re.search(r"LLVM_DELETED_FUNCTION|= delete;", func_sig)
-
- if not delfunc:
- func = re.search(r"(.*?)\*[\n\s]*(Create\w*)\((.*?)\)", func_sig)
- if func is not None:
-
- return_type = func.group(1).lstrip() + '*'
- func_name = func.group(2)
- arguments = func.group(3)
-
- func_args = ''
- func_args_nodefs = ''
-
- num_args = arguments.count(',')
-
- arg_names = []
- num_args = 0
- args = arguments.split(',')
- for arg in args:
- arg = arg.lstrip()
- if arg:
- if num_args > 0:
- func_args += ', '
- func_args_nodefs += ', '
- func_args += arg
- func_args_nodefs += arg.split(' =')[0]
-
- split_args = arg.split('=')
- arg_name = split_args[0].rsplit(None, 1)[-1]
-
- #print("Before ArgName = %s" % arg_name)
-
- reg_arg = re.search(r"[\&\*]*(\w*)", arg_name)
- if reg_arg:
- #print("Arg Name = %s" % reg_arg.group(1))
- arg_names += [reg_arg.group(1)]
-
- num_args += 1
-
- ignore = False
-
- # The following functions need to be ignored.
- if func_name == 'CreateInsertNUWNSWBinOp':
- ignore = True
-
- if func_name == 'CreateMaskedIntrinsic':
- ignore = True
-
- # Convert CamelCase to CAMEL_CASE
- func_mod = re.search(r"Create(\w*)", func_name)
- if func_mod:
- func_mod = func_mod.group(1)
- func_mod = convert_uppercamel(func_mod)
- if func_mod[0:2] == 'F_' or func_mod[0:2] == 'I_':
- func_mod = func_mod[0] + func_mod[2:]
-
- # Substitute alias based on CAMEL_CASE name.
- func_alias = inst_aliases.get(func_mod)
- if not func_alias:
- func_alias = func_mod
-
- if func_name == 'CreateCall' or func_name == 'CreateGEP':
- arglist = re.search(r'ArrayRef', func_args)
- if arglist:
- func_alias = func_alias + 'A'
-
- if not ignore:
- functions.append({
- "name": func_name,
- "alias": func_alias,
- "return": return_type,
- "args": func_args,
- "args_nodefs": func_args_nodefs,
- "arg_names": arg_names
- })
-
- return functions
-
-"""
- Auto-generates macros for LLVM IR
-"""
-def generate_gen_h(functions, output_file):
- output_lines = gen_file_header(os.path.basename(output_file.name))
-
- output_lines += [
- '#pragma once',
- '',
- '//////////////////////////////////////////////////////////////////////////',
- '/// Auto-generated Builder IR declarations',
- '//////////////////////////////////////////////////////////////////////////',
- ]
-
- for func in functions:
- name = func['name']
- if func['alias']:
- name = func['alias']
- output_lines += [
- '%s%s(%s);' % (func['return'], name, func['args'])
- ]
-
- output_file.write('\n'.join(output_lines) + '\n')
-
-"""
- Auto-generates macros for LLVM IR
-"""
-def generate_gen_cpp(functions, output_file):
- output_lines = gen_file_header(os.path.basename(output_file.name))
-
- output_lines += [
- '#include \"builder.h\"',
- '',
- 'namespace SwrJit',
- '{',
- ' using namespace llvm;',
- '',
- ]
-
- for func in functions:
- name = func['name']
- if func['alias']:
- name = func['alias']
-
- args = func['arg_names']
- func_args = ''
- first_arg = True
- for arg in args:
- if not first_arg:
- func_args += ', '
- func_args += arg
- first_arg = False
-
- output_lines += [
- ' //////////////////////////////////////////////////////////////////////////',
- ' %sBuilder::%s(%s)' % (func['return'], name, func['args_nodefs']),
- ' {',
- ' return IRB()->%s(%s);' % (func['name'], func_args),
- ' }',
- '',
- ]
- output_lines.append('}')
- output_file.write('\n'.join(output_lines) + '\n')
-
-"""
- Auto-generates macros for LLVM IR
-"""
-def generate_x86_h(output_file):
- output_lines = gen_file_header(os.path.basename(output_file.name))
-
- output_lines += [
- '#pragma once',
- '',
- '//////////////////////////////////////////////////////////////////////////',
- '/// Auto-generated x86 intrinsics',
- '//////////////////////////////////////////////////////////////////////////',
- ]
-
- for inst in intrinsics:
- #print("Inst: %s, x86: %s numArgs: %d" % (inst[0], inst[1], len(inst[2])))
-
- args = ''
- first = True
- for arg in inst[2]:
- if not first:
- args += ', '
- args += ("Value* %s" % arg)
- first = False
-
- output_lines += [
- 'Value *%s(%s);' % (inst[0], args)
- ]
-
- output_file.write('\n'.join(output_lines) + '\n')
-
-"""
- Auto-generates macros for LLVM IR
-"""
-def generate_x86_cpp(output_file):
- output_lines = gen_file_header(os.path.basename(output_file.name))
-
- output_lines += [
- '#include \"builder.h\"',
- '',
- 'namespace SwrJit',
- '{',
- ' using namespace llvm;',
- '',
- ]
-
- for inst in intrinsics:
- #print("Inst: %s, x86: %s numArgs: %d" % (inst[0], inst[1], len(inst[2])))
-
- args = ''
- pass_args = ''
- first = True
- for arg in inst[2]:
- if not first:
- args += ', '
- pass_args += ', '
- args += ("Value* %s" % arg)
- pass_args += arg
- first = False
-
- output_lines += [
- ' //////////////////////////////////////////////////////////////////////////',
- ' Value *Builder::%s(%s)' % (inst[0], args),
- ' {',
- ' Function *func = Intrinsic::getDeclaration(JM()->mpCurrentModule, Intrinsic::%s);' % inst[1],
- ]
- if inst[0] == "VPERMD":
- rev_args = ''
- first = True
- for arg in reversed(inst[2]):
- if not first:
- rev_args += ', '
- rev_args += arg
- first = False
-
- output_lines += [
- '#if (HAVE_LLVM == 0x306) && (LLVM_VERSION_PATCH == 0)',
- ' return CALL(func, std::initializer_list<Value*>{%s});' % rev_args,
- '#else',
- ]
- output_lines += [
- ' return CALL(func, std::initializer_list<Value*>{%s});' % pass_args,
- ]
- if inst[0] == "VPERMD":
- output_lines += [
- '#endif',
- ]
- output_lines += [
- ' }',
- '',
- ]
-
- output_lines.append('}')
- output_file.write('\n'.join(output_lines) + '\n')
-
-"""
- Function which is invoked when this script is started from a command line.
- Will present and consume a set of arguments which will tell this script how
- to behave
-"""
-def main():
-
- # Parse args...
- parser = argparse.ArgumentParser()
- parser.add_argument("--input", "-i", type=argparse.FileType('r'), help="Path to IRBuilder.h", required=False)
- parser.add_argument("--output", "-o", type=argparse.FileType('w'), help="Path to output file", required=True)
- parser.add_argument("--gen_h", "-gen_h", help="Generate builder_gen.h", action="store_true", default=False)
- parser.add_argument("--gen_cpp", "-gen_cpp", help="Generate builder_gen.cpp", action="store_true", default=False)
- parser.add_argument("--gen_x86_h", "-gen_x86_h", help="Generate x86 intrinsics. No input is needed.", action="store_true", default=False)
- parser.add_argument("--gen_x86_cpp", "-gen_x86_cpp", help="Generate x86 intrinsics. No input is needed.", action="store_true", default=False)
- args = parser.parse_args()
-
- if args.input:
- functions = parse_ir_builder(args.input)
-
- if args.gen_h:
- generate_gen_h(functions, args.output)
-
- if args.gen_cpp:
- generate_gen_cpp(functions, args.output)
- else:
- if args.gen_x86_h:
- generate_x86_h(args.output)
-
- if args.gen_x86_cpp:
- generate_x86_cpp(args.output)
-
- if args.gen_h:
- print("Need to specify --input for --gen_h!")
-
- if args.gen_cpp:
- print("Need to specify --input for --gen_cpp!")
-
-if __name__ == '__main__':
- main()
-# END OF FILE
+++ /dev/null
-# Copyright (C) 2014-2015 Intel Corporation. All Rights Reserved.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the "Software"),
-# to deal in the Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice (including the next
-# paragraph) shall be included in all copies or substantial portions of the
-# Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
-
-#!deps/python32/python.exe
-
-import os, sys, re
-import argparse
-import json as JSON
-import operator
-
-header = r"""
-/****************************************************************************
-* Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
-*
-* Permission is hereby granted, free of charge, to any person obtaining a
-* copy of this software and associated documentation files (the "Software"),
-* to deal in the Software without restriction, including without limitation
-* the rights to use, copy, modify, merge, publish, distribute, sublicense,
-* and/or sell copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following conditions:
-*
-* The above copyright notice and this permission notice (including the next
-* paragraph) shall be included in all copies or substantial portions of the
-* Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-* IN THE SOFTWARE.
-*
-* @file %s
-*
-* @brief auto-generated file
-*
-* DO NOT EDIT
-*
-******************************************************************************/
-
-#pragma once
-
-namespace SwrJit
-{
- using namespace llvm;
-
-"""
-
-"""
-"""
-def gen_file_header(filename):
- global header
- headerStr = header % filename
- return headerStr.splitlines()
-
-"""
-"""
-def gen_llvm_type(type, name, postfix_name, is_pointer, is_pointer_pointer, is_array, is_array_array, array_count, array_count1, is_llvm_struct, is_llvm_enum, is_llvm_pfn, output_file):
-
- llvm_type = ''
-
- if is_llvm_struct:
- if is_pointer or is_pointer_pointer:
- llvm_type = 'Type::getInt32Ty(ctx)'
- else:
- llvm_type = 'ArrayType::get(Type::getInt8Ty(ctx), sizeof(%s))' % type
- elif is_llvm_enum:
- llvm_type = 'Type::getInt32Ty(ctx)'
- elif is_llvm_pfn:
- llvm_type = 'PointerType::get(Type::getInt8Ty(ctx), 0)'
- else:
- if type == "BYTE" or type == "char" or type == "uint8_t" or type == "int8_t" or type == 'bool':
- llvm_type = 'Type::getInt8Ty(ctx)'
- elif type == 'UINT64' or type == 'INT64' or type == 'uint64_t' or type == 'int64_t':
- llvm_type = 'Type::getInt64Ty(ctx)'
- elif type == 'UINT16' or type == 'int16_t' or type == 'uint16_t':
- llvm_type = 'Type::getInt16Ty(ctx)'
- elif type == 'UINT' or type == 'INT' or type == 'int' or type == 'BOOL' or type == 'uint32_t' or type == 'int32_t':
- llvm_type = 'Type::getInt32Ty(ctx)'
- elif type == 'float' or type == 'FLOAT':
- llvm_type = 'Type::getFloatTy(ctx)'
- elif type == 'double' or type == 'DOUBLE':
- llvm_type = 'Type::getDoubleTy(ctx)'
- elif type == 'void' or type == 'VOID':
- llvm_type = 'Type::getInt32Ty(ctx)'
- elif type == 'HANDLE':
- llvm_type = 'PointerType::get(Type::getInt32Ty(ctx), 0)'
- elif type == 'simdscalar':
- llvm_type = 'VectorType::get(Type::getFloatTy(ctx), pJitMgr->mVWidth)'
- elif type == 'simdscalari':
- llvm_type = 'VectorType::get(Type::getInt32Ty(ctx), pJitMgr->mVWidth)'
- elif type == 'simdvector':
- llvm_type = 'ArrayType::get(VectorType::get(Type::getFloatTy(ctx), pJitMgr->mVWidth), 4)'
- else:
- llvm_type = 'Gen_%s%s(pJitMgr)' % (type, postfix_name)
-
- if is_pointer:
- llvm_type = 'PointerType::get(%s, 0)' % llvm_type
-
- if is_pointer_pointer:
- llvm_type = 'PointerType::get(%s, 0)' % llvm_type
-
- if is_array_array:
- llvm_type = 'ArrayType::get(ArrayType::get(%s, %s), %s)' % (llvm_type, array_count1, array_count)
- elif is_array:
- llvm_type = 'ArrayType::get(%s, %s)' % (llvm_type, array_count)
-
- return [' members.push_back( %s ); // %s' % (llvm_type, name)]
-
-"""
-"""
-def gen_llvm_types(input_file, output_file):
-
- output_lines = gen_file_header(os.path.basename(output_file.name))
-
- lines = input_file.readlines()
-
- postfix_name = ""
-
- for idx in range(len(lines)):
- line = lines[idx].rstrip()
-
- if "gen_llvm_types FINI" in line:
- break
-
- match = re.match(r"(\s*)struct(\s*)(\w+)", line)
- if match:
- llvm_args = []
-
- # Detect start of structure
- is_fwd_decl = re.search(r";", line)
-
- if not is_fwd_decl:
-
- # Extract the command name
- struct_name = match.group(3).strip()
-
- output_lines += [
- ' //////////////////////////////////////////////////////////////////////////',
- ' /// Generate LLVM type information for %s' % struct_name,
- ' INLINE static StructType *Gen_%s%s(JitManager* pJitMgr)' % (struct_name, postfix_name),
- ' {',
- ' LLVMContext& ctx = pJitMgr->mContext;',
- ' std::vector<Type*> members;',
- '',
- ]
-
- end_of_struct = False
-
- while not end_of_struct and idx < len(lines)-1:
- idx += 1
- line = lines[idx].rstrip()
-
- is_llvm_typedef = re.search(r"@llvm_typedef", line)
- if is_llvm_typedef is not None:
- is_llvm_typedef = True
- else:
- is_llvm_typedef = False
-
- ###########################################
- # Is field a llvm struct? Tells script to treat type as array of bytes that is size of structure.
- is_llvm_struct = re.search(r"@llvm_struct", line)
-
- if is_llvm_struct is not None:
- is_llvm_struct = True
- else:
- is_llvm_struct = False
-
- ###########################################
- # Is field a llvm enum? Tells script to treat type as an enum and replaced with uint32 type.
- is_llvm_enum = re.search(r"@llvm_enum", line)
-
- if is_llvm_enum is not None:
- is_llvm_enum = True
- else:
- is_llvm_enum = False
-
- ###########################################
- # Is field a llvm function pointer? Tells script to treat type as an enum and replaced with uint32 type.
- is_llvm_pfn = re.search(r"@llvm_pfn", line)
-
- if is_llvm_pfn is not None:
- is_llvm_pfn = True
- else:
- is_llvm_pfn = False
-
- ###########################################
- # Is field const?
- is_const = re.search(r"\s+const\s+", line)
-
- if is_const is not None:
- is_const = True
- else:
- is_const = False
-
- ###########################################
- # Is field a pointer?
- is_pointer_pointer = re.search("\*\*", line)
-
- if is_pointer_pointer is not None:
- is_pointer_pointer = True
- else:
- is_pointer_pointer = False
-
- ###########################################
- # Is field a pointer?
- is_pointer = re.search("\*", line)
-
- if is_pointer is not None:
- is_pointer = True
- else:
- is_pointer = False
-
- ###########################################
- # Is field an array of arrays?
- # TODO: Can add this to a list.
- is_array_array = re.search("\[(\w*)\]\[(\w*)\]", line)
- array_count = '0'
- array_count1 = '0'
-
- if is_array_array is not None:
- array_count = is_array_array.group(1)
- array_count1 = is_array_array.group(2)
- is_array_array = True
- else:
- is_array_array = False
-
- ###########################################
- # Is field an array?
- is_array = re.search("\[(\w*)\]", line)
-
- if is_array is not None:
- array_count = is_array.group(1)
- is_array = True
- else:
- is_array = False
-
- is_scoped = re.search("::", line)
-
- if is_scoped is not None:
- is_scoped = True
- else:
- is_scoped = False
-
- type = None
- name = None
- if is_const and is_pointer:
-
- if is_scoped:
- field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+::)(\w+)(\s*\**\s*)(\w+)", line)
-
- type = "%s%s" % (field_match.group(4), field_match.group(5))
- name = field_match.group(7)
- else:
- field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+)(\s*\**\s*)(\w+)", line)
-
- type = field_match.group(4)
- name = field_match.group(6)
-
- elif is_pointer:
- field_match = re.match(r"(\s*)(\s+)(\w+\<*\w*\>*)(\s*\**\s*)(\w+)", line)
-
- if field_match:
- type = field_match.group(3)
- name = field_match.group(5)
- elif is_const:
- field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+)(\s*)(\w+)", line)
-
- if field_match:
- type = field_match.group(4)
- name = field_match.group(6)
- else:
- if is_scoped:
- field_match = re.match(r"\s*(\w+\<*\w*\>*)\s*::\s*(\w+\<*\w*\>*)\s+(\w+)", line)
-
- if field_match:
- type = field_match.group(1) + '::' + field_match.group(2)
- name = field_match.group(3)
- else:
- field_match = re.match(r"(\s*)(\w+\<*\w*\>*)(\s+)(\w+)", line)
-
- if field_match:
- type = field_match.group(2)
- name = field_match.group(4)
-
- if is_llvm_typedef is False:
- if type is not None:
- output_lines += gen_llvm_type(type, name, postfix_name, is_pointer, is_pointer_pointer, is_array, is_array_array, array_count, array_count1, is_llvm_struct, is_llvm_enum, is_llvm_pfn, output_file)
- llvm_args.append(name)
-
- # Detect end of structure
- end_of_struct = re.match(r"(\s*)};", line)
-
- if (end_of_struct):
- output_lines += [
- '',
- ' return StructType::get(ctx, members, false);',
- ' }',
- '',
- ]
-
- for i in range(len(llvm_args)):
- output_lines.append(' static const uint32_t %s%s_%s = %s;' % (struct_name, postfix_name, llvm_args[i], i))
-
- output_lines.append('')
-
- output_lines.append('}')
- output_file.write('\n'.join(output_lines) + '\n')
-
-"""
- Function which is invoked when this script is started from a command line.
- Will present and consume a set of arguments which will tell this script how
- to behave
-"""
-def main():
-
- # Parse args...
- parser = argparse.ArgumentParser()
- parser.add_argument("--input", "-i", type=argparse.FileType('r'),
- help="Path to input file containing structs", required=True)
- parser.add_argument("--output", "-o", type=argparse.FileType('w'),
- help="Path to output file", required=True)
- parser.add_argument("--scalar", "-scalar", help="Generates scalar files with all enums", action="store_true", default=False)
- args = parser.parse_args()
-
- gen_llvm_types(args.input, args.output)
-
-if __name__ == '__main__':
- main()
-# END OF FILE
+++ /dev/null
-# Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the "Software"),
-# to deal in the Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice (including the next
-# paragraph) shall be included in all copies or substantial portions of the
-# Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
-
-# Python source
-from __future__ import print_function
-import os
-import sys
-import re
-import argparse
-from mako.template import Template
-from mako.exceptions import RichTraceback
-
-def write_template_to_string(template_filename, **kwargs):
- try:
- template = Template(filename=template_filename)
- # Split + Join fixes line-endings for whatever platform you are using
- return '\n'.join(template.render(**kwargs).splitlines())
- except:
- traceback = RichTraceback()
- for (filename, lineno, function, line) in traceback.traceback:
- print("File %s, line %s, in %s" % (filename, lineno, function))
- print(line, "\n")
- print("%s: %s" % (str(traceback.error.__class__.__name__), traceback.error))
-
-def write_template_to_file(template_filename, output_filename, **kwargs):
- with open(output_filename, "w") as outfile:
- print(write_template_to_string(template_filename, **kwargs), file=outfile)
-
-def parse_event_fields(lines, idx, event_dict):
- field_names = []
- field_types = []
- end_of_event = False
-
- num_fields = 0
-
- # record all fields in event definition.
- # note: we don't check if there's a leading brace.
- while not end_of_event and idx < len(lines):
- line = lines[idx].rstrip()
- idx += 1
-
- field = re.match(r"(\s*)(\w+)(\s*)(\w+)", line)
-
- if field:
- field_types.append(field.group(2))
- field_names.append(field.group(4))
- num_fields += 1
-
- end_of_event = re.match(r"(\s*)};", line)
-
- event_dict['field_types'] = field_types
- event_dict['field_names'] = field_names
- event_dict['num_fields'] = num_fields
-
- return idx
-
-def parse_enums(lines, idx, event_dict):
- enum_names = []
- end_of_enum = False
-
- # record all enum values in enumeration
- # note: we don't check if there's a leading brace.
- while not end_of_enum and idx < len(lines):
- line = lines[idx].rstrip()
- idx += 1
-
- preprocessor = re.search(r"#if|#endif", line)
-
- if not preprocessor:
- enum = re.match(r"(\s*)(\w+)(\s*)", line)
-
- if enum:
- enum_names.append(line)
-
- end_of_enum = re.match(r"(\s*)};", line)
-
- event_dict['names'] = enum_names
- return idx
-
-def parse_protos(filename):
- protos = {}
-
- with open(filename, 'r') as f:
- lines=f.readlines()
-
- idx = 0
-
- protos['events'] = {} # event dictionary containing events with their fields
- protos['event_names'] = [] # needed to keep events in order parsed. dict is not ordered.
- protos['enums'] = {}
- protos['enum_names'] = []
-
- eventId = 0
- raw_text = []
- while idx < len(lines):
- line = lines[idx].rstrip()
- idx += 1
-
- # search for event definitions.
- match = re.match(r"(\s*)event(\s*)(\w+)", line)
-
- if match:
- eventId += 1
- event_name = match.group(3)
- protos['event_names'].append(event_name)
-
- protos['events'][event_name] = {}
- protos['events'][event_name]['event_id'] = eventId
- idx = parse_event_fields(lines, idx, protos['events'][event_name])
-
- # search for enums.
- match = re.match(r"(\s*)enum(\s*)(\w+)", line)
-
- if match:
- enum_name = match.group(3)
- protos['enum_names'].append(enum_name)
-
- protos['enums'][enum_name] = {}
- idx = parse_enums(lines, idx, protos['enums'][enum_name])
-
- return protos
-
-def main():
-
- # Parse args...
- parser = argparse.ArgumentParser()
- parser.add_argument("--proto", "-p", help="Path to proto file", required=True)
- parser.add_argument("--output", "-o", help="Output filename (i.e. event.h)", required=True)
- parser.add_argument("--gen_event_h", "-geh", help="Generate event header", action="store_true", default=False)
- parser.add_argument("--gen_event_cpp", "-gec", help="Generate event cpp", action="store_true", default=False)
- parser.add_argument("--gen_eventhandler_h", "-gehh", help="Generate eventhandler header", action="store_true", default=False)
- parser.add_argument("--gen_eventhandlerfile_h", "-gehf", help="Generate eventhandler header for writing to files", action="store_true", default=False)
- args = parser.parse_args()
-
- proto_filename = args.proto
-
- (output_dir, output_filename) = os.path.split(args.output)
-
- if not output_dir:
- output_dir = "."
-
- #print("output_dir = %s" % output_dir, file=sys.stderr)
- #print("output_filename = %s" % output_filename, file=sys.stderr)
-
- if not os.path.exists(proto_filename):
- print("Error: Could not find proto file %s" % proto_filename, file=sys.stderr)
- return 1
-
- protos = parse_protos(proto_filename)
-
- # Generate event header
- if args.gen_event_h:
- curdir = os.path.dirname(os.path.abspath(__file__))
- template_file = os.sep.join([curdir, 'templates', 'ar_event_h.template'])
- output_fullpath = os.sep.join([output_dir, output_filename])
-
- write_template_to_file(template_file, output_fullpath,
- filename=output_filename,
- protos=protos)
-
- # Generate event implementation
- if args.gen_event_cpp:
- curdir = os.path.dirname(os.path.abspath(__file__))
- template_file = os.sep.join([curdir, 'templates', 'ar_event_cpp.template'])
- output_fullpath = os.sep.join([output_dir, output_filename])
-
- write_template_to_file(template_file, output_fullpath,
- filename=output_filename,
- protos=protos)
-
- # Generate event handler header
- if args.gen_eventhandler_h:
- curdir = os.path.dirname(os.path.abspath(__file__))
- template_file = os.sep.join([curdir, 'templates', 'ar_eventhandler_h.template'])
- output_fullpath = os.sep.join([output_dir, output_filename])
-
- write_template_to_file(template_file, output_fullpath,
- filename=output_filename,
- event_header="gen_ar_event.h", # todo: fix this!
- protos=protos)
-
- # Generate event handler header
- if args.gen_eventhandlerfile_h:
- curdir = os.path.dirname(os.path.abspath(__file__))
- template_file = os.sep.join([curdir, 'templates', 'ar_eventhandlerfile_h.template'])
- output_fullpath = os.sep.join([output_dir, output_filename])
-
- write_template_to_file(template_file, output_fullpath,
- filename=output_filename,
- event_header="gen_ar_eventhandler.h", # todo: fix this!
- protos=protos)
-
- return 0
-
-if __name__ == '__main__':
- sys.exit(main())
-
+++ /dev/null
-# Copyright (C) 2017 Intel Corporation. All Rights Reserved.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the "Software"),
-# to deal in the Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice (including the next
-# paragraph) shall be included in all copies or substantial portions of the
-# Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
-
-# Python source
-# Compatible with Python2.X and Python3.X
-
-from __future__ import print_function
-import itertools
-import math
-import argparse
-import os
-import sys
-from mako.template import Template
-from mako.exceptions import RichTraceback
-
-def write_template_to_string(template_filename, **kwargs):
- try:
- template = Template(filename=os.path.abspath(template_filename))
- # Split + Join fixes line-endings for whatever platform you are using
- return '\n'.join(template.render(**kwargs).splitlines())
- except:
- traceback = RichTraceback()
- for (filename, lineno, function, line) in traceback.traceback:
- print("File %s, line %s, in %s" % (filename, lineno, function))
- print(line, "\n")
- print("%s: %s" % (str(traceback.error.__class__.__name__), traceback.error))
-
-def write_template_to_file(template_filename, output_filename, **kwargs):
- output_dirname = os.path.dirname(output_filename)
- if not os.path.exists(output_dirname):
- os.makedirs(output_dirname)
- with open(output_filename, "w") as outfile:
- print(write_template_to_string(template_filename, **kwargs), file=outfile)
-
-
-def main(args=sys.argv[1:]):
- thisDir = os.path.dirname(os.path.realpath(__file__))
- parser = argparse.ArgumentParser("Generate files and initialization functions for all permutuations of BackendPixelRate.")
- parser.add_argument('--dim', help="gBackendPixelRateTable array dimensions", nargs='+', type=int, required=True)
- parser.add_argument('--outdir', help="output directory", nargs='?', type=str, default=thisDir)
- parser.add_argument('--split', help="how many lines of initialization per file [0=no split]", nargs='?', type=int, default='512')
- parser.add_argument('--cpp', help="Generate cpp file(s)", action='store_true', default=False)
- parser.add_argument('--cmake', help="Generate cmake file", action='store_true', default=False)
-
-
- args = parser.parse_args(args);
-
- output_list = []
- for x in args.dim:
- output_list.append(list(range(x)))
-
- # generate all permutations possible for template paremeter inputs
- output_combinations = list(itertools.product(*output_list))
- output_list = []
-
- # for each permutation
- for x in range(len(output_combinations)):
- # separate each template peram into its own list member
- new_list = [output_combinations[x][i] for i in range(len(output_combinations[x]))]
- tempStr = 'gBackendPixelRateTable'
- #print each list member as an index in the multidimensional array
- for i in new_list:
- tempStr += '[' + str(i) + ']'
- #map each entry in the permuation as its own string member, store as the template instantiation string
- tempStr += " = BackendPixelRate<SwrBackendTraits<" + ','.join(map(str, output_combinations[x])) + '>>;'
- #append the line of c++ code in the list of output lines
- output_list.append(tempStr)
-
- # how many files should we split the global template initialization into?
- if (args.split == 0):
- numFiles = 1
- else:
- numFiles = (len(output_list) + args.split - 1) // args.split
- linesPerFile = (len(output_list) + numFiles - 1) // numFiles
- chunkedList = [output_list[x:x+linesPerFile] for x in range(0, len(output_list), linesPerFile)]
-
- # generate .cpp files
- if args.cpp:
- baseCppName = os.path.join(args.outdir, 'BackendPixelRate%s.cpp')
- templateCpp = os.path.join(thisDir, 'templates', 'backend_template.cpp')
-
- for fileNum in range(numFiles):
- filename = baseCppName % str(fileNum)
- print('Generating', filename)
- write_template_to_file(
- templateCpp,
- baseCppName % str(fileNum),
- fileNum=fileNum,
- funcList=chunkedList[fileNum])
-
- # generate gen_backend.cmake file
- if args.cmake:
- templateCmake = os.path.join(thisDir, 'templates', 'backend_template.cmake')
- cmakeFile = os.path.join(args.outdir, 'gen_backends.cmake')
- print('Generating', cmakeFile)
- write_template_to_file(
- templateCmake,
- cmakeFile,
- numFiles=numFiles,
- baseCppName=baseCppName.replace('\\','/'))
-
- print("Generated %d template instantiations in %d files" % (len(output_list), numFiles))
-
- return 0
-
-if __name__ == '__main__':
- sys.exit(main())
+++ /dev/null
-# Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the "Software"),
-# to deal in the Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice (including the next
-# paragraph) shall be included in all copies or substantial portions of the
-# Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
-
-# Python source
-from __future__ import print_function
-import os
-import sys
-import argparse
-import knob_defs
-from mako.template import Template
-from mako.exceptions import RichTraceback
-
-def write_template_to_string(template_filename, **kwargs):
- try:
- template = Template(filename=os.path.abspath(template_filename))
- # Split + Join fixes line-endings for whatever platform you are using
- return '\n'.join(template.render(**kwargs).splitlines())
- except:
- traceback = RichTraceback()
- for (filename, lineno, function, line) in traceback.traceback:
- print("File %s, line %s, in %s" % (filename, lineno, function))
- print(line, "\n")
- print("%s: %s" % (str(traceback.error.__class__.__name__), traceback.error))
-
-def write_template_to_file(template_filename, output_filename, **kwargs):
- output_dirname = os.path.dirname(output_filename)
- if not os.path.exists(output_dirname):
- os.makedirs(output_dirname)
- with open(output_filename, "w") as outfile:
- print(write_template_to_string(template_filename, **kwargs), file=outfile)
-
-def main(args=sys.argv[1:]):
-
- # parse args
- parser = argparse.ArgumentParser()
- parser.add_argument("--input", "-i", help="Path to knobs.template", required=True)
- parser.add_argument("--output", "-o", help="Path to output file", required=True)
- parser.add_argument("--gen_h", "-gen_h", help="Generate gen_knobs.h", action="store_true", default=False)
- parser.add_argument("--gen_cpp", "-gen_cpp", help="Generate gen_knobs.cpp", action="store_true", required=False)
-
- args = parser.parse_args()
-
- if args.input:
- if args.gen_h:
- write_template_to_file(args.input,
- args.output,
- filename='gen_knobs',
- knobs=knob_defs.KNOBS,
- includes=['core/knobs_init.h', 'common/os.h', 'sstream', 'iomanip'],
- gen_header=True)
-
- if args.gen_cpp:
- write_template_to_file(args.input,
- args.output,
- filename='gen_knobs',
- knobs=knob_defs.KNOBS,
- includes=['core/knobs_init.h', 'common/os.h', 'sstream', 'iomanip'],
- gen_header=False)
-
- return 0
-
-if __name__ == '__main__':
- sys.exit(main())
-
+++ /dev/null
-# Copyright (C) 2014-2016 Intel Corporation. All Rights Reserved.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the "Software"),
-# to deal in the Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice (including the next
-# paragraph) shall be included in all copies or substantial portions of the
-# Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
-
-# Python source
-KNOBS = [
-
- ['ENABLE_ASSERT_DIALOGS', {
- 'type' : 'bool',
- 'default' : 'true',
- 'desc' : ['Use dialogs when asserts fire.',
- 'Asserts are only enabled in debug builds'],
- 'category' : 'debug',
- }],
-
- ['SINGLE_THREADED', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['If enabled will perform all rendering on the API thread.',
- 'This is useful mainly for debugging purposes.'],
- 'category' : 'debug',
- }],
-
- ['DUMP_SHADER_IR', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Dumps shader LLVM IR at various stages of jit compilation.'],
- 'category' : 'debug',
- }],
-
- ['USE_GENERIC_STORETILE', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Always use generic function for performing StoreTile.',
- 'Will be slightly slower than using optimized (jitted) path'],
- 'category' : 'debug',
- }],
-
- ['FAST_CLEAR', {
- 'type' : 'bool',
- 'default' : 'true',
- 'desc' : ['Replace 3D primitive execute with a SWRClearRT operation and',
- 'defer clear execution to first backend op on hottile, or hottile store'],
- 'category' : 'perf',
- }],
-
- ['MAX_NUMA_NODES', {
- 'type' : 'uint32_t',
- 'default' : '0',
- 'desc' : ['Maximum # of NUMA-nodes per system used for worker threads',
- ' 0 == ALL NUMA-nodes in the system',
- ' N == Use at most N NUMA-nodes for rendering'],
- 'category' : 'perf',
- }],
-
- ['MAX_CORES_PER_NUMA_NODE', {
- 'type' : 'uint32_t',
- 'default' : '0',
- 'desc' : ['Maximum # of cores per NUMA-node used for worker threads.',
- ' 0 == ALL non-API thread cores per NUMA-node',
- ' N == Use at most N cores per NUMA-node'],
- 'category' : 'perf',
- }],
-
- ['MAX_THREADS_PER_CORE', {
- 'type' : 'uint32_t',
- 'default' : '1',
- 'desc' : ['Maximum # of (hyper)threads per physical core used for worker threads.',
- ' 0 == ALL hyper-threads per core',
- ' N == Use at most N hyper-threads per physical core'],
- 'category' : 'perf',
- }],
-
- ['MAX_WORKER_THREADS', {
- 'type' : 'uint32_t',
- 'default' : '0',
- 'desc' : ['Maximum worker threads to spawn.',
- '',
- 'IMPORTANT: If this is non-zero, no worker threads will be bound to',
- 'specific HW threads. They will all be "floating" SW threads.',
- 'In this case, the above 3 KNOBS will be ignored.'],
- 'category' : 'perf',
- }],
-
- ['BUCKETS_START_FRAME', {
- 'type' : 'uint32_t',
- 'default' : '1200',
- 'desc' : ['Frame from when to start saving buckets data.',
- '',
- 'NOTE: KNOB_ENABLE_RDTSC must be enabled in core/knobs.h',
- 'for this to have an effect.'],
- 'category' : 'perf',
- }],
-
- ['BUCKETS_END_FRAME', {
- 'type' : 'uint32_t',
- 'default' : '1400',
- 'desc' : ['Frame at which to stop saving buckets data.',
- '',
- 'NOTE: KNOB_ENABLE_RDTSC must be enabled in core/knobs.h',
- 'for this to have an effect.'],
- 'category' : 'perf',
- }],
-
- ['WORKER_SPIN_LOOP_COUNT', {
- 'type' : 'uint32_t',
- 'default' : '5000',
- 'desc' : ['Number of spin-loop iterations worker threads will perform',
- 'before going to sleep when waiting for work'],
- 'category' : 'perf',
- }],
-
- ['MAX_DRAWS_IN_FLIGHT', {
- 'type' : 'uint32_t',
- 'default' : '128',
- 'desc' : ['Maximum number of draws outstanding before API thread blocks.',
- 'This value MUST be evenly divisible into 2^32'],
- 'category' : 'perf',
- }],
-
- ['MAX_PRIMS_PER_DRAW', {
- 'type' : 'uint32_t',
- 'default' : '2040',
- 'desc' : ['Maximum primitives in a single Draw().',
- 'Larger primitives are split into smaller Draw calls.',
- 'Should be a multiple of (3 * vectorWidth).'],
- 'category' : 'perf',
- }],
-
- ['MAX_TESS_PRIMS_PER_DRAW', {
- 'type' : 'uint32_t',
- 'default' : '16',
- 'desc' : ['Maximum primitives in a single Draw() with tessellation enabled.',
- 'Larger primitives are split into smaller Draw calls.',
- 'Should be a multiple of (vectorWidth).'],
- 'category' : 'perf',
- }],
-
-
- ['DEBUG_OUTPUT_DIR', {
- 'type' : 'std::string',
- 'default' : '/tmp/Rast/DebugOutput',
- 'desc' : ['Output directory for debug data.'],
- 'category' : 'debug',
- }],
-
- ['TOSS_DRAW', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Disable per-draw/dispatch execution'],
- 'category' : 'perf',
- }],
-
- ['TOSS_QUEUE_FE', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Stop per-draw execution at worker FE',
- '',
- 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
- 'category' : 'perf',
- 'advanced' : 'true',
- }],
-
- ['TOSS_FETCH', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Stop per-draw execution at vertex fetch',
- '',
- 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
- 'category' : 'perf',
- 'advanced' : 'true',
- }],
-
- ['TOSS_IA', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Stop per-draw execution at input assembler',
- '',
- 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
- 'category' : 'perf',
- 'advanced' : 'true',
- }],
-
- ['TOSS_VS', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Stop per-draw execution at vertex shader',
- '',
- 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
- 'category' : 'perf',
- 'advanced' : 'true',
- }],
-
- ['TOSS_SETUP_TRIS', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Stop per-draw execution at primitive setup',
- '',
- 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
- 'category' : 'perf',
- 'advanced' : 'true',
- }],
-
- ['TOSS_BIN_TRIS', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Stop per-draw execution at primitive binning',
- '',
- 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
- 'category' : 'perf',
- 'advanced' : 'true',
- }],
-
- ['TOSS_RS', {
- 'type' : 'bool',
- 'default' : 'false',
- 'desc' : ['Stop per-draw execution at rasterizer',
- '',
- 'NOTE: Requires KNOB_ENABLE_TOSS_POINTS to be enabled in core/knobs.h'],
- 'category' : 'perf',
- 'advanced' : 'true',
- }],
-
- ]
+++ /dev/null
-# mako/__init__.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-__version__ = '1.0.1'
+++ /dev/null
-# mako/_ast_util.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
- ast
- ~~~
-
- The `ast` module helps Python applications to process trees of the Python
- abstract syntax grammar. The abstract syntax itself might change with
- each Python release; this module helps to find out programmatically what
- the current grammar looks like and allows modifications of it.
-
- An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
- a flag to the `compile()` builtin function or by using the `parse()`
- function from this module. The result will be a tree of objects whose
- classes all inherit from `ast.AST`.
-
- A modified abstract syntax tree can be compiled into a Python code object
- using the built-in `compile()` function.
-
- Additionally various helper functions are provided that make working with
- the trees simpler. The main intention of the helper functions and this
- module in general is to provide an easy to use interface for libraries
- that work tightly with the python syntax (template engines for example).
-
-
- :copyright: Copyright 2008 by Armin Ronacher.
- :license: Python License.
-"""
-from _ast import *
-from mako.compat import arg_stringname
-
-BOOLOP_SYMBOLS = {
- And: 'and',
- Or: 'or'
-}
-
-BINOP_SYMBOLS = {
- Add: '+',
- Sub: '-',
- Mult: '*',
- Div: '/',
- FloorDiv: '//',
- Mod: '%',
- LShift: '<<',
- RShift: '>>',
- BitOr: '|',
- BitAnd: '&',
- BitXor: '^'
-}
-
-CMPOP_SYMBOLS = {
- Eq: '==',
- Gt: '>',
- GtE: '>=',
- In: 'in',
- Is: 'is',
- IsNot: 'is not',
- Lt: '<',
- LtE: '<=',
- NotEq: '!=',
- NotIn: 'not in'
-}
-
-UNARYOP_SYMBOLS = {
- Invert: '~',
- Not: 'not',
- UAdd: '+',
- USub: '-'
-}
-
-ALL_SYMBOLS = {}
-ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
-ALL_SYMBOLS.update(BINOP_SYMBOLS)
-ALL_SYMBOLS.update(CMPOP_SYMBOLS)
-ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
-
-
-def parse(expr, filename='<unknown>', mode='exec'):
- """Parse an expression into an AST node."""
- return compile(expr, filename, mode, PyCF_ONLY_AST)
-
-
-def to_source(node, indent_with=' ' * 4):
- """
- This function can convert a node tree back into python sourcecode. This
- is useful for debugging purposes, especially if you're dealing with custom
- asts not generated by python itself.
-
- It could be that the sourcecode is evaluable when the AST itself is not
- compilable / evaluable. The reason for this is that the AST contains some
- more data than regular sourcecode does, which is dropped during
- conversion.
-
- Each level of indentation is replaced with `indent_with`. Per default this
- parameter is equal to four spaces as suggested by PEP 8, but it might be
- adjusted to match the application's styleguide.
- """
- generator = SourceGenerator(indent_with)
- generator.visit(node)
- return ''.join(generator.result)
-
-
-def dump(node):
- """
- A very verbose representation of the node passed. This is useful for
- debugging purposes.
- """
- def _format(node):
- if isinstance(node, AST):
- return '%s(%s)' % (node.__class__.__name__,
- ', '.join('%s=%s' % (a, _format(b))
- for a, b in iter_fields(node)))
- elif isinstance(node, list):
- return '[%s]' % ', '.join(_format(x) for x in node)
- return repr(node)
- if not isinstance(node, AST):
- raise TypeError('expected AST, got %r' % node.__class__.__name__)
- return _format(node)
-
-
-def copy_location(new_node, old_node):
- """
- Copy the source location hint (`lineno` and `col_offset`) from the
- old to the new node if possible and return the new one.
- """
- for attr in 'lineno', 'col_offset':
- if attr in old_node._attributes and attr in new_node._attributes \
- and hasattr(old_node, attr):
- setattr(new_node, attr, getattr(old_node, attr))
- return new_node
-
-
-def fix_missing_locations(node):
- """
- Some nodes require a line number and the column offset. Without that
- information the compiler will abort the compilation. Because it can be
- a dull task to add appropriate line numbers and column offsets when
- adding new nodes this function can help. It copies the line number and
- column offset of the parent node to the child nodes without this
- information.
-
- Unlike `copy_location` this works recursive and won't touch nodes that
- already have a location information.
- """
- def _fix(node, lineno, col_offset):
- if 'lineno' in node._attributes:
- if not hasattr(node, 'lineno'):
- node.lineno = lineno
- else:
- lineno = node.lineno
- if 'col_offset' in node._attributes:
- if not hasattr(node, 'col_offset'):
- node.col_offset = col_offset
- else:
- col_offset = node.col_offset
- for child in iter_child_nodes(node):
- _fix(child, lineno, col_offset)
- _fix(node, 1, 0)
- return node
-
-
-def increment_lineno(node, n=1):
- """
- Increment the line numbers of all nodes by `n` if they have line number
- attributes. This is useful to "move code" to a different location in a
- file.
- """
- for node in zip((node,), walk(node)):
- if 'lineno' in node._attributes:
- node.lineno = getattr(node, 'lineno', 0) + n
-
-
-def iter_fields(node):
- """Iterate over all fields of a node, only yielding existing fields."""
- # CPython 2.5 compat
- if not hasattr(node, '_fields') or not node._fields:
- return
- for field in node._fields:
- try:
- yield field, getattr(node, field)
- except AttributeError:
- pass
-
-
-def get_fields(node):
- """Like `iter_fiels` but returns a dict."""
- return dict(iter_fields(node))
-
-
-def iter_child_nodes(node):
- """Iterate over all child nodes or a node."""
- for name, field in iter_fields(node):
- if isinstance(field, AST):
- yield field
- elif isinstance(field, list):
- for item in field:
- if isinstance(item, AST):
- yield item
-
-
-def get_child_nodes(node):
- """Like `iter_child_nodes` but returns a list."""
- return list(iter_child_nodes(node))
-
-
-def get_compile_mode(node):
- """
- Get the mode for `compile` of a given node. If the node is not a `mod`
- node (`Expression`, `Module` etc.) a `TypeError` is thrown.
- """
- if not isinstance(node, mod):
- raise TypeError('expected mod node, got %r' % node.__class__.__name__)
- return {
- Expression: 'eval',
- Interactive: 'single'
- }.get(node.__class__, 'expr')
-
-
-def get_docstring(node):
- """
- Return the docstring for the given node or `None` if no docstring can be
- found. If the node provided does not accept docstrings a `TypeError`
- will be raised.
- """
- if not isinstance(node, (FunctionDef, ClassDef, Module)):
- raise TypeError("%r can't have docstrings" % node.__class__.__name__)
- if node.body and isinstance(node.body[0], Str):
- return node.body[0].s
-
-
-def walk(node):
- """
- Iterate over all nodes. This is useful if you only want to modify nodes in
- place and don't care about the context or the order the nodes are returned.
- """
- from collections import deque
- todo = deque([node])
- while todo:
- node = todo.popleft()
- todo.extend(iter_child_nodes(node))
- yield node
-
-
-class NodeVisitor(object):
- """
- Walks the abstract syntax tree and call visitor functions for every node
- found. The visitor functions may return values which will be forwarded
- by the `visit` method.
-
- Per default the visitor functions for the nodes are ``'visit_'`` +
- class name of the node. So a `TryFinally` node visit function would
- be `visit_TryFinally`. This behavior can be changed by overriding
- the `get_visitor` function. If no visitor function exists for a node
- (return value `None`) the `generic_visit` visitor is used instead.
-
- Don't use the `NodeVisitor` if you want to apply changes to nodes during
- traversing. For this a special visitor exists (`NodeTransformer`) that
- allows modifications.
- """
-
- def get_visitor(self, node):
- """
- Return the visitor function for this node or `None` if no visitor
- exists for this node. In that case the generic visit function is
- used instead.
- """
- method = 'visit_' + node.__class__.__name__
- return getattr(self, method, None)
-
- def visit(self, node):
- """Visit a node."""
- f = self.get_visitor(node)
- if f is not None:
- return f(node)
- return self.generic_visit(node)
-
- def generic_visit(self, node):
- """Called if no explicit visitor function exists for a node."""
- for field, value in iter_fields(node):
- if isinstance(value, list):
- for item in value:
- if isinstance(item, AST):
- self.visit(item)
- elif isinstance(value, AST):
- self.visit(value)
-
-
-class NodeTransformer(NodeVisitor):
- """
- Walks the abstract syntax tree and allows modifications of nodes.
-
- The `NodeTransformer` will walk the AST and use the return value of the
- visitor functions to replace or remove the old node. If the return
- value of the visitor function is `None` the node will be removed
- from the previous location otherwise it's replaced with the return
- value. The return value may be the original node in which case no
- replacement takes place.
-
- Here an example transformer that rewrites all `foo` to `data['foo']`::
-
- class RewriteName(NodeTransformer):
-
- def visit_Name(self, node):
- return copy_location(Subscript(
- value=Name(id='data', ctx=Load()),
- slice=Index(value=Str(s=node.id)),
- ctx=node.ctx
- ), node)
-
- Keep in mind that if the node you're operating on has child nodes
- you must either transform the child nodes yourself or call the generic
- visit function for the node first.
-
- Nodes that were part of a collection of statements (that applies to
- all statement nodes) may also return a list of nodes rather than just
- a single node.
-
- Usually you use the transformer like this::
-
- node = YourTransformer().visit(node)
- """
-
- def generic_visit(self, node):
- for field, old_value in iter_fields(node):
- old_value = getattr(node, field, None)
- if isinstance(old_value, list):
- new_values = []
- for value in old_value:
- if isinstance(value, AST):
- value = self.visit(value)
- if value is None:
- continue
- elif not isinstance(value, AST):
- new_values.extend(value)
- continue
- new_values.append(value)
- old_value[:] = new_values
- elif isinstance(old_value, AST):
- new_node = self.visit(old_value)
- if new_node is None:
- delattr(node, field)
- else:
- setattr(node, field, new_node)
- return node
-
-
-class SourceGenerator(NodeVisitor):
- """
- This visitor is able to transform a well formed syntax tree into python
- sourcecode. For more details have a look at the docstring of the
- `node_to_source` function.
- """
-
- def __init__(self, indent_with):
- self.result = []
- self.indent_with = indent_with
- self.indentation = 0
- self.new_lines = 0
-
- def write(self, x):
- if self.new_lines:
- if self.result:
- self.result.append('\n' * self.new_lines)
- self.result.append(self.indent_with * self.indentation)
- self.new_lines = 0
- self.result.append(x)
-
- def newline(self, n=1):
- self.new_lines = max(self.new_lines, n)
-
- def body(self, statements):
- self.new_line = True
- self.indentation += 1
- for stmt in statements:
- self.visit(stmt)
- self.indentation -= 1
-
- def body_or_else(self, node):
- self.body(node.body)
- if node.orelse:
- self.newline()
- self.write('else:')
- self.body(node.orelse)
-
- def signature(self, node):
- want_comma = []
- def write_comma():
- if want_comma:
- self.write(', ')
- else:
- want_comma.append(True)
-
- padding = [None] * (len(node.args) - len(node.defaults))
- for arg, default in zip(node.args, padding + node.defaults):
- write_comma()
- self.visit(arg)
- if default is not None:
- self.write('=')
- self.visit(default)
- if node.vararg is not None:
- write_comma()
- self.write('*' + arg_stringname(node.vararg))
- if node.kwarg is not None:
- write_comma()
- self.write('**' + arg_stringname(node.kwarg))
-
- def decorators(self, node):
- for decorator in node.decorator_list:
- self.newline()
- self.write('@')
- self.visit(decorator)
-
- # Statements
-
- def visit_Assign(self, node):
- self.newline()
- for idx, target in enumerate(node.targets):
- if idx:
- self.write(', ')
- self.visit(target)
- self.write(' = ')
- self.visit(node.value)
-
- def visit_AugAssign(self, node):
- self.newline()
- self.visit(node.target)
- self.write(BINOP_SYMBOLS[type(node.op)] + '=')
- self.visit(node.value)
-
- def visit_ImportFrom(self, node):
- self.newline()
- self.write('from %s%s import ' % ('.' * node.level, node.module))
- for idx, item in enumerate(node.names):
- if idx:
- self.write(', ')
- self.write(item)
-
- def visit_Import(self, node):
- self.newline()
- for item in node.names:
- self.write('import ')
- self.visit(item)
-
- def visit_Expr(self, node):
- self.newline()
- self.generic_visit(node)
-
- def visit_FunctionDef(self, node):
- self.newline(n=2)
- self.decorators(node)
- self.newline()
- self.write('def %s(' % node.name)
- self.signature(node.args)
- self.write('):')
- self.body(node.body)
-
- def visit_ClassDef(self, node):
- have_args = []
- def paren_or_comma():
- if have_args:
- self.write(', ')
- else:
- have_args.append(True)
- self.write('(')
-
- self.newline(n=3)
- self.decorators(node)
- self.newline()
- self.write('class %s' % node.name)
- for base in node.bases:
- paren_or_comma()
- self.visit(base)
- # XXX: the if here is used to keep this module compatible
- # with python 2.6.
- if hasattr(node, 'keywords'):
- for keyword in node.keywords:
- paren_or_comma()
- self.write(keyword.arg + '=')
- self.visit(keyword.value)
- if node.starargs is not None:
- paren_or_comma()
- self.write('*')
- self.visit(node.starargs)
- if node.kwargs is not None:
- paren_or_comma()
- self.write('**')
- self.visit(node.kwargs)
- self.write(have_args and '):' or ':')
- self.body(node.body)
-
- def visit_If(self, node):
- self.newline()
- self.write('if ')
- self.visit(node.test)
- self.write(':')
- self.body(node.body)
- while True:
- else_ = node.orelse
- if len(else_) == 1 and isinstance(else_[0], If):
- node = else_[0]
- self.newline()
- self.write('elif ')
- self.visit(node.test)
- self.write(':')
- self.body(node.body)
- else:
- self.newline()
- self.write('else:')
- self.body(else_)
- break
-
- def visit_For(self, node):
- self.newline()
- self.write('for ')
- self.visit(node.target)
- self.write(' in ')
- self.visit(node.iter)
- self.write(':')
- self.body_or_else(node)
-
- def visit_While(self, node):
- self.newline()
- self.write('while ')
- self.visit(node.test)
- self.write(':')
- self.body_or_else(node)
-
- def visit_With(self, node):
- self.newline()
- self.write('with ')
- self.visit(node.context_expr)
- if node.optional_vars is not None:
- self.write(' as ')
- self.visit(node.optional_vars)
- self.write(':')
- self.body(node.body)
-
- def visit_Pass(self, node):
- self.newline()
- self.write('pass')
-
- def visit_Print(self, node):
- # XXX: python 2.6 only
- self.newline()
- self.write('print ')
- want_comma = False
- if node.dest is not None:
- self.write(' >> ')
- self.visit(node.dest)
- want_comma = True
- for value in node.values:
- if want_comma:
- self.write(', ')
- self.visit(value)
- want_comma = True
- if not node.nl:
- self.write(',')
-
- def visit_Delete(self, node):
- self.newline()
- self.write('del ')
- for idx, target in enumerate(node):
- if idx:
- self.write(', ')
- self.visit(target)
-
- def visit_TryExcept(self, node):
- self.newline()
- self.write('try:')
- self.body(node.body)
- for handler in node.handlers:
- self.visit(handler)
-
- def visit_TryFinally(self, node):
- self.newline()
- self.write('try:')
- self.body(node.body)
- self.newline()
- self.write('finally:')
- self.body(node.finalbody)
-
- def visit_Global(self, node):
- self.newline()
- self.write('global ' + ', '.join(node.names))
-
- def visit_Nonlocal(self, node):
- self.newline()
- self.write('nonlocal ' + ', '.join(node.names))
-
- def visit_Return(self, node):
- self.newline()
- self.write('return ')
- self.visit(node.value)
-
- def visit_Break(self, node):
- self.newline()
- self.write('break')
-
- def visit_Continue(self, node):
- self.newline()
- self.write('continue')
-
- def visit_Raise(self, node):
- # XXX: Python 2.6 / 3.0 compatibility
- self.newline()
- self.write('raise')
- if hasattr(node, 'exc') and node.exc is not None:
- self.write(' ')
- self.visit(node.exc)
- if node.cause is not None:
- self.write(' from ')
- self.visit(node.cause)
- elif hasattr(node, 'type') and node.type is not None:
- self.visit(node.type)
- if node.inst is not None:
- self.write(', ')
- self.visit(node.inst)
- if node.tback is not None:
- self.write(', ')
- self.visit(node.tback)
-
- # Expressions
-
- def visit_Attribute(self, node):
- self.visit(node.value)
- self.write('.' + node.attr)
-
- def visit_Call(self, node):
- want_comma = []
- def write_comma():
- if want_comma:
- self.write(', ')
- else:
- want_comma.append(True)
-
- self.visit(node.func)
- self.write('(')
- for arg in node.args:
- write_comma()
- self.visit(arg)
- for keyword in node.keywords:
- write_comma()
- self.write(keyword.arg + '=')
- self.visit(keyword.value)
- if node.starargs is not None:
- write_comma()
- self.write('*')
- self.visit(node.starargs)
- if node.kwargs is not None:
- write_comma()
- self.write('**')
- self.visit(node.kwargs)
- self.write(')')
-
- def visit_Name(self, node):
- self.write(node.id)
-
- def visit_NameConstant(self, node):
- self.write(str(node.value))
-
- def visit_arg(self, node):
- self.write(node.arg)
-
- def visit_Str(self, node):
- self.write(repr(node.s))
-
- def visit_Bytes(self, node):
- self.write(repr(node.s))
-
- def visit_Num(self, node):
- self.write(repr(node.n))
-
- def visit_Tuple(self, node):
- self.write('(')
- idx = -1
- for idx, item in enumerate(node.elts):
- if idx:
- self.write(', ')
- self.visit(item)
- self.write(idx and ')' or ',)')
-
- def sequence_visit(left, right):
- def visit(self, node):
- self.write(left)
- for idx, item in enumerate(node.elts):
- if idx:
- self.write(', ')
- self.visit(item)
- self.write(right)
- return visit
-
- visit_List = sequence_visit('[', ']')
- visit_Set = sequence_visit('{', '}')
- del sequence_visit
-
- def visit_Dict(self, node):
- self.write('{')
- for idx, (key, value) in enumerate(zip(node.keys, node.values)):
- if idx:
- self.write(', ')
- self.visit(key)
- self.write(': ')
- self.visit(value)
- self.write('}')
-
- def visit_BinOp(self, node):
- self.write('(')
- self.visit(node.left)
- self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
- self.visit(node.right)
- self.write(')')
-
- def visit_BoolOp(self, node):
- self.write('(')
- for idx, value in enumerate(node.values):
- if idx:
- self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
- self.visit(value)
- self.write(')')
-
- def visit_Compare(self, node):
- self.write('(')
- self.visit(node.left)
- for op, right in zip(node.ops, node.comparators):
- self.write(' %s ' % CMPOP_SYMBOLS[type(op)])
- self.visit(right)
- self.write(')')
-
- def visit_UnaryOp(self, node):
- self.write('(')
- op = UNARYOP_SYMBOLS[type(node.op)]
- self.write(op)
- if op == 'not':
- self.write(' ')
- self.visit(node.operand)
- self.write(')')
-
- def visit_Subscript(self, node):
- self.visit(node.value)
- self.write('[')
- self.visit(node.slice)
- self.write(']')
-
- def visit_Slice(self, node):
- if node.lower is not None:
- self.visit(node.lower)
- self.write(':')
- if node.upper is not None:
- self.visit(node.upper)
- if node.step is not None:
- self.write(':')
- if not (isinstance(node.step, Name) and node.step.id == 'None'):
- self.visit(node.step)
-
- def visit_ExtSlice(self, node):
- for idx, item in node.dims:
- if idx:
- self.write(', ')
- self.visit(item)
-
- def visit_Yield(self, node):
- self.write('yield ')
- self.visit(node.value)
-
- def visit_Lambda(self, node):
- self.write('lambda ')
- self.signature(node.args)
- self.write(': ')
- self.visit(node.body)
-
- def visit_Ellipsis(self, node):
- self.write('Ellipsis')
-
- def generator_visit(left, right):
- def visit(self, node):
- self.write(left)
- self.visit(node.elt)
- for comprehension in node.generators:
- self.visit(comprehension)
- self.write(right)
- return visit
-
- visit_ListComp = generator_visit('[', ']')
- visit_GeneratorExp = generator_visit('(', ')')
- visit_SetComp = generator_visit('{', '}')
- del generator_visit
-
- def visit_DictComp(self, node):
- self.write('{')
- self.visit(node.key)
- self.write(': ')
- self.visit(node.value)
- for comprehension in node.generators:
- self.visit(comprehension)
- self.write('}')
-
- def visit_IfExp(self, node):
- self.visit(node.body)
- self.write(' if ')
- self.visit(node.test)
- self.write(' else ')
- self.visit(node.orelse)
-
- def visit_Starred(self, node):
- self.write('*')
- self.visit(node.value)
-
- def visit_Repr(self, node):
- # XXX: python 2.6 only
- self.write('`')
- self.visit(node.value)
- self.write('`')
-
- # Helper Nodes
-
- def visit_alias(self, node):
- self.write(node.name)
- if node.asname is not None:
- self.write(' as ' + node.asname)
-
- def visit_comprehension(self, node):
- self.write(' for ')
- self.visit(node.target)
- self.write(' in ')
- self.visit(node.iter)
- if node.ifs:
- for if_ in node.ifs:
- self.write(' if ')
- self.visit(if_)
-
- def visit_excepthandler(self, node):
- self.newline()
- self.write('except')
- if node.type is not None:
- self.write(' ')
- self.visit(node.type)
- if node.name is not None:
- self.write(' as ')
- self.visit(node.name)
- self.write(':')
- self.body(node.body)
+++ /dev/null
-# mako/ast.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""utilities for analyzing expressions and blocks of Python
-code, as well as generating Python from AST nodes"""
-
-from mako import exceptions, pyparser, compat
-import re
-
-class PythonCode(object):
- """represents information about a string containing Python code"""
- def __init__(self, code, **exception_kwargs):
- self.code = code
-
- # represents all identifiers which are assigned to at some point in
- # the code
- self.declared_identifiers = set()
-
- # represents all identifiers which are referenced before their
- # assignment, if any
- self.undeclared_identifiers = set()
-
- # note that an identifier can be in both the undeclared and declared
- # lists.
-
- # using AST to parse instead of using code.co_varnames,
- # code.co_names has several advantages:
- # - we can locate an identifier as "undeclared" even if
- # its declared later in the same block of code
- # - AST is less likely to break with version changes
- # (for example, the behavior of co_names changed a little bit
- # in python version 2.5)
- if isinstance(code, compat.string_types):
- expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
- else:
- expr = code
-
- f = pyparser.FindIdentifiers(self, **exception_kwargs)
- f.visit(expr)
-
-class ArgumentList(object):
- """parses a fragment of code as a comma-separated list of expressions"""
- def __init__(self, code, **exception_kwargs):
- self.codeargs = []
- self.args = []
- self.declared_identifiers = set()
- self.undeclared_identifiers = set()
- if isinstance(code, compat.string_types):
- if re.match(r"\S", code) and not re.match(r",\s*$", code):
- # if theres text and no trailing comma, insure its parsed
- # as a tuple by adding a trailing comma
- code += ","
- expr = pyparser.parse(code, "exec", **exception_kwargs)
- else:
- expr = code
-
- f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
- f.visit(expr)
-
-class PythonFragment(PythonCode):
- """extends PythonCode to provide identifier lookups in partial control
- statements
-
- e.g.
- for x in 5:
- elif y==9:
- except (MyException, e):
- etc.
- """
- def __init__(self, code, **exception_kwargs):
- m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
- if not m:
- raise exceptions.CompileException(
- "Fragment '%s' is not a partial control statement" %
- code, **exception_kwargs)
- if m.group(3):
- code = code[:m.start(3)]
- (keyword, expr) = m.group(1,2)
- if keyword in ['for','if', 'while']:
- code = code + "pass"
- elif keyword == 'try':
- code = code + "pass\nexcept:pass"
- elif keyword == 'elif' or keyword == 'else':
- code = "if False:pass\n" + code + "pass"
- elif keyword == 'except':
- code = "try:pass\n" + code + "pass"
- elif keyword == 'with':
- code = code + "pass"
- else:
- raise exceptions.CompileException(
- "Unsupported control keyword: '%s'" %
- keyword, **exception_kwargs)
- super(PythonFragment, self).__init__(code, **exception_kwargs)
-
-
-class FunctionDecl(object):
- """function declaration"""
- def __init__(self, code, allow_kwargs=True, **exception_kwargs):
- self.code = code
- expr = pyparser.parse(code, "exec", **exception_kwargs)
-
- f = pyparser.ParseFunc(self, **exception_kwargs)
- f.visit(expr)
- if not hasattr(self, 'funcname'):
- raise exceptions.CompileException(
- "Code '%s' is not a function declaration" % code,
- **exception_kwargs)
- if not allow_kwargs and self.kwargs:
- raise exceptions.CompileException(
- "'**%s' keyword argument not allowed here" %
- self.kwargnames[-1], **exception_kwargs)
-
- def get_argument_expressions(self, as_call=False):
- """Return the argument declarations of this FunctionDecl as a printable
- list.
-
- By default the return value is appropriate for writing in a ``def``;
- set `as_call` to true to build arguments to be passed to the function
- instead (assuming locals with the same names as the arguments exist).
- """
-
- namedecls = []
-
- # Build in reverse order, since defaults and slurpy args come last
- argnames = self.argnames[::-1]
- kwargnames = self.kwargnames[::-1]
- defaults = self.defaults[::-1]
- kwdefaults = self.kwdefaults[::-1]
-
- # Named arguments
- if self.kwargs:
- namedecls.append("**" + kwargnames.pop(0))
-
- for name in kwargnames:
- # Keyword-only arguments must always be used by name, so even if
- # this is a call, print out `foo=foo`
- if as_call:
- namedecls.append("%s=%s" % (name, name))
- elif kwdefaults:
- default = kwdefaults.pop(0)
- if default is None:
- # The AST always gives kwargs a default, since you can do
- # `def foo(*, a=1, b, c=3)`
- namedecls.append(name)
- else:
- namedecls.append("%s=%s" % (
- name, pyparser.ExpressionGenerator(default).value()))
- else:
- namedecls.append(name)
-
- # Positional arguments
- if self.varargs:
- namedecls.append("*" + argnames.pop(0))
-
- for name in argnames:
- if as_call or not defaults:
- namedecls.append(name)
- else:
- default = defaults.pop(0)
- namedecls.append("%s=%s" % (
- name, pyparser.ExpressionGenerator(default).value()))
-
- namedecls.reverse()
- return namedecls
-
- @property
- def allargnames(self):
- return tuple(self.argnames) + tuple(self.kwargnames)
-
-class FunctionArgs(FunctionDecl):
- """the argument portion of a function declaration"""
-
- def __init__(self, code, **kwargs):
- super(FunctionArgs, self).__init__("def ANON(%s):pass" % code,
- **kwargs)
+++ /dev/null
-# mako/cache.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from mako import compat, util
-
-_cache_plugins = util.PluginLoader("mako.cache")
-
-register_plugin = _cache_plugins.register
-register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl")
-
-
-class Cache(object):
- """Represents a data content cache made available to the module
- space of a specific :class:`.Template` object.
-
- .. versionadded:: 0.6
- :class:`.Cache` by itself is mostly a
- container for a :class:`.CacheImpl` object, which implements
- a fixed API to provide caching services; specific subclasses exist to
- implement different
- caching strategies. Mako includes a backend that works with
- the Beaker caching system. Beaker itself then supports
- a number of backends (i.e. file, memory, memcached, etc.)
-
- The construction of a :class:`.Cache` is part of the mechanics
- of a :class:`.Template`, and programmatic access to this
- cache is typically via the :attr:`.Template.cache` attribute.
-
- """
-
- impl = None
- """Provide the :class:`.CacheImpl` in use by this :class:`.Cache`.
-
- This accessor allows a :class:`.CacheImpl` with additional
- methods beyond that of :class:`.Cache` to be used programmatically.
-
- """
-
- id = None
- """Return the 'id' that identifies this cache.
-
- This is a value that should be globally unique to the
- :class:`.Template` associated with this cache, and can
- be used by a caching system to name a local container
- for data specific to this template.
-
- """
-
- starttime = None
- """Epochal time value for when the owning :class:`.Template` was
- first compiled.
-
- A cache implementation may wish to invalidate data earlier than
- this timestamp; this has the effect of the cache for a specific
- :class:`.Template` starting clean any time the :class:`.Template`
- is recompiled, such as when the original template file changed on
- the filesystem.
-
- """
-
- def __init__(self, template, *args):
- # check for a stale template calling the
- # constructor
- if isinstance(template, compat.string_types) and args:
- return
- self.template = template
- self.id = template.module.__name__
- self.starttime = template.module._modified_time
- self._def_regions = {}
- self.impl = self._load_impl(self.template.cache_impl)
-
- def _load_impl(self, name):
- return _cache_plugins.load(name)(self)
-
- def get_or_create(self, key, creation_function, **kw):
- """Retrieve a value from the cache, using the given creation function
- to generate a new value."""
-
- return self._ctx_get_or_create(key, creation_function, None, **kw)
-
- def _ctx_get_or_create(self, key, creation_function, context, **kw):
- """Retrieve a value from the cache, using the given creation function
- to generate a new value."""
-
- if not self.template.cache_enabled:
- return creation_function()
-
- return self.impl.get_or_create(
- key,
- creation_function,
- **self._get_cache_kw(kw, context))
-
- def set(self, key, value, **kw):
- """Place a value in the cache.
-
- :param key: the value's key.
- :param value: the value.
- :param \**kw: cache configuration arguments.
-
- """
-
- self.impl.set(key, value, **self._get_cache_kw(kw, None))
-
- put = set
- """A synonym for :meth:`.Cache.set`.
-
- This is here for backwards compatibility.
-
- """
-
- def get(self, key, **kw):
- """Retrieve a value from the cache.
-
- :param key: the value's key.
- :param \**kw: cache configuration arguments. The
- backend is configured using these arguments upon first request.
- Subsequent requests that use the same series of configuration
- values will use that same backend.
-
- """
- return self.impl.get(key, **self._get_cache_kw(kw, None))
-
- def invalidate(self, key, **kw):
- """Invalidate a value in the cache.
-
- :param key: the value's key.
- :param \**kw: cache configuration arguments. The
- backend is configured using these arguments upon first request.
- Subsequent requests that use the same series of configuration
- values will use that same backend.
-
- """
- self.impl.invalidate(key, **self._get_cache_kw(kw, None))
-
- def invalidate_body(self):
- """Invalidate the cached content of the "body" method for this
- template.
-
- """
- self.invalidate('render_body', __M_defname='render_body')
-
- def invalidate_def(self, name):
- """Invalidate the cached content of a particular ``<%def>`` within this
- template.
-
- """
-
- self.invalidate('render_%s' % name, __M_defname='render_%s' % name)
-
- def invalidate_closure(self, name):
- """Invalidate a nested ``<%def>`` within this template.
-
- Caching of nested defs is a blunt tool as there is no
- management of scope -- nested defs that use cache tags
- need to have names unique of all other nested defs in the
- template, else their content will be overwritten by
- each other.
-
- """
-
- self.invalidate(name, __M_defname=name)
-
- def _get_cache_kw(self, kw, context):
- defname = kw.pop('__M_defname', None)
- if not defname:
- tmpl_kw = self.template.cache_args.copy()
- tmpl_kw.update(kw)
- elif defname in self._def_regions:
- tmpl_kw = self._def_regions[defname]
- else:
- tmpl_kw = self.template.cache_args.copy()
- tmpl_kw.update(kw)
- self._def_regions[defname] = tmpl_kw
- if context and self.impl.pass_context:
- tmpl_kw = tmpl_kw.copy()
- tmpl_kw.setdefault('context', context)
- return tmpl_kw
-
-
-class CacheImpl(object):
- """Provide a cache implementation for use by :class:`.Cache`."""
-
- def __init__(self, cache):
- self.cache = cache
-
- pass_context = False
- """If ``True``, the :class:`.Context` will be passed to
- :meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``.
- """
-
- def get_or_create(self, key, creation_function, **kw):
- """Retrieve a value from the cache, using the given creation function
- to generate a new value.
-
- This function *must* return a value, either from
- the cache, or via the given creation function.
- If the creation function is called, the newly
- created value should be populated into the cache
- under the given key before being returned.
-
- :param key: the value's key.
- :param creation_function: function that when called generates
- a new value.
- :param \**kw: cache configuration arguments.
-
- """
- raise NotImplementedError()
-
- def set(self, key, value, **kw):
- """Place a value in the cache.
-
- :param key: the value's key.
- :param value: the value.
- :param \**kw: cache configuration arguments.
-
- """
- raise NotImplementedError()
-
- def get(self, key, **kw):
- """Retrieve a value from the cache.
-
- :param key: the value's key.
- :param \**kw: cache configuration arguments.
-
- """
- raise NotImplementedError()
-
- def invalidate(self, key, **kw):
- """Invalidate a value in the cache.
-
- :param key: the value's key.
- :param \**kw: cache configuration arguments.
-
- """
- raise NotImplementedError()
+++ /dev/null
-# mako/cmd.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from argparse import ArgumentParser
-from os.path import isfile, dirname
-import sys
-from mako.template import Template
-from mako.lookup import TemplateLookup
-from mako import exceptions
-
-def varsplit(var):
- if "=" not in var:
- return (var, "")
- return var.split("=", 1)
-
-def _exit():
- sys.stderr.write(exceptions.text_error_template().render())
- sys.exit(1)
-
-def cmdline(argv=None):
-
- parser = ArgumentParser("usage: %prog [FILENAME]")
- parser.add_argument("--var", default=[], action="append",
- help="variable (can be used multiple times, use name=value)")
- parser.add_argument("--template-dir", default=[], action="append",
- help="Directory to use for template lookup (multiple "
- "directories may be provided). If not given then if the "
- "template is read from stdin, the value defaults to be "
- "the current directory, otherwise it defaults to be the "
- "parent directory of the file provided.")
- parser.add_argument('input', nargs='?', default='-')
-
- options = parser.parse_args(argv)
- if options.input == '-':
- lookup_dirs = options.template_dir or ["."]
- lookup = TemplateLookup(lookup_dirs)
- try:
- template = Template(sys.stdin.read(), lookup=lookup)
- except:
- _exit()
- else:
- filename = options.input
- if not isfile(filename):
- raise SystemExit("error: can't find %s" % filename)
- lookup_dirs = options.template_dir or [dirname(filename)]
- lookup = TemplateLookup(lookup_dirs)
- try:
- template = Template(filename=filename, lookup=lookup)
- except:
- _exit()
-
- kw = dict([varsplit(var) for var in options.var])
- try:
- print(template.render(**kw))
- except:
- _exit()
-
-
-if __name__ == "__main__":
- cmdline()
+++ /dev/null
-# mako/codegen.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""provides functionality for rendering a parsetree constructing into module
-source code."""
-
-import time
-import re
-from mako.pygen import PythonPrinter
-from mako import util, ast, parsetree, filters, exceptions
-from mako import compat
-
-
-MAGIC_NUMBER = 10
-
-# names which are hardwired into the
-# template and are not accessed via the
-# context itself
-RESERVED_NAMES = set(['context', 'loop', 'UNDEFINED'])
-
-def compile(node,
- uri,
- filename=None,
- default_filters=None,
- buffer_filters=None,
- imports=None,
- future_imports=None,
- source_encoding=None,
- generate_magic_comment=True,
- disable_unicode=False,
- strict_undefined=False,
- enable_loop=True,
- reserved_names=frozenset()):
-
- """Generate module source code given a parsetree node,
- uri, and optional source filename"""
-
- # if on Py2K, push the "source_encoding" string to be
- # a bytestring itself, as we will be embedding it into
- # the generated source and we don't want to coerce the
- # result into a unicode object, in "disable_unicode" mode
- if not compat.py3k and isinstance(source_encoding, compat.text_type):
- source_encoding = source_encoding.encode(source_encoding)
-
-
- buf = util.FastEncodingBuffer()
-
- printer = PythonPrinter(buf)
- _GenerateRenderMethod(printer,
- _CompileContext(uri,
- filename,
- default_filters,
- buffer_filters,
- imports,
- future_imports,
- source_encoding,
- generate_magic_comment,
- disable_unicode,
- strict_undefined,
- enable_loop,
- reserved_names),
- node)
- return buf.getvalue()
-
-class _CompileContext(object):
- def __init__(self,
- uri,
- filename,
- default_filters,
- buffer_filters,
- imports,
- future_imports,
- source_encoding,
- generate_magic_comment,
- disable_unicode,
- strict_undefined,
- enable_loop,
- reserved_names):
- self.uri = uri
- self.filename = filename
- self.default_filters = default_filters
- self.buffer_filters = buffer_filters
- self.imports = imports
- self.future_imports = future_imports
- self.source_encoding = source_encoding
- self.generate_magic_comment = generate_magic_comment
- self.disable_unicode = disable_unicode
- self.strict_undefined = strict_undefined
- self.enable_loop = enable_loop
- self.reserved_names = reserved_names
-
-class _GenerateRenderMethod(object):
- """A template visitor object which generates the
- full module source for a template.
-
- """
- def __init__(self, printer, compiler, node):
- self.printer = printer
- self.compiler = compiler
- self.node = node
- self.identifier_stack = [None]
- self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag))
-
- if self.in_def:
- name = "render_%s" % node.funcname
- args = node.get_argument_expressions()
- filtered = len(node.filter_args.args) > 0
- buffered = eval(node.attributes.get('buffered', 'False'))
- cached = eval(node.attributes.get('cached', 'False'))
- defs = None
- pagetag = None
- if node.is_block and not node.is_anonymous:
- args += ['**pageargs']
- else:
- defs = self.write_toplevel()
- pagetag = self.compiler.pagetag
- name = "render_body"
- if pagetag is not None:
- args = pagetag.body_decl.get_argument_expressions()
- if not pagetag.body_decl.kwargs:
- args += ['**pageargs']
- cached = eval(pagetag.attributes.get('cached', 'False'))
- self.compiler.enable_loop = self.compiler.enable_loop or eval(
- pagetag.attributes.get(
- 'enable_loop', 'False')
- )
- else:
- args = ['**pageargs']
- cached = False
- buffered = filtered = False
- if args is None:
- args = ['context']
- else:
- args = [a for a in ['context'] + args]
-
- self.write_render_callable(
- pagetag or node,
- name, args,
- buffered, filtered, cached)
-
- if defs is not None:
- for node in defs:
- _GenerateRenderMethod(printer, compiler, node)
-
- if not self.in_def:
- self.write_metadata_struct()
-
- def write_metadata_struct(self):
- self.printer.source_map[self.printer.lineno] = \
- max(self.printer.source_map)
- struct = {
- "filename": self.compiler.filename,
- "uri": self.compiler.uri,
- "source_encoding": self.compiler.source_encoding,
- "line_map": self.printer.source_map,
- }
- self.printer.writelines(
- '"""',
- '__M_BEGIN_METADATA',
- compat.json.dumps(struct),
- '__M_END_METADATA\n'
- '"""'
- )
-
- @property
- def identifiers(self):
- return self.identifier_stack[-1]
-
- def write_toplevel(self):
- """Traverse a template structure for module-level directives and
- generate the start of module-level code.
-
- """
- inherit = []
- namespaces = {}
- module_code = []
-
- self.compiler.pagetag = None
-
- class FindTopLevel(object):
- def visitInheritTag(s, node):
- inherit.append(node)
- def visitNamespaceTag(s, node):
- namespaces[node.name] = node
- def visitPageTag(s, node):
- self.compiler.pagetag = node
- def visitCode(s, node):
- if node.ismodule:
- module_code.append(node)
-
- f = FindTopLevel()
- for n in self.node.nodes:
- n.accept_visitor(f)
-
- self.compiler.namespaces = namespaces
-
- module_ident = set()
- for n in module_code:
- module_ident = module_ident.union(n.declared_identifiers())
-
- module_identifiers = _Identifiers(self.compiler)
- module_identifiers.declared = module_ident
-
- # module-level names, python code
- if self.compiler.generate_magic_comment and \
- self.compiler.source_encoding:
- self.printer.writeline("# -*- coding:%s -*-" %
- self.compiler.source_encoding)
-
- if self.compiler.future_imports:
- self.printer.writeline("from __future__ import %s" %
- (", ".join(self.compiler.future_imports),))
- self.printer.writeline("from mako import runtime, filters, cache")
- self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
- self.printer.writeline("__M_dict_builtin = dict")
- self.printer.writeline("__M_locals_builtin = locals")
- self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER)
- self.printer.writeline("_modified_time = %r" % time.time())
- self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
- self.printer.writeline(
- "_template_filename = %r" % self.compiler.filename)
- self.printer.writeline("_template_uri = %r" % self.compiler.uri)
- self.printer.writeline(
- "_source_encoding = %r" % self.compiler.source_encoding)
- if self.compiler.imports:
- buf = ''
- for imp in self.compiler.imports:
- buf += imp + "\n"
- self.printer.writeline(imp)
- impcode = ast.PythonCode(
- buf,
- source='', lineno=0,
- pos=0,
- filename='template defined imports')
- else:
- impcode = None
-
- main_identifiers = module_identifiers.branch(self.node)
- module_identifiers.topleveldefs = \
- module_identifiers.topleveldefs.\
- union(main_identifiers.topleveldefs)
- module_identifiers.declared.add("UNDEFINED")
- if impcode:
- module_identifiers.declared.update(impcode.declared_identifiers)
-
- self.compiler.identifiers = module_identifiers
- self.printer.writeline("_exports = %r" %
- [n.name for n in
- main_identifiers.topleveldefs.values()]
- )
- self.printer.write_blanks(2)
-
- if len(module_code):
- self.write_module_code(module_code)
-
- if len(inherit):
- self.write_namespaces(namespaces)
- self.write_inherit(inherit[-1])
- elif len(namespaces):
- self.write_namespaces(namespaces)
-
- return list(main_identifiers.topleveldefs.values())
-
- def write_render_callable(self, node, name, args, buffered, filtered,
- cached):
- """write a top-level render callable.
-
- this could be the main render() method or that of a top-level def."""
-
- if self.in_def:
- decorator = node.decorator
- if decorator:
- self.printer.writeline(
- "@runtime._decorate_toplevel(%s)" % decorator)
-
- self.printer.start_source(node.lineno)
- self.printer.writelines(
- "def %s(%s):" % (name, ','.join(args)),
- # push new frame, assign current frame to __M_caller
- "__M_caller = context.caller_stack._push_frame()",
- "try:"
- )
- if buffered or filtered or cached:
- self.printer.writeline("context._push_buffer()")
-
- self.identifier_stack.append(
- self.compiler.identifiers.branch(self.node))
- if (not self.in_def or self.node.is_block) and '**pageargs' in args:
- self.identifier_stack[-1].argument_declared.add('pageargs')
-
- if not self.in_def and (
- len(self.identifiers.locally_assigned) > 0 or
- len(self.identifiers.argument_declared) > 0
- ):
- self.printer.writeline("__M_locals = __M_dict_builtin(%s)" %
- ','.join([
- "%s=%s" % (x, x) for x in
- self.identifiers.argument_declared
- ]))
-
- self.write_variable_declares(self.identifiers, toplevel=True)
-
- for n in self.node.nodes:
- n.accept_visitor(self)
-
- self.write_def_finish(self.node, buffered, filtered, cached)
- self.printer.writeline(None)
- self.printer.write_blanks(2)
- if cached:
- self.write_cache_decorator(
- node, name,
- args, buffered,
- self.identifiers, toplevel=True)
-
- def write_module_code(self, module_code):
- """write module-level template code, i.e. that which
- is enclosed in <%! %> tags in the template."""
- for n in module_code:
- self.printer.start_source(n.lineno)
- self.printer.write_indented_block(n.text)
-
- def write_inherit(self, node):
- """write the module-level inheritance-determination callable."""
-
- self.printer.writelines(
- "def _mako_inherit(template, context):",
- "_mako_generate_namespaces(context)",
- "return runtime._inherit_from(context, %s, _template_uri)" %
- (node.parsed_attributes['file']),
- None
- )
-
- def write_namespaces(self, namespaces):
- """write the module-level namespace-generating callable."""
- self.printer.writelines(
- "def _mako_get_namespace(context, name):",
- "try:",
- "return context.namespaces[(__name__, name)]",
- "except KeyError:",
- "_mako_generate_namespaces(context)",
- "return context.namespaces[(__name__, name)]",
- None, None
- )
- self.printer.writeline("def _mako_generate_namespaces(context):")
-
-
- for node in namespaces.values():
- if 'import' in node.attributes:
- self.compiler.has_ns_imports = True
- self.printer.start_source(node.lineno)
- if len(node.nodes):
- self.printer.writeline("def make_namespace():")
- export = []
- identifiers = self.compiler.identifiers.branch(node)
- self.in_def = True
- class NSDefVisitor(object):
- def visitDefTag(s, node):
- s.visitDefOrBase(node)
-
- def visitBlockTag(s, node):
- s.visitDefOrBase(node)
-
- def visitDefOrBase(s, node):
- if node.is_anonymous:
- raise exceptions.CompileException(
- "Can't put anonymous blocks inside "
- "<%namespace>",
- **node.exception_kwargs
- )
- self.write_inline_def(node, identifiers, nested=False)
- export.append(node.funcname)
- vis = NSDefVisitor()
- for n in node.nodes:
- n.accept_visitor(vis)
- self.printer.writeline("return [%s]" % (','.join(export)))
- self.printer.writeline(None)
- self.in_def = False
- callable_name = "make_namespace()"
- else:
- callable_name = "None"
-
- if 'file' in node.parsed_attributes:
- self.printer.writeline(
- "ns = runtime.TemplateNamespace(%r,"
- " context._clean_inheritance_tokens(),"
- " templateuri=%s, callables=%s, "
- " calling_uri=_template_uri)" %
- (
- node.name,
- node.parsed_attributes.get('file', 'None'),
- callable_name,
- )
- )
- elif 'module' in node.parsed_attributes:
- self.printer.writeline(
- "ns = runtime.ModuleNamespace(%r,"
- " context._clean_inheritance_tokens(),"
- " callables=%s, calling_uri=_template_uri,"
- " module=%s)" %
- (
- node.name,
- callable_name,
- node.parsed_attributes.get(
- 'module', 'None')
- )
- )
- else:
- self.printer.writeline(
- "ns = runtime.Namespace(%r,"
- " context._clean_inheritance_tokens(),"
- " callables=%s, calling_uri=_template_uri)" %
- (
- node.name,
- callable_name,
- )
- )
- if eval(node.attributes.get('inheritable', "False")):
- self.printer.writeline("context['self'].%s = ns" % (node.name))
-
- self.printer.writeline(
- "context.namespaces[(__name__, %s)] = ns" % repr(node.name))
- self.printer.write_blanks(1)
- if not len(namespaces):
- self.printer.writeline("pass")
- self.printer.writeline(None)
-
- def write_variable_declares(self, identifiers, toplevel=False, limit=None):
- """write variable declarations at the top of a function.
-
- the variable declarations are in the form of callable
- definitions for defs and/or name lookup within the
- function's context argument. the names declared are based
- on the names that are referenced in the function body,
- which don't otherwise have any explicit assignment
- operation. names that are assigned within the body are
- assumed to be locally-scoped variables and are not
- separately declared.
-
- for def callable definitions, if the def is a top-level
- callable then a 'stub' callable is generated which wraps
- the current Context into a closure. if the def is not
- top-level, it is fully rendered as a local closure.
-
- """
-
- # collection of all defs available to us in this scope
- comp_idents = dict([(c.funcname, c) for c in identifiers.defs])
- to_write = set()
-
- # write "context.get()" for all variables we are going to
- # need that arent in the namespace yet
- to_write = to_write.union(identifiers.undeclared)
-
- # write closure functions for closures that we define
- # right here
- to_write = to_write.union(
- [c.funcname for c in identifiers.closuredefs.values()])
-
- # remove identifiers that are declared in the argument
- # signature of the callable
- to_write = to_write.difference(identifiers.argument_declared)
-
- # remove identifiers that we are going to assign to.
- # in this way we mimic Python's behavior,
- # i.e. assignment to a variable within a block
- # means that variable is now a "locally declared" var,
- # which cannot be referenced beforehand.
- to_write = to_write.difference(identifiers.locally_declared)
-
- if self.compiler.enable_loop:
- has_loop = "loop" in to_write
- to_write.discard("loop")
- else:
- has_loop = False
-
- # if a limiting set was sent, constraint to those items in that list
- # (this is used for the caching decorator)
- if limit is not None:
- to_write = to_write.intersection(limit)
-
- if toplevel and getattr(self.compiler, 'has_ns_imports', False):
- self.printer.writeline("_import_ns = {}")
- self.compiler.has_imports = True
- for ident, ns in self.compiler.namespaces.items():
- if 'import' in ns.attributes:
- self.printer.writeline(
- "_mako_get_namespace(context, %r)."
- "_populate(_import_ns, %r)" %
- (
- ident,
- re.split(r'\s*,\s*', ns.attributes['import'])
- ))
-
- if has_loop:
- self.printer.writeline(
- 'loop = __M_loop = runtime.LoopStack()'
- )
-
- for ident in to_write:
- if ident in comp_idents:
- comp = comp_idents[ident]
- if comp.is_block:
- if not comp.is_anonymous:
- self.write_def_decl(comp, identifiers)
- else:
- self.write_inline_def(comp, identifiers, nested=True)
- else:
- if comp.is_root():
- self.write_def_decl(comp, identifiers)
- else:
- self.write_inline_def(comp, identifiers, nested=True)
-
- elif ident in self.compiler.namespaces:
- self.printer.writeline(
- "%s = _mako_get_namespace(context, %r)" %
- (ident, ident)
- )
- else:
- if getattr(self.compiler, 'has_ns_imports', False):
- if self.compiler.strict_undefined:
- self.printer.writelines(
- "%s = _import_ns.get(%r, UNDEFINED)" %
- (ident, ident),
- "if %s is UNDEFINED:" % ident,
- "try:",
- "%s = context[%r]" % (ident, ident),
- "except KeyError:",
- "raise NameError(\"'%s' is not defined\")" %
- ident,
- None, None
- )
- else:
- self.printer.writeline(
- "%s = _import_ns.get(%r, context.get(%r, UNDEFINED))" %
- (ident, ident, ident))
- else:
- if self.compiler.strict_undefined:
- self.printer.writelines(
- "try:",
- "%s = context[%r]" % (ident, ident),
- "except KeyError:",
- "raise NameError(\"'%s' is not defined\")" %
- ident,
- None
- )
- else:
- self.printer.writeline(
- "%s = context.get(%r, UNDEFINED)" % (ident, ident)
- )
-
- self.printer.writeline("__M_writer = context.writer()")
-
- def write_def_decl(self, node, identifiers):
- """write a locally-available callable referencing a top-level def"""
- funcname = node.funcname
- namedecls = node.get_argument_expressions()
- nameargs = node.get_argument_expressions(as_call=True)
-
- if not self.in_def and (
- len(self.identifiers.locally_assigned) > 0 or
- len(self.identifiers.argument_declared) > 0):
- nameargs.insert(0, 'context._locals(__M_locals)')
- else:
- nameargs.insert(0, 'context')
- self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
- self.printer.writeline(
- "return render_%s(%s)" % (funcname, ",".join(nameargs)))
- self.printer.writeline(None)
-
- def write_inline_def(self, node, identifiers, nested):
- """write a locally-available def callable inside an enclosing def."""
-
- namedecls = node.get_argument_expressions()
-
- decorator = node.decorator
- if decorator:
- self.printer.writeline(
- "@runtime._decorate_inline(context, %s)" % decorator)
- self.printer.writeline(
- "def %s(%s):" % (node.funcname, ",".join(namedecls)))
- filtered = len(node.filter_args.args) > 0
- buffered = eval(node.attributes.get('buffered', 'False'))
- cached = eval(node.attributes.get('cached', 'False'))
- self.printer.writelines(
- # push new frame, assign current frame to __M_caller
- "__M_caller = context.caller_stack._push_frame()",
- "try:"
- )
- if buffered or filtered or cached:
- self.printer.writelines(
- "context._push_buffer()",
- )
-
- identifiers = identifiers.branch(node, nested=nested)
-
- self.write_variable_declares(identifiers)
-
- self.identifier_stack.append(identifiers)
- for n in node.nodes:
- n.accept_visitor(self)
- self.identifier_stack.pop()
-
- self.write_def_finish(node, buffered, filtered, cached)
- self.printer.writeline(None)
- if cached:
- self.write_cache_decorator(node, node.funcname,
- namedecls, False, identifiers,
- inline=True, toplevel=False)
-
- def write_def_finish(self, node, buffered, filtered, cached,
- callstack=True):
- """write the end section of a rendering function, either outermost or
- inline.
-
- this takes into account if the rendering function was filtered,
- buffered, etc. and closes the corresponding try: block if any, and
- writes code to retrieve captured content, apply filters, send proper
- return value."""
-
- if not buffered and not cached and not filtered:
- self.printer.writeline("return ''")
- if callstack:
- self.printer.writelines(
- "finally:",
- "context.caller_stack._pop_frame()",
- None
- )
-
- if buffered or filtered or cached:
- if buffered or cached:
- # in a caching scenario, don't try to get a writer
- # from the context after popping; assume the caching
- # implemenation might be using a context with no
- # extra buffers
- self.printer.writelines(
- "finally:",
- "__M_buf = context._pop_buffer()"
- )
- else:
- self.printer.writelines(
- "finally:",
- "__M_buf, __M_writer = context._pop_buffer_and_writer()"
- )
-
- if callstack:
- self.printer.writeline("context.caller_stack._pop_frame()")
-
- s = "__M_buf.getvalue()"
- if filtered:
- s = self.create_filter_callable(node.filter_args.args, s,
- False)
- self.printer.writeline(None)
- if buffered and not cached:
- s = self.create_filter_callable(self.compiler.buffer_filters,
- s, False)
- if buffered or cached:
- self.printer.writeline("return %s" % s)
- else:
- self.printer.writelines(
- "__M_writer(%s)" % s,
- "return ''"
- )
-
- def write_cache_decorator(self, node_or_pagetag, name,
- args, buffered, identifiers,
- inline=False, toplevel=False):
- """write a post-function decorator to replace a rendering
- callable with a cached version of itself."""
-
- self.printer.writeline("__M_%s = %s" % (name, name))
- cachekey = node_or_pagetag.parsed_attributes.get('cache_key',
- repr(name))
-
- cache_args = {}
- if self.compiler.pagetag is not None:
- cache_args.update(
- (
- pa[6:],
- self.compiler.pagetag.parsed_attributes[pa]
- )
- for pa in self.compiler.pagetag.parsed_attributes
- if pa.startswith('cache_') and pa != 'cache_key'
- )
- cache_args.update(
- (
- pa[6:],
- node_or_pagetag.parsed_attributes[pa]
- ) for pa in node_or_pagetag.parsed_attributes
- if pa.startswith('cache_') and pa != 'cache_key'
- )
- if 'timeout' in cache_args:
- cache_args['timeout'] = int(eval(cache_args['timeout']))
-
- self.printer.writeline("def %s(%s):" % (name, ','.join(args)))
-
- # form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
- pass_args = [
- "%s=%s" % ((a.split('=')[0],) * 2) if '=' in a else a
- for a in args
- ]
-
- self.write_variable_declares(
- identifiers,
- toplevel=toplevel,
- limit=node_or_pagetag.undeclared_identifiers()
- )
- if buffered:
- s = "context.get('local')."\
- "cache._ctx_get_or_create("\
- "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" % (
- cachekey, name, ','.join(pass_args),
- ''.join(["%s=%s, " % (k, v)
- for k, v in cache_args.items()]),
- name
- )
- # apply buffer_filters
- s = self.create_filter_callable(self.compiler.buffer_filters, s,
- False)
- self.printer.writelines("return " + s, None)
- else:
- self.printer.writelines(
- "__M_writer(context.get('local')."
- "cache._ctx_get_or_create("
- "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" %
- (
- cachekey, name, ','.join(pass_args),
- ''.join(["%s=%s, " % (k, v)
- for k, v in cache_args.items()]),
- name,
- ),
- "return ''",
- None
- )
-
- def create_filter_callable(self, args, target, is_expression):
- """write a filter-applying expression based on the filters
- present in the given filter names, adjusting for the global
- 'default' filter aliases as needed."""
-
- def locate_encode(name):
- if re.match(r'decode\..+', name):
- return "filters." + name
- elif self.compiler.disable_unicode:
- return filters.NON_UNICODE_ESCAPES.get(name, name)
- else:
- return filters.DEFAULT_ESCAPES.get(name, name)
-
- if 'n' not in args:
- if is_expression:
- if self.compiler.pagetag:
- args = self.compiler.pagetag.filter_args.args + args
- if self.compiler.default_filters:
- args = self.compiler.default_filters + args
- for e in args:
- # if filter given as a function, get just the identifier portion
- if e == 'n':
- continue
- m = re.match(r'(.+?)(\(.*\))', e)
- if m:
- ident, fargs = m.group(1, 2)
- f = locate_encode(ident)
- e = f + fargs
- else:
- e = locate_encode(e)
- assert e is not None
- target = "%s(%s)" % (e, target)
- return target
-
- def visitExpression(self, node):
- self.printer.start_source(node.lineno)
- if len(node.escapes) or \
- (
- self.compiler.pagetag is not None and
- len(self.compiler.pagetag.filter_args.args)
- ) or \
- len(self.compiler.default_filters):
-
- s = self.create_filter_callable(node.escapes_code.args,
- "%s" % node.text, True)
- self.printer.writeline("__M_writer(%s)" % s)
- else:
- self.printer.writeline("__M_writer(%s)" % node.text)
-
- def visitControlLine(self, node):
- if node.isend:
- self.printer.writeline(None)
- if node.has_loop_context:
- self.printer.writeline('finally:')
- self.printer.writeline("loop = __M_loop._exit()")
- self.printer.writeline(None)
- else:
- self.printer.start_source(node.lineno)
- if self.compiler.enable_loop and node.keyword == 'for':
- text = mangle_mako_loop(node, self.printer)
- else:
- text = node.text
- self.printer.writeline(text)
- children = node.get_children()
- # this covers the three situations where we want to insert a pass:
- # 1) a ternary control line with no children,
- # 2) a primary control line with nothing but its own ternary
- # and end control lines, and
- # 3) any control line with no content other than comments
- if not children or (
- compat.all(isinstance(c, (parsetree.Comment,
- parsetree.ControlLine))
- for c in children) and
- compat.all((node.is_ternary(c.keyword) or c.isend)
- for c in children
- if isinstance(c, parsetree.ControlLine))):
- self.printer.writeline("pass")
-
- def visitText(self, node):
- self.printer.start_source(node.lineno)
- self.printer.writeline("__M_writer(%s)" % repr(node.content))
-
- def visitTextTag(self, node):
- filtered = len(node.filter_args.args) > 0
- if filtered:
- self.printer.writelines(
- "__M_writer = context._push_writer()",
- "try:",
- )
- for n in node.nodes:
- n.accept_visitor(self)
- if filtered:
- self.printer.writelines(
- "finally:",
- "__M_buf, __M_writer = context._pop_buffer_and_writer()",
- "__M_writer(%s)" %
- self.create_filter_callable(
- node.filter_args.args,
- "__M_buf.getvalue()",
- False),
- None
- )
-
- def visitCode(self, node):
- if not node.ismodule:
- self.printer.start_source(node.lineno)
- self.printer.write_indented_block(node.text)
-
- if not self.in_def and len(self.identifiers.locally_assigned) > 0:
- # if we are the "template" def, fudge locally
- # declared/modified variables into the "__M_locals" dictionary,
- # which is used for def calls within the same template,
- # to simulate "enclosing scope"
- self.printer.writeline(
- '__M_locals_builtin_stored = __M_locals_builtin()')
- self.printer.writeline(
- '__M_locals.update(__M_dict_builtin([(__M_key,'
- ' __M_locals_builtin_stored[__M_key]) for __M_key in'
- ' [%s] if __M_key in __M_locals_builtin_stored]))' %
- ','.join([repr(x) for x in node.declared_identifiers()]))
-
- def visitIncludeTag(self, node):
- self.printer.start_source(node.lineno)
- args = node.attributes.get('args')
- if args:
- self.printer.writeline(
- "runtime._include_file(context, %s, _template_uri, %s)" %
- (node.parsed_attributes['file'], args))
- else:
- self.printer.writeline(
- "runtime._include_file(context, %s, _template_uri)" %
- (node.parsed_attributes['file']))
-
- def visitNamespaceTag(self, node):
- pass
-
- def visitDefTag(self, node):
- pass
-
- def visitBlockTag(self, node):
- if node.is_anonymous:
- self.printer.writeline("%s()" % node.funcname)
- else:
- nameargs = node.get_argument_expressions(as_call=True)
- nameargs += ['**pageargs']
- self.printer.writeline("if 'parent' not in context._data or "
- "not hasattr(context._data['parent'], '%s'):"
- % node.funcname)
- self.printer.writeline(
- "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs)))
- self.printer.writeline("\n")
-
- def visitCallNamespaceTag(self, node):
- # TODO: we can put namespace-specific checks here, such
- # as ensure the given namespace will be imported,
- # pre-import the namespace, etc.
- self.visitCallTag(node)
-
- def visitCallTag(self, node):
- self.printer.writeline("def ccall(caller):")
- export = ['body']
- callable_identifiers = self.identifiers.branch(node, nested=True)
- body_identifiers = callable_identifiers.branch(node, nested=False)
- # we want the 'caller' passed to ccall to be used
- # for the body() function, but for other non-body()
- # <%def>s within <%call> we want the current caller
- # off the call stack (if any)
- body_identifiers.add_declared('caller')
-
- self.identifier_stack.append(body_identifiers)
- class DefVisitor(object):
- def visitDefTag(s, node):
- s.visitDefOrBase(node)
-
- def visitBlockTag(s, node):
- s.visitDefOrBase(node)
-
- def visitDefOrBase(s, node):
- self.write_inline_def(node, callable_identifiers, nested=False)
- if not node.is_anonymous:
- export.append(node.funcname)
- # remove defs that are within the <%call> from the
- # "closuredefs" defined in the body, so they dont render twice
- if node.funcname in body_identifiers.closuredefs:
- del body_identifiers.closuredefs[node.funcname]
-
- vis = DefVisitor()
- for n in node.nodes:
- n.accept_visitor(vis)
- self.identifier_stack.pop()
-
- bodyargs = node.body_decl.get_argument_expressions()
- self.printer.writeline("def body(%s):" % ','.join(bodyargs))
-
- # TODO: figure out best way to specify
- # buffering/nonbuffering (at call time would be better)
- buffered = False
- if buffered:
- self.printer.writelines(
- "context._push_buffer()",
- "try:"
- )
- self.write_variable_declares(body_identifiers)
- self.identifier_stack.append(body_identifiers)
-
- for n in node.nodes:
- n.accept_visitor(self)
- self.identifier_stack.pop()
-
- self.write_def_finish(node, buffered, False, False, callstack=False)
- self.printer.writelines(
- None,
- "return [%s]" % (','.join(export)),
- None
- )
-
- self.printer.writelines(
- # push on caller for nested call
- "context.caller_stack.nextcaller = "
- "runtime.Namespace('caller', context, "
- "callables=ccall(__M_caller))",
- "try:")
- self.printer.start_source(node.lineno)
- self.printer.writelines(
- "__M_writer(%s)" % self.create_filter_callable(
- [], node.expression, True),
- "finally:",
- "context.caller_stack.nextcaller = None",
- None
- )
-
-class _Identifiers(object):
- """tracks the status of identifier names as template code is rendered."""
-
- def __init__(self, compiler, node=None, parent=None, nested=False):
- if parent is not None:
- # if we are the branch created in write_namespaces(),
- # we don't share any context from the main body().
- if isinstance(node, parsetree.NamespaceTag):
- self.declared = set()
- self.topleveldefs = util.SetLikeDict()
- else:
- # things that have already been declared
- # in an enclosing namespace (i.e. names we can just use)
- self.declared = set(parent.declared).\
- union([c.name for c in parent.closuredefs.values()]).\
- union(parent.locally_declared).\
- union(parent.argument_declared)
-
- # if these identifiers correspond to a "nested"
- # scope, it means whatever the parent identifiers
- # had as undeclared will have been declared by that parent,
- # and therefore we have them in our scope.
- if nested:
- self.declared = self.declared.union(parent.undeclared)
-
- # top level defs that are available
- self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
- else:
- self.declared = set()
- self.topleveldefs = util.SetLikeDict()
-
- self.compiler = compiler
-
- # things within this level that are referenced before they
- # are declared (e.g. assigned to)
- self.undeclared = set()
-
- # things that are declared locally. some of these things
- # could be in the "undeclared" list as well if they are
- # referenced before declared
- self.locally_declared = set()
-
- # assignments made in explicit python blocks.
- # these will be propagated to
- # the context of local def calls.
- self.locally_assigned = set()
-
- # things that are declared in the argument
- # signature of the def callable
- self.argument_declared = set()
-
- # closure defs that are defined in this level
- self.closuredefs = util.SetLikeDict()
-
- self.node = node
-
- if node is not None:
- node.accept_visitor(self)
-
- illegal_names = self.compiler.reserved_names.intersection(
- self.locally_declared)
- if illegal_names:
- raise exceptions.NameConflictError(
- "Reserved words declared in template: %s" %
- ", ".join(illegal_names))
-
-
- def branch(self, node, **kwargs):
- """create a new Identifiers for a new Node, with
- this Identifiers as the parent."""
-
- return _Identifiers(self.compiler, node, self, **kwargs)
-
- @property
- def defs(self):
- return set(self.topleveldefs.union(self.closuredefs).values())
-
- def __repr__(self):
- return "Identifiers(declared=%r, locally_declared=%r, "\
- "undeclared=%r, topleveldefs=%r, closuredefs=%r, "\
- "argumentdeclared=%r)" %\
- (
- list(self.declared),
- list(self.locally_declared),
- list(self.undeclared),
- [c.name for c in self.topleveldefs.values()],
- [c.name for c in self.closuredefs.values()],
- self.argument_declared)
-
- def check_declared(self, node):
- """update the state of this Identifiers with the undeclared
- and declared identifiers of the given node."""
-
- for ident in node.undeclared_identifiers():
- if ident != 'context' and\
- ident not in self.declared.union(self.locally_declared):
- self.undeclared.add(ident)
- for ident in node.declared_identifiers():
- self.locally_declared.add(ident)
-
- def add_declared(self, ident):
- self.declared.add(ident)
- if ident in self.undeclared:
- self.undeclared.remove(ident)
-
- def visitExpression(self, node):
- self.check_declared(node)
-
- def visitControlLine(self, node):
- self.check_declared(node)
-
- def visitCode(self, node):
- if not node.ismodule:
- self.check_declared(node)
- self.locally_assigned = self.locally_assigned.union(
- node.declared_identifiers())
-
- def visitNamespaceTag(self, node):
- # only traverse into the sub-elements of a
- # <%namespace> tag if we are the branch created in
- # write_namespaces()
- if self.node is node:
- for n in node.nodes:
- n.accept_visitor(self)
-
- def _check_name_exists(self, collection, node):
- existing = collection.get(node.funcname)
- collection[node.funcname] = node
- if existing is not None and \
- existing is not node and \
- (node.is_block or existing.is_block):
- raise exceptions.CompileException(
- "%%def or %%block named '%s' already "
- "exists in this template." %
- node.funcname, **node.exception_kwargs)
-
- def visitDefTag(self, node):
- if node.is_root() and not node.is_anonymous:
- self._check_name_exists(self.topleveldefs, node)
- elif node is not self.node:
- self._check_name_exists(self.closuredefs, node)
-
- for ident in node.undeclared_identifiers():
- if ident != 'context' and \
- ident not in self.declared.union(self.locally_declared):
- self.undeclared.add(ident)
-
- # visit defs only one level deep
- if node is self.node:
- for ident in node.declared_identifiers():
- self.argument_declared.add(ident)
-
- for n in node.nodes:
- n.accept_visitor(self)
-
- def visitBlockTag(self, node):
- if node is not self.node and not node.is_anonymous:
-
- if isinstance(self.node, parsetree.DefTag):
- raise exceptions.CompileException(
- "Named block '%s' not allowed inside of def '%s'"
- % (node.name, self.node.name), **node.exception_kwargs)
- elif isinstance(self.node,
- (parsetree.CallTag, parsetree.CallNamespaceTag)):
- raise exceptions.CompileException(
- "Named block '%s' not allowed inside of <%%call> tag"
- % (node.name, ), **node.exception_kwargs)
-
- for ident in node.undeclared_identifiers():
- if ident != 'context' and \
- ident not in self.declared.union(self.locally_declared):
- self.undeclared.add(ident)
-
- if not node.is_anonymous:
- self._check_name_exists(self.topleveldefs, node)
- self.undeclared.add(node.funcname)
- elif node is not self.node:
- self._check_name_exists(self.closuredefs, node)
- for ident in node.declared_identifiers():
- self.argument_declared.add(ident)
- for n in node.nodes:
- n.accept_visitor(self)
-
- def visitTextTag(self, node):
- for ident in node.undeclared_identifiers():
- if ident != 'context' and \
- ident not in self.declared.union(self.locally_declared):
- self.undeclared.add(ident)
-
- def visitIncludeTag(self, node):
- self.check_declared(node)
-
- def visitPageTag(self, node):
- for ident in node.declared_identifiers():
- self.argument_declared.add(ident)
- self.check_declared(node)
-
- def visitCallNamespaceTag(self, node):
- self.visitCallTag(node)
-
- def visitCallTag(self, node):
- if node is self.node:
- for ident in node.undeclared_identifiers():
- if ident != 'context' and \
- ident not in self.declared.union(
- self.locally_declared):
- self.undeclared.add(ident)
- for ident in node.declared_identifiers():
- self.argument_declared.add(ident)
- for n in node.nodes:
- n.accept_visitor(self)
- else:
- for ident in node.undeclared_identifiers():
- if ident != 'context' and \
- ident not in self.declared.union(
- self.locally_declared):
- self.undeclared.add(ident)
-
-
-_FOR_LOOP = re.compile(
- r'^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*'
- r'(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):'
-)
-
-def mangle_mako_loop(node, printer):
- """converts a for loop into a context manager wrapped around a for loop
- when access to the `loop` variable has been detected in the for loop body
- """
- loop_variable = LoopVariable()
- node.accept_visitor(loop_variable)
- if loop_variable.detected:
- node.nodes[-1].has_loop_context = True
- match = _FOR_LOOP.match(node.text)
- if match:
- printer.writelines(
- 'loop = __M_loop._enter(%s)' % match.group(2),
- 'try:'
- #'with __M_loop(%s) as loop:' % match.group(2)
- )
- text = 'for %s in loop:' % match.group(1)
- else:
- raise SyntaxError("Couldn't apply loop context: %s" % node.text)
- else:
- text = node.text
- return text
-
-
-class LoopVariable(object):
- """A node visitor which looks for the name 'loop' within undeclared
- identifiers."""
-
- def __init__(self):
- self.detected = False
-
- def _loop_reference_detected(self, node):
- if 'loop' in node.undeclared_identifiers():
- self.detected = True
- else:
- for n in node.get_children():
- n.accept_visitor(self)
-
- def visitControlLine(self, node):
- self._loop_reference_detected(node)
-
- def visitCode(self, node):
- self._loop_reference_detected(node)
-
- def visitExpression(self, node):
- self._loop_reference_detected(node)
+++ /dev/null
-import sys
-import time
-
-py3k = sys.version_info >= (3, 0)
-py33 = sys.version_info >= (3, 3)
-py2k = sys.version_info < (3,)
-py26 = sys.version_info >= (2, 6)
-jython = sys.platform.startswith('java')
-win32 = sys.platform.startswith('win')
-pypy = hasattr(sys, 'pypy_version_info')
-
-if py3k:
- from io import StringIO
- import builtins as compat_builtins
- from urllib.parse import quote_plus, unquote_plus
- from html.entities import codepoint2name, name2codepoint
- string_types = str,
- binary_type = bytes
- text_type = str
-
- from io import BytesIO as byte_buffer
-
- def u(s):
- return s
-
- def b(s):
- return s.encode("latin-1")
-
- def octal(lit):
- return eval("0o" + lit)
-
-else:
- import __builtin__ as compat_builtins
- try:
- from cStringIO import StringIO
- except:
- from StringIO import StringIO
-
- byte_buffer = StringIO
-
- from urllib import quote_plus, unquote_plus
- from htmlentitydefs import codepoint2name, name2codepoint
- string_types = basestring,
- binary_type = str
- text_type = unicode
-
- def u(s):
- return unicode(s, "utf-8")
-
- def b(s):
- return s
-
- def octal(lit):
- return eval("0" + lit)
-
-
-if py33:
- from importlib import machinery
- def load_module(module_id, path):
- return machinery.SourceFileLoader(module_id, path).load_module()
-else:
- import imp
- def load_module(module_id, path):
- fp = open(path, 'rb')
- try:
- return imp.load_source(module_id, path, fp)
- finally:
- fp.close()
-
-
-if py3k:
- def reraise(tp, value, tb=None, cause=None):
- if cause is not None:
- value.__cause__ = cause
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-else:
- exec("def reraise(tp, value, tb=None, cause=None):\n"
- " raise tp, value, tb\n")
-
-
-def exception_as():
- return sys.exc_info()[1]
-
-try:
- import threading
- if py3k:
- import _thread as thread
- else:
- import thread
-except ImportError:
- import dummy_threading as threading
- if py3k:
- import _dummy_thread as thread
- else:
- import dummy_thread as thread
-
-if win32 or jython:
- time_func = time.clock
-else:
- time_func = time.time
-
-try:
- from functools import partial
-except:
- def partial(func, *args, **keywords):
- def newfunc(*fargs, **fkeywords):
- newkeywords = keywords.copy()
- newkeywords.update(fkeywords)
- return func(*(args + fargs), **newkeywords)
- return newfunc
-
-
-all = all
-import json
-
-def exception_name(exc):
- return exc.__class__.__name__
-
-try:
- from inspect import CO_VARKEYWORDS, CO_VARARGS
- def inspect_func_args(fn):
- if py3k:
- co = fn.__code__
- else:
- co = fn.func_code
-
- nargs = co.co_argcount
- names = co.co_varnames
- args = list(names[:nargs])
-
- varargs = None
- if co.co_flags & CO_VARARGS:
- varargs = co.co_varnames[nargs]
- nargs = nargs + 1
- varkw = None
- if co.co_flags & CO_VARKEYWORDS:
- varkw = co.co_varnames[nargs]
-
- if py3k:
- return args, varargs, varkw, fn.__defaults__
- else:
- return args, varargs, varkw, fn.func_defaults
-except ImportError:
- import inspect
- def inspect_func_args(fn):
- return inspect.getargspec(fn)
-
-if py3k:
- def callable(fn):
- return hasattr(fn, '__call__')
-else:
- callable = callable
-
-
-################################################
-# cross-compatible metaclass implementation
-# Copyright (c) 2010-2012 Benjamin Peterson
-def with_metaclass(meta, base=object):
- """Create a base class with a metaclass."""
- return meta("%sBase" % meta.__name__, (base,), {})
-################################################
-
-
-def arg_stringname(func_arg):
- """Gets the string name of a kwarg or vararg
- In Python3.4 a function's args are
- of _ast.arg type not _ast.name
- """
- if hasattr(func_arg, 'arg'):
- return func_arg.arg
- else:
- return str(func_arg)
+++ /dev/null
-# mako/exceptions.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""exception classes"""
-
-import traceback
-import sys
-from mako import util, compat
-
-class MakoException(Exception):
- pass
-
-class RuntimeException(MakoException):
- pass
-
-def _format_filepos(lineno, pos, filename):
- if filename is None:
- return " at line: %d char: %d" % (lineno, pos)
- else:
- return " in file '%s' at line: %d char: %d" % (filename, lineno, pos)
-
-
-class CompileException(MakoException):
- def __init__(self, message, source, lineno, pos, filename):
- MakoException.__init__(self,
- message + _format_filepos(lineno, pos, filename))
- self.lineno = lineno
- self.pos = pos
- self.filename = filename
- self.source = source
-
-class SyntaxException(MakoException):
- def __init__(self, message, source, lineno, pos, filename):
- MakoException.__init__(self,
- message + _format_filepos(lineno, pos, filename))
- self.lineno = lineno
- self.pos = pos
- self.filename = filename
- self.source = source
-
-class UnsupportedError(MakoException):
- """raised when a retired feature is used."""
-
-class NameConflictError(MakoException):
- """raised when a reserved word is used inappropriately"""
-
-class TemplateLookupException(MakoException):
- pass
-
-class TopLevelLookupException(TemplateLookupException):
- pass
-
-class RichTraceback(object):
- """Pull the current exception from the ``sys`` traceback and extracts
- Mako-specific template information.
-
- See the usage examples in :ref:`handling_exceptions`.
-
- """
- def __init__(self, error=None, traceback=None):
- self.source, self.lineno = "", 0
-
- if error is None or traceback is None:
- t, value, tback = sys.exc_info()
-
- if error is None:
- error = value or t
-
- if traceback is None:
- traceback = tback
-
- self.error = error
- self.records = self._init(traceback)
-
- if isinstance(self.error, (CompileException, SyntaxException)):
- self.source = self.error.source
- self.lineno = self.error.lineno
- self._has_source = True
-
- self._init_message()
-
- @property
- def errorname(self):
- return compat.exception_name(self.error)
-
- def _init_message(self):
- """Find a unicode representation of self.error"""
- try:
- self.message = compat.text_type(self.error)
- except UnicodeError:
- try:
- self.message = str(self.error)
- except UnicodeEncodeError:
- # Fallback to args as neither unicode nor
- # str(Exception(u'\xe6')) work in Python < 2.6
- self.message = self.error.args[0]
- if not isinstance(self.message, compat.text_type):
- self.message = compat.text_type(self.message, 'ascii', 'replace')
-
- def _get_reformatted_records(self, records):
- for rec in records:
- if rec[6] is not None:
- yield (rec[4], rec[5], rec[2], rec[6])
- else:
- yield tuple(rec[0:4])
-
- @property
- def traceback(self):
- """Return a list of 4-tuple traceback records (i.e. normal python
- format) with template-corresponding lines remapped to the originating
- template.
-
- """
- return list(self._get_reformatted_records(self.records))
-
- @property
- def reverse_records(self):
- return reversed(self.records)
-
- @property
- def reverse_traceback(self):
- """Return the same data as traceback, except in reverse order.
- """
-
- return list(self._get_reformatted_records(self.reverse_records))
-
- def _init(self, trcback):
- """format a traceback from sys.exc_info() into 7-item tuples,
- containing the regular four traceback tuple items, plus the original
- template filename, the line number adjusted relative to the template
- source, and code line from that line number of the template."""
-
- import mako.template
- mods = {}
- rawrecords = traceback.extract_tb(trcback)
- new_trcback = []
- for filename, lineno, function, line in rawrecords:
- if not line:
- line = ''
- try:
- (line_map, template_lines) = mods[filename]
- except KeyError:
- try:
- info = mako.template._get_module_info(filename)
- module_source = info.code
- template_source = info.source
- template_filename = info.template_filename or filename
- except KeyError:
- # A normal .py file (not a Template)
- if not compat.py3k:
- try:
- fp = open(filename, 'rb')
- encoding = util.parse_encoding(fp)
- fp.close()
- except IOError:
- encoding = None
- if encoding:
- line = line.decode(encoding)
- else:
- line = line.decode('ascii', 'replace')
- new_trcback.append((filename, lineno, function, line,
- None, None, None, None))
- continue
-
- template_ln = 1
-
- source_map = mako.template.ModuleInfo.\
- get_module_source_metadata(
- module_source, full_line_map=True)
- line_map = source_map['full_line_map']
-
- template_lines = [line for line in
- template_source.split("\n")]
- mods[filename] = (line_map, template_lines)
-
- template_ln = line_map[lineno - 1]
-
- if template_ln <= len(template_lines):
- template_line = template_lines[template_ln - 1]
- else:
- template_line = None
- new_trcback.append((filename, lineno, function,
- line, template_filename, template_ln,
- template_line, template_source))
- if not self.source:
- for l in range(len(new_trcback) - 1, 0, -1):
- if new_trcback[l][5]:
- self.source = new_trcback[l][7]
- self.lineno = new_trcback[l][5]
- break
- else:
- if new_trcback:
- try:
- # A normal .py file (not a Template)
- fp = open(new_trcback[-1][0], 'rb')
- encoding = util.parse_encoding(fp)
- fp.seek(0)
- self.source = fp.read()
- fp.close()
- if encoding:
- self.source = self.source.decode(encoding)
- except IOError:
- self.source = ''
- self.lineno = new_trcback[-1][1]
- return new_trcback
-
-
-def text_error_template(lookup=None):
- """Provides a template that renders a stack trace in a similar format to
- the Python interpreter, substituting source template filenames, line
- numbers and code for that of the originating source template, as
- applicable.
-
- """
- import mako.template
- return mako.template.Template(r"""
-<%page args="error=None, traceback=None"/>
-<%!
- from mako.exceptions import RichTraceback
-%>\
-<%
- tback = RichTraceback(error=error, traceback=traceback)
-%>\
-Traceback (most recent call last):
-% for (filename, lineno, function, line) in tback.traceback:
- File "${filename}", line ${lineno}, in ${function or '?'}
- ${line | trim}
-% endfor
-${tback.errorname}: ${tback.message}
-""")
-
-
-def _install_pygments():
- global syntax_highlight, pygments_html_formatter
- from mako.ext.pygmentplugin import syntax_highlight,\
- pygments_html_formatter
-
-def _install_fallback():
- global syntax_highlight, pygments_html_formatter
- from mako.filters import html_escape
- pygments_html_formatter = None
- def syntax_highlight(filename='', language=None):
- return html_escape
-
-def _install_highlighting():
- try:
- _install_pygments()
- except ImportError:
- _install_fallback()
-_install_highlighting()
-
-def html_error_template():
- """Provides a template that renders a stack trace in an HTML format,
- providing an excerpt of code as well as substituting source template
- filenames, line numbers and code for that of the originating source
- template, as applicable.
-
- The template's default ``encoding_errors`` value is
- ``'htmlentityreplace'``. The template has two options. With the
- ``full`` option disabled, only a section of an HTML document is
- returned. With the ``css`` option disabled, the default stylesheet
- won't be included.
-
- """
- import mako.template
- return mako.template.Template(r"""
-<%!
- from mako.exceptions import RichTraceback, syntax_highlight,\
- pygments_html_formatter
-%>
-<%page args="full=True, css=True, error=None, traceback=None"/>
-% if full:
-<html>
-<head>
- <title>Mako Runtime Error</title>
-% endif
-% if css:
- <style>
- body { font-family:verdana; margin:10px 30px 10px 30px;}
- .stacktrace { margin:5px 5px 5px 5px; }
- .highlight { padding:0px 10px 0px 10px; background-color:#9F9FDF; }
- .nonhighlight { padding:0px; background-color:#DFDFDF; }
- .sample { padding:10px; margin:10px 10px 10px 10px;
- font-family:monospace; }
- .sampleline { padding:0px 10px 0px 10px; }
- .sourceline { margin:5px 5px 10px 5px; font-family:monospace;}
- .location { font-size:80%; }
- .highlight { white-space:pre; }
- .sampleline { white-space:pre; }
-
- % if pygments_html_formatter:
- ${pygments_html_formatter.get_style_defs()}
- .linenos { min-width: 2.5em; text-align: right; }
- pre { margin: 0; }
- .syntax-highlighted { padding: 0 10px; }
- .syntax-highlightedtable { border-spacing: 1px; }
- .nonhighlight { border-top: 1px solid #DFDFDF;
- border-bottom: 1px solid #DFDFDF; }
- .stacktrace .nonhighlight { margin: 5px 15px 10px; }
- .sourceline { margin: 0 0; font-family:monospace; }
- .code { background-color: #F8F8F8; width: 100%; }
- .error .code { background-color: #FFBDBD; }
- .error .syntax-highlighted { background-color: #FFBDBD; }
- % endif
-
- </style>
-% endif
-% if full:
-</head>
-<body>
-% endif
-
-<h2>Error !</h2>
-<%
- tback = RichTraceback(error=error, traceback=traceback)
- src = tback.source
- line = tback.lineno
- if src:
- lines = src.split('\n')
- else:
- lines = None
-%>
-<h3>${tback.errorname}: ${tback.message|h}</h3>
-
-% if lines:
- <div class="sample">
- <div class="nonhighlight">
-% for index in range(max(0, line-4),min(len(lines), line+5)):
- <%
- if pygments_html_formatter:
- pygments_html_formatter.linenostart = index + 1
- %>
- % if index + 1 == line:
- <%
- if pygments_html_formatter:
- old_cssclass = pygments_html_formatter.cssclass
- pygments_html_formatter.cssclass = 'error ' + old_cssclass
- %>
- ${lines[index] | syntax_highlight(language='mako')}
- <%
- if pygments_html_formatter:
- pygments_html_formatter.cssclass = old_cssclass
- %>
- % else:
- ${lines[index] | syntax_highlight(language='mako')}
- % endif
-% endfor
- </div>
- </div>
-% endif
-
-<div class="stacktrace">
-% for (filename, lineno, function, line) in tback.reverse_traceback:
- <div class="location">${filename}, line ${lineno}:</div>
- <div class="nonhighlight">
- <%
- if pygments_html_formatter:
- pygments_html_formatter.linenostart = lineno
- %>
- <div class="sourceline">${line | syntax_highlight(filename)}</div>
- </div>
-% endfor
-</div>
-
-% if full:
-</body>
-</html>
-% endif
-""", output_encoding=sys.getdefaultencoding(),
- encoding_errors='htmlentityreplace')
+++ /dev/null
-# mako/filters.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-import re
-import codecs
-
-from mako.compat import quote_plus, unquote_plus, codepoint2name, \
- name2codepoint
-
-from mako import compat
-
-xml_escapes = {
- '&': '&',
- '>': '>',
- '<': '<',
- '"': '"', # also " in html-only
- "'": ''' # also ' in html-only
-}
-
-# XXX: " is valid in HTML and XML
-# ' is not valid HTML, but is valid XML
-
-def legacy_html_escape(s):
- """legacy HTML escape for non-unicode mode."""
- s = s.replace("&", "&")
- s = s.replace(">", ">")
- s = s.replace("<", "<")
- s = s.replace('"', """)
- s = s.replace("'", "'")
- return s
-
-
-try:
- import markupsafe
- html_escape = markupsafe.escape
-except ImportError:
- html_escape = legacy_html_escape
-
-def xml_escape(string):
- return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
-
-def url_escape(string):
- # convert into a list of octets
- string = string.encode("utf8")
- return quote_plus(string)
-
-def legacy_url_escape(string):
- # convert into a list of octets
- return quote_plus(string)
-
-def url_unescape(string):
- text = unquote_plus(string)
- if not is_ascii_str(text):
- text = text.decode("utf8")
- return text
-
-def trim(string):
- return string.strip()
-
-
-class Decode(object):
- def __getattr__(self, key):
- def decode(x):
- if isinstance(x, compat.text_type):
- return x
- elif not isinstance(x, compat.binary_type):
- return decode(str(x))
- else:
- return compat.text_type(x, encoding=key)
- return decode
-decode = Decode()
-
-
-_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z')
-
-def is_ascii_str(text):
- return isinstance(text, str) and _ASCII_re.match(text)
-
-################################################################
-
-class XMLEntityEscaper(object):
- def __init__(self, codepoint2name, name2codepoint):
- self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n))
- for c, n in codepoint2name.items()])
- self.name2codepoint = name2codepoint
-
- def escape_entities(self, text):
- """Replace characters with their character entity references.
-
- Only characters corresponding to a named entity are replaced.
- """
- return compat.text_type(text).translate(self.codepoint2entity)
-
- def __escape(self, m):
- codepoint = ord(m.group())
- try:
- return self.codepoint2entity[codepoint]
- except (KeyError, IndexError):
- return '&#x%X;' % codepoint
-
-
- __escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
-
- def escape(self, text):
- """Replace characters with their character references.
-
- Replace characters by their named entity references.
- Non-ASCII characters, if they do not have a named entity reference,
- are replaced by numerical character references.
-
- The return value is guaranteed to be ASCII.
- """
- return self.__escapable.sub(self.__escape, compat.text_type(text)
- ).encode('ascii')
-
- # XXX: This regexp will not match all valid XML entity names__.
- # (It punts on details involving involving CombiningChars and Extenders.)
- #
- # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
- __characterrefs = re.compile(r'''& (?:
- \#(\d+)
- | \#x([\da-f]+)
- | ( (?!\d) [:\w] [-.:\w]+ )
- ) ;''',
- re.X | re.UNICODE)
-
- def __unescape(self, m):
- dval, hval, name = m.groups()
- if dval:
- codepoint = int(dval)
- elif hval:
- codepoint = int(hval, 16)
- else:
- codepoint = self.name2codepoint.get(name, 0xfffd)
- # U+FFFD = "REPLACEMENT CHARACTER"
- if codepoint < 128:
- return chr(codepoint)
- return chr(codepoint)
-
- def unescape(self, text):
- """Unescape character references.
-
- All character references (both entity references and numerical
- character references) are unescaped.
- """
- return self.__characterrefs.sub(self.__unescape, text)
-
-
-_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint)
-
-html_entities_escape = _html_entities_escaper.escape_entities
-html_entities_unescape = _html_entities_escaper.unescape
-
-
-def htmlentityreplace_errors(ex):
- """An encoding error handler.
-
- This python `codecs`_ error handler replaces unencodable
- characters with HTML entities, or, if no HTML entity exists for
- the character, XML character references.
-
- >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
- 'The cost was €12.'
- """
- if isinstance(ex, UnicodeEncodeError):
- # Handle encoding errors
- bad_text = ex.object[ex.start:ex.end]
- text = _html_entities_escaper.escape(bad_text)
- return (compat.text_type(text), ex.end)
- raise ex
-
-codecs.register_error('htmlentityreplace', htmlentityreplace_errors)
-
-
-# TODO: options to make this dynamic per-compilation will be added in a later
-# release
-DEFAULT_ESCAPES = {
- 'x': 'filters.xml_escape',
- 'h': 'filters.html_escape',
- 'u': 'filters.url_escape',
- 'trim': 'filters.trim',
- 'entity': 'filters.html_entities_escape',
- 'unicode': 'unicode',
- 'decode': 'decode',
- 'str': 'str',
- 'n': 'n'
-}
-
-if compat.py3k:
- DEFAULT_ESCAPES.update({
- 'unicode': 'str'
- })
-
-NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy()
-NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape'
-NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape'
-
+++ /dev/null
-# mako/lexer.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""provides the Lexer class for parsing template strings into parse trees."""
-
-import re
-import codecs
-from mako import parsetree, exceptions, compat
-from mako.pygen import adjust_whitespace
-
-_regexp_cache = {}
-
-class Lexer(object):
- def __init__(self, text, filename=None,
- disable_unicode=False,
- input_encoding=None, preprocessor=None):
- self.text = text
- self.filename = filename
- self.template = parsetree.TemplateNode(self.filename)
- self.matched_lineno = 1
- self.matched_charpos = 0
- self.lineno = 1
- self.match_position = 0
- self.tag = []
- self.control_line = []
- self.ternary_stack = []
- self.disable_unicode = disable_unicode
- self.encoding = input_encoding
-
- if compat.py3k and disable_unicode:
- raise exceptions.UnsupportedError(
- "Mako for Python 3 does not "
- "support disabling Unicode")
-
- if preprocessor is None:
- self.preprocessor = []
- elif not hasattr(preprocessor, '__iter__'):
- self.preprocessor = [preprocessor]
- else:
- self.preprocessor = preprocessor
-
- @property
- def exception_kwargs(self):
- return {'source': self.text,
- 'lineno': self.matched_lineno,
- 'pos': self.matched_charpos,
- 'filename': self.filename}
-
- def match(self, regexp, flags=None):
- """compile the given regexp, cache the reg, and call match_reg()."""
-
- try:
- reg = _regexp_cache[(regexp, flags)]
- except KeyError:
- if flags:
- reg = re.compile(regexp, flags)
- else:
- reg = re.compile(regexp)
- _regexp_cache[(regexp, flags)] = reg
-
- return self.match_reg(reg)
-
- def match_reg(self, reg):
- """match the given regular expression object to the current text
- position.
-
- if a match occurs, update the current text and line position.
-
- """
-
- mp = self.match_position
-
- match = reg.match(self.text, self.match_position)
- if match:
- (start, end) = match.span()
- if end == start:
- self.match_position = end + 1
- else:
- self.match_position = end
- self.matched_lineno = self.lineno
- lines = re.findall(r"\n", self.text[mp:self.match_position])
- cp = mp - 1
- while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'):
- cp -= 1
- self.matched_charpos = mp - cp
- self.lineno += len(lines)
- #print "MATCHED:", match.group(0), "LINE START:",
- # self.matched_lineno, "LINE END:", self.lineno
- #print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
- # (match and "TRUE" or "FALSE")
- return match
-
- def parse_until_text(self, *text):
- startpos = self.match_position
- text_re = r'|'.join(text)
- brace_level = 0
- while True:
- match = self.match(r'#.*\n')
- if match:
- continue
- match = self.match(r'(\"\"\"|\'\'\'|\"|\')((?<!\\)\\\1|.)*?\1',
- re.S)
- if match:
- continue
- match = self.match(r'(%s)' % text_re)
- if match:
- if match.group(1) == '}' and brace_level > 0:
- brace_level -= 1
- continue
- return \
- self.text[startpos:
- self.match_position - len(match.group(1))],\
- match.group(1)
- match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S)
- if match:
- brace_level += match.group(1).count('{')
- brace_level -= match.group(1).count('}')
- continue
- raise exceptions.SyntaxException(
- "Expected: %s" %
- ','.join(text),
- **self.exception_kwargs)
-
- def append_node(self, nodecls, *args, **kwargs):
- kwargs.setdefault('source', self.text)
- kwargs.setdefault('lineno', self.matched_lineno)
- kwargs.setdefault('pos', self.matched_charpos)
- kwargs['filename'] = self.filename
- node = nodecls(*args, **kwargs)
- if len(self.tag):
- self.tag[-1].nodes.append(node)
- else:
- self.template.nodes.append(node)
- # build a set of child nodes for the control line
- # (used for loop variable detection)
- # also build a set of child nodes on ternary control lines
- # (used for determining if a pass needs to be auto-inserted
- if self.control_line:
- control_frame = self.control_line[-1]
- control_frame.nodes.append(node)
- if not (isinstance(node, parsetree.ControlLine) and
- control_frame.is_ternary(node.keyword)):
- if self.ternary_stack and self.ternary_stack[-1]:
- self.ternary_stack[-1][-1].nodes.append(node)
- if isinstance(node, parsetree.Tag):
- if len(self.tag):
- node.parent = self.tag[-1]
- self.tag.append(node)
- elif isinstance(node, parsetree.ControlLine):
- if node.isend:
- self.control_line.pop()
- self.ternary_stack.pop()
- elif node.is_primary:
- self.control_line.append(node)
- self.ternary_stack.append([])
- elif self.control_line and \
- self.control_line[-1].is_ternary(node.keyword):
- self.ternary_stack[-1].append(node)
- elif self.control_line and \
- not self.control_line[-1].is_ternary(node.keyword):
- raise exceptions.SyntaxException(
- "Keyword '%s' not a legal ternary for keyword '%s'" %
- (node.keyword, self.control_line[-1].keyword),
- **self.exception_kwargs)
-
- _coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n')
-
- def decode_raw_stream(self, text, decode_raw, known_encoding, filename):
- """given string/unicode or bytes/string, determine encoding
- from magic encoding comment, return body as unicode
- or raw if decode_raw=False
-
- """
- if isinstance(text, compat.text_type):
- m = self._coding_re.match(text)
- encoding = m and m.group(1) or known_encoding or 'ascii'
- return encoding, text
-
- if text.startswith(codecs.BOM_UTF8):
- text = text[len(codecs.BOM_UTF8):]
- parsed_encoding = 'utf-8'
- m = self._coding_re.match(text.decode('utf-8', 'ignore'))
- if m is not None and m.group(1) != 'utf-8':
- raise exceptions.CompileException(
- "Found utf-8 BOM in file, with conflicting "
- "magic encoding comment of '%s'" % m.group(1),
- text.decode('utf-8', 'ignore'),
- 0, 0, filename)
- else:
- m = self._coding_re.match(text.decode('utf-8', 'ignore'))
- if m:
- parsed_encoding = m.group(1)
- else:
- parsed_encoding = known_encoding or 'ascii'
-
- if decode_raw:
- try:
- text = text.decode(parsed_encoding)
- except UnicodeDecodeError:
- raise exceptions.CompileException(
- "Unicode decode operation of encoding '%s' failed" %
- parsed_encoding,
- text.decode('utf-8', 'ignore'),
- 0, 0, filename)
-
- return parsed_encoding, text
-
- def parse(self):
- self.encoding, self.text = self.decode_raw_stream(self.text,
- not self.disable_unicode,
- self.encoding,
- self.filename,)
-
- for preproc in self.preprocessor:
- self.text = preproc(self.text)
-
- # push the match marker past the
- # encoding comment.
- self.match_reg(self._coding_re)
-
- self.textlength = len(self.text)
-
- while (True):
- if self.match_position > self.textlength:
- break
-
- if self.match_end():
- break
- if self.match_expression():
- continue
- if self.match_control_line():
- continue
- if self.match_comment():
- continue
- if self.match_tag_start():
- continue
- if self.match_tag_end():
- continue
- if self.match_python_block():
- continue
- if self.match_text():
- continue
-
- if self.match_position > self.textlength:
- break
- raise exceptions.CompileException("assertion failed")
-
- if len(self.tag):
- raise exceptions.SyntaxException("Unclosed tag: <%%%s>" %
- self.tag[-1].keyword,
- **self.exception_kwargs)
- if len(self.control_line):
- raise exceptions.SyntaxException(
- "Unterminated control keyword: '%s'" %
- self.control_line[-1].keyword,
- self.text,
- self.control_line[-1].lineno,
- self.control_line[-1].pos, self.filename)
- return self.template
-
- def match_tag_start(self):
- match = self.match(r'''
- \<% # opening tag
-
- ([\w\.\:]+) # keyword
-
- ((?:\s+\w+|\s*=\s*|".*?"|'.*?')*) # attrname, = \
- # sign, string expression
-
- \s* # more whitespace
-
- (/)?> # closing
-
- ''',
-
- re.I | re.S | re.X)
-
- if match:
- keyword, attr, isend = match.groups()
- self.keyword = keyword
- attributes = {}
- if attr:
- for att in re.findall(
- r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
- key, val1, val2 = att
- text = val1 or val2
- text = text.replace('\r\n', '\n')
- attributes[key] = text
- self.append_node(parsetree.Tag, keyword, attributes)
- if isend:
- self.tag.pop()
- else:
- if keyword == 'text':
- match = self.match(r'(.*?)(?=\</%text>)', re.S)
- if not match:
- raise exceptions.SyntaxException(
- "Unclosed tag: <%%%s>" %
- self.tag[-1].keyword,
- **self.exception_kwargs)
- self.append_node(parsetree.Text, match.group(1))
- return self.match_tag_end()
- return True
- else:
- return False
-
- def match_tag_end(self):
- match = self.match(r'\</%[\t ]*(.+?)[\t ]*>')
- if match:
- if not len(self.tag):
- raise exceptions.SyntaxException(
- "Closing tag without opening tag: </%%%s>" %
- match.group(1),
- **self.exception_kwargs)
- elif self.tag[-1].keyword != match.group(1):
- raise exceptions.SyntaxException(
- "Closing tag </%%%s> does not match tag: <%%%s>" %
- (match.group(1), self.tag[-1].keyword),
- **self.exception_kwargs)
- self.tag.pop()
- return True
- else:
- return False
-
- def match_end(self):
- match = self.match(r'\Z', re.S)
- if match:
- string = match.group()
- if string:
- return string
- else:
- return True
- else:
- return False
-
- def match_text(self):
- match = self.match(r"""
- (.*?) # anything, followed by:
- (
- (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
- # comment preceded by a
- # consumed newline and whitespace
- |
- (?=\${) # an expression
- |
- (?=</?[%&]) # a substitution or block or call start or end
- # - don't consume
- |
- (\\\r?\n) # an escaped newline - throw away
- |
- \Z # end of string
- )""", re.X | re.S)
-
- if match:
- text = match.group(1)
- if text:
- self.append_node(parsetree.Text, text)
- return True
- else:
- return False
-
- def match_python_block(self):
- match = self.match(r"<%(!)?")
- if match:
- line, pos = self.matched_lineno, self.matched_charpos
- text, end = self.parse_until_text(r'%>')
- # the trailing newline helps
- # compiler.parse() not complain about indentation
- text = adjust_whitespace(text) + "\n"
- self.append_node(
- parsetree.Code,
- text,
- match.group(1) == '!', lineno=line, pos=pos)
- return True
- else:
- return False
-
- def match_expression(self):
- match = self.match(r"\${")
- if match:
- line, pos = self.matched_lineno, self.matched_charpos
- text, end = self.parse_until_text(r'\|', r'}')
- if end == '|':
- escapes, end = self.parse_until_text(r'}')
- else:
- escapes = ""
- text = text.replace('\r\n', '\n')
- self.append_node(
- parsetree.Expression,
- text, escapes.strip(),
- lineno=line, pos=pos)
- return True
- else:
- return False
-
- def match_control_line(self):
- match = self.match(
- r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
- r"(?:\r?\n|\Z)", re.M)
- if match:
- operator = match.group(1)
- text = match.group(2)
- if operator == '%':
- m2 = re.match(r'(end)?(\w+)\s*(.*)', text)
- if not m2:
- raise exceptions.SyntaxException(
- "Invalid control line: '%s'" %
- text,
- **self.exception_kwargs)
- isend, keyword = m2.group(1, 2)
- isend = (isend is not None)
-
- if isend:
- if not len(self.control_line):
- raise exceptions.SyntaxException(
- "No starting keyword '%s' for '%s'" %
- (keyword, text),
- **self.exception_kwargs)
- elif self.control_line[-1].keyword != keyword:
- raise exceptions.SyntaxException(
- "Keyword '%s' doesn't match keyword '%s'" %
- (text, self.control_line[-1].keyword),
- **self.exception_kwargs)
- self.append_node(parsetree.ControlLine, keyword, isend, text)
- else:
- self.append_node(parsetree.Comment, text)
- return True
- else:
- return False
-
- def match_comment(self):
- """matches the multiline version of a comment"""
- match = self.match(r"<%doc>(.*?)</%doc>", re.S)
- if match:
- self.append_node(parsetree.Comment, match.group(1))
- return True
- else:
- return False
-
+++ /dev/null
-# mako/lookup.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-import os, stat, posixpath, re
-from mako import exceptions, util
-from mako.template import Template
-
-try:
- import threading
-except:
- import dummy_threading as threading
-
-class TemplateCollection(object):
- """Represent a collection of :class:`.Template` objects,
- identifiable via URI.
-
- A :class:`.TemplateCollection` is linked to the usage of
- all template tags that address other templates, such
- as ``<%include>``, ``<%namespace>``, and ``<%inherit>``.
- The ``file`` attribute of each of those tags refers
- to a string URI that is passed to that :class:`.Template`
- object's :class:`.TemplateCollection` for resolution.
-
- :class:`.TemplateCollection` is an abstract class,
- with the usual default implementation being :class:`.TemplateLookup`.
-
- """
-
- def has_template(self, uri):
- """Return ``True`` if this :class:`.TemplateLookup` is
- capable of returning a :class:`.Template` object for the
- given ``uri``.
-
- :param uri: String URI of the template to be resolved.
-
- """
- try:
- self.get_template(uri)
- return True
- except exceptions.TemplateLookupException:
- return False
-
- def get_template(self, uri, relativeto=None):
- """Return a :class:`.Template` object corresponding to the given
- ``uri``.
-
- The default implementation raises
- :class:`.NotImplementedError`. Implementations should
- raise :class:`.TemplateLookupException` if the given ``uri``
- cannot be resolved.
-
- :param uri: String URI of the template to be resolved.
- :param relativeto: if present, the given ``uri`` is assumed to
- be relative to this URI.
-
- """
- raise NotImplementedError()
-
- def filename_to_uri(self, uri, filename):
- """Convert the given ``filename`` to a URI relative to
- this :class:`.TemplateCollection`."""
-
- return uri
-
- def adjust_uri(self, uri, filename):
- """Adjust the given ``uri`` based on the calling ``filename``.
-
- When this method is called from the runtime, the
- ``filename`` parameter is taken directly to the ``filename``
- attribute of the calling template. Therefore a custom
- :class:`.TemplateCollection` subclass can place any string
- identifier desired in the ``filename`` parameter of the
- :class:`.Template` objects it constructs and have them come back
- here.
-
- """
- return uri
-
-class TemplateLookup(TemplateCollection):
- """Represent a collection of templates that locates template source files
- from the local filesystem.
-
- The primary argument is the ``directories`` argument, the list of
- directories to search:
-
- .. sourcecode:: python
-
- lookup = TemplateLookup(["/path/to/templates"])
- some_template = lookup.get_template("/index.html")
-
- The :class:`.TemplateLookup` can also be given :class:`.Template` objects
- programatically using :meth:`.put_string` or :meth:`.put_template`:
-
- .. sourcecode:: python
-
- lookup = TemplateLookup()
- lookup.put_string("base.html", '''
- <html><body>${self.next()}</body></html>
- ''')
- lookup.put_string("hello.html", '''
- <%include file='base.html'/>
-
- Hello, world !
- ''')
-
-
- :param directories: A list of directory names which will be
- searched for a particular template URI. The URI is appended
- to each directory and the filesystem checked.
-
- :param collection_size: Approximate size of the collection used
- to store templates. If left at its default of ``-1``, the size
- is unbounded, and a plain Python dictionary is used to
- relate URI strings to :class:`.Template` instances.
- Otherwise, a least-recently-used cache object is used which
- will maintain the size of the collection approximately to
- the number given.
-
- :param filesystem_checks: When at its default value of ``True``,
- each call to :meth:`.TemplateLookup.get_template()` will
- compare the filesystem last modified time to the time in
- which an existing :class:`.Template` object was created.
- This allows the :class:`.TemplateLookup` to regenerate a
- new :class:`.Template` whenever the original source has
- been updated. Set this to ``False`` for a very minor
- performance increase.
-
- :param modulename_callable: A callable which, when present,
- is passed the path of the source file as well as the
- requested URI, and then returns the full path of the
- generated Python module file. This is used to inject
- alternate schemes for Python module location. If left at
- its default of ``None``, the built in system of generation
- based on ``module_directory`` plus ``uri`` is used.
-
- All other keyword parameters available for
- :class:`.Template` are mirrored here. When new
- :class:`.Template` objects are created, the keywords
- established with this :class:`.TemplateLookup` are passed on
- to each new :class:`.Template`.
-
- """
-
- def __init__(self,
- directories=None,
- module_directory=None,
- filesystem_checks=True,
- collection_size=-1,
- format_exceptions=False,
- error_handler=None,
- disable_unicode=False,
- bytestring_passthrough=False,
- output_encoding=None,
- encoding_errors='strict',
-
- cache_args=None,
- cache_impl='beaker',
- cache_enabled=True,
- cache_type=None,
- cache_dir=None,
- cache_url=None,
-
- modulename_callable=None,
- module_writer=None,
- default_filters=None,
- buffer_filters=(),
- strict_undefined=False,
- imports=None,
- future_imports=None,
- enable_loop=True,
- input_encoding=None,
- preprocessor=None,
- lexer_cls=None):
-
- self.directories = [posixpath.normpath(d) for d in
- util.to_list(directories, ())
- ]
- self.module_directory = module_directory
- self.modulename_callable = modulename_callable
- self.filesystem_checks = filesystem_checks
- self.collection_size = collection_size
-
- if cache_args is None:
- cache_args = {}
- # transfer deprecated cache_* args
- if cache_dir:
- cache_args.setdefault('dir', cache_dir)
- if cache_url:
- cache_args.setdefault('url', cache_url)
- if cache_type:
- cache_args.setdefault('type', cache_type)
-
- self.template_args = {
- 'format_exceptions':format_exceptions,
- 'error_handler':error_handler,
- 'disable_unicode':disable_unicode,
- 'bytestring_passthrough':bytestring_passthrough,
- 'output_encoding':output_encoding,
- 'cache_impl':cache_impl,
- 'encoding_errors':encoding_errors,
- 'input_encoding':input_encoding,
- 'module_directory':module_directory,
- 'module_writer':module_writer,
- 'cache_args':cache_args,
- 'cache_enabled':cache_enabled,
- 'default_filters':default_filters,
- 'buffer_filters':buffer_filters,
- 'strict_undefined':strict_undefined,
- 'imports':imports,
- 'future_imports':future_imports,
- 'enable_loop':enable_loop,
- 'preprocessor':preprocessor,
- 'lexer_cls':lexer_cls
- }
-
- if collection_size == -1:
- self._collection = {}
- self._uri_cache = {}
- else:
- self._collection = util.LRUCache(collection_size)
- self._uri_cache = util.LRUCache(collection_size)
- self._mutex = threading.Lock()
-
- def get_template(self, uri):
- """Return a :class:`.Template` object corresponding to the given
- ``uri``.
-
- .. note:: The ``relativeto`` argument is not supported here at the moment.
-
- """
-
- try:
- if self.filesystem_checks:
- return self._check(uri, self._collection[uri])
- else:
- return self._collection[uri]
- except KeyError:
- u = re.sub(r'^\/+', '', uri)
- for dir in self.directories:
- srcfile = posixpath.normpath(posixpath.join(dir, u))
- if os.path.isfile(srcfile):
- return self._load(srcfile, uri)
- else:
- raise exceptions.TopLevelLookupException(
- "Cant locate template for uri %r" % uri)
-
- def adjust_uri(self, uri, relativeto):
- """Adjust the given ``uri`` based on the given relative URI."""
-
- key = (uri, relativeto)
- if key in self._uri_cache:
- return self._uri_cache[key]
-
- if uri[0] != '/':
- if relativeto is not None:
- v = self._uri_cache[key] = posixpath.join(
- posixpath.dirname(relativeto), uri)
- else:
- v = self._uri_cache[key] = '/' + uri
- else:
- v = self._uri_cache[key] = uri
- return v
-
-
- def filename_to_uri(self, filename):
- """Convert the given ``filename`` to a URI relative to
- this :class:`.TemplateCollection`."""
-
- try:
- return self._uri_cache[filename]
- except KeyError:
- value = self._relativeize(filename)
- self._uri_cache[filename] = value
- return value
-
- def _relativeize(self, filename):
- """Return the portion of a filename that is 'relative'
- to the directories in this lookup.
-
- """
-
- filename = posixpath.normpath(filename)
- for dir in self.directories:
- if filename[0:len(dir)] == dir:
- return filename[len(dir):]
- else:
- return None
-
- def _load(self, filename, uri):
- self._mutex.acquire()
- try:
- try:
- # try returning from collection one
- # more time in case concurrent thread already loaded
- return self._collection[uri]
- except KeyError:
- pass
- try:
- if self.modulename_callable is not None:
- module_filename = self.modulename_callable(filename, uri)
- else:
- module_filename = None
- self._collection[uri] = template = Template(
- uri=uri,
- filename=posixpath.normpath(filename),
- lookup=self,
- module_filename=module_filename,
- **self.template_args)
- return template
- except:
- # if compilation fails etc, ensure
- # template is removed from collection,
- # re-raise
- self._collection.pop(uri, None)
- raise
- finally:
- self._mutex.release()
-
- def _check(self, uri, template):
- if template.filename is None:
- return template
-
- try:
- template_stat = os.stat(template.filename)
- if template.module._modified_time < \
- template_stat[stat.ST_MTIME]:
- self._collection.pop(uri, None)
- return self._load(template.filename, uri)
- else:
- return template
- except OSError:
- self._collection.pop(uri, None)
- raise exceptions.TemplateLookupException(
- "Cant locate template for uri %r" % uri)
-
-
- def put_string(self, uri, text):
- """Place a new :class:`.Template` object into this
- :class:`.TemplateLookup`, based on the given string of
- ``text``.
-
- """
- self._collection[uri] = Template(
- text,
- lookup=self,
- uri=uri,
- **self.template_args)
-
- def put_template(self, uri, template):
- """Place a new :class:`.Template` object into this
- :class:`.TemplateLookup`, based on the given
- :class:`.Template` object.
-
- """
- self._collection[uri] = template
-
+++ /dev/null
-# mako/parsetree.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""defines the parse tree components for Mako templates."""
-
-from mako import exceptions, ast, util, filters, compat
-import re
-
-class Node(object):
- """base class for a Node in the parse tree."""
-
- def __init__(self, source, lineno, pos, filename):
- self.source = source
- self.lineno = lineno
- self.pos = pos
- self.filename = filename
-
- @property
- def exception_kwargs(self):
- return {'source': self.source, 'lineno': self.lineno,
- 'pos': self.pos, 'filename': self.filename}
-
- def get_children(self):
- return []
-
- def accept_visitor(self, visitor):
- def traverse(node):
- for n in node.get_children():
- n.accept_visitor(visitor)
-
- method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
- method(self)
-
-class TemplateNode(Node):
- """a 'container' node that stores the overall collection of nodes."""
-
- def __init__(self, filename):
- super(TemplateNode, self).__init__('', 0, 0, filename)
- self.nodes = []
- self.page_attributes = {}
-
- def get_children(self):
- return self.nodes
-
- def __repr__(self):
- return "TemplateNode(%s, %r)" % (
- util.sorted_dict_repr(self.page_attributes),
- self.nodes)
-
-class ControlLine(Node):
- """defines a control line, a line-oriented python line or end tag.
-
- e.g.::
-
- % if foo:
- (markup)
- % endif
-
- """
-
- has_loop_context = False
-
- def __init__(self, keyword, isend, text, **kwargs):
- super(ControlLine, self).__init__(**kwargs)
- self.text = text
- self.keyword = keyword
- self.isend = isend
- self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with']
- self.nodes = []
- if self.isend:
- self._declared_identifiers = []
- self._undeclared_identifiers = []
- else:
- code = ast.PythonFragment(text, **self.exception_kwargs)
- self._declared_identifiers = code.declared_identifiers
- self._undeclared_identifiers = code.undeclared_identifiers
-
- def get_children(self):
- return self.nodes
-
- def declared_identifiers(self):
- return self._declared_identifiers
-
- def undeclared_identifiers(self):
- return self._undeclared_identifiers
-
- def is_ternary(self, keyword):
- """return true if the given keyword is a ternary keyword
- for this ControlLine"""
-
- return keyword in {
- 'if':set(['else', 'elif']),
- 'try':set(['except', 'finally']),
- 'for':set(['else'])
- }.get(self.keyword, [])
-
- def __repr__(self):
- return "ControlLine(%r, %r, %r, %r)" % (
- self.keyword,
- self.text,
- self.isend,
- (self.lineno, self.pos)
- )
-
-class Text(Node):
- """defines plain text in the template."""
-
- def __init__(self, content, **kwargs):
- super(Text, self).__init__(**kwargs)
- self.content = content
-
- def __repr__(self):
- return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
-
-class Code(Node):
- """defines a Python code block, either inline or module level.
-
- e.g.::
-
- inline:
- <%
- x = 12
- %>
-
- module level:
- <%!
- import logger
- %>
-
- """
-
- def __init__(self, text, ismodule, **kwargs):
- super(Code, self).__init__(**kwargs)
- self.text = text
- self.ismodule = ismodule
- self.code = ast.PythonCode(text, **self.exception_kwargs)
-
- def declared_identifiers(self):
- return self.code.declared_identifiers
-
- def undeclared_identifiers(self):
- return self.code.undeclared_identifiers
-
- def __repr__(self):
- return "Code(%r, %r, %r)" % (
- self.text,
- self.ismodule,
- (self.lineno, self.pos)
- )
-
-class Comment(Node):
- """defines a comment line.
-
- # this is a comment
-
- """
-
- def __init__(self, text, **kwargs):
- super(Comment, self).__init__(**kwargs)
- self.text = text
-
- def __repr__(self):
- return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
-
-class Expression(Node):
- """defines an inline expression.
-
- ${x+y}
-
- """
-
- def __init__(self, text, escapes, **kwargs):
- super(Expression, self).__init__(**kwargs)
- self.text = text
- self.escapes = escapes
- self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
- self.code = ast.PythonCode(text, **self.exception_kwargs)
-
- def declared_identifiers(self):
- return []
-
- def undeclared_identifiers(self):
- # TODO: make the "filter" shortcut list configurable at parse/gen time
- return self.code.undeclared_identifiers.union(
- self.escapes_code.undeclared_identifiers.difference(
- set(filters.DEFAULT_ESCAPES.keys())
- )
- ).difference(self.code.declared_identifiers)
-
- def __repr__(self):
- return "Expression(%r, %r, %r)" % (
- self.text,
- self.escapes_code.args,
- (self.lineno, self.pos)
- )
-
-class _TagMeta(type):
- """metaclass to allow Tag to produce a subclass according to
- its keyword"""
-
- _classmap = {}
-
- def __init__(cls, clsname, bases, dict):
- if getattr(cls, '__keyword__', None) is not None:
- cls._classmap[cls.__keyword__] = cls
- super(_TagMeta, cls).__init__(clsname, bases, dict)
-
- def __call__(cls, keyword, attributes, **kwargs):
- if ":" in keyword:
- ns, defname = keyword.split(':')
- return type.__call__(CallNamespaceTag, ns, defname,
- attributes, **kwargs)
-
- try:
- cls = _TagMeta._classmap[keyword]
- except KeyError:
- raise exceptions.CompileException(
- "No such tag: '%s'" % keyword,
- source=kwargs['source'],
- lineno=kwargs['lineno'],
- pos=kwargs['pos'],
- filename=kwargs['filename']
- )
- return type.__call__(cls, keyword, attributes, **kwargs)
-
-class Tag(compat.with_metaclass(_TagMeta, Node)):
- """abstract base class for tags.
-
- <%sometag/>
-
- <%someothertag>
- stuff
- </%someothertag>
-
- """
- __keyword__ = None
-
- def __init__(self, keyword, attributes, expressions,
- nonexpressions, required, **kwargs):
- """construct a new Tag instance.
-
- this constructor not called directly, and is only called
- by subclasses.
-
- :param keyword: the tag keyword
-
- :param attributes: raw dictionary of attribute key/value pairs
-
- :param expressions: a set of identifiers that are legal attributes,
- which can also contain embedded expressions
-
- :param nonexpressions: a set of identifiers that are legal
- attributes, which cannot contain embedded expressions
-
- :param \**kwargs:
- other arguments passed to the Node superclass (lineno, pos)
-
- """
- super(Tag, self).__init__(**kwargs)
- self.keyword = keyword
- self.attributes = attributes
- self._parse_attributes(expressions, nonexpressions)
- missing = [r for r in required if r not in self.parsed_attributes]
- if len(missing):
- raise exceptions.CompileException(
- "Missing attribute(s): %s" %
- ",".join([repr(m) for m in missing]),
- **self.exception_kwargs)
- self.parent = None
- self.nodes = []
-
- def is_root(self):
- return self.parent is None
-
- def get_children(self):
- return self.nodes
-
- def _parse_attributes(self, expressions, nonexpressions):
- undeclared_identifiers = set()
- self.parsed_attributes = {}
- for key in self.attributes:
- if key in expressions:
- expr = []
- for x in re.compile(r'(\${.+?})',
- re.S).split(self.attributes[key]):
- m = re.compile(r'^\${(.+?)}$', re.S).match(x)
- if m:
- code = ast.PythonCode(m.group(1).rstrip(),
- **self.exception_kwargs)
- # we aren't discarding "declared_identifiers" here,
- # which we do so that list comprehension-declared
- # variables aren't counted. As yet can't find a
- # condition that requires it here.
- undeclared_identifiers = \
- undeclared_identifiers.union(
- code.undeclared_identifiers)
- expr.append('(%s)' % m.group(1))
- else:
- if x:
- expr.append(repr(x))
- self.parsed_attributes[key] = " + ".join(expr) or repr('')
- elif key in nonexpressions:
- if re.search(r'\${.+?}', self.attributes[key]):
- raise exceptions.CompileException(
- "Attibute '%s' in tag '%s' does not allow embedded "
- "expressions" % (key, self.keyword),
- **self.exception_kwargs)
- self.parsed_attributes[key] = repr(self.attributes[key])
- else:
- raise exceptions.CompileException(
- "Invalid attribute for tag '%s': '%s'" %
- (self.keyword, key),
- **self.exception_kwargs)
- self.expression_undeclared_identifiers = undeclared_identifiers
-
- def declared_identifiers(self):
- return []
-
- def undeclared_identifiers(self):
- return self.expression_undeclared_identifiers
-
- def __repr__(self):
- return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
- self.keyword,
- util.sorted_dict_repr(self.attributes),
- (self.lineno, self.pos),
- self.nodes
- )
-
-class IncludeTag(Tag):
- __keyword__ = 'include'
-
- def __init__(self, keyword, attributes, **kwargs):
- super(IncludeTag, self).__init__(
- keyword,
- attributes,
- ('file', 'import', 'args'),
- (), ('file',), **kwargs)
- self.page_args = ast.PythonCode(
- "__DUMMY(%s)" % attributes.get('args', ''),
- **self.exception_kwargs)
-
- def declared_identifiers(self):
- return []
-
- def undeclared_identifiers(self):
- identifiers = self.page_args.undeclared_identifiers.\
- difference(set(["__DUMMY"])).\
- difference(self.page_args.declared_identifiers)
- return identifiers.union(super(IncludeTag, self).
- undeclared_identifiers())
-
-class NamespaceTag(Tag):
- __keyword__ = 'namespace'
-
- def __init__(self, keyword, attributes, **kwargs):
- super(NamespaceTag, self).__init__(
- keyword, attributes,
- ('file',),
- ('name','inheritable',
- 'import','module'),
- (), **kwargs)
-
- self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
- if not 'name' in attributes and not 'import' in attributes:
- raise exceptions.CompileException(
- "'name' and/or 'import' attributes are required "
- "for <%namespace>",
- **self.exception_kwargs)
- if 'file' in attributes and 'module' in attributes:
- raise exceptions.CompileException(
- "<%namespace> may only have one of 'file' or 'module'",
- **self.exception_kwargs
- )
-
- def declared_identifiers(self):
- return []
-
-class TextTag(Tag):
- __keyword__ = 'text'
-
- def __init__(self, keyword, attributes, **kwargs):
- super(TextTag, self).__init__(
- keyword,
- attributes, (),
- ('filter'), (), **kwargs)
- self.filter_args = ast.ArgumentList(
- attributes.get('filter', ''),
- **self.exception_kwargs)
-
- def undeclared_identifiers(self):
- return self.filter_args.\
- undeclared_identifiers.\
- difference(filters.DEFAULT_ESCAPES.keys()).union(
- self.expression_undeclared_identifiers
- )
-
-class DefTag(Tag):
- __keyword__ = 'def'
-
- def __init__(self, keyword, attributes, **kwargs):
- expressions = ['buffered', 'cached'] + [
- c for c in attributes if c.startswith('cache_')]
-
-
- super(DefTag, self).__init__(
- keyword,
- attributes,
- expressions,
- ('name', 'filter', 'decorator'),
- ('name',),
- **kwargs)
- name = attributes['name']
- if re.match(r'^[\w_]+$', name):
- raise exceptions.CompileException(
- "Missing parenthesis in %def",
- **self.exception_kwargs)
- self.function_decl = ast.FunctionDecl("def " + name + ":pass",
- **self.exception_kwargs)
- self.name = self.function_decl.funcname
- self.decorator = attributes.get('decorator', '')
- self.filter_args = ast.ArgumentList(
- attributes.get('filter', ''),
- **self.exception_kwargs)
-
- is_anonymous = False
- is_block = False
-
- @property
- def funcname(self):
- return self.function_decl.funcname
-
- def get_argument_expressions(self, **kw):
- return self.function_decl.get_argument_expressions(**kw)
-
- def declared_identifiers(self):
- return self.function_decl.allargnames
-
- def undeclared_identifiers(self):
- res = []
- for c in self.function_decl.defaults:
- res += list(ast.PythonCode(c, **self.exception_kwargs).
- undeclared_identifiers)
- return set(res).union(
- self.filter_args.\
- undeclared_identifiers.\
- difference(filters.DEFAULT_ESCAPES.keys())
- ).union(
- self.expression_undeclared_identifiers
- ).difference(
- self.function_decl.allargnames
- )
-
-class BlockTag(Tag):
- __keyword__ = 'block'
-
- def __init__(self, keyword, attributes, **kwargs):
- expressions = ['buffered', 'cached', 'args'] + [
- c for c in attributes if c.startswith('cache_')]
-
- super(BlockTag, self).__init__(
- keyword,
- attributes,
- expressions,
- ('name','filter', 'decorator'),
- (),
- **kwargs)
- name = attributes.get('name')
- if name and not re.match(r'^[\w_]+$',name):
- raise exceptions.CompileException(
- "%block may not specify an argument signature",
- **self.exception_kwargs)
- if not name and attributes.get('args', None):
- raise exceptions.CompileException(
- "Only named %blocks may specify args",
- **self.exception_kwargs
- )
- self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
- **self.exception_kwargs)
-
- self.name = name
- self.decorator = attributes.get('decorator', '')
- self.filter_args = ast.ArgumentList(
- attributes.get('filter', ''),
- **self.exception_kwargs)
-
-
- is_block = True
-
- @property
- def is_anonymous(self):
- return self.name is None
-
- @property
- def funcname(self):
- return self.name or "__M_anon_%d" % (self.lineno, )
-
- def get_argument_expressions(self, **kw):
- return self.body_decl.get_argument_expressions(**kw)
-
- def declared_identifiers(self):
- return self.body_decl.allargnames
-
- def undeclared_identifiers(self):
- return (self.filter_args.\
- undeclared_identifiers.\
- difference(filters.DEFAULT_ESCAPES.keys())
- ).union(self.expression_undeclared_identifiers)
-
-
-
-class CallTag(Tag):
- __keyword__ = 'call'
-
- def __init__(self, keyword, attributes, **kwargs):
- super(CallTag, self).__init__(keyword, attributes,
- ('args'), ('expr',), ('expr',), **kwargs)
- self.expression = attributes['expr']
- self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
- self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
- **self.exception_kwargs)
-
- def declared_identifiers(self):
- return self.code.declared_identifiers.union(self.body_decl.allargnames)
-
- def undeclared_identifiers(self):
- return self.code.undeclared_identifiers.\
- difference(self.code.declared_identifiers)
-
-class CallNamespaceTag(Tag):
-
- def __init__(self, namespace, defname, attributes, **kwargs):
- super(CallNamespaceTag, self).__init__(
- namespace + ":" + defname,
- attributes,
- tuple(attributes.keys()) + ('args', ),
- (),
- (),
- **kwargs)
-
- self.expression = "%s.%s(%s)" % (
- namespace,
- defname,
- ",".join(["%s=%s" % (k, v) for k, v in
- self.parsed_attributes.items()
- if k != 'args'])
- )
- self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
- self.body_decl = ast.FunctionArgs(
- attributes.get('args', ''),
- **self.exception_kwargs)
-
- def declared_identifiers(self):
- return self.code.declared_identifiers.union(self.body_decl.allargnames)
-
- def undeclared_identifiers(self):
- return self.code.undeclared_identifiers.\
- difference(self.code.declared_identifiers)
-
-class InheritTag(Tag):
- __keyword__ = 'inherit'
-
- def __init__(self, keyword, attributes, **kwargs):
- super(InheritTag, self).__init__(
- keyword, attributes,
- ('file',), (), ('file',), **kwargs)
-
-class PageTag(Tag):
- __keyword__ = 'page'
-
- def __init__(self, keyword, attributes, **kwargs):
- expressions = ['cached', 'args', 'expression_filter', 'enable_loop'] + [
- c for c in attributes if c.startswith('cache_')]
-
- super(PageTag, self).__init__(
- keyword,
- attributes,
- expressions,
- (),
- (),
- **kwargs)
- self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
- **self.exception_kwargs)
- self.filter_args = ast.ArgumentList(
- attributes.get('expression_filter', ''),
- **self.exception_kwargs)
-
- def declared_identifiers(self):
- return self.body_decl.allargnames
-
-
+++ /dev/null
-# mako/pygen.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""utilities for generating and formatting literal Python code."""
-
-import re
-from mako import exceptions
-
-class PythonPrinter(object):
- def __init__(self, stream):
- # indentation counter
- self.indent = 0
-
- # a stack storing information about why we incremented
- # the indentation counter, to help us determine if we
- # should decrement it
- self.indent_detail = []
-
- # the string of whitespace multiplied by the indent
- # counter to produce a line
- self.indentstring = " "
-
- # the stream we are writing to
- self.stream = stream
-
- # current line number
- self.lineno = 1
-
- # a list of lines that represents a buffered "block" of code,
- # which can be later printed relative to an indent level
- self.line_buffer = []
-
- self.in_indent_lines = False
-
- self._reset_multi_line_flags()
-
- # mapping of generated python lines to template
- # source lines
- self.source_map = {}
-
- def _update_lineno(self, num):
- self.lineno += num
-
- def start_source(self, lineno):
- if self.lineno not in self.source_map:
- self.source_map[self.lineno] = lineno
-
- def write_blanks(self, num):
- self.stream.write("\n" * num)
- self._update_lineno(num)
-
- def write_indented_block(self, block):
- """print a line or lines of python which already contain indentation.
-
- The indentation of the total block of lines will be adjusted to that of
- the current indent level."""
- self.in_indent_lines = False
- for l in re.split(r'\r?\n', block):
- self.line_buffer.append(l)
- self._update_lineno(1)
-
- def writelines(self, *lines):
- """print a series of lines of python."""
- for line in lines:
- self.writeline(line)
-
- def writeline(self, line):
- """print a line of python, indenting it according to the current
- indent level.
-
- this also adjusts the indentation counter according to the
- content of the line.
-
- """
-
- if not self.in_indent_lines:
- self._flush_adjusted_lines()
- self.in_indent_lines = True
-
- if (line is None or
- re.match(r"^\s*#",line) or
- re.match(r"^\s*$", line)
- ):
- hastext = False
- else:
- hastext = True
-
- is_comment = line and len(line) and line[0] == '#'
-
- # see if this line should decrease the indentation level
- if (not is_comment and
- (not hastext or self._is_unindentor(line))
- ):
-
- if self.indent > 0:
- self.indent -= 1
- # if the indent_detail stack is empty, the user
- # probably put extra closures - the resulting
- # module wont compile.
- if len(self.indent_detail) == 0:
- raise exceptions.SyntaxException(
- "Too many whitespace closures")
- self.indent_detail.pop()
-
- if line is None:
- return
-
- # write the line
- self.stream.write(self._indent_line(line) + "\n")
- self._update_lineno(len(line.split("\n")))
-
- # see if this line should increase the indentation level.
- # note that a line can both decrase (before printing) and
- # then increase (after printing) the indentation level.
-
- if re.search(r":[ \t]*(?:#.*)?$", line):
- # increment indentation count, and also
- # keep track of what the keyword was that indented us,
- # if it is a python compound statement keyword
- # where we might have to look for an "unindent" keyword
- match = re.match(r"^\s*(if|try|elif|while|for|with)", line)
- if match:
- # its a "compound" keyword, so we will check for "unindentors"
- indentor = match.group(1)
- self.indent += 1
- self.indent_detail.append(indentor)
- else:
- indentor = None
- # its not a "compound" keyword. but lets also
- # test for valid Python keywords that might be indenting us,
- # else assume its a non-indenting line
- m2 = re.match(r"^\s*(def|class|else|elif|except|finally)",
- line)
- if m2:
- self.indent += 1
- self.indent_detail.append(indentor)
-
- def close(self):
- """close this printer, flushing any remaining lines."""
- self._flush_adjusted_lines()
-
- def _is_unindentor(self, line):
- """return true if the given line is an 'unindentor',
- relative to the last 'indent' event received.
-
- """
-
- # no indentation detail has been pushed on; return False
- if len(self.indent_detail) == 0:
- return False
-
- indentor = self.indent_detail[-1]
-
- # the last indent keyword we grabbed is not a
- # compound statement keyword; return False
- if indentor is None:
- return False
-
- # if the current line doesnt have one of the "unindentor" keywords,
- # return False
- match = re.match(r"^\s*(else|elif|except|finally).*\:", line)
- if not match:
- return False
-
- # whitespace matches up, we have a compound indentor,
- # and this line has an unindentor, this
- # is probably good enough
- return True
-
- # should we decide that its not good enough, heres
- # more stuff to check.
- #keyword = match.group(1)
-
- # match the original indent keyword
- #for crit in [
- # (r'if|elif', r'else|elif'),
- # (r'try', r'except|finally|else'),
- # (r'while|for', r'else'),
- #]:
- # if re.match(crit[0], indentor) and re.match(crit[1], keyword):
- # return True
-
- #return False
-
- def _indent_line(self, line, stripspace=''):
- """indent the given line according to the current indent level.
-
- stripspace is a string of space that will be truncated from the
- start of the line before indenting."""
-
- return re.sub(r"^%s" % stripspace, self.indentstring
- * self.indent, line)
-
- def _reset_multi_line_flags(self):
- """reset the flags which would indicate we are in a backslashed
- or triple-quoted section."""
-
- self.backslashed, self.triplequoted = False, False
-
- def _in_multi_line(self, line):
- """return true if the given line is part of a multi-line block,
- via backslash or triple-quote."""
-
- # we are only looking for explicitly joined lines here, not
- # implicit ones (i.e. brackets, braces etc.). this is just to
- # guard against the possibility of modifying the space inside of
- # a literal multiline string with unfortunately placed
- # whitespace
-
- current_state = (self.backslashed or self.triplequoted)
-
- if re.search(r"\\$", line):
- self.backslashed = True
- else:
- self.backslashed = False
-
- triples = len(re.findall(r"\"\"\"|\'\'\'", line))
- if triples == 1 or triples % 2 != 0:
- self.triplequoted = not self.triplequoted
-
- return current_state
-
- def _flush_adjusted_lines(self):
- stripspace = None
- self._reset_multi_line_flags()
-
- for entry in self.line_buffer:
- if self._in_multi_line(entry):
- self.stream.write(entry + "\n")
- else:
- entry = entry.expandtabs()
- if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry):
- stripspace = re.match(r"^([ \t]*)", entry).group(1)
- self.stream.write(self._indent_line(entry, stripspace) + "\n")
-
- self.line_buffer = []
- self._reset_multi_line_flags()
-
-
-def adjust_whitespace(text):
- """remove the left-whitespace margin of a block of Python code."""
-
- state = [False, False]
- (backslashed, triplequoted) = (0, 1)
-
- def in_multi_line(line):
- start_state = (state[backslashed] or state[triplequoted])
-
- if re.search(r"\\$", line):
- state[backslashed] = True
- else:
- state[backslashed] = False
-
- def match(reg, t):
- m = re.match(reg, t)
- if m:
- return m, t[len(m.group(0)):]
- else:
- return None, t
-
- while line:
- if state[triplequoted]:
- m, line = match(r"%s" % state[triplequoted], line)
- if m:
- state[triplequoted] = False
- else:
- m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
- else:
- m, line = match(r'#', line)
- if m:
- return start_state
-
- m, line = match(r"\"\"\"|\'\'\'", line)
- if m:
- state[triplequoted] = m.group(0)
- continue
-
- m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line)
-
- return start_state
-
- def _indent_line(line, stripspace=''):
- return re.sub(r"^%s" % stripspace, '', line)
-
- lines = []
- stripspace = None
-
- for line in re.split(r'\r?\n', text):
- if in_multi_line(line):
- lines.append(line)
- else:
- line = line.expandtabs()
- if stripspace is None and re.search(r"^[ \t]*[^# \t]", line):
- stripspace = re.match(r"^([ \t]*)", line).group(1)
- lines.append(_indent_line(line, stripspace))
- return "\n".join(lines)
+++ /dev/null
-# mako/pyparser.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Handles parsing of Python code.
-
-Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
-module is used.
-"""
-
-from mako import exceptions, util, compat
-from mako.compat import arg_stringname
-import operator
-
-if compat.py3k:
- # words that cannot be assigned to (notably
- # smaller than the total keys in __builtins__)
- reserved = set(['True', 'False', 'None', 'print'])
-
- # the "id" attribute on a function node
- arg_id = operator.attrgetter('arg')
-else:
- # words that cannot be assigned to (notably
- # smaller than the total keys in __builtins__)
- reserved = set(['True', 'False', 'None'])
-
- # the "id" attribute on a function node
- arg_id = operator.attrgetter('id')
-
-import _ast
-util.restore__ast(_ast)
-from mako import _ast_util
-
-
-def parse(code, mode='exec', **exception_kwargs):
- """Parse an expression into AST"""
-
- try:
- return _ast_util.parse(code, '<unknown>', mode)
- except Exception:
- raise exceptions.SyntaxException(
- "(%s) %s (%r)" % (
- compat.exception_as().__class__.__name__,
- compat.exception_as(),
- code[0:50]
- ), **exception_kwargs)
-
-
-class FindIdentifiers(_ast_util.NodeVisitor):
-
- def __init__(self, listener, **exception_kwargs):
- self.in_function = False
- self.in_assign_targets = False
- self.local_ident_stack = set()
- self.listener = listener
- self.exception_kwargs = exception_kwargs
-
- def _add_declared(self, name):
- if not self.in_function:
- self.listener.declared_identifiers.add(name)
- else:
- self.local_ident_stack.add(name)
-
- def visit_ClassDef(self, node):
- self._add_declared(node.name)
-
- def visit_Assign(self, node):
-
- # flip around the visiting of Assign so the expression gets
- # evaluated first, in the case of a clause like "x=x+5" (x
- # is undeclared)
-
- self.visit(node.value)
- in_a = self.in_assign_targets
- self.in_assign_targets = True
- for n in node.targets:
- self.visit(n)
- self.in_assign_targets = in_a
-
- if compat.py3k:
-
- # ExceptHandler is in Python 2, but this block only works in
- # Python 3 (and is required there)
-
- def visit_ExceptHandler(self, node):
- if node.name is not None:
- self._add_declared(node.name)
- if node.type is not None:
- self.visit(node.type)
- for statement in node.body:
- self.visit(statement)
-
- def visit_Lambda(self, node, *args):
- self._visit_function(node, True)
-
- def visit_FunctionDef(self, node):
- self._add_declared(node.name)
- self._visit_function(node, False)
-
- def _expand_tuples(self, args):
- for arg in args:
- if isinstance(arg, _ast.Tuple):
- for n in arg.elts:
- yield n
- else:
- yield arg
-
- def _visit_function(self, node, islambda):
-
- # push function state onto stack. dont log any more
- # identifiers as "declared" until outside of the function,
- # but keep logging identifiers as "undeclared". track
- # argument names in each function header so they arent
- # counted as "undeclared"
-
- inf = self.in_function
- self.in_function = True
-
- local_ident_stack = self.local_ident_stack
- self.local_ident_stack = local_ident_stack.union([
- arg_id(arg) for arg in self._expand_tuples(node.args.args)
- ])
- if islambda:
- self.visit(node.body)
- else:
- for n in node.body:
- self.visit(n)
- self.in_function = inf
- self.local_ident_stack = local_ident_stack
-
- def visit_For(self, node):
-
- # flip around visit
-
- self.visit(node.iter)
- self.visit(node.target)
- for statement in node.body:
- self.visit(statement)
- for statement in node.orelse:
- self.visit(statement)
-
- def visit_Name(self, node):
- if isinstance(node.ctx, _ast.Store):
- # this is eqiuvalent to visit_AssName in
- # compiler
- self._add_declared(node.id)
- elif node.id not in reserved and node.id \
- not in self.listener.declared_identifiers and node.id \
- not in self.local_ident_stack:
- self.listener.undeclared_identifiers.add(node.id)
-
- def visit_Import(self, node):
- for name in node.names:
- if name.asname is not None:
- self._add_declared(name.asname)
- else:
- self._add_declared(name.name.split('.')[0])
-
- def visit_ImportFrom(self, node):
- for name in node.names:
- if name.asname is not None:
- self._add_declared(name.asname)
- else:
- if name.name == '*':
- raise exceptions.CompileException(
- "'import *' is not supported, since all identifier "
- "names must be explicitly declared. Please use the "
- "form 'from <modulename> import <name1>, <name2>, "
- "...' instead.", **self.exception_kwargs)
- self._add_declared(name.name)
-
-
-class FindTuple(_ast_util.NodeVisitor):
-
- def __init__(self, listener, code_factory, **exception_kwargs):
- self.listener = listener
- self.exception_kwargs = exception_kwargs
- self.code_factory = code_factory
-
- def visit_Tuple(self, node):
- for n in node.elts:
- p = self.code_factory(n, **self.exception_kwargs)
- self.listener.codeargs.append(p)
- self.listener.args.append(ExpressionGenerator(n).value())
- self.listener.declared_identifiers = \
- self.listener.declared_identifiers.union(
- p.declared_identifiers)
- self.listener.undeclared_identifiers = \
- self.listener.undeclared_identifiers.union(
- p.undeclared_identifiers)
-
-
-class ParseFunc(_ast_util.NodeVisitor):
-
- def __init__(self, listener, **exception_kwargs):
- self.listener = listener
- self.exception_kwargs = exception_kwargs
-
- def visit_FunctionDef(self, node):
- self.listener.funcname = node.name
-
- argnames = [arg_id(arg) for arg in node.args.args]
- if node.args.vararg:
- argnames.append(arg_stringname(node.args.vararg))
-
- if compat.py2k:
- # kw-only args don't exist in Python 2
- kwargnames = []
- else:
- kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs]
- if node.args.kwarg:
- kwargnames.append(arg_stringname(node.args.kwarg))
- self.listener.argnames = argnames
- self.listener.defaults = node.args.defaults # ast
- self.listener.kwargnames = kwargnames
- if compat.py2k:
- self.listener.kwdefaults = []
- else:
- self.listener.kwdefaults = node.args.kw_defaults
- self.listener.varargs = node.args.vararg
- self.listener.kwargs = node.args.kwarg
-
-class ExpressionGenerator(object):
-
- def __init__(self, astnode):
- self.generator = _ast_util.SourceGenerator(' ' * 4)
- self.generator.visit(astnode)
-
- def value(self):
- return ''.join(self.generator.result)
+++ /dev/null
-# mako/runtime.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""provides runtime services for templates, including Context,
-Namespace, and various helper functions."""
-
-from mako import exceptions, util, compat
-from mako.compat import compat_builtins
-import sys
-
-
-class Context(object):
- """Provides runtime namespace, output buffer, and various
- callstacks for templates.
-
- See :ref:`runtime_toplevel` for detail on the usage of
- :class:`.Context`.
-
- """
-
- def __init__(self, buffer, **data):
- self._buffer_stack = [buffer]
-
- self._data = data
-
- self._kwargs = data.copy()
- self._with_template = None
- self._outputting_as_unicode = None
- self.namespaces = {}
-
- # "capture" function which proxies to the
- # generic "capture" function
- self._data['capture'] = compat.partial(capture, self)
-
- # "caller" stack used by def calls with content
- self.caller_stack = self._data['caller'] = CallerStack()
-
- def _set_with_template(self, t):
- self._with_template = t
- illegal_names = t.reserved_names.intersection(self._data)
- if illegal_names:
- raise exceptions.NameConflictError(
- "Reserved words passed to render(): %s" %
- ", ".join(illegal_names))
-
- @property
- def lookup(self):
- """Return the :class:`.TemplateLookup` associated
- with this :class:`.Context`.
-
- """
- return self._with_template.lookup
-
- @property
- def kwargs(self):
- """Return the dictionary of top level keyword arguments associated
- with this :class:`.Context`.
-
- This dictionary only includes the top-level arguments passed to
- :meth:`.Template.render`. It does not include names produced within
- the template execution such as local variable names or special names
- such as ``self``, ``next``, etc.
-
- The purpose of this dictionary is primarily for the case that
- a :class:`.Template` accepts arguments via its ``<%page>`` tag,
- which are normally expected to be passed via :meth:`.Template.render`,
- except the template is being called in an inheritance context,
- using the ``body()`` method. :attr:`.Context.kwargs` can then be
- used to propagate these arguments to the inheriting template::
-
- ${next.body(**context.kwargs)}
-
- """
- return self._kwargs.copy()
-
- def push_caller(self, caller):
- """Push a ``caller`` callable onto the callstack for
- this :class:`.Context`."""
-
-
- self.caller_stack.append(caller)
-
- def pop_caller(self):
- """Pop a ``caller`` callable onto the callstack for this
- :class:`.Context`."""
-
- del self.caller_stack[-1]
-
- def keys(self):
- """Return a list of all names established in this :class:`.Context`."""
-
- return list(self._data.keys())
-
- def __getitem__(self, key):
- if key in self._data:
- return self._data[key]
- else:
- return compat_builtins.__dict__[key]
-
- def _push_writer(self):
- """push a capturing buffer onto this Context and return
- the new writer function."""
-
- buf = util.FastEncodingBuffer()
- self._buffer_stack.append(buf)
- return buf.write
-
- def _pop_buffer_and_writer(self):
- """pop the most recent capturing buffer from this Context
- and return the current writer after the pop.
-
- """
-
- buf = self._buffer_stack.pop()
- return buf, self._buffer_stack[-1].write
-
- def _push_buffer(self):
- """push a capturing buffer onto this Context."""
-
- self._push_writer()
-
- def _pop_buffer(self):
- """pop the most recent capturing buffer from this Context."""
-
- return self._buffer_stack.pop()
-
- def get(self, key, default=None):
- """Return a value from this :class:`.Context`."""
-
- return self._data.get(key, compat_builtins.__dict__.get(key, default))
-
- def write(self, string):
- """Write a string to this :class:`.Context` object's
- underlying output buffer."""
-
- self._buffer_stack[-1].write(string)
-
- def writer(self):
- """Return the current writer function."""
-
- return self._buffer_stack[-1].write
-
- def _copy(self):
- c = Context.__new__(Context)
- c._buffer_stack = self._buffer_stack
- c._data = self._data.copy()
- c._kwargs = self._kwargs
- c._with_template = self._with_template
- c._outputting_as_unicode = self._outputting_as_unicode
- c.namespaces = self.namespaces
- c.caller_stack = self.caller_stack
- return c
-
- def _locals(self, d):
- """Create a new :class:`.Context` with a copy of this
- :class:`.Context`'s current state,
- updated with the given dictionary.
-
- The :attr:`.Context.kwargs` collection remains
- unaffected.
-
-
- """
-
- if not d:
- return self
- c = self._copy()
- c._data.update(d)
- return c
-
- def _clean_inheritance_tokens(self):
- """create a new copy of this :class:`.Context`. with
- tokens related to inheritance state removed."""
-
- c = self._copy()
- x = c._data
- x.pop('self', None)
- x.pop('parent', None)
- x.pop('next', None)
- return c
-
-class CallerStack(list):
- def __init__(self):
- self.nextcaller = None
-
- def __nonzero__(self):
- return self.__bool__()
-
- def __bool__(self):
- return len(self) and self._get_caller() and True or False
-
- def _get_caller(self):
- # this method can be removed once
- # codegen MAGIC_NUMBER moves past 7
- return self[-1]
-
- def __getattr__(self, key):
- return getattr(self._get_caller(), key)
-
- def _push_frame(self):
- frame = self.nextcaller or None
- self.append(frame)
- self.nextcaller = None
- return frame
-
- def _pop_frame(self):
- self.nextcaller = self.pop()
-
-
-class Undefined(object):
- """Represents an undefined value in a template.
-
- All template modules have a constant value
- ``UNDEFINED`` present which is an instance of this
- object.
-
- """
- def __str__(self):
- raise NameError("Undefined")
-
- def __nonzero__(self):
- return self.__bool__()
-
- def __bool__(self):
- return False
-
-UNDEFINED = Undefined()
-
-class LoopStack(object):
- """a stack for LoopContexts that implements the context manager protocol
- to automatically pop off the top of the stack on context exit
- """
-
- def __init__(self):
- self.stack = []
-
- def _enter(self, iterable):
- self._push(iterable)
- return self._top
-
- def _exit(self):
- self._pop()
- return self._top
-
- @property
- def _top(self):
- if self.stack:
- return self.stack[-1]
- else:
- return self
-
- def _pop(self):
- return self.stack.pop()
-
- def _push(self, iterable):
- new = LoopContext(iterable)
- if self.stack:
- new.parent = self.stack[-1]
- return self.stack.append(new)
-
- def __getattr__(self, key):
- raise exceptions.RuntimeException("No loop context is established")
-
- def __iter__(self):
- return iter(self._top)
-
-
-class LoopContext(object):
- """A magic loop variable.
- Automatically accessible in any ``% for`` block.
-
- See the section :ref:`loop_context` for usage
- notes.
-
- :attr:`parent` -> :class:`.LoopContext` or ``None``
- The parent loop, if one exists.
- :attr:`index` -> `int`
- The 0-based iteration count.
- :attr:`reverse_index` -> `int`
- The number of iterations remaining.
- :attr:`first` -> `bool`
- ``True`` on the first iteration, ``False`` otherwise.
- :attr:`last` -> `bool`
- ``True`` on the last iteration, ``False`` otherwise.
- :attr:`even` -> `bool`
- ``True`` when ``index`` is even.
- :attr:`odd` -> `bool`
- ``True`` when ``index`` is odd.
- """
-
- def __init__(self, iterable):
- self._iterable = iterable
- self.index = 0
- self.parent = None
-
- def __iter__(self):
- for i in self._iterable:
- yield i
- self.index += 1
-
- @util.memoized_instancemethod
- def __len__(self):
- return len(self._iterable)
-
- @property
- def reverse_index(self):
- return len(self) - self.index - 1
-
- @property
- def first(self):
- return self.index == 0
-
- @property
- def last(self):
- return self.index == len(self) - 1
-
- @property
- def even(self):
- return not self.odd
-
- @property
- def odd(self):
- return bool(self.index % 2)
-
- def cycle(self, *values):
- """Cycle through values as the loop progresses.
- """
- if not values:
- raise ValueError("You must provide values to cycle through")
- return values[self.index % len(values)]
-
-
-class _NSAttr(object):
- def __init__(self, parent):
- self.__parent = parent
- def __getattr__(self, key):
- ns = self.__parent
- while ns:
- if hasattr(ns.module, key):
- return getattr(ns.module, key)
- else:
- ns = ns.inherits
- raise AttributeError(key)
-
-class Namespace(object):
- """Provides access to collections of rendering methods, which
- can be local, from other templates, or from imported modules.
-
- To access a particular rendering method referenced by a
- :class:`.Namespace`, use plain attribute access:
-
- .. sourcecode:: mako
-
- ${some_namespace.foo(x, y, z)}
-
- :class:`.Namespace` also contains several built-in attributes
- described here.
-
- """
-
- def __init__(self, name, context,
- callables=None, inherits=None,
- populate_self=True, calling_uri=None):
- self.name = name
- self.context = context
- self.inherits = inherits
- if callables is not None:
- self.callables = dict([(c.__name__, c) for c in callables])
-
- callables = ()
-
- module = None
- """The Python module referenced by this :class:`.Namespace`.
-
- If the namespace references a :class:`.Template`, then
- this module is the equivalent of ``template.module``,
- i.e. the generated module for the template.
-
- """
-
- template = None
- """The :class:`.Template` object referenced by this
- :class:`.Namespace`, if any.
-
- """
-
- context = None
- """The :class:`.Context` object for this :class:`.Namespace`.
-
- Namespaces are often created with copies of contexts that
- contain slightly different data, particularly in inheritance
- scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one
- can traverse an entire chain of templates that inherit from
- one-another.
-
- """
-
- filename = None
- """The path of the filesystem file used for this
- :class:`.Namespace`'s module or template.
-
- If this is a pure module-based
- :class:`.Namespace`, this evaluates to ``module.__file__``. If a
- template-based namespace, it evaluates to the original
- template file location.
-
- """
-
- uri = None
- """The URI for this :class:`.Namespace`'s template.
-
- I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
-
- This is the equivalent of :attr:`.Template.uri`.
-
- """
-
- _templateuri = None
-
- @util.memoized_property
- def attr(self):
- """Access module level attributes by name.
-
- This accessor allows templates to supply "scalar"
- attributes which are particularly handy in inheritance
- relationships.
-
- .. seealso::
-
- :ref:`inheritance_attr`
-
- :ref:`namespace_attr_for_includes`
-
- """
- return _NSAttr(self)
-
- def get_namespace(self, uri):
- """Return a :class:`.Namespace` corresponding to the given ``uri``.
-
- If the given ``uri`` is a relative URI (i.e. it does not
- contain a leading slash ``/``), the ``uri`` is adjusted to
- be relative to the ``uri`` of the namespace itself. This
- method is therefore mostly useful off of the built-in
- ``local`` namespace, described in :ref:`namespace_local`.
-
- In
- most cases, a template wouldn't need this function, and
- should instead use the ``<%namespace>`` tag to load
- namespaces. However, since all ``<%namespace>`` tags are
- evaluated before the body of a template ever runs,
- this method can be used to locate namespaces using
- expressions that were generated within the body code of
- the template, or to conditionally use a particular
- namespace.
-
- """
- key = (self, uri)
- if key in self.context.namespaces:
- return self.context.namespaces[key]
- else:
- ns = TemplateNamespace(uri, self.context._copy(),
- templateuri=uri,
- calling_uri=self._templateuri)
- self.context.namespaces[key] = ns
- return ns
-
- def get_template(self, uri):
- """Return a :class:`.Template` from the given ``uri``.
-
- The ``uri`` resolution is relative to the ``uri`` of this
- :class:`.Namespace` object's :class:`.Template`.
-
- """
- return _lookup_template(self.context, uri, self._templateuri)
-
- def get_cached(self, key, **kwargs):
- """Return a value from the :class:`.Cache` referenced by this
- :class:`.Namespace` object's :class:`.Template`.
-
- The advantage to this method versus direct access to the
- :class:`.Cache` is that the configuration parameters
- declared in ``<%page>`` take effect here, thereby calling
- up the same configured backend as that configured
- by ``<%page>``.
-
- """
-
- return self.cache.get(key, **kwargs)
-
- @property
- def cache(self):
- """Return the :class:`.Cache` object referenced
- by this :class:`.Namespace` object's
- :class:`.Template`.
-
- """
- return self.template.cache
-
- def include_file(self, uri, **kwargs):
- """Include a file at the given ``uri``."""
-
- _include_file(self.context, uri, self._templateuri, **kwargs)
-
- def _populate(self, d, l):
- for ident in l:
- if ident == '*':
- for (k, v) in self._get_star():
- d[k] = v
- else:
- d[ident] = getattr(self, ident)
-
- def _get_star(self):
- if self.callables:
- for key in self.callables:
- yield (key, self.callables[key])
-
- def __getattr__(self, key):
- if key in self.callables:
- val = self.callables[key]
- elif self.inherits:
- val = getattr(self.inherits, key)
- else:
- raise AttributeError(
- "Namespace '%s' has no member '%s'" %
- (self.name, key))
- setattr(self, key, val)
- return val
-
-class TemplateNamespace(Namespace):
- """A :class:`.Namespace` specific to a :class:`.Template` instance."""
-
- def __init__(self, name, context, template=None, templateuri=None,
- callables=None, inherits=None,
- populate_self=True, calling_uri=None):
- self.name = name
- self.context = context
- self.inherits = inherits
- if callables is not None:
- self.callables = dict([(c.__name__, c) for c in callables])
-
- if templateuri is not None:
- self.template = _lookup_template(context, templateuri,
- calling_uri)
- self._templateuri = self.template.module._template_uri
- elif template is not None:
- self.template = template
- self._templateuri = template.module._template_uri
- else:
- raise TypeError("'template' argument is required.")
-
- if populate_self:
- lclcallable, lclcontext = \
- _populate_self_namespace(context, self.template,
- self_ns=self)
-
- @property
- def module(self):
- """The Python module referenced by this :class:`.Namespace`.
-
- If the namespace references a :class:`.Template`, then
- this module is the equivalent of ``template.module``,
- i.e. the generated module for the template.
-
- """
- return self.template.module
-
- @property
- def filename(self):
- """The path of the filesystem file used for this
- :class:`.Namespace`'s module or template.
- """
- return self.template.filename
-
- @property
- def uri(self):
- """The URI for this :class:`.Namespace`'s template.
-
- I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
-
- This is the equivalent of :attr:`.Template.uri`.
-
- """
- return self.template.uri
-
- def _get_star(self):
- if self.callables:
- for key in self.callables:
- yield (key, self.callables[key])
- def get(key):
- callable_ = self.template._get_def_callable(key)
- return compat.partial(callable_, self.context)
- for k in self.template.module._exports:
- yield (k, get(k))
-
- def __getattr__(self, key):
- if key in self.callables:
- val = self.callables[key]
- elif self.template.has_def(key):
- callable_ = self.template._get_def_callable(key)
- val = compat.partial(callable_, self.context)
- elif self.inherits:
- val = getattr(self.inherits, key)
-
- else:
- raise AttributeError(
- "Namespace '%s' has no member '%s'" %
- (self.name, key))
- setattr(self, key, val)
- return val
-
-class ModuleNamespace(Namespace):
- """A :class:`.Namespace` specific to a Python module instance."""
-
- def __init__(self, name, context, module,
- callables=None, inherits=None,
- populate_self=True, calling_uri=None):
- self.name = name
- self.context = context
- self.inherits = inherits
- if callables is not None:
- self.callables = dict([(c.__name__, c) for c in callables])
-
- mod = __import__(module)
- for token in module.split('.')[1:]:
- mod = getattr(mod, token)
- self.module = mod
-
- @property
- def filename(self):
- """The path of the filesystem file used for this
- :class:`.Namespace`'s module or template.
- """
- return self.module.__file__
-
- def _get_star(self):
- if self.callables:
- for key in self.callables:
- yield (key, self.callables[key])
- for key in dir(self.module):
- if key[0] != '_':
- callable_ = getattr(self.module, key)
- if compat.callable(callable_):
- yield key, compat.partial(callable_, self.context)
-
-
- def __getattr__(self, key):
- if key in self.callables:
- val = self.callables[key]
- elif hasattr(self.module, key):
- callable_ = getattr(self.module, key)
- val = compat.partial(callable_, self.context)
- elif self.inherits:
- val = getattr(self.inherits, key)
- else:
- raise AttributeError(
- "Namespace '%s' has no member '%s'" %
- (self.name, key))
- setattr(self, key, val)
- return val
-
-def supports_caller(func):
- """Apply a caller_stack compatibility decorator to a plain
- Python function.
-
- See the example in :ref:`namespaces_python_modules`.
-
- """
-
- def wrap_stackframe(context, *args, **kwargs):
- context.caller_stack._push_frame()
- try:
- return func(context, *args, **kwargs)
- finally:
- context.caller_stack._pop_frame()
- return wrap_stackframe
-
-def capture(context, callable_, *args, **kwargs):
- """Execute the given template def, capturing the output into
- a buffer.
-
- See the example in :ref:`namespaces_python_modules`.
-
- """
-
- if not compat.callable(callable_):
- raise exceptions.RuntimeException(
- "capture() function expects a callable as "
- "its argument (i.e. capture(func, *args, **kwargs))"
- )
- context._push_buffer()
- try:
- callable_(*args, **kwargs)
- finally:
- buf = context._pop_buffer()
- return buf.getvalue()
-
-def _decorate_toplevel(fn):
- def decorate_render(render_fn):
- def go(context, *args, **kw):
- def y(*args, **kw):
- return render_fn(context, *args, **kw)
- try:
- y.__name__ = render_fn.__name__[7:]
- except TypeError:
- # < Python 2.4
- pass
- return fn(y)(context, *args, **kw)
- return go
- return decorate_render
-
-def _decorate_inline(context, fn):
- def decorate_render(render_fn):
- dec = fn(render_fn)
- def go(*args, **kw):
- return dec(context, *args, **kw)
- return go
- return decorate_render
-
-def _include_file(context, uri, calling_uri, **kwargs):
- """locate the template from the given uri and include it in
- the current output."""
-
- template = _lookup_template(context, uri, calling_uri)
- (callable_, ctx) = _populate_self_namespace(
- context._clean_inheritance_tokens(),
- template)
- callable_(ctx, **_kwargs_for_include(callable_, context._data, **kwargs))
-
-def _inherit_from(context, uri, calling_uri):
- """called by the _inherit method in template modules to set
- up the inheritance chain at the start of a template's
- execution."""
-
- if uri is None:
- return None
- template = _lookup_template(context, uri, calling_uri)
- self_ns = context['self']
- ih = self_ns
- while ih.inherits is not None:
- ih = ih.inherits
- lclcontext = context._locals({'next': ih})
- ih.inherits = TemplateNamespace("self:%s" % template.uri,
- lclcontext,
- template=template,
- populate_self=False)
- context._data['parent'] = lclcontext._data['local'] = ih.inherits
- callable_ = getattr(template.module, '_mako_inherit', None)
- if callable_ is not None:
- ret = callable_(template, lclcontext)
- if ret:
- return ret
-
- gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
- if gen_ns is not None:
- gen_ns(context)
- return (template.callable_, lclcontext)
-
-def _lookup_template(context, uri, relativeto):
- lookup = context._with_template.lookup
- if lookup is None:
- raise exceptions.TemplateLookupException(
- "Template '%s' has no TemplateLookup associated" %
- context._with_template.uri)
- uri = lookup.adjust_uri(uri, relativeto)
- try:
- return lookup.get_template(uri)
- except exceptions.TopLevelLookupException:
- raise exceptions.TemplateLookupException(str(compat.exception_as()))
-
-def _populate_self_namespace(context, template, self_ns=None):
- if self_ns is None:
- self_ns = TemplateNamespace('self:%s' % template.uri,
- context, template=template,
- populate_self=False)
- context._data['self'] = context._data['local'] = self_ns
- if hasattr(template.module, '_mako_inherit'):
- ret = template.module._mako_inherit(template, context)
- if ret:
- return ret
- return (template.callable_, context)
-
-def _render(template, callable_, args, data, as_unicode=False):
- """create a Context and return the string
- output of the given template and template callable."""
-
- if as_unicode:
- buf = util.FastEncodingBuffer(as_unicode=True)
- elif template.bytestring_passthrough:
- buf = compat.StringIO()
- else:
- buf = util.FastEncodingBuffer(
- as_unicode=as_unicode,
- encoding=template.output_encoding,
- errors=template.encoding_errors)
- context = Context(buf, **data)
- context._outputting_as_unicode = as_unicode
- context._set_with_template(template)
-
- _render_context(template, callable_, context, *args,
- **_kwargs_for_callable(callable_, data))
- return context._pop_buffer().getvalue()
-
-def _kwargs_for_callable(callable_, data):
- argspec = compat.inspect_func_args(callable_)
- # for normal pages, **pageargs is usually present
- if argspec[2]:
- return data
-
- # for rendering defs from the top level, figure out the args
- namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
- kwargs = {}
- for arg in namedargs:
- if arg != 'context' and arg in data and arg not in kwargs:
- kwargs[arg] = data[arg]
- return kwargs
-
-def _kwargs_for_include(callable_, data, **kwargs):
- argspec = compat.inspect_func_args(callable_)
- namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
- for arg in namedargs:
- if arg != 'context' and arg in data and arg not in kwargs:
- kwargs[arg] = data[arg]
- return kwargs
-
-def _render_context(tmpl, callable_, context, *args, **kwargs):
- import mako.template as template
- # create polymorphic 'self' namespace for this
- # template with possibly updated context
- if not isinstance(tmpl, template.DefTemplate):
- # if main render method, call from the base of the inheritance stack
- (inherit, lclcontext) = _populate_self_namespace(context, tmpl)
- _exec_template(inherit, lclcontext, args=args, kwargs=kwargs)
- else:
- # otherwise, call the actual rendering method specified
- (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent)
- _exec_template(callable_, context, args=args, kwargs=kwargs)
-
-def _exec_template(callable_, context, args=None, kwargs=None):
- """execute a rendering callable given the callable, a
- Context, and optional explicit arguments
-
- the contextual Template will be located if it exists, and
- the error handling options specified on that Template will
- be interpreted here.
- """
- template = context._with_template
- if template is not None and \
- (template.format_exceptions or template.error_handler):
- try:
- callable_(context, *args, **kwargs)
- except Exception:
- _render_error(template, context, compat.exception_as())
- except:
- e = sys.exc_info()[0]
- _render_error(template, context, e)
- else:
- callable_(context, *args, **kwargs)
-
-def _render_error(template, context, error):
- if template.error_handler:
- result = template.error_handler(context, error)
- if not result:
- compat.reraise(*sys.exc_info())
- else:
- error_template = exceptions.html_error_template()
- if context._outputting_as_unicode:
- context._buffer_stack[:] = [
- util.FastEncodingBuffer(as_unicode=True)]
- else:
- context._buffer_stack[:] = [util.FastEncodingBuffer(
- error_template.output_encoding,
- error_template.encoding_errors)]
-
- context._set_with_template(error_template)
- error_template.render_context(context, error=error)
+++ /dev/null
-# mako/template.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Provides the Template class, a facade for parsing, generating and executing
-template strings, as well as template runtime operations."""
-
-from mako.lexer import Lexer
-from mako import runtime, util, exceptions, codegen, cache, compat
-import os
-import re
-import shutil
-import stat
-import sys
-import tempfile
-import types
-import weakref
-
-
-class Template(object):
- """Represents a compiled template.
-
- :class:`.Template` includes a reference to the original
- template source (via the :attr:`.source` attribute)
- as well as the source code of the
- generated Python module (i.e. the :attr:`.code` attribute),
- as well as a reference to an actual Python module.
-
- :class:`.Template` is constructed using either a literal string
- representing the template text, or a filename representing a filesystem
- path to a source file.
-
- :param text: textual template source. This argument is mutually
- exclusive versus the ``filename`` parameter.
-
- :param filename: filename of the source template. This argument is
- mutually exclusive versus the ``text`` parameter.
-
- :param buffer_filters: string list of filters to be applied
- to the output of ``%def``\ s which are buffered, cached, or otherwise
- filtered, after all filters
- defined with the ``%def`` itself have been applied. Allows the
- creation of default expression filters that let the output
- of return-valued ``%def``\ s "opt out" of that filtering via
- passing special attributes or objects.
-
- :param bytestring_passthrough: When ``True``, and ``output_encoding`` is
- set to ``None``, and :meth:`.Template.render` is used to render,
- the `StringIO` or `cStringIO` buffer will be used instead of the
- default "fast" buffer. This allows raw bytestrings in the
- output stream, such as in expressions, to pass straight
- through to the buffer. This flag is forced
- to ``True`` if ``disable_unicode`` is also configured.
-
- .. versionadded:: 0.4
- Added to provide the same behavior as that of the previous series.
-
- :param cache_args: Dictionary of cache configuration arguments that
- will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`.
-
- :param cache_dir:
-
- .. deprecated:: 0.6
- Use the ``'dir'`` argument in the ``cache_args`` dictionary.
- See :ref:`caching_toplevel`.
-
- :param cache_enabled: Boolean flag which enables caching of this
- template. See :ref:`caching_toplevel`.
-
- :param cache_impl: String name of a :class:`.CacheImpl` caching
- implementation to use. Defaults to ``'beaker'``.
-
- :param cache_type:
-
- .. deprecated:: 0.6
- Use the ``'type'`` argument in the ``cache_args`` dictionary.
- See :ref:`caching_toplevel`.
-
- :param cache_url:
-
- .. deprecated:: 0.6
- Use the ``'url'`` argument in the ``cache_args`` dictionary.
- See :ref:`caching_toplevel`.
-
- :param default_filters: List of string filter names that will
- be applied to all expressions. See :ref:`filtering_default_filters`.
-
- :param disable_unicode: Disables all awareness of Python Unicode
- objects. See :ref:`unicode_disabled`.
-
- :param enable_loop: When ``True``, enable the ``loop`` context variable.
- This can be set to ``False`` to support templates that may
- be making usage of the name "``loop``". Individual templates can
- re-enable the "loop" context by placing the directive
- ``enable_loop="True"`` inside the ``<%page>`` tag -- see
- :ref:`migrating_loop`.
-
- :param encoding_errors: Error parameter passed to ``encode()`` when
- string encoding is performed. See :ref:`usage_unicode`.
-
- :param error_handler: Python callable which is called whenever
- compile or runtime exceptions occur. The callable is passed
- the current context as well as the exception. If the
- callable returns ``True``, the exception is considered to
- be handled, else it is re-raised after the function
- completes. Is used to provide custom error-rendering
- functions.
-
- :param format_exceptions: if ``True``, exceptions which occur during
- the render phase of this template will be caught and
- formatted into an HTML error page, which then becomes the
- rendered result of the :meth:`.render` call. Otherwise,
- runtime exceptions are propagated outwards.
-
- :param imports: String list of Python statements, typically individual
- "import" lines, which will be placed into the module level
- preamble of all generated Python modules. See the example
- in :ref:`filtering_default_filters`.
-
- :param future_imports: String list of names to import from `__future__`.
- These will be concatenated into a comma-separated string and inserted
- into the beginning of the template, e.g. ``futures_imports=['FOO',
- 'BAR']`` results in ``from __future__ import FOO, BAR``. If you're
- interested in using features like the new division operator, you must
- use future_imports to convey that to the renderer, as otherwise the
- import will not appear as the first executed statement in the generated
- code and will therefore not have the desired effect.
-
- :param input_encoding: Encoding of the template's source code. Can
- be used in lieu of the coding comment. See
- :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for
- details on source encoding.
-
- :param lookup: a :class:`.TemplateLookup` instance that will be used
- for all file lookups via the ``<%namespace>``,
- ``<%include>``, and ``<%inherit>`` tags. See
- :ref:`usage_templatelookup`.
-
- :param module_directory: Filesystem location where generated
- Python module files will be placed.
-
- :param module_filename: Overrides the filename of the generated
- Python module file. For advanced usage only.
-
- :param module_writer: A callable which overrides how the Python
- module is written entirely. The callable is passed the
- encoded source content of the module and the destination
- path to be written to. The default behavior of module writing
- uses a tempfile in conjunction with a file move in order
- to make the operation atomic. So a user-defined module
- writing function that mimics the default behavior would be:
-
- .. sourcecode:: python
-
- import tempfile
- import os
- import shutil
-
- def module_writer(source, outputpath):
- (dest, name) = \\
- tempfile.mkstemp(
- dir=os.path.dirname(outputpath)
- )
-
- os.write(dest, source)
- os.close(dest)
- shutil.move(name, outputpath)
-
- from mako.template import Template
- mytemplate = Template(
- filename="index.html",
- module_directory="/path/to/modules",
- module_writer=module_writer
- )
-
- The function is provided for unusual configurations where
- certain platform-specific permissions or other special
- steps are needed.
-
- :param output_encoding: The encoding to use when :meth:`.render`
- is called.
- See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`.
-
- :param preprocessor: Python callable which will be passed
- the full template source before it is parsed. The return
- result of the callable will be used as the template source
- code.
-
- :param lexer_cls: A :class:`.Lexer` class used to parse
- the template. The :class:`.Lexer` class is used by
- default.
-
- .. versionadded:: 0.7.4
-
- :param strict_undefined: Replaces the automatic usage of
- ``UNDEFINED`` for any undeclared variables not located in
- the :class:`.Context` with an immediate raise of
- ``NameError``. The advantage is immediate reporting of
- missing variables which include the name.
-
- .. versionadded:: 0.3.6
-
- :param uri: string URI or other identifier for this template.
- If not provided, the ``uri`` is generated from the filesystem
- path, or from the in-memory identity of a non-file-based
- template. The primary usage of the ``uri`` is to provide a key
- within :class:`.TemplateLookup`, as well as to generate the
- file path of the generated Python module file, if
- ``module_directory`` is specified.
-
- """
-
- lexer_cls = Lexer
-
- def __init__(self,
- text=None,
- filename=None,
- uri=None,
- format_exceptions=False,
- error_handler=None,
- lookup=None,
- output_encoding=None,
- encoding_errors='strict',
- module_directory=None,
- cache_args=None,
- cache_impl='beaker',
- cache_enabled=True,
- cache_type=None,
- cache_dir=None,
- cache_url=None,
- module_filename=None,
- input_encoding=None,
- disable_unicode=False,
- module_writer=None,
- bytestring_passthrough=False,
- default_filters=None,
- buffer_filters=(),
- strict_undefined=False,
- imports=None,
- future_imports=None,
- enable_loop=True,
- preprocessor=None,
- lexer_cls=None):
- if uri:
- self.module_id = re.sub(r'\W', "_", uri)
- self.uri = uri
- elif filename:
- self.module_id = re.sub(r'\W', "_", filename)
- drive, path = os.path.splitdrive(filename)
- path = os.path.normpath(path).replace(os.path.sep, "/")
- self.uri = path
- else:
- self.module_id = "memory:" + hex(id(self))
- self.uri = self.module_id
-
- u_norm = self.uri
- if u_norm.startswith("/"):
- u_norm = u_norm[1:]
- u_norm = os.path.normpath(u_norm)
- if u_norm.startswith(".."):
- raise exceptions.TemplateLookupException(
- "Template uri \"%s\" is invalid - "
- "it cannot be relative outside "
- "of the root path." % self.uri)
-
- self.input_encoding = input_encoding
- self.output_encoding = output_encoding
- self.encoding_errors = encoding_errors
- self.disable_unicode = disable_unicode
- self.bytestring_passthrough = bytestring_passthrough or disable_unicode
- self.enable_loop = enable_loop
- self.strict_undefined = strict_undefined
- self.module_writer = module_writer
-
- if compat.py3k and disable_unicode:
- raise exceptions.UnsupportedError(
- "Mako for Python 3 does not "
- "support disabling Unicode")
- elif output_encoding and disable_unicode:
- raise exceptions.UnsupportedError(
- "output_encoding must be set to "
- "None when disable_unicode is used.")
- if default_filters is None:
- if compat.py3k or self.disable_unicode:
- self.default_filters = ['str']
- else:
- self.default_filters = ['unicode']
- else:
- self.default_filters = default_filters
- self.buffer_filters = buffer_filters
-
- self.imports = imports
- self.future_imports = future_imports
- self.preprocessor = preprocessor
-
- if lexer_cls is not None:
- self.lexer_cls = lexer_cls
-
- # if plain text, compile code in memory only
- if text is not None:
- (code, module) = _compile_text(self, text, filename)
- self._code = code
- self._source = text
- ModuleInfo(module, None, self, filename, code, text)
- elif filename is not None:
- # if template filename and a module directory, load
- # a filesystem-based module file, generating if needed
- if module_filename is not None:
- path = module_filename
- elif module_directory is not None:
- path = os.path.abspath(
- os.path.join(
- os.path.normpath(module_directory),
- u_norm + ".py"
- )
- )
- else:
- path = None
- module = self._compile_from_file(path, filename)
- else:
- raise exceptions.RuntimeException(
- "Template requires text or filename")
-
- self.module = module
- self.filename = filename
- self.callable_ = self.module.render_body
- self.format_exceptions = format_exceptions
- self.error_handler = error_handler
- self.lookup = lookup
-
- self.module_directory = module_directory
-
- self._setup_cache_args(
- cache_impl, cache_enabled, cache_args,
- cache_type, cache_dir, cache_url
- )
-
-
- @util.memoized_property
- def reserved_names(self):
- if self.enable_loop:
- return codegen.RESERVED_NAMES
- else:
- return codegen.RESERVED_NAMES.difference(['loop'])
-
- def _setup_cache_args(self,
- cache_impl, cache_enabled, cache_args,
- cache_type, cache_dir, cache_url):
- self.cache_impl = cache_impl
- self.cache_enabled = cache_enabled
- if cache_args:
- self.cache_args = cache_args
- else:
- self.cache_args = {}
-
- # transfer deprecated cache_* args
- if cache_type:
- self.cache_args['type'] = cache_type
- if cache_dir:
- self.cache_args['dir'] = cache_dir
- if cache_url:
- self.cache_args['url'] = cache_url
-
- def _compile_from_file(self, path, filename):
- if path is not None:
- util.verify_directory(os.path.dirname(path))
- filemtime = os.stat(filename)[stat.ST_MTIME]
- if not os.path.exists(path) or \
- os.stat(path)[stat.ST_MTIME] < filemtime:
- data = util.read_file(filename)
- _compile_module_file(
- self,
- data,
- filename,
- path,
- self.module_writer)
- module = compat.load_module(self.module_id, path)
- del sys.modules[self.module_id]
- if module._magic_number != codegen.MAGIC_NUMBER:
- data = util.read_file(filename)
- _compile_module_file(
- self,
- data,
- filename,
- path,
- self.module_writer)
- module = compat.load_module(self.module_id, path)
- del sys.modules[self.module_id]
- ModuleInfo(module, path, self, filename, None, None)
- else:
- # template filename and no module directory, compile code
- # in memory
- data = util.read_file(filename)
- code, module = _compile_text(
- self,
- data,
- filename)
- self._source = None
- self._code = code
- ModuleInfo(module, None, self, filename, code, None)
- return module
-
- @property
- def source(self):
- """Return the template source code for this :class:`.Template`."""
-
- return _get_module_info_from_callable(self.callable_).source
-
- @property
- def code(self):
- """Return the module source code for this :class:`.Template`."""
-
- return _get_module_info_from_callable(self.callable_).code
-
- @util.memoized_property
- def cache(self):
- return cache.Cache(self)
-
- @property
- def cache_dir(self):
- return self.cache_args['dir']
- @property
- def cache_url(self):
- return self.cache_args['url']
- @property
- def cache_type(self):
- return self.cache_args['type']
-
- def render(self, *args, **data):
- """Render the output of this template as a string.
-
- If the template specifies an output encoding, the string
- will be encoded accordingly, else the output is raw (raw
- output uses `cStringIO` and can't handle multibyte
- characters). A :class:`.Context` object is created corresponding
- to the given data. Arguments that are explicitly declared
- by this template's internal rendering method are also
- pulled from the given ``*args``, ``**data`` members.
-
- """
- return runtime._render(self, self.callable_, args, data)
-
- def render_unicode(self, *args, **data):
- """Render the output of this template as a unicode object."""
-
- return runtime._render(self,
- self.callable_,
- args,
- data,
- as_unicode=True)
-
- def render_context(self, context, *args, **kwargs):
- """Render this :class:`.Template` with the given context.
-
- The data is written to the context's buffer.
-
- """
- if getattr(context, '_with_template', None) is None:
- context._set_with_template(self)
- runtime._render_context(self,
- self.callable_,
- context,
- *args,
- **kwargs)
-
- def has_def(self, name):
- return hasattr(self.module, "render_%s" % name)
-
- def get_def(self, name):
- """Return a def of this template as a :class:`.DefTemplate`."""
-
- return DefTemplate(self, getattr(self.module, "render_%s" % name))
-
- def _get_def_callable(self, name):
- return getattr(self.module, "render_%s" % name)
-
- @property
- def last_modified(self):
- return self.module._modified_time
-
-class ModuleTemplate(Template):
- """A Template which is constructed given an existing Python module.
-
- e.g.::
-
- t = Template("this is a template")
- f = file("mymodule.py", "w")
- f.write(t.code)
- f.close()
-
- import mymodule
-
- t = ModuleTemplate(mymodule)
- print t.render()
-
- """
-
- def __init__(self, module,
- module_filename=None,
- template=None,
- template_filename=None,
- module_source=None,
- template_source=None,
- output_encoding=None,
- encoding_errors='strict',
- disable_unicode=False,
- bytestring_passthrough=False,
- format_exceptions=False,
- error_handler=None,
- lookup=None,
- cache_args=None,
- cache_impl='beaker',
- cache_enabled=True,
- cache_type=None,
- cache_dir=None,
- cache_url=None,
- ):
- self.module_id = re.sub(r'\W', "_", module._template_uri)
- self.uri = module._template_uri
- self.input_encoding = module._source_encoding
- self.output_encoding = output_encoding
- self.encoding_errors = encoding_errors
- self.disable_unicode = disable_unicode
- self.bytestring_passthrough = bytestring_passthrough or disable_unicode
- self.enable_loop = module._enable_loop
-
- if compat.py3k and disable_unicode:
- raise exceptions.UnsupportedError(
- "Mako for Python 3 does not "
- "support disabling Unicode")
- elif output_encoding and disable_unicode:
- raise exceptions.UnsupportedError(
- "output_encoding must be set to "
- "None when disable_unicode is used.")
-
- self.module = module
- self.filename = template_filename
- ModuleInfo(module,
- module_filename,
- self,
- template_filename,
- module_source,
- template_source)
-
- self.callable_ = self.module.render_body
- self.format_exceptions = format_exceptions
- self.error_handler = error_handler
- self.lookup = lookup
- self._setup_cache_args(
- cache_impl, cache_enabled, cache_args,
- cache_type, cache_dir, cache_url
- )
-
-class DefTemplate(Template):
- """A :class:`.Template` which represents a callable def in a parent
- template."""
-
- def __init__(self, parent, callable_):
- self.parent = parent
- self.callable_ = callable_
- self.output_encoding = parent.output_encoding
- self.module = parent.module
- self.encoding_errors = parent.encoding_errors
- self.format_exceptions = parent.format_exceptions
- self.error_handler = parent.error_handler
- self.enable_loop = parent.enable_loop
- self.lookup = parent.lookup
- self.bytestring_passthrough = parent.bytestring_passthrough
-
- def get_def(self, name):
- return self.parent.get_def(name)
-
-class ModuleInfo(object):
- """Stores information about a module currently loaded into
- memory, provides reverse lookups of template source, module
- source code based on a module's identifier.
-
- """
- _modules = weakref.WeakValueDictionary()
-
- def __init__(self,
- module,
- module_filename,
- template,
- template_filename,
- module_source,
- template_source):
- self.module = module
- self.module_filename = module_filename
- self.template_filename = template_filename
- self.module_source = module_source
- self.template_source = template_source
- self._modules[module.__name__] = template._mmarker = self
- if module_filename:
- self._modules[module_filename] = self
-
- @classmethod
- def get_module_source_metadata(cls, module_source, full_line_map=False):
- source_map = re.search(
- r"__M_BEGIN_METADATA(.+?)__M_END_METADATA",
- module_source, re.S).group(1)
- source_map = compat.json.loads(source_map)
- source_map['line_map'] = dict((int(k), int(v))
- for k, v in source_map['line_map'].items())
- if full_line_map:
- f_line_map = source_map['full_line_map'] = []
- line_map = source_map['line_map']
-
- curr_templ_line = 1
- for mod_line in range(1, max(line_map)):
- if mod_line in line_map:
- curr_templ_line = line_map[mod_line]
- f_line_map.append(curr_templ_line)
- return source_map
-
- @property
- def code(self):
- if self.module_source is not None:
- return self.module_source
- else:
- return util.read_python_file(self.module_filename)
-
- @property
- def source(self):
- if self.template_source is not None:
- if self.module._source_encoding and \
- not isinstance(self.template_source, compat.text_type):
- return self.template_source.decode(
- self.module._source_encoding)
- else:
- return self.template_source
- else:
- data = util.read_file(self.template_filename)
- if self.module._source_encoding:
- return data.decode(self.module._source_encoding)
- else:
- return data
-
-def _compile(template, text, filename, generate_magic_comment):
- lexer = template.lexer_cls(text,
- filename,
- disable_unicode=template.disable_unicode,
- input_encoding=template.input_encoding,
- preprocessor=template.preprocessor)
- node = lexer.parse()
- source = codegen.compile(node,
- template.uri,
- filename,
- default_filters=template.default_filters,
- buffer_filters=template.buffer_filters,
- imports=template.imports,
- future_imports=template.future_imports,
- source_encoding=lexer.encoding,
- generate_magic_comment=generate_magic_comment,
- disable_unicode=template.disable_unicode,
- strict_undefined=template.strict_undefined,
- enable_loop=template.enable_loop,
- reserved_names=template.reserved_names)
- return source, lexer
-
-def _compile_text(template, text, filename):
- identifier = template.module_id
- source, lexer = _compile(template, text, filename,
- generate_magic_comment=template.disable_unicode)
-
- cid = identifier
- if not compat.py3k and isinstance(cid, compat.text_type):
- cid = cid.encode()
- module = types.ModuleType(cid)
- code = compile(source, cid, 'exec')
-
- # this exec() works for 2.4->3.3.
- exec(code, module.__dict__, module.__dict__)
- return (source, module)
-
-def _compile_module_file(template, text, filename, outputpath, module_writer):
- source, lexer = _compile(template, text, filename,
- generate_magic_comment=True)
-
- if isinstance(source, compat.text_type):
- source = source.encode(lexer.encoding or 'ascii')
-
- if module_writer:
- module_writer(source, outputpath)
- else:
- # make tempfiles in the same location as the ultimate
- # location. this ensures they're on the same filesystem,
- # avoiding synchronization issues.
- (dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath))
-
- os.write(dest, source)
- os.close(dest)
- shutil.move(name, outputpath)
-
-def _get_module_info_from_callable(callable_):
- if compat.py3k:
- return _get_module_info(callable_.__globals__['__name__'])
- else:
- return _get_module_info(callable_.func_globals['__name__'])
-
-def _get_module_info(filename):
- return ModuleInfo._modules[filename]
-
+++ /dev/null
-# mako/util.py
-# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
-#
-# This module is part of Mako and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-import re
-import collections
-import codecs
-import os
-from mako import compat
-import operator
-
-def update_wrapper(decorated, fn):
- decorated.__wrapped__ = fn
- decorated.__name__ = fn.__name__
- return decorated
-
-
-class PluginLoader(object):
- def __init__(self, group):
- self.group = group
- self.impls = {}
-
- def load(self, name):
- if name in self.impls:
- return self.impls[name]()
- else:
- import pkg_resources
- for impl in pkg_resources.iter_entry_points(
- self.group,
- name):
- self.impls[name] = impl.load
- return impl.load()
- else:
- from mako import exceptions
- raise exceptions.RuntimeException(
- "Can't load plugin %s %s" %
- (self.group, name))
-
- def register(self, name, modulepath, objname):
- def load():
- mod = __import__(modulepath)
- for token in modulepath.split(".")[1:]:
- mod = getattr(mod, token)
- return getattr(mod, objname)
- self.impls[name] = load
-
-def verify_directory(dir):
- """create and/or verify a filesystem directory."""
-
- tries = 0
-
- while not os.path.exists(dir):
- try:
- tries += 1
- os.makedirs(dir, compat.octal("0775"))
- except:
- if tries > 5:
- raise
-
-def to_list(x, default=None):
- if x is None:
- return default
- if not isinstance(x, (list, tuple)):
- return [x]
- else:
- return x
-
-
-class memoized_property(object):
- """A read-only @property that is only evaluated once."""
- def __init__(self, fget, doc=None):
- self.fget = fget
- self.__doc__ = doc or fget.__doc__
- self.__name__ = fget.__name__
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- obj.__dict__[self.__name__] = result = self.fget(obj)
- return result
-
-class memoized_instancemethod(object):
- """Decorate a method memoize its return value.
-
- Best applied to no-arg methods: memoization is not sensitive to
- argument values, and will always return the same value even when
- called with different arguments.
-
- """
- def __init__(self, fget, doc=None):
- self.fget = fget
- self.__doc__ = doc or fget.__doc__
- self.__name__ = fget.__name__
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- def oneshot(*args, **kw):
- result = self.fget(obj, *args, **kw)
- memo = lambda *a, **kw: result
- memo.__name__ = self.__name__
- memo.__doc__ = self.__doc__
- obj.__dict__[self.__name__] = memo
- return result
- oneshot.__name__ = self.__name__
- oneshot.__doc__ = self.__doc__
- return oneshot
-
-class SetLikeDict(dict):
- """a dictionary that has some setlike methods on it"""
- def union(self, other):
- """produce a 'union' of this dict and another (at the key level).
-
- values in the second dict take precedence over that of the first"""
- x = SetLikeDict(**self)
- x.update(other)
- return x
-
-class FastEncodingBuffer(object):
- """a very rudimentary buffer that is faster than StringIO,
- but doesn't crash on unicode data like cStringIO."""
-
- def __init__(self, encoding=None, errors='strict', as_unicode=False):
- self.data = collections.deque()
- self.encoding = encoding
- if as_unicode:
- self.delim = compat.u('')
- else:
- self.delim = ''
- self.as_unicode = as_unicode
- self.errors = errors
- self.write = self.data.append
-
- def truncate(self):
- self.data = collections.deque()
- self.write = self.data.append
-
- def getvalue(self):
- if self.encoding:
- return self.delim.join(self.data).encode(self.encoding,
- self.errors)
- else:
- return self.delim.join(self.data)
-
-class LRUCache(dict):
- """A dictionary-like object that stores a limited number of items,
- discarding lesser used items periodically.
-
- this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based
- paradigm so that synchronization is not really needed. the size management
- is inexact.
- """
-
- class _Item(object):
- def __init__(self, key, value):
- self.key = key
- self.value = value
- self.timestamp = compat.time_func()
- def __repr__(self):
- return repr(self.value)
-
- def __init__(self, capacity, threshold=.5):
- self.capacity = capacity
- self.threshold = threshold
-
- def __getitem__(self, key):
- item = dict.__getitem__(self, key)
- item.timestamp = compat.time_func()
- return item.value
-
- def values(self):
- return [i.value for i in dict.values(self)]
-
- def setdefault(self, key, value):
- if key in self:
- return self[key]
- else:
- self[key] = value
- return value
-
- def __setitem__(self, key, value):
- item = dict.get(self, key)
- if item is None:
- item = self._Item(key, value)
- dict.__setitem__(self, key, item)
- else:
- item.value = value
- self._manage_size()
-
- def _manage_size(self):
- while len(self) > self.capacity + self.capacity * self.threshold:
- bytime = sorted(dict.values(self),
- key=operator.attrgetter('timestamp'), reverse=True)
- for item in bytime[self.capacity:]:
- try:
- del self[item.key]
- except KeyError:
- # if we couldn't find a key, most likely some other thread
- # broke in on us. loop around and try again
- break
-
-# Regexp to match python magic encoding line
-_PYTHON_MAGIC_COMMENT_re = re.compile(
- r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
- re.VERBOSE)
-
-def parse_encoding(fp):
- """Deduce the encoding of a Python source file (binary mode) from magic
- comment.
-
- It does this in the same way as the `Python interpreter`__
-
- .. __: http://docs.python.org/ref/encodings.html
-
- The ``fp`` argument should be a seekable file object in binary mode.
- """
- pos = fp.tell()
- fp.seek(0)
- try:
- line1 = fp.readline()
- has_bom = line1.startswith(codecs.BOM_UTF8)
- if has_bom:
- line1 = line1[len(codecs.BOM_UTF8):]
-
- m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore'))
- if not m:
- try:
- import parser
- parser.suite(line1.decode('ascii', 'ignore'))
- except (ImportError, SyntaxError):
- # Either it's a real syntax error, in which case the source
- # is not valid python source, or line2 is a continuation of
- # line1, in which case we don't want to scan line2 for a magic
- # comment.
- pass
- else:
- line2 = fp.readline()
- m = _PYTHON_MAGIC_COMMENT_re.match(
- line2.decode('ascii', 'ignore'))
-
- if has_bom:
- if m:
- raise SyntaxError("python refuses to compile code with both a UTF8" \
- " byte-order-mark and a magic encoding comment")
- return 'utf_8'
- elif m:
- return m.group(1)
- else:
- return None
- finally:
- fp.seek(pos)
-
-def sorted_dict_repr(d):
- """repr() a dictionary with the keys in order.
-
- Used by the lexer unit test to compare parse trees based on strings.
-
- """
- keys = list(d.keys())
- keys.sort()
- return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
-
-def restore__ast(_ast):
- """Attempt to restore the required classes to the _ast module if it
- appears to be missing them
- """
- if hasattr(_ast, 'AST'):
- return
- _ast.PyCF_ONLY_AST = 2 << 9
- m = compile("""\
-def foo(): pass
-class Bar(object): pass
-if False: pass
-baz = 'mako'
-1 + 2 - 3 * 4 / 5
-6 // 7 % 8 << 9 >> 10
-11 & 12 ^ 13 | 14
-15 and 16 or 17
--baz + (not +18) - ~17
-baz and 'foo' or 'bar'
-(mako is baz == baz) is not baz != mako
-mako > baz < mako >= baz <= mako
-mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
- _ast.Module = type(m)
-
- for cls in _ast.Module.__mro__:
- if cls.__name__ == 'mod':
- _ast.mod = cls
- elif cls.__name__ == 'AST':
- _ast.AST = cls
-
- _ast.FunctionDef = type(m.body[0])
- _ast.ClassDef = type(m.body[1])
- _ast.If = type(m.body[2])
-
- _ast.Name = type(m.body[3].targets[0])
- _ast.Store = type(m.body[3].targets[0].ctx)
- _ast.Str = type(m.body[3].value)
-
- _ast.Sub = type(m.body[4].value.op)
- _ast.Add = type(m.body[4].value.left.op)
- _ast.Div = type(m.body[4].value.right.op)
- _ast.Mult = type(m.body[4].value.right.left.op)
-
- _ast.RShift = type(m.body[5].value.op)
- _ast.LShift = type(m.body[5].value.left.op)
- _ast.Mod = type(m.body[5].value.left.left.op)
- _ast.FloorDiv = type(m.body[5].value.left.left.left.op)
-
- _ast.BitOr = type(m.body[6].value.op)
- _ast.BitXor = type(m.body[6].value.left.op)
- _ast.BitAnd = type(m.body[6].value.left.left.op)
-
- _ast.Or = type(m.body[7].value.op)
- _ast.And = type(m.body[7].value.values[0].op)
-
- _ast.Invert = type(m.body[8].value.right.op)
- _ast.Not = type(m.body[8].value.left.right.op)
- _ast.UAdd = type(m.body[8].value.left.right.operand.op)
- _ast.USub = type(m.body[8].value.left.left.op)
-
- _ast.Or = type(m.body[9].value.op)
- _ast.And = type(m.body[9].value.values[0].op)
-
- _ast.IsNot = type(m.body[10].value.ops[0])
- _ast.NotEq = type(m.body[10].value.ops[1])
- _ast.Is = type(m.body[10].value.left.ops[0])
- _ast.Eq = type(m.body[10].value.left.ops[1])
-
- _ast.Gt = type(m.body[11].value.ops[0])
- _ast.Lt = type(m.body[11].value.ops[1])
- _ast.GtE = type(m.body[11].value.ops[2])
- _ast.LtE = type(m.body[11].value.ops[3])
-
- _ast.In = type(m.body[12].value.ops[0])
- _ast.NotIn = type(m.body[12].value.ops[1])
-
-
-
-def read_file(path, mode='rb'):
- fp = open(path, mode)
- try:
- data = fp.read()
- return data
- finally:
- fp.close()
-
-def read_python_file(path):
- fp = open(path, "rb")
- try:
- encoding = parse_encoding(fp)
- data = fp.read()
- if encoding:
- data = data.decode(encoding)
- return data
- finally:
- fp.close()
-
+++ /dev/null
-/****************************************************************************
-* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
-*
-* Permission is hereby granted, free of charge, to any person obtaining a
-* copy of this software and associated documentation files (the "Software"),
-* to deal in the Software without restriction, including without limitation
-* the rights to use, copy, modify, merge, publish, distribute, sublicense,
-* and/or sell copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following conditions:
-*
-* The above copyright notice and this permission notice (including the next
-* paragraph) shall be included in all copies or substantial portions of the
-* Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-* IN THE SOFTWARE.
-*
-* @file ${filename}
-*
-* @brief Implementation for events. auto-generated file
-*
-* DO NOT EDIT
-*
-******************************************************************************/
-#include "common/os.h"
-#include "gen_ar_event.h"
-#include "gen_ar_eventhandler.h"
-
-using namespace ArchRast;
-% for name in protos['event_names']:
-
-void ${name}::Accept(EventHandler* pHandler)
-{
- pHandler->Handle(*this);
-}
-% endfor
+++ /dev/null
-/****************************************************************************
-* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
-*
-* Permission is hereby granted, free of charge, to any person obtaining a
-* copy of this software and associated documentation files (the "Software"),
-* to deal in the Software without restriction, including without limitation
-* the rights to use, copy, modify, merge, publish, distribute, sublicense,
-* and/or sell copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following conditions:
-*
-* The above copyright notice and this permission notice (including the next
-* paragraph) shall be included in all copies or substantial portions of the
-* Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-* IN THE SOFTWARE.
-*
-* @file ${filename}
-*
-* @brief Definitions for events. auto-generated file
-*
-* DO NOT EDIT
-*
-******************************************************************************/
-#pragma once
-
-#include "common/os.h"
-#include "core/state.h"
-
-namespace ArchRast
-{
-% for name in protos['enum_names']:
- enum ${name}
- {<% names = protos['enums'][name]['names'] %>
- % for i in range(len(names)):
- ${names[i].lstrip()}
- % endfor
- };
-% endfor
-
- //Forward decl
- class EventHandler;
-
- //////////////////////////////////////////////////////////////////////////
- /// Event - interface for handling events.
- //////////////////////////////////////////////////////////////////////////
- struct Event
- {
- Event() {}
- virtual ~Event() {}
-
- virtual void Accept(EventHandler* pHandler) = 0;
- };
-% for name in protos['event_names']:
-
- //////////////////////////////////////////////////////////////////////////
- /// ${name}Data
- //////////////////////////////////////////////////////////////////////////
-#pragma pack(push, 1)
- struct ${name}Data
- {<%
- field_names = protos['events'][name]['field_names']
- field_types = protos['events'][name]['field_types'] %>
- // Fields
- % for i in range(len(field_names)):
- ${field_types[i]} ${field_names[i]};
- % endfor
- };
-#pragma pack(pop)
-
- //////////////////////////////////////////////////////////////////////////
- /// ${name}
- //////////////////////////////////////////////////////////////////////////
- struct ${name} : Event
- {<%
- field_names = protos['events'][name]['field_names']
- field_types = protos['events'][name]['field_types'] %>
- ${name}Data data;
-
- // Constructor
- ${name}(
- % for i in range(len(field_names)):
- % if i < len(field_names)-1:
- ${field_types[i]} ${field_names[i]},
- % endif
- % if i == len(field_names)-1:
- ${field_types[i]} ${field_names[i]}
- % endif
- % endfor
- )
- {
- % for i in range(len(field_names)):
- data.${field_names[i]} = ${field_names[i]};
- % endfor
- }
-
- virtual void Accept(EventHandler* pHandler);
- };
-% endfor
-}
\ No newline at end of file
+++ /dev/null
-/****************************************************************************
-* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
-*
-* Permission is hereby granted, free of charge, to any person obtaining a
-* copy of this software and associated documentation files (the "Software"),
-* to deal in the Software without restriction, including without limitation
-* the rights to use, copy, modify, merge, publish, distribute, sublicense,
-* and/or sell copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following conditions:
-*
-* The above copyright notice and this permission notice (including the next
-* paragraph) shall be included in all copies or substantial portions of the
-* Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-* IN THE SOFTWARE.
-*
-* @file ${filename}
-*
-* @brief Event handler interface. auto-generated file
-*
-* DO NOT EDIT
-*
-******************************************************************************/
-#pragma once
-
-#include "${event_header}"
-
-namespace ArchRast
-{
- //////////////////////////////////////////////////////////////////////////
- /// EventHandler - interface for handling events.
- //////////////////////////////////////////////////////////////////////////
- class EventHandler
- {
- public:
- EventHandler() {}
- virtual ~EventHandler() {}
-
- virtual void FlushDraw(uint32_t drawId) {}
-
-% for name in protos['event_names']:
- virtual void Handle(const ${name}& event) {}
-% endfor
- };
-}
+++ /dev/null
-/****************************************************************************
-* Copyright (C) 2016 Intel Corporation. All Rights Reserved.
-*
-* Permission is hereby granted, free of charge, to any person obtaining a
-* copy of this software and associated documentation files (the "Software"),
-* to deal in the Software without restriction, including without limitation
-* the rights to use, copy, modify, merge, publish, distribute, sublicense,
-* and/or sell copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following conditions:
-*
-* The above copyright notice and this permission notice (including the next
-* paragraph) shall be included in all copies or substantial portions of the
-* Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-* IN THE SOFTWARE.
-*
-* @file ${filename}
-*
-* @brief Event handler interface. auto-generated file
-*
-* DO NOT EDIT
-*
-******************************************************************************/
-#pragma once
-
-#include "common/os.h"
-#include "${event_header}"
-#include <fstream>
-#include <sstream>
-
-namespace ArchRast
-{
- //////////////////////////////////////////////////////////////////////////
- /// EventHandlerFile - interface for handling events.
- //////////////////////////////////////////////////////////////////////////
- class EventHandlerFile : public EventHandler
- {
- public:
- EventHandlerFile(uint32_t id)
- : mBufOffset(0)
- {
-#if defined(_WIN32)
- DWORD pid = GetCurrentProcessId();
- TCHAR procname[MAX_PATH];
- GetModuleFileName(NULL, procname, MAX_PATH);
- const char* pBaseName = strrchr(procname, '\\');
- std::stringstream outDir;
- outDir << KNOB_DEBUG_OUTPUT_DIR << pBaseName << "_" << pid << std::ends;
- CreateDirectory(outDir.str().c_str(), NULL);
-
- char buf[255];
- // There could be multiple threads creating thread pools. We
- // want to make sure they are uniquly identified by adding in
- // the creator's thread id into the filename.
- sprintf(buf, "%s\\ar_event%d_%d.bin", outDir.str().c_str(), GetCurrentThreadId(), id);
- mFilename = std::string(buf);
-#else
- char buf[255];
- // There could be multiple threads creating thread pools. We
- // want to make sure they are uniquly identified by adding in
- // the creator's thread id into the filename.
- sprintf(buf, "%s/ar_event%d_%d.bin", "/tmp", GetCurrentThreadId(), id);
- mFilename = std::string(buf);
-#endif
- }
-
- virtual ~EventHandlerFile()
- {
- FlushBuffer();
- }
-
- //////////////////////////////////////////////////////////////////////////
- /// @brief Flush buffer to file.
- bool FlushBuffer()
- {
- if (mBufOffset > 0)
- {
- if (mBufOffset == mHeaderBufOffset)
- {
- // Nothing to flush. Only header has been generated.
- return false;
- }
-
- std::ofstream file;
- file.open(mFilename, std::ios::out | std::ios::app | std::ios::binary);
-
- if (!file.is_open())
- {
- SWR_INVALID("ArchRast: Could not open event file!");
- return false;
- }
-
- file.write((char*)mBuffer, mBufOffset);
- file.close();
-
- mBufOffset = 0;
- mHeaderBufOffset = 0; // Reset header offset so its no longer considered.
- }
- return true;
- }
-
- //////////////////////////////////////////////////////////////////////////
- /// @brief Write event and its payload to the memory buffer.
- void Write(uint32_t eventId, const char* pBlock, uint32_t size)
- {
- if ((mBufOffset + size + sizeof(eventId)) > mBufferSize)
- {
- if (!FlushBuffer())
- {
- // Don't corrupt what's already in the buffer?
- /// @todo Maybe add corrupt marker to buffer here in case we can open file in future?
- return;
- }
- }
-
- memcpy(&mBuffer[mBufOffset], (char*)&eventId, sizeof(eventId));
- mBufOffset += sizeof(eventId);
- memcpy(&mBuffer[mBufOffset], pBlock, size);
- mBufOffset += size;
- }
-
-% for name in protos['event_names']:
- //////////////////////////////////////////////////////////////////////////
- /// @brief Handle ${name} event
- virtual void Handle(const ${name}& event)
- {
-% if protos['events'][name]['num_fields'] == 0:
- Write(${protos['events'][name]['event_id']}, (char*)&event.data, 0);
-% else:
- Write(${protos['events'][name]['event_id']}, (char*)&event.data, sizeof(event.data));
-%endif
- }
-% endfor
-
- //////////////////////////////////////////////////////////////////////////
- /// @brief Everything written to buffer this point is the header.
- virtual void MarkHeader()
- {
- mHeaderBufOffset = mBufOffset;
- }
-
- std::string mFilename;
-
- static const uint32_t mBufferSize = 1024;
- uint8_t mBuffer[mBufferSize];
- uint32_t mBufOffset{0};
- uint32_t mHeaderBufOffset{0};
- };
-}
+++ /dev/null
-/****************************************************************************
-* Copyright (C) 2017 Intel Corporation. All Rights Reserved.
-*
-* Permission is hereby granted, free of charge, to any person obtaining a
-* copy of this software and associated documentation files (the "Software"),
-* to deal in the Software without restriction, including without limitation
-* the rights to use, copy, modify, merge, publish, distribute, sublicense,
-* and/or sell copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following conditions:
-*
-* The above copyright notice and this permission notice (including the next
-* paragraph) shall be included in all copies or substantial portions of the
-* Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-* IN THE SOFTWARE.
-*
-* @file BackendPixelRate${fileNum}.cpp
-*
-* @brief auto-generated file
-*
-* DO NOT EDIT
-*
-******************************************************************************/
-
-#include "core/backend.h"
-
-void InitBackendPixelRate${fileNum}()
-{
- %for func in funcList:
- ${func}
- %endfor
-}
+++ /dev/null
-<%
- max_len = 0
- for knob in knobs:
- if len(knob[0]) > max_len: max_len = len(knob[0])
- max_len += len('KNOB_ ')
- if max_len % 4: max_len += 4 - (max_len % 4)
-
- def space_knob(knob):
- knob_len = len('KNOB_' + knob)
- return ' '*(max_len - knob_len)
-
- def calc_max_name_len(choices_array):
- _max_len = 0
- for choice in choices_array:
- if len(choice['name']) > _max_len: _max_len = len(choice['name'])
-
- if _max_len % 4: _max_len += 4 - (_max_len % 4)
- return _max_len
-
- def space_name(name, max_len):
- name_len = len(name)
- return ' '*(max_len - name_len)
-
-
-%>/******************************************************************************
-*
-* Copyright 2015-2016
-* Intel Corporation
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http ://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*
-% if gen_header:
-* @file ${filename}.h
-% else:
-* @file ${filename}.cpp
-% endif
-*
-* @brief Dynamic Knobs for Core.
-*
-* ======================= AUTO GENERATED: DO NOT EDIT !!! ====================
-*
-******************************************************************************/
-%if gen_header:
-#pragma once
-#include <string>
-
-template <typename T>
-struct Knob
-{
- const T& Value() const { return m_Value; }
- const T& Value(const T& newValue) { m_Value = newValue; return Value(); }
-
-protected:
- Knob(const T& defaultValue) : m_Value(defaultValue) {}
-
-private:
- T m_Value;
-};
-
-#define DEFINE_KNOB(_name, _type, _default) \\
-
- struct Knob_##_name : Knob<_type> \\
-
- { \\
-
- Knob_##_name() : Knob<_type>(_default) { } \\
-
- static const char* Name() { return "KNOB_" #_name; } \\
-
- } _name;
-
-#define GET_KNOB(_name) g_GlobalKnobs._name.Value()
-#define SET_KNOB(_name, _newValue) g_GlobalKnobs._name.Value(_newValue)
-
-struct GlobalKnobs
-{
- % for knob in knobs:
- //-----------------------------------------------------------
- // KNOB_${knob[0]}
- //
- % for line in knob[1]['desc']:
- // ${line}
- % endfor
- % if knob[1].get('choices'):
- <%
- choices = knob[1].get('choices')
- _max_len = calc_max_name_len(choices) %>//
- % for i in range(len(choices)):
- // ${choices[i]['name']}${space_name(choices[i]['name'], _max_len)} = ${format(choices[i]['value'], '#010x')}
- % endfor
- % endif
- //
- % if knob[1]['type'] == 'std::string':
- DEFINE_KNOB(${knob[0]}, ${knob[1]['type']}, "${repr(knob[1]['default'])[1:-1]}");
- % else:
- DEFINE_KNOB(${knob[0]}, ${knob[1]['type']}, ${knob[1]['default']});
- % endif
-
- % endfor
- GlobalKnobs();
- std::string ToString(const char* optPerLinePrefix="");
-};
-extern GlobalKnobs g_GlobalKnobs;
-
-#undef DEFINE_KNOB
-
-% for knob in knobs:
-#define KNOB_${knob[0]}${space_knob(knob[0])} GET_KNOB(${knob[0]})
-% endfor
-
-% else:
-% for inc in includes:
-#include <${inc}>
-% endfor
-
-//========================================================
-// Static Data Members
-//========================================================
-GlobalKnobs g_GlobalKnobs;
-
-//========================================================
-// Knob Initialization
-//========================================================
-GlobalKnobs::GlobalKnobs()
-{
- % for knob in knobs:
- InitKnob(${knob[0]});
- % endfor
-}
-
-//========================================================
-// Knob Display (Convert to String)
-//========================================================
-std::string GlobalKnobs::ToString(const char* optPerLinePrefix)
-{
- std::basic_stringstream<char> str;
- str << std::showbase << std::setprecision(1) << std::fixed;
-
- if (optPerLinePrefix == nullptr) { optPerLinePrefix = ""; }
-
- % for knob in knobs:
- str << optPerLinePrefix << "KNOB_${knob[0]}:${space_knob(knob[0])}";
- % if knob[1]['type'] == 'bool':
- str << (KNOB_${knob[0]} ? "+\n" : "-\n");
- % elif knob[1]['type'] != 'float' and knob[1]['type'] != 'std::string':
- str << std::hex << std::setw(11) << std::left << KNOB_${knob[0]};
- str << std::dec << KNOB_${knob[0]} << "\n";
- % else:
- str << KNOB_${knob[0]} << "\n";
- % endif
- % endfor
- str << std::ends;
-
- return str.str();
-}
-
-% endif