forked from M-Labs/artiq
Merge branch 'new-py2llvm'
This commit is contained in:
commit
c14299dca8
|
@ -5,6 +5,7 @@ __pycache__
|
||||||
*.bin
|
*.bin
|
||||||
*.elf
|
*.elf
|
||||||
*.fbi
|
*.fbi
|
||||||
|
*.pyc
|
||||||
doc/manual/_build
|
doc/manual/_build
|
||||||
/build
|
/build
|
||||||
/dist
|
/dist
|
||||||
|
@ -15,3 +16,10 @@ artiq/test/h5types.h5
|
||||||
examples/master/results
|
examples/master/results
|
||||||
examples/master/dataset_db.pyon
|
examples/master/dataset_db.pyon
|
||||||
examples/sim/dataset_db.pyon
|
examples/sim/dataset_db.pyon
|
||||||
|
Output/
|
||||||
|
/lit-test/libartiq_support/libartiq_support.so
|
||||||
|
|
||||||
|
# for developer convenience
|
||||||
|
/test*.py
|
||||||
|
/device_db.pyon
|
||||||
|
/dataset_db.pyon
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[submodule "artiq/runtime/lwip"]
|
[submodule "artiq/runtime/lwip"]
|
||||||
path = artiq/runtime/lwip
|
path = artiq/runtime/lwip
|
||||||
url = git://git.savannah.nongnu.org/lwip.git
|
url = git://git.savannah.nongnu.org/lwip.git
|
||||||
ignore = untracked
|
ignore = untracked
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
- '3.5'
|
- '3.5'
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
sudo: false
|
sudo: false
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
from .module import Module, Source
|
||||||
|
from .embedding import Stitcher
|
|
@ -0,0 +1 @@
|
||||||
|
from .inline import inline
|
|
@ -0,0 +1,80 @@
|
||||||
|
"""
|
||||||
|
:func:`inline` inlines a call instruction in ARTIQ IR.
|
||||||
|
The call instruction must have a statically known callee,
|
||||||
|
it must be second to last in the basic block, and the basic
|
||||||
|
block must have exactly one successor.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .. import types, builtins, iodelay, ir
|
||||||
|
|
||||||
|
def inline(call_insn):
|
||||||
|
assert isinstance(call_insn, ir.Call)
|
||||||
|
assert call_insn.static_target_function is not None
|
||||||
|
assert len(call_insn.basic_block.successors()) == 1
|
||||||
|
assert call_insn.basic_block.index(call_insn) == \
|
||||||
|
len(call_insn.basic_block.instructions) - 2
|
||||||
|
|
||||||
|
value_map = {}
|
||||||
|
source_function = call_insn.static_target_function
|
||||||
|
target_function = call_insn.basic_block.function
|
||||||
|
target_predecessor = call_insn.basic_block
|
||||||
|
target_successor = call_insn.basic_block.successors()[0]
|
||||||
|
|
||||||
|
if builtins.is_none(source_function.type.ret):
|
||||||
|
target_return_phi = None
|
||||||
|
else:
|
||||||
|
target_return_phi = target_successor.prepend(ir.Phi(source_function.type.ret))
|
||||||
|
|
||||||
|
closure = target_predecessor.insert(ir.GetAttr(call_insn.target_function(), '__closure__'),
|
||||||
|
before=call_insn)
|
||||||
|
for actual_arg, formal_arg in zip([closure] + call_insn.arguments(),
|
||||||
|
source_function.arguments):
|
||||||
|
value_map[formal_arg] = actual_arg
|
||||||
|
|
||||||
|
for source_block in source_function.basic_blocks:
|
||||||
|
target_block = ir.BasicBlock([], "i." + source_block.name)
|
||||||
|
target_function.add(target_block)
|
||||||
|
value_map[source_block] = target_block
|
||||||
|
|
||||||
|
def mapper(value):
|
||||||
|
if isinstance(value, ir.Constant):
|
||||||
|
return value
|
||||||
|
else:
|
||||||
|
return value_map[value]
|
||||||
|
|
||||||
|
for source_insn in source_function.instructions():
|
||||||
|
target_block = value_map[source_insn.basic_block]
|
||||||
|
if isinstance(source_insn, ir.Return):
|
||||||
|
if target_return_phi is not None:
|
||||||
|
target_return_phi.add_incoming(mapper(source_insn.value()), target_block)
|
||||||
|
target_insn = ir.Branch(target_successor)
|
||||||
|
elif isinstance(source_insn, ir.Phi):
|
||||||
|
target_insn = ir.Phi()
|
||||||
|
elif isinstance(source_insn, ir.Delay):
|
||||||
|
substs = source_insn.substs()
|
||||||
|
mapped_substs = {var: value_map[substs[var]] for var in substs}
|
||||||
|
const_substs = {var: iodelay.Const(mapped_substs[var].value)
|
||||||
|
for var in mapped_substs
|
||||||
|
if isinstance(mapped_substs[var], ir.Constant)}
|
||||||
|
other_substs = {var: mapped_substs[var]
|
||||||
|
for var in mapped_substs
|
||||||
|
if not isinstance(mapped_substs[var], ir.Constant)}
|
||||||
|
target_insn = ir.Delay(source_insn.expr.fold(const_substs), other_substs,
|
||||||
|
value_map[source_insn.decomposition()],
|
||||||
|
value_map[source_insn.target()])
|
||||||
|
else:
|
||||||
|
target_insn = source_insn.copy(mapper)
|
||||||
|
target_insn.name = "i." + source_insn.name
|
||||||
|
value_map[source_insn] = target_insn
|
||||||
|
target_block.append(target_insn)
|
||||||
|
|
||||||
|
for source_insn in source_function.instructions():
|
||||||
|
if isinstance(source_insn, ir.Phi):
|
||||||
|
target_insn = value_map[source_insn]
|
||||||
|
for block, value in source_insn.incoming():
|
||||||
|
target_insn.add_incoming(value_map[value], value_map[block])
|
||||||
|
|
||||||
|
target_predecessor.terminator().replace_with(ir.Branch(value_map[source_function.entry()]))
|
||||||
|
if target_return_phi is not None:
|
||||||
|
call_insn.replace_all_uses_with(target_return_phi)
|
||||||
|
call_insn.erase()
|
|
@ -0,0 +1,2 @@
|
||||||
|
from .domination import DominatorTree
|
||||||
|
from .devirtualization import Devirtualization
|
|
@ -0,0 +1,119 @@
|
||||||
|
"""
|
||||||
|
:class:`Devirtualizer` performs method resolution at
|
||||||
|
compile time.
|
||||||
|
|
||||||
|
Devirtualization is implemented using a lattice
|
||||||
|
with three states: unknown → assigned once → diverges.
|
||||||
|
The lattice is computed individually for every
|
||||||
|
variable in scope as well as every
|
||||||
|
(instance type, field name) pair.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pythonparser import algorithm
|
||||||
|
from .. import asttyped, ir, types
|
||||||
|
|
||||||
|
def _advance(target_map, key, value):
|
||||||
|
if key not in target_map:
|
||||||
|
target_map[key] = value # unknown → assigned once
|
||||||
|
else:
|
||||||
|
target_map[key] = None # assigned once → diverges
|
||||||
|
|
||||||
|
class FunctionResolver(algorithm.Visitor):
|
||||||
|
def __init__(self, variable_map):
|
||||||
|
self.variable_map = variable_map
|
||||||
|
|
||||||
|
self.scope_map = dict()
|
||||||
|
self.queue = []
|
||||||
|
|
||||||
|
self.in_assign = False
|
||||||
|
self.current_scopes = []
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
for thunk in self.queue:
|
||||||
|
thunk()
|
||||||
|
|
||||||
|
def visit_scope(self, node):
|
||||||
|
self.current_scopes.append(node)
|
||||||
|
self.generic_visit(node)
|
||||||
|
self.current_scopes.pop()
|
||||||
|
|
||||||
|
def visit_in_assign(self, node):
|
||||||
|
self.in_assign = True
|
||||||
|
self.visit(node)
|
||||||
|
self.in_assign = False
|
||||||
|
|
||||||
|
def visit_Assign(self, node):
|
||||||
|
self.visit(node.value)
|
||||||
|
self.visit_in_assign(node.targets)
|
||||||
|
|
||||||
|
def visit_For(self, node):
|
||||||
|
self.visit(node.iter)
|
||||||
|
self.visit_in_assign(node.target)
|
||||||
|
self.visit(node.body)
|
||||||
|
self.visit(node.orelse)
|
||||||
|
|
||||||
|
def visit_withitem(self, node):
|
||||||
|
self.visit(node.context_expr)
|
||||||
|
self.visit_in_assign(node.optional_vars)
|
||||||
|
|
||||||
|
def visit_comprehension(self, node):
|
||||||
|
self.visit(node.iter)
|
||||||
|
self.visit_in_assign(node.target)
|
||||||
|
self.visit(node.ifs)
|
||||||
|
|
||||||
|
def visit_ModuleT(self, node):
|
||||||
|
self.visit_scope(node)
|
||||||
|
|
||||||
|
def visit_FunctionDefT(self, node):
|
||||||
|
_advance(self.scope_map, (self.current_scopes[-1], node.name), node)
|
||||||
|
self.visit_scope(node)
|
||||||
|
|
||||||
|
def visit_NameT(self, node):
|
||||||
|
if self.in_assign:
|
||||||
|
# Just give up if we assign anything at all to a variable, and
|
||||||
|
# assume it diverges.
|
||||||
|
_advance(self.scope_map, (self.current_scopes[-1], node.id), None)
|
||||||
|
else:
|
||||||
|
# Look up the final value in scope_map and copy it into variable_map.
|
||||||
|
keys = [(scope, node.id) for scope in reversed(self.current_scopes)]
|
||||||
|
def thunk():
|
||||||
|
for key in keys:
|
||||||
|
if key in self.scope_map:
|
||||||
|
self.variable_map[node] = self.scope_map[key]
|
||||||
|
return
|
||||||
|
self.queue.append(thunk)
|
||||||
|
|
||||||
|
class MethodResolver(algorithm.Visitor):
|
||||||
|
def __init__(self, variable_map, method_map):
|
||||||
|
self.variable_map = variable_map
|
||||||
|
self.method_map = method_map
|
||||||
|
|
||||||
|
# embedding.Stitcher.finalize generates initialization statements
|
||||||
|
# of form "constructor.meth = meth_body".
|
||||||
|
def visit_Assign(self, node):
|
||||||
|
if node.value not in self.variable_map:
|
||||||
|
return
|
||||||
|
|
||||||
|
value = self.variable_map[node.value]
|
||||||
|
for target in node.targets:
|
||||||
|
if isinstance(target, asttyped.AttributeT):
|
||||||
|
if types.is_constructor(target.value.type):
|
||||||
|
instance_type = target.value.type.instance
|
||||||
|
elif types.is_instance(target.value.type):
|
||||||
|
instance_type = target.value.type
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
_advance(self.method_map, (instance_type, target.attr), value)
|
||||||
|
|
||||||
|
class Devirtualization:
|
||||||
|
def __init__(self):
|
||||||
|
self.variable_map = dict()
|
||||||
|
self.method_map = dict()
|
||||||
|
|
||||||
|
def visit(self, node):
|
||||||
|
function_resolver = FunctionResolver(self.variable_map)
|
||||||
|
function_resolver.visit(node)
|
||||||
|
function_resolver.finalize()
|
||||||
|
|
||||||
|
method_resolver = MethodResolver(self.variable_map, self.method_map)
|
||||||
|
method_resolver.visit(node)
|
|
@ -0,0 +1,137 @@
|
||||||
|
"""
|
||||||
|
:class:`DominatorTree` computes the dominance relation over
|
||||||
|
control flow graphs.
|
||||||
|
|
||||||
|
See http://www.cs.rice.edu/~keith/EMBED/dom.pdf.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class GenericDominatorTree:
|
||||||
|
def __init__(self):
|
||||||
|
self._assign_names()
|
||||||
|
self._compute()
|
||||||
|
|
||||||
|
def _traverse_in_postorder(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _prev_block_names(self, block):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _assign_names(self):
|
||||||
|
postorder = self._traverse_in_postorder()
|
||||||
|
|
||||||
|
self._start_name = len(postorder) - 1
|
||||||
|
self._block_of_name = postorder
|
||||||
|
self._name_of_block = {}
|
||||||
|
for block_name, block in enumerate(postorder):
|
||||||
|
self._name_of_block[block] = block_name
|
||||||
|
|
||||||
|
def _intersect(self, block_name_1, block_name_2):
|
||||||
|
finger_1, finger_2 = block_name_1, block_name_2
|
||||||
|
while finger_1 != finger_2:
|
||||||
|
while finger_1 < finger_2:
|
||||||
|
finger_1 = self._doms[finger_1]
|
||||||
|
while finger_2 < finger_1:
|
||||||
|
finger_2 = self._doms[finger_2]
|
||||||
|
return finger_1
|
||||||
|
|
||||||
|
def _compute(self):
|
||||||
|
self._doms = {}
|
||||||
|
|
||||||
|
# Start block dominates itself.
|
||||||
|
self._doms[self._start_name] = self._start_name
|
||||||
|
|
||||||
|
# We don't yet know what blocks dominate all other blocks.
|
||||||
|
for block_name in range(self._start_name):
|
||||||
|
self._doms[block_name] = None
|
||||||
|
|
||||||
|
changed = True
|
||||||
|
while changed:
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
# For all blocks except start block, in reverse postorder...
|
||||||
|
for block_name in reversed(range(self._start_name)):
|
||||||
|
# Select a new immediate dominator from the blocks we have
|
||||||
|
# already processed, and remember all others.
|
||||||
|
# We've already processed at least one previous block because
|
||||||
|
# of the graph traverse order.
|
||||||
|
new_idom, prev_block_names = None, []
|
||||||
|
for prev_block_name in self._prev_block_names(block_name):
|
||||||
|
if new_idom is None and self._doms[prev_block_name] is not None:
|
||||||
|
new_idom = prev_block_name
|
||||||
|
else:
|
||||||
|
prev_block_names.append(prev_block_name)
|
||||||
|
|
||||||
|
# Find a common previous block
|
||||||
|
for prev_block_name in prev_block_names:
|
||||||
|
if self._doms[prev_block_name] is not None:
|
||||||
|
new_idom = self._intersect(prev_block_name, new_idom)
|
||||||
|
|
||||||
|
if self._doms[block_name] != new_idom:
|
||||||
|
self._doms[block_name] = new_idom
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
def immediate_dominator(self, block):
|
||||||
|
return self._block_of_name[self._doms[self._name_of_block[block]]]
|
||||||
|
|
||||||
|
def dominators(self, block):
|
||||||
|
yield block
|
||||||
|
|
||||||
|
block_name = self._name_of_block[block]
|
||||||
|
while block_name != self._doms[block_name]:
|
||||||
|
block_name = self._doms[block_name]
|
||||||
|
yield self._block_of_name[block_name]
|
||||||
|
|
||||||
|
class DominatorTree(GenericDominatorTree):
|
||||||
|
def __init__(self, function):
|
||||||
|
self.function = function
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def _traverse_in_postorder(self):
|
||||||
|
postorder = []
|
||||||
|
|
||||||
|
visited = set()
|
||||||
|
def visit(block):
|
||||||
|
visited.add(block)
|
||||||
|
for next_block in block.successors():
|
||||||
|
if next_block not in visited:
|
||||||
|
visit(next_block)
|
||||||
|
postorder.append(block)
|
||||||
|
|
||||||
|
visit(self.function.entry())
|
||||||
|
|
||||||
|
return postorder
|
||||||
|
|
||||||
|
def _prev_block_names(self, block_name):
|
||||||
|
for block in self._block_of_name[block_name].predecessors():
|
||||||
|
yield self._name_of_block[block]
|
||||||
|
|
||||||
|
class PostDominatorTree(GenericDominatorTree):
|
||||||
|
def __init__(self, function):
|
||||||
|
self.function = function
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def _traverse_in_postorder(self):
|
||||||
|
postorder = []
|
||||||
|
|
||||||
|
visited = set()
|
||||||
|
def visit(block):
|
||||||
|
visited.add(block)
|
||||||
|
for next_block in block.predecessors():
|
||||||
|
if next_block not in visited:
|
||||||
|
visit(next_block)
|
||||||
|
postorder.append(block)
|
||||||
|
|
||||||
|
for block in self.function.basic_blocks:
|
||||||
|
if not any(block.successors()):
|
||||||
|
visit(block)
|
||||||
|
|
||||||
|
postorder.append(None) # virtual exit block
|
||||||
|
return postorder
|
||||||
|
|
||||||
|
def _prev_block_names(self, block_name):
|
||||||
|
succ_blocks = self._block_of_name[block_name].successors()
|
||||||
|
if len(succ_blocks) > 0:
|
||||||
|
for block in succ_blocks:
|
||||||
|
yield self._name_of_block[block]
|
||||||
|
else:
|
||||||
|
yield self._start_name
|
|
@ -0,0 +1,99 @@
|
||||||
|
"""
|
||||||
|
The typedtree module exports the PythonParser AST enriched with
|
||||||
|
typing information.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pythonparser import ast
|
||||||
|
|
||||||
|
class commontyped(ast.commonloc):
|
||||||
|
"""A mixin for typed AST nodes."""
|
||||||
|
|
||||||
|
_types = ("type",)
|
||||||
|
|
||||||
|
def _reprfields(self):
|
||||||
|
return self._fields + self._locs + self._types
|
||||||
|
|
||||||
|
class scoped(object):
|
||||||
|
"""
|
||||||
|
:ivar typing_env: (dict with string keys and :class:`.types.Type` values)
|
||||||
|
map of variable names to variable types
|
||||||
|
:ivar globals_in_scope: (set of string keys)
|
||||||
|
set of variables resolved as globals
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Typed versions of untyped nodes
|
||||||
|
class argT(ast.arg, commontyped):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ClassDefT(ast.ClassDef):
|
||||||
|
_types = ("constructor_type",)
|
||||||
|
class FunctionDefT(ast.FunctionDef, scoped):
|
||||||
|
_types = ("signature_type",)
|
||||||
|
class ModuleT(ast.Module, scoped):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ExceptHandlerT(ast.ExceptHandler):
|
||||||
|
_fields = ("filter", "name", "body") # rename ast.ExceptHandler.type to filter
|
||||||
|
_types = ("name_type",)
|
||||||
|
|
||||||
|
class SliceT(ast.Slice, commontyped):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AttributeT(ast.Attribute, commontyped):
|
||||||
|
pass
|
||||||
|
class BinOpT(ast.BinOp, commontyped):
|
||||||
|
pass
|
||||||
|
class BoolOpT(ast.BoolOp, commontyped):
|
||||||
|
pass
|
||||||
|
class CallT(ast.Call, commontyped):
|
||||||
|
"""
|
||||||
|
:ivar iodelay: (:class:`iodelay.Expr`)
|
||||||
|
"""
|
||||||
|
class CompareT(ast.Compare, commontyped):
|
||||||
|
pass
|
||||||
|
class DictT(ast.Dict, commontyped):
|
||||||
|
pass
|
||||||
|
class DictCompT(ast.DictComp, commontyped, scoped):
|
||||||
|
pass
|
||||||
|
class EllipsisT(ast.Ellipsis, commontyped):
|
||||||
|
pass
|
||||||
|
class GeneratorExpT(ast.GeneratorExp, commontyped, scoped):
|
||||||
|
pass
|
||||||
|
class IfExpT(ast.IfExp, commontyped):
|
||||||
|
pass
|
||||||
|
class LambdaT(ast.Lambda, commontyped, scoped):
|
||||||
|
pass
|
||||||
|
class ListT(ast.List, commontyped):
|
||||||
|
pass
|
||||||
|
class ListCompT(ast.ListComp, commontyped, scoped):
|
||||||
|
pass
|
||||||
|
class NameT(ast.Name, commontyped):
|
||||||
|
pass
|
||||||
|
class NameConstantT(ast.NameConstant, commontyped):
|
||||||
|
pass
|
||||||
|
class NumT(ast.Num, commontyped):
|
||||||
|
pass
|
||||||
|
class SetT(ast.Set, commontyped):
|
||||||
|
pass
|
||||||
|
class SetCompT(ast.SetComp, commontyped, scoped):
|
||||||
|
pass
|
||||||
|
class StrT(ast.Str, commontyped):
|
||||||
|
pass
|
||||||
|
class StarredT(ast.Starred, commontyped):
|
||||||
|
pass
|
||||||
|
class SubscriptT(ast.Subscript, commontyped):
|
||||||
|
pass
|
||||||
|
class TupleT(ast.Tuple, commontyped):
|
||||||
|
pass
|
||||||
|
class UnaryOpT(ast.UnaryOp, commontyped):
|
||||||
|
pass
|
||||||
|
class YieldT(ast.Yield, commontyped):
|
||||||
|
pass
|
||||||
|
class YieldFromT(ast.YieldFrom, commontyped):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Novel typed nodes
|
||||||
|
class CoerceT(ast.expr, commontyped):
|
||||||
|
_fields = ('value',) # other_value deliberately not in _fields
|
||||||
|
class QuoteT(ast.expr, commontyped):
|
||||||
|
_fields = ('value',)
|
|
@ -0,0 +1,245 @@
|
||||||
|
"""
|
||||||
|
The :mod:`builtins` module contains the builtin Python
|
||||||
|
and ARTIQ types, such as int or float.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections import OrderedDict
|
||||||
|
from . import types
|
||||||
|
|
||||||
|
# Types
|
||||||
|
|
||||||
|
class TNone(types.TMono):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("NoneType")
|
||||||
|
|
||||||
|
class TBool(types.TMono):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("bool")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def zero():
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def one():
|
||||||
|
return True
|
||||||
|
|
||||||
|
class TInt(types.TMono):
|
||||||
|
def __init__(self, width=None):
|
||||||
|
if width is None:
|
||||||
|
width = types.TVar()
|
||||||
|
super().__init__("int", {"width": width})
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def zero():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def one():
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def TInt32():
|
||||||
|
return TInt(types.TValue(32))
|
||||||
|
|
||||||
|
def TInt64():
|
||||||
|
return TInt(types.TValue(64))
|
||||||
|
|
||||||
|
class TFloat(types.TMono):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("float")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def zero():
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def one():
|
||||||
|
return 1.0
|
||||||
|
|
||||||
|
class TStr(types.TMono):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("str")
|
||||||
|
|
||||||
|
class TList(types.TMono):
|
||||||
|
def __init__(self, elt=None):
|
||||||
|
if elt is None:
|
||||||
|
elt = types.TVar()
|
||||||
|
super().__init__("list", {"elt": elt})
|
||||||
|
|
||||||
|
class TRange(types.TMono):
|
||||||
|
def __init__(self, elt=None):
|
||||||
|
if elt is None:
|
||||||
|
elt = types.TVar()
|
||||||
|
super().__init__("range", {"elt": elt})
|
||||||
|
self.attributes = OrderedDict([
|
||||||
|
("start", elt),
|
||||||
|
("stop", elt),
|
||||||
|
("step", elt),
|
||||||
|
])
|
||||||
|
|
||||||
|
class TException(types.TMono):
|
||||||
|
# All exceptions share the same internal layout:
|
||||||
|
# * Pointer to the unique global with the name of the exception (str)
|
||||||
|
# (which also serves as the EHABI type_info).
|
||||||
|
# * File, line and column where it was raised (str, int, int).
|
||||||
|
# * Message, which can contain substitutions {0}, {1} and {2} (str).
|
||||||
|
# * Three 64-bit integers, parameterizing the message (int(width=64)).
|
||||||
|
|
||||||
|
|
||||||
|
# Keep this in sync with the function ARTIQIRGenerator.alloc_exn.
|
||||||
|
attributes = OrderedDict([
|
||||||
|
("__name__", TStr()),
|
||||||
|
("__file__", TStr()),
|
||||||
|
("__line__", TInt(types.TValue(32))),
|
||||||
|
("__col__", TInt(types.TValue(32))),
|
||||||
|
("__func__", TStr()),
|
||||||
|
("__message__", TStr()),
|
||||||
|
("__param0__", TInt(types.TValue(64))),
|
||||||
|
("__param1__", TInt(types.TValue(64))),
|
||||||
|
("__param2__", TInt(types.TValue(64))),
|
||||||
|
])
|
||||||
|
|
||||||
|
def __init__(self, name="Exception"):
|
||||||
|
super().__init__(name)
|
||||||
|
|
||||||
|
def fn_bool():
|
||||||
|
return types.TConstructor(TBool())
|
||||||
|
|
||||||
|
def fn_int():
|
||||||
|
return types.TConstructor(TInt())
|
||||||
|
|
||||||
|
def fn_float():
|
||||||
|
return types.TConstructor(TFloat())
|
||||||
|
|
||||||
|
def fn_str():
|
||||||
|
return types.TConstructor(TStr())
|
||||||
|
|
||||||
|
def fn_list():
|
||||||
|
return types.TConstructor(TList())
|
||||||
|
|
||||||
|
def fn_Exception():
|
||||||
|
return types.TExceptionConstructor(TException("Exception"))
|
||||||
|
|
||||||
|
def fn_IndexError():
|
||||||
|
return types.TExceptionConstructor(TException("IndexError"))
|
||||||
|
|
||||||
|
def fn_ValueError():
|
||||||
|
return types.TExceptionConstructor(TException("ValueError"))
|
||||||
|
|
||||||
|
def fn_ZeroDivisionError():
|
||||||
|
return types.TExceptionConstructor(TException("ZeroDivisionError"))
|
||||||
|
|
||||||
|
def fn_range():
|
||||||
|
return types.TBuiltinFunction("range")
|
||||||
|
|
||||||
|
def fn_len():
|
||||||
|
return types.TBuiltinFunction("len")
|
||||||
|
|
||||||
|
def fn_round():
|
||||||
|
return types.TBuiltinFunction("round")
|
||||||
|
|
||||||
|
def fn_print():
|
||||||
|
return types.TBuiltinFunction("print")
|
||||||
|
|
||||||
|
def fn_kernel():
|
||||||
|
return types.TBuiltinFunction("kernel")
|
||||||
|
|
||||||
|
def fn_parallel():
|
||||||
|
return types.TBuiltinFunction("parallel")
|
||||||
|
|
||||||
|
def fn_sequential():
|
||||||
|
return types.TBuiltinFunction("sequential")
|
||||||
|
|
||||||
|
def fn_now():
|
||||||
|
return types.TBuiltinFunction("now")
|
||||||
|
|
||||||
|
def fn_delay():
|
||||||
|
return types.TBuiltinFunction("delay")
|
||||||
|
|
||||||
|
def fn_at():
|
||||||
|
return types.TBuiltinFunction("at")
|
||||||
|
|
||||||
|
def fn_now_mu():
|
||||||
|
return types.TBuiltinFunction("now_mu")
|
||||||
|
|
||||||
|
def fn_delay_mu():
|
||||||
|
return types.TBuiltinFunction("delay_mu")
|
||||||
|
|
||||||
|
def fn_at_mu():
|
||||||
|
return types.TBuiltinFunction("at_mu")
|
||||||
|
|
||||||
|
def fn_mu_to_seconds():
|
||||||
|
return types.TBuiltinFunction("mu_to_seconds")
|
||||||
|
|
||||||
|
def fn_seconds_to_mu():
|
||||||
|
return types.TBuiltinFunction("seconds_to_mu")
|
||||||
|
|
||||||
|
# Accessors
|
||||||
|
|
||||||
|
def is_none(typ):
|
||||||
|
return types.is_mono(typ, "NoneType")
|
||||||
|
|
||||||
|
def is_bool(typ):
|
||||||
|
return types.is_mono(typ, "bool")
|
||||||
|
|
||||||
|
def is_int(typ, width=None):
|
||||||
|
if width is not None:
|
||||||
|
return types.is_mono(typ, "int", width=width)
|
||||||
|
else:
|
||||||
|
return types.is_mono(typ, "int")
|
||||||
|
|
||||||
|
def get_int_width(typ):
|
||||||
|
if is_int(typ):
|
||||||
|
return types.get_value(typ.find()["width"])
|
||||||
|
|
||||||
|
def is_float(typ):
|
||||||
|
return types.is_mono(typ, "float")
|
||||||
|
|
||||||
|
def is_str(typ):
|
||||||
|
return types.is_mono(typ, "str")
|
||||||
|
|
||||||
|
def is_numeric(typ):
|
||||||
|
typ = typ.find()
|
||||||
|
return isinstance(typ, types.TMono) and \
|
||||||
|
typ.name in ('int', 'float')
|
||||||
|
|
||||||
|
def is_list(typ, elt=None):
|
||||||
|
if elt is not None:
|
||||||
|
return types.is_mono(typ, "list", elt=elt)
|
||||||
|
else:
|
||||||
|
return types.is_mono(typ, "list")
|
||||||
|
|
||||||
|
def is_range(typ, elt=None):
|
||||||
|
if elt is not None:
|
||||||
|
return types.is_mono(typ, "range", {"elt": elt})
|
||||||
|
else:
|
||||||
|
return types.is_mono(typ, "range")
|
||||||
|
|
||||||
|
def is_exception(typ, name=None):
|
||||||
|
if name is None:
|
||||||
|
return isinstance(typ.find(), TException)
|
||||||
|
else:
|
||||||
|
return isinstance(typ.find(), TException) and \
|
||||||
|
typ.name == name
|
||||||
|
|
||||||
|
def is_iterable(typ):
|
||||||
|
typ = typ.find()
|
||||||
|
return isinstance(typ, types.TMono) and \
|
||||||
|
typ.name in ('list', 'range')
|
||||||
|
|
||||||
|
def get_iterable_elt(typ):
|
||||||
|
if is_iterable(typ):
|
||||||
|
return typ.find()["elt"].find()
|
||||||
|
|
||||||
|
def is_collection(typ):
|
||||||
|
typ = typ.find()
|
||||||
|
return isinstance(typ, types.TTuple) or \
|
||||||
|
types.is_mono(typ, "list")
|
||||||
|
|
||||||
|
def is_allocated(typ):
|
||||||
|
return not (is_none(typ) or is_bool(typ) or is_int(typ) or
|
||||||
|
is_float(typ) or is_range(typ) or
|
||||||
|
types._is_pointer(typ) or types.is_function(typ) or
|
||||||
|
types.is_c_function(typ) or types.is_rpc_function(typ) or
|
||||||
|
types.is_method(typ) or types.is_tuple(typ) or
|
||||||
|
types.is_value(typ))
|
|
@ -0,0 +1,676 @@
|
||||||
|
"""
|
||||||
|
The :class:`Stitcher` class allows to transparently combine compiled
|
||||||
|
Python code and Python code executed on the host system: it resolves
|
||||||
|
the references to the host objects and translates the functions
|
||||||
|
annotated as ``@kernel`` when they are referenced.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys, os, re, linecache, inspect, textwrap
|
||||||
|
from collections import OrderedDict, defaultdict
|
||||||
|
|
||||||
|
from pythonparser import ast, algorithm, source, diagnostic, parse_buffer
|
||||||
|
from pythonparser import lexer as source_lexer, parser as source_parser
|
||||||
|
|
||||||
|
from Levenshtein import jaro_winkler
|
||||||
|
|
||||||
|
from ..language import core as language_core
|
||||||
|
from . import types, builtins, asttyped, prelude
|
||||||
|
from .transforms import ASTTypedRewriter, Inferencer, IntMonomorphizer
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectMap:
|
||||||
|
def __init__(self):
|
||||||
|
self.current_key = 0
|
||||||
|
self.forward_map = {}
|
||||||
|
self.reverse_map = {}
|
||||||
|
|
||||||
|
def store(self, obj_ref):
|
||||||
|
obj_id = id(obj_ref)
|
||||||
|
if obj_id in self.reverse_map:
|
||||||
|
return self.reverse_map[obj_id]
|
||||||
|
|
||||||
|
self.current_key += 1
|
||||||
|
self.forward_map[self.current_key] = obj_ref
|
||||||
|
self.reverse_map[obj_id] = self.current_key
|
||||||
|
return self.current_key
|
||||||
|
|
||||||
|
def retrieve(self, obj_key):
|
||||||
|
return self.forward_map[obj_key]
|
||||||
|
|
||||||
|
def has_rpc(self):
|
||||||
|
return any(filter(lambda x: inspect.isfunction(x) or inspect.ismethod(x),
|
||||||
|
self.forward_map.values()))
|
||||||
|
|
||||||
|
class ASTSynthesizer:
|
||||||
|
def __init__(self, type_map, value_map, quote_function=None, expanded_from=None):
|
||||||
|
self.source = ""
|
||||||
|
self.source_buffer = source.Buffer(self.source, "<synthesized>")
|
||||||
|
self.type_map, self.value_map = type_map, value_map
|
||||||
|
self.quote_function = quote_function
|
||||||
|
self.expanded_from = expanded_from
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
self.source_buffer.source = self.source
|
||||||
|
return self.source_buffer
|
||||||
|
|
||||||
|
def _add(self, fragment):
|
||||||
|
range_from = len(self.source)
|
||||||
|
self.source += fragment
|
||||||
|
range_to = len(self.source)
|
||||||
|
return source.Range(self.source_buffer, range_from, range_to,
|
||||||
|
expanded_from=self.expanded_from)
|
||||||
|
|
||||||
|
def quote(self, value):
|
||||||
|
"""Construct an AST fragment equal to `value`."""
|
||||||
|
if value is None:
|
||||||
|
typ = builtins.TNone()
|
||||||
|
return asttyped.NameConstantT(value=value, type=typ,
|
||||||
|
loc=self._add(repr(value)))
|
||||||
|
elif value is True or value is False:
|
||||||
|
typ = builtins.TBool()
|
||||||
|
return asttyped.NameConstantT(value=value, type=typ,
|
||||||
|
loc=self._add(repr(value)))
|
||||||
|
elif isinstance(value, (int, float)):
|
||||||
|
if isinstance(value, int):
|
||||||
|
typ = builtins.TInt()
|
||||||
|
elif isinstance(value, float):
|
||||||
|
typ = builtins.TFloat()
|
||||||
|
return asttyped.NumT(n=value, ctx=None, type=typ,
|
||||||
|
loc=self._add(repr(value)))
|
||||||
|
elif isinstance(value, language_core.int):
|
||||||
|
typ = builtins.TInt(width=types.TValue(value.width))
|
||||||
|
return asttyped.NumT(n=int(value), ctx=None, type=typ,
|
||||||
|
loc=self._add(repr(value)))
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return asttyped.StrT(s=value, ctx=None, type=builtins.TStr(),
|
||||||
|
loc=self._add(repr(value)))
|
||||||
|
elif isinstance(value, list):
|
||||||
|
begin_loc = self._add("[")
|
||||||
|
elts = []
|
||||||
|
for index, elt in enumerate(value):
|
||||||
|
elts.append(self.quote(elt))
|
||||||
|
if index < len(value) - 1:
|
||||||
|
self._add(", ")
|
||||||
|
end_loc = self._add("]")
|
||||||
|
return asttyped.ListT(elts=elts, ctx=None, type=builtins.TList(),
|
||||||
|
begin_loc=begin_loc, end_loc=end_loc,
|
||||||
|
loc=begin_loc.join(end_loc))
|
||||||
|
elif inspect.isfunction(value) or inspect.ismethod(value):
|
||||||
|
quote_loc = self._add('`')
|
||||||
|
repr_loc = self._add(repr(value))
|
||||||
|
unquote_loc = self._add('`')
|
||||||
|
loc = quote_loc.join(unquote_loc)
|
||||||
|
|
||||||
|
function_name, function_type = self.quote_function(value, self.expanded_from)
|
||||||
|
return asttyped.NameT(id=function_name, ctx=None, type=function_type, loc=loc)
|
||||||
|
else:
|
||||||
|
quote_loc = self._add('`')
|
||||||
|
repr_loc = self._add(repr(value))
|
||||||
|
unquote_loc = self._add('`')
|
||||||
|
loc = quote_loc.join(unquote_loc)
|
||||||
|
|
||||||
|
if isinstance(value, type):
|
||||||
|
typ = value
|
||||||
|
else:
|
||||||
|
typ = type(value)
|
||||||
|
|
||||||
|
if typ in self.type_map:
|
||||||
|
instance_type, constructor_type = self.type_map[typ]
|
||||||
|
else:
|
||||||
|
instance_type = types.TInstance("{}.{}".format(typ.__module__, typ.__qualname__),
|
||||||
|
OrderedDict())
|
||||||
|
instance_type.attributes['__objectid__'] = builtins.TInt(types.TValue(32))
|
||||||
|
|
||||||
|
constructor_type = types.TConstructor(instance_type)
|
||||||
|
constructor_type.attributes['__objectid__'] = builtins.TInt(types.TValue(32))
|
||||||
|
instance_type.constructor = constructor_type
|
||||||
|
|
||||||
|
self.type_map[typ] = instance_type, constructor_type
|
||||||
|
|
||||||
|
if isinstance(value, type):
|
||||||
|
self.value_map[constructor_type].append((value, loc))
|
||||||
|
return asttyped.QuoteT(value=value, type=constructor_type,
|
||||||
|
loc=loc)
|
||||||
|
else:
|
||||||
|
self.value_map[instance_type].append((value, loc))
|
||||||
|
return asttyped.QuoteT(value=value, type=instance_type,
|
||||||
|
loc=loc)
|
||||||
|
|
||||||
|
def call(self, function_node, args, kwargs):
|
||||||
|
"""
|
||||||
|
Construct an AST fragment calling a function specified by
|
||||||
|
an AST node `function_node`, with given arguments.
|
||||||
|
"""
|
||||||
|
arg_nodes = []
|
||||||
|
kwarg_nodes = []
|
||||||
|
kwarg_locs = []
|
||||||
|
|
||||||
|
name_loc = self._add(function_node.name)
|
||||||
|
begin_loc = self._add("(")
|
||||||
|
for index, arg in enumerate(args):
|
||||||
|
arg_nodes.append(self.quote(arg))
|
||||||
|
if index < len(args) - 1:
|
||||||
|
self._add(", ")
|
||||||
|
if any(args) and any(kwargs):
|
||||||
|
self._add(", ")
|
||||||
|
for index, kw in enumerate(kwargs):
|
||||||
|
arg_loc = self._add(kw)
|
||||||
|
equals_loc = self._add("=")
|
||||||
|
kwarg_locs.append((arg_loc, equals_loc))
|
||||||
|
kwarg_nodes.append(self.quote(kwargs[kw]))
|
||||||
|
if index < len(kwargs) - 1:
|
||||||
|
self._add(", ")
|
||||||
|
end_loc = self._add(")")
|
||||||
|
|
||||||
|
return asttyped.CallT(
|
||||||
|
func=asttyped.NameT(id=function_node.name, ctx=None,
|
||||||
|
type=function_node.signature_type,
|
||||||
|
loc=name_loc),
|
||||||
|
args=arg_nodes,
|
||||||
|
keywords=[ast.keyword(arg=kw, value=value,
|
||||||
|
arg_loc=arg_loc, equals_loc=equals_loc,
|
||||||
|
loc=arg_loc.join(value.loc))
|
||||||
|
for kw, value, (arg_loc, equals_loc)
|
||||||
|
in zip(kwargs, kwarg_nodes, kwarg_locs)],
|
||||||
|
starargs=None, kwargs=None,
|
||||||
|
type=types.TVar(), iodelay=None,
|
||||||
|
begin_loc=begin_loc, end_loc=end_loc, star_loc=None, dstar_loc=None,
|
||||||
|
loc=name_loc.join(end_loc))
|
||||||
|
|
||||||
|
def assign_local(self, var_name, value):
|
||||||
|
name_loc = self._add(var_name)
|
||||||
|
_ = self._add(" ")
|
||||||
|
equals_loc = self._add("=")
|
||||||
|
_ = self._add(" ")
|
||||||
|
value_node = self.quote(value)
|
||||||
|
|
||||||
|
var_node = asttyped.NameT(id=var_name, ctx=None, type=value_node.type,
|
||||||
|
loc=name_loc)
|
||||||
|
|
||||||
|
return ast.Assign(targets=[var_node], value=value_node,
|
||||||
|
op_locs=[equals_loc], loc=name_loc.join(value_node.loc))
|
||||||
|
|
||||||
|
def assign_attribute(self, obj, attr_name, value):
|
||||||
|
obj_node = self.quote(obj)
|
||||||
|
dot_loc = self._add(".")
|
||||||
|
name_loc = self._add(attr_name)
|
||||||
|
_ = self._add(" ")
|
||||||
|
equals_loc = self._add("=")
|
||||||
|
_ = self._add(" ")
|
||||||
|
value_node = self.quote(value)
|
||||||
|
|
||||||
|
attr_node = asttyped.AttributeT(value=obj_node, attr=attr_name, ctx=None,
|
||||||
|
type=value_node.type,
|
||||||
|
dot_loc=dot_loc, attr_loc=name_loc,
|
||||||
|
loc=obj_node.loc.join(name_loc))
|
||||||
|
|
||||||
|
return ast.Assign(targets=[attr_node], value=value_node,
|
||||||
|
op_locs=[equals_loc], loc=name_loc.join(value_node.loc))
|
||||||
|
|
||||||
|
class StitchingASTTypedRewriter(ASTTypedRewriter):
|
||||||
|
def __init__(self, engine, prelude, globals, host_environment, quote):
|
||||||
|
super().__init__(engine, prelude)
|
||||||
|
self.globals = globals
|
||||||
|
self.env_stack.append(self.globals)
|
||||||
|
|
||||||
|
self.host_environment = host_environment
|
||||||
|
self.quote = quote
|
||||||
|
|
||||||
|
def visit_Name(self, node):
|
||||||
|
typ = super()._try_find_name(node.id)
|
||||||
|
if typ is not None:
|
||||||
|
# Value from device environment.
|
||||||
|
return asttyped.NameT(type=typ, id=node.id, ctx=node.ctx,
|
||||||
|
loc=node.loc)
|
||||||
|
else:
|
||||||
|
# Try to find this value in the host environment and quote it.
|
||||||
|
if node.id in self.host_environment:
|
||||||
|
return self.quote(self.host_environment[node.id], node.loc)
|
||||||
|
else:
|
||||||
|
suggestion = self._most_similar_ident(node.id)
|
||||||
|
if suggestion is not None:
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"name '{name}' is not bound to anything; did you mean '{suggestion}'?",
|
||||||
|
{"name": node.id, "suggestion": suggestion},
|
||||||
|
node.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
else:
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"name '{name}' is not bound to anything", {"name": node.id},
|
||||||
|
node.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
def _most_similar_ident(self, id):
|
||||||
|
names = set()
|
||||||
|
names.update(self.host_environment.keys())
|
||||||
|
for typing_env in reversed(self.env_stack):
|
||||||
|
names.update(typing_env.keys())
|
||||||
|
|
||||||
|
sorted_names = sorted(names, key=lambda other: jaro_winkler(id, other), reverse=True)
|
||||||
|
if len(sorted_names) > 0:
|
||||||
|
if jaro_winkler(id, sorted_names[0]) > 0.0:
|
||||||
|
return sorted_names[0]
|
||||||
|
|
||||||
|
class StitchingInferencer(Inferencer):
|
||||||
|
def __init__(self, engine, value_map, quote):
|
||||||
|
super().__init__(engine)
|
||||||
|
self.value_map = value_map
|
||||||
|
self.quote = quote
|
||||||
|
|
||||||
|
def visit_AttributeT(self, node):
|
||||||
|
self.generic_visit(node)
|
||||||
|
object_type = node.value.type.find()
|
||||||
|
|
||||||
|
# The inferencer can only observe types, not values; however,
|
||||||
|
# when we work with host objects, we have to get the values
|
||||||
|
# somewhere, since host interpreter does not have types.
|
||||||
|
# Since we have categorized every host object we quoted according to
|
||||||
|
# its type, we now interrogate every host object we have to ensure
|
||||||
|
# that we can successfully serialize the value of the attribute we
|
||||||
|
# are now adding at the code generation stage.
|
||||||
|
#
|
||||||
|
# FIXME: We perform exhaustive checks of every known host object every
|
||||||
|
# time an attribute access is visited, which is potentially quadratic.
|
||||||
|
# This is done because it is simpler than performing the checks only when:
|
||||||
|
# * a previously unknown attribute is encountered,
|
||||||
|
# * a previously unknown host object is encountered;
|
||||||
|
# which would be the optimal solution.
|
||||||
|
for object_value, object_loc in self.value_map[object_type]:
|
||||||
|
if not hasattr(object_value, node.attr):
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"attribute accessed here", {},
|
||||||
|
node.loc)
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"host object does not have an attribute '{attr}'",
|
||||||
|
{"attr": node.attr},
|
||||||
|
object_loc, notes=[note])
|
||||||
|
self.engine.process(diag)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Figure out what ARTIQ type does the value of the attribute have.
|
||||||
|
# We do this by quoting it, as if to serialize. This has some
|
||||||
|
# overhead (i.e. synthesizing a source buffer), but has the advantage
|
||||||
|
# of having the host-to-ARTIQ mapping code in only one place and
|
||||||
|
# also immediately getting proper diagnostics on type errors.
|
||||||
|
attr_value = getattr(object_value, node.attr)
|
||||||
|
if (inspect.ismethod(attr_value) and hasattr(attr_value.__func__, 'artiq_embedded')
|
||||||
|
and types.is_instance(object_type)):
|
||||||
|
# In cases like:
|
||||||
|
# class c:
|
||||||
|
# @kernel
|
||||||
|
# def f(self): pass
|
||||||
|
# we want f to be defined on the class, not on the instance.
|
||||||
|
attributes = object_type.constructor.attributes
|
||||||
|
attr_value = attr_value.__func__
|
||||||
|
else:
|
||||||
|
attributes = object_type.attributes
|
||||||
|
|
||||||
|
ast = self.quote(attr_value, object_loc.expanded_from)
|
||||||
|
|
||||||
|
def proxy_diagnostic(diag):
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"while inferring a type for an attribute '{attr}' of a host object",
|
||||||
|
{"attr": node.attr},
|
||||||
|
node.loc)
|
||||||
|
diag.notes.append(note)
|
||||||
|
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
proxy_engine = diagnostic.Engine()
|
||||||
|
proxy_engine.process = proxy_diagnostic
|
||||||
|
Inferencer(engine=proxy_engine).visit(ast)
|
||||||
|
IntMonomorphizer(engine=proxy_engine).visit(ast)
|
||||||
|
|
||||||
|
if node.attr not in attributes:
|
||||||
|
# We just figured out what the type should be. Add it.
|
||||||
|
attributes[node.attr] = ast.type
|
||||||
|
elif attributes[node.attr] != ast.type:
|
||||||
|
# Does this conflict with an earlier guess?
|
||||||
|
printer = types.TypePrinter()
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"host object has an attribute '{attr}' of type {typea}, which is"
|
||||||
|
" different from previously inferred type {typeb} for the same attribute",
|
||||||
|
{"typea": printer.name(ast.type),
|
||||||
|
"typeb": printer.name(attributes[node.attr]),
|
||||||
|
"attr": node.attr},
|
||||||
|
object_loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
super().visit_AttributeT(node)
|
||||||
|
|
||||||
|
class TypedtreeHasher(algorithm.Visitor):
|
||||||
|
def generic_visit(self, node):
|
||||||
|
def freeze(obj):
|
||||||
|
if isinstance(obj, ast.AST):
|
||||||
|
return self.visit(obj)
|
||||||
|
elif isinstance(obj, types.Type):
|
||||||
|
return hash(obj.find())
|
||||||
|
else:
|
||||||
|
# We don't care; only types change during inference.
|
||||||
|
pass
|
||||||
|
|
||||||
|
fields = node._fields
|
||||||
|
if hasattr(node, '_types'):
|
||||||
|
fields = fields + node._types
|
||||||
|
return hash(tuple(freeze(getattr(node, field_name)) for field_name in fields))
|
||||||
|
|
||||||
|
class Stitcher:
|
||||||
|
def __init__(self, engine=None):
|
||||||
|
if engine is None:
|
||||||
|
self.engine = diagnostic.Engine(all_errors_are_fatal=True)
|
||||||
|
else:
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
self.name = ""
|
||||||
|
self.typedtree = []
|
||||||
|
self.inject_at = 0
|
||||||
|
self.prelude = prelude.globals()
|
||||||
|
self.globals = {}
|
||||||
|
|
||||||
|
self.functions = {}
|
||||||
|
|
||||||
|
self.object_map = ObjectMap()
|
||||||
|
self.type_map = {}
|
||||||
|
self.value_map = defaultdict(lambda: [])
|
||||||
|
|
||||||
|
def stitch_call(self, function, args, kwargs):
|
||||||
|
function_node = self._quote_embedded_function(function)
|
||||||
|
self.typedtree.append(function_node)
|
||||||
|
|
||||||
|
# We synthesize source code for the initial call so that
|
||||||
|
# diagnostics would have something meaningful to display to the user.
|
||||||
|
synthesizer = self._synthesizer()
|
||||||
|
call_node = synthesizer.call(function_node, args, kwargs)
|
||||||
|
synthesizer.finalize()
|
||||||
|
self.typedtree.append(call_node)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
inferencer = StitchingInferencer(engine=self.engine,
|
||||||
|
value_map=self.value_map,
|
||||||
|
quote=self._quote)
|
||||||
|
hasher = TypedtreeHasher()
|
||||||
|
|
||||||
|
# Iterate inference to fixed point.
|
||||||
|
old_typedtree_hash = None
|
||||||
|
while True:
|
||||||
|
inferencer.visit(self.typedtree)
|
||||||
|
typedtree_hash = hasher.visit(self.typedtree)
|
||||||
|
|
||||||
|
if old_typedtree_hash == typedtree_hash:
|
||||||
|
break
|
||||||
|
old_typedtree_hash = typedtree_hash
|
||||||
|
|
||||||
|
# For every host class we embed, add an appropriate constructor
|
||||||
|
# as a global. This is necessary for method lookup, which uses
|
||||||
|
# the getconstructor instruction.
|
||||||
|
for instance_type, constructor_type in list(self.type_map.values()):
|
||||||
|
# Do we have any direct reference to a constructor?
|
||||||
|
if len(self.value_map[constructor_type]) > 0:
|
||||||
|
# Yes, use it.
|
||||||
|
constructor, _constructor_loc = self.value_map[constructor_type][0]
|
||||||
|
else:
|
||||||
|
# No, extract one from a reference to an instance.
|
||||||
|
instance, _instance_loc = self.value_map[instance_type][0]
|
||||||
|
constructor = type(instance)
|
||||||
|
|
||||||
|
self.globals[constructor_type.name] = constructor_type
|
||||||
|
|
||||||
|
synthesizer = self._synthesizer()
|
||||||
|
ast = synthesizer.assign_local(constructor_type.name, constructor)
|
||||||
|
synthesizer.finalize()
|
||||||
|
self._inject(ast)
|
||||||
|
|
||||||
|
for attr in constructor_type.attributes:
|
||||||
|
if types.is_function(constructor_type.attributes[attr]):
|
||||||
|
synthesizer = self._synthesizer()
|
||||||
|
ast = synthesizer.assign_attribute(constructor, attr,
|
||||||
|
getattr(constructor, attr))
|
||||||
|
synthesizer.finalize()
|
||||||
|
self._inject(ast)
|
||||||
|
|
||||||
|
# After we have found all functions, synthesize a module to hold them.
|
||||||
|
source_buffer = source.Buffer("", "<synthesized>")
|
||||||
|
self.typedtree = asttyped.ModuleT(
|
||||||
|
typing_env=self.globals, globals_in_scope=set(),
|
||||||
|
body=self.typedtree, loc=source.Range(source_buffer, 0, 0))
|
||||||
|
|
||||||
|
def _inject(self, node):
|
||||||
|
self.typedtree.insert(self.inject_at, node)
|
||||||
|
self.inject_at += 1
|
||||||
|
|
||||||
|
def _synthesizer(self, expanded_from=None):
|
||||||
|
return ASTSynthesizer(expanded_from=expanded_from,
|
||||||
|
type_map=self.type_map,
|
||||||
|
value_map=self.value_map,
|
||||||
|
quote_function=self._quote_function)
|
||||||
|
|
||||||
|
def _quote_embedded_function(self, function):
|
||||||
|
if not hasattr(function, "artiq_embedded"):
|
||||||
|
raise ValueError("{} is not an embedded function".format(repr(function)))
|
||||||
|
|
||||||
|
# Extract function source.
|
||||||
|
embedded_function = function.artiq_embedded.function
|
||||||
|
source_code = inspect.getsource(embedded_function)
|
||||||
|
filename = embedded_function.__code__.co_filename
|
||||||
|
module_name = embedded_function.__globals__['__name__']
|
||||||
|
first_line = embedded_function.__code__.co_firstlineno
|
||||||
|
|
||||||
|
# Extract function environment.
|
||||||
|
host_environment = dict()
|
||||||
|
host_environment.update(embedded_function.__globals__)
|
||||||
|
cells = embedded_function.__closure__
|
||||||
|
cell_names = embedded_function.__code__.co_freevars
|
||||||
|
host_environment.update({var: cells[index] for index, var in enumerate(cell_names)})
|
||||||
|
|
||||||
|
# Find out how indented we are.
|
||||||
|
initial_whitespace = re.search(r"^\s*", source_code).group(0)
|
||||||
|
initial_indent = len(initial_whitespace.expandtabs())
|
||||||
|
|
||||||
|
# Parse.
|
||||||
|
source_buffer = source.Buffer(source_code, filename, first_line)
|
||||||
|
lexer = source_lexer.Lexer(source_buffer, version=sys.version_info[0:2],
|
||||||
|
diagnostic_engine=self.engine)
|
||||||
|
lexer.indent = [(initial_indent,
|
||||||
|
source.Range(source_buffer, 0, len(initial_whitespace)),
|
||||||
|
initial_whitespace)]
|
||||||
|
parser = source_parser.Parser(lexer, version=sys.version_info[0:2],
|
||||||
|
diagnostic_engine=self.engine)
|
||||||
|
function_node = parser.file_input().body[0]
|
||||||
|
|
||||||
|
# Mangle the name, since we put everything into a single module.
|
||||||
|
function_node.name = "{}.{}".format(module_name, function.__qualname__)
|
||||||
|
|
||||||
|
# Normally, LocalExtractor would populate the typing environment
|
||||||
|
# of the module with the function name. However, since we run
|
||||||
|
# ASTTypedRewriter on the function node directly, we need to do it
|
||||||
|
# explicitly.
|
||||||
|
self.globals[function_node.name] = types.TVar()
|
||||||
|
|
||||||
|
# Memoize the function before typing it to handle recursive
|
||||||
|
# invocations.
|
||||||
|
self.functions[function] = function_node.name
|
||||||
|
|
||||||
|
# Rewrite into typed form.
|
||||||
|
asttyped_rewriter = StitchingASTTypedRewriter(
|
||||||
|
engine=self.engine, prelude=self.prelude,
|
||||||
|
globals=self.globals, host_environment=host_environment,
|
||||||
|
quote=self._quote)
|
||||||
|
return asttyped_rewriter.visit(function_node)
|
||||||
|
|
||||||
|
def _function_loc(self, function):
|
||||||
|
filename = function.__code__.co_filename
|
||||||
|
line = function.__code__.co_firstlineno
|
||||||
|
name = function.__code__.co_name
|
||||||
|
|
||||||
|
source_line = linecache.getline(filename, line)
|
||||||
|
while source_line.lstrip().startswith("@"):
|
||||||
|
line += 1
|
||||||
|
source_line = linecache.getline(filename, line)
|
||||||
|
|
||||||
|
if "<lambda>" in function.__qualname__:
|
||||||
|
column = 0 # can't get column of lambda
|
||||||
|
else:
|
||||||
|
column = re.search("def", source_line).start(0)
|
||||||
|
source_buffer = source.Buffer(source_line, filename, line)
|
||||||
|
return source.Range(source_buffer, column, column)
|
||||||
|
|
||||||
|
def _call_site_note(self, call_loc, is_syscall):
|
||||||
|
if call_loc:
|
||||||
|
if is_syscall:
|
||||||
|
return [diagnostic.Diagnostic("note",
|
||||||
|
"in system call here", {},
|
||||||
|
call_loc)]
|
||||||
|
else:
|
||||||
|
return [diagnostic.Diagnostic("note",
|
||||||
|
"in function called remotely here", {},
|
||||||
|
call_loc)]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _extract_annot(self, function, annot, kind, call_loc, is_syscall):
|
||||||
|
if not isinstance(annot, types.Type):
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"type annotation for {kind}, '{annot}', is not an ARTIQ type",
|
||||||
|
{"kind": kind, "annot": repr(annot)},
|
||||||
|
self._function_loc(function),
|
||||||
|
notes=self._call_site_note(call_loc, is_syscall))
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
return types.TVar()
|
||||||
|
else:
|
||||||
|
return annot
|
||||||
|
|
||||||
|
def _type_of_param(self, function, loc, param, is_syscall):
|
||||||
|
if param.annotation is not inspect.Parameter.empty:
|
||||||
|
# Type specified explicitly.
|
||||||
|
return self._extract_annot(function, param.annotation,
|
||||||
|
"argument '{}'".format(param.name), loc,
|
||||||
|
is_syscall)
|
||||||
|
elif is_syscall:
|
||||||
|
# Syscalls must be entirely annotated.
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"system call argument '{argument}' must have a type annotation",
|
||||||
|
{"argument": param.name},
|
||||||
|
self._function_loc(function),
|
||||||
|
notes=self._call_site_note(loc, is_syscall))
|
||||||
|
self.engine.process(diag)
|
||||||
|
elif param.default is not inspect.Parameter.empty:
|
||||||
|
# Try and infer the type from the default value.
|
||||||
|
# This is tricky, because the default value might not have
|
||||||
|
# a well-defined type in APython.
|
||||||
|
# In this case, we bail out, but mention why we do it.
|
||||||
|
ast = self._quote(param.default, None)
|
||||||
|
|
||||||
|
def proxy_diagnostic(diag):
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"expanded from here while trying to infer a type for an"
|
||||||
|
" unannotated optional argument '{argument}' from its default value",
|
||||||
|
{"argument": param.name},
|
||||||
|
self._function_loc(function))
|
||||||
|
diag.notes.append(note)
|
||||||
|
|
||||||
|
note = self._call_site_note(loc, is_syscall)
|
||||||
|
if note:
|
||||||
|
diag.notes += note
|
||||||
|
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
proxy_engine = diagnostic.Engine()
|
||||||
|
proxy_engine.process = proxy_diagnostic
|
||||||
|
Inferencer(engine=proxy_engine).visit(ast)
|
||||||
|
IntMonomorphizer(engine=proxy_engine).visit(ast)
|
||||||
|
|
||||||
|
return ast.type
|
||||||
|
else:
|
||||||
|
# Let the rest of the program decide.
|
||||||
|
return types.TVar()
|
||||||
|
|
||||||
|
def _quote_foreign_function(self, function, loc, syscall):
|
||||||
|
signature = inspect.signature(function)
|
||||||
|
|
||||||
|
arg_types = OrderedDict()
|
||||||
|
optarg_types = OrderedDict()
|
||||||
|
for param in signature.parameters.values():
|
||||||
|
if param.kind not in (inspect.Parameter.POSITIONAL_ONLY,
|
||||||
|
inspect.Parameter.POSITIONAL_OR_KEYWORD):
|
||||||
|
# We pretend we don't see *args, kwpostargs=..., **kwargs.
|
||||||
|
# Since every method can be still invoked without any arguments
|
||||||
|
# going into *args and the slots after it, this is always safe,
|
||||||
|
# if sometimes constraining.
|
||||||
|
#
|
||||||
|
# Accepting POSITIONAL_ONLY is OK, because the compiler
|
||||||
|
# desugars the keyword arguments into positional ones internally.
|
||||||
|
continue
|
||||||
|
|
||||||
|
if param.default is inspect.Parameter.empty:
|
||||||
|
arg_types[param.name] = self._type_of_param(function, loc, param,
|
||||||
|
is_syscall=syscall is not None)
|
||||||
|
elif syscall is None:
|
||||||
|
optarg_types[param.name] = self._type_of_param(function, loc, param,
|
||||||
|
is_syscall=False)
|
||||||
|
else:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"system call argument '{argument}' must not have a default value",
|
||||||
|
{"argument": param.name},
|
||||||
|
self._function_loc(function),
|
||||||
|
notes=self._call_site_note(loc, is_syscall=True))
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
if signature.return_annotation is not inspect.Signature.empty:
|
||||||
|
ret_type = self._extract_annot(function, signature.return_annotation,
|
||||||
|
"return type", loc, is_syscall=syscall is not None)
|
||||||
|
elif syscall is None:
|
||||||
|
ret_type = builtins.TNone()
|
||||||
|
else: # syscall is not None
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"system call must have a return type annotation", {},
|
||||||
|
self._function_loc(function),
|
||||||
|
notes=self._call_site_note(loc, is_syscall=True))
|
||||||
|
self.engine.process(diag)
|
||||||
|
ret_type = types.TVar()
|
||||||
|
|
||||||
|
if syscall is None:
|
||||||
|
function_type = types.TRPCFunction(arg_types, optarg_types, ret_type,
|
||||||
|
service=self.object_map.store(function))
|
||||||
|
function_name = "rpc${}".format(function_type.service)
|
||||||
|
else:
|
||||||
|
function_type = types.TCFunction(arg_types, ret_type,
|
||||||
|
name=syscall)
|
||||||
|
function_name = "ffi${}".format(function_type.name)
|
||||||
|
|
||||||
|
self.globals[function_name] = function_type
|
||||||
|
self.functions[function] = function_name
|
||||||
|
|
||||||
|
return function_name, function_type
|
||||||
|
|
||||||
|
def _quote_function(self, function, loc):
|
||||||
|
if function in self.functions:
|
||||||
|
function_name = self.functions[function]
|
||||||
|
return function_name, self.globals[function_name]
|
||||||
|
|
||||||
|
if hasattr(function, "artiq_embedded"):
|
||||||
|
if function.artiq_embedded.function is not None:
|
||||||
|
# Insert the typed AST for the new function and restart inference.
|
||||||
|
# It doesn't really matter where we insert as long as it is before
|
||||||
|
# the final call.
|
||||||
|
function_node = self._quote_embedded_function(function)
|
||||||
|
self._inject(function_node)
|
||||||
|
return function_node.name, self.globals[function_node.name]
|
||||||
|
elif function.artiq_embedded.syscall is not None:
|
||||||
|
# Insert a storage-less global whose type instructs the compiler
|
||||||
|
# to perform a system call instead of a regular call.
|
||||||
|
return self._quote_foreign_function(function, loc,
|
||||||
|
syscall=function.artiq_embedded.syscall)
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
else:
|
||||||
|
# Insert a storage-less global whose type instructs the compiler
|
||||||
|
# to perform an RPC instead of a regular call.
|
||||||
|
return self._quote_foreign_function(function, loc,
|
||||||
|
syscall=None)
|
||||||
|
|
||||||
|
def _quote(self, value, loc):
|
||||||
|
synthesizer = self._synthesizer(loc)
|
||||||
|
node = synthesizer.quote(value)
|
||||||
|
synthesizer.finalize()
|
||||||
|
return node
|
|
@ -0,0 +1,249 @@
|
||||||
|
"""
|
||||||
|
The :mod:`iodelay` module contains the classes describing
|
||||||
|
the statically inferred RTIO delay arising from executing
|
||||||
|
a function.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
class Expr:
|
||||||
|
def __add__(lhs, rhs):
|
||||||
|
assert isinstance(rhs, Expr)
|
||||||
|
return Add(lhs, rhs)
|
||||||
|
__iadd__ = __add__
|
||||||
|
|
||||||
|
def __sub__(lhs, rhs):
|
||||||
|
assert isinstance(rhs, Expr)
|
||||||
|
return Sub(lhs, rhs)
|
||||||
|
__isub__ = __sub__
|
||||||
|
|
||||||
|
def __mul__(lhs, rhs):
|
||||||
|
assert isinstance(rhs, Expr)
|
||||||
|
return Mul(lhs, rhs)
|
||||||
|
__imul__ = __mul__
|
||||||
|
|
||||||
|
def __truediv__(lhs, rhs):
|
||||||
|
assert isinstance(rhs, Expr)
|
||||||
|
return TrueDiv(lhs, rhs)
|
||||||
|
__itruediv__ = __truediv__
|
||||||
|
|
||||||
|
def __floordiv__(lhs, rhs):
|
||||||
|
assert isinstance(rhs, Expr)
|
||||||
|
return FloorDiv(lhs, rhs)
|
||||||
|
__ifloordiv__ = __floordiv__
|
||||||
|
|
||||||
|
def __ne__(lhs, rhs):
|
||||||
|
return not (lhs == rhs)
|
||||||
|
|
||||||
|
def free_vars(self):
|
||||||
|
return set()
|
||||||
|
|
||||||
|
def fold(self, vars=None):
|
||||||
|
return self
|
||||||
|
|
||||||
|
class Const(Expr):
|
||||||
|
_priority = 1
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
assert isinstance(value, (int, float))
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.value)
|
||||||
|
|
||||||
|
def __eq__(lhs, rhs):
|
||||||
|
return rhs.__class__ == lhs.__class__ and lhs.value == rhs.value
|
||||||
|
|
||||||
|
def eval(self, env):
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
class Var(Expr):
|
||||||
|
_priority = 1
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
assert isinstance(name, str)
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
def __eq__(lhs, rhs):
|
||||||
|
return rhs.__class__ == lhs.__class__ and lhs.name == rhs.name
|
||||||
|
|
||||||
|
def free_vars(self):
|
||||||
|
return {self.name}
|
||||||
|
|
||||||
|
def fold(self, vars=None):
|
||||||
|
if vars is not None and self.name in vars:
|
||||||
|
return vars[self.name]
|
||||||
|
else:
|
||||||
|
return self
|
||||||
|
|
||||||
|
class Conv(Expr):
|
||||||
|
_priority = 1
|
||||||
|
|
||||||
|
def __init__(self, operand, ref_period):
|
||||||
|
assert isinstance(operand, Expr)
|
||||||
|
assert isinstance(ref_period, float)
|
||||||
|
self.operand, self.ref_period = operand, ref_period
|
||||||
|
|
||||||
|
def __eq__(lhs, rhs):
|
||||||
|
return rhs.__class__ == lhs.__class__ and \
|
||||||
|
lhs.ref_period == rhs.ref_period and \
|
||||||
|
lhs.operand == rhs.operand
|
||||||
|
|
||||||
|
def free_vars(self):
|
||||||
|
return self.operand.free_vars()
|
||||||
|
|
||||||
|
class MUToS(Conv):
|
||||||
|
def __str__(self):
|
||||||
|
return "mu->s({})".format(self.operand)
|
||||||
|
|
||||||
|
def eval(self, env):
|
||||||
|
return self.operand.eval(env) * self.ref_period
|
||||||
|
|
||||||
|
def fold(self, vars=None):
|
||||||
|
operand = self.operand.fold(vars)
|
||||||
|
if isinstance(operand, Const):
|
||||||
|
return Const(operand.value * self.ref_period)
|
||||||
|
else:
|
||||||
|
return MUToS(operand, ref_period=self.ref_period)
|
||||||
|
|
||||||
|
class SToMU(Conv):
|
||||||
|
def __str__(self):
|
||||||
|
return "s->mu({})".format(self.operand)
|
||||||
|
|
||||||
|
def eval(self, env):
|
||||||
|
return int(self.operand.eval(env) / self.ref_period)
|
||||||
|
|
||||||
|
def fold(self, vars=None):
|
||||||
|
operand = self.operand.fold(vars)
|
||||||
|
if isinstance(operand, Const):
|
||||||
|
return Const(int(operand.value / self.ref_period))
|
||||||
|
else:
|
||||||
|
return SToMU(operand, ref_period=self.ref_period)
|
||||||
|
|
||||||
|
class BinOp(Expr):
|
||||||
|
def __init__(self, lhs, rhs):
|
||||||
|
self.lhs, self.rhs = lhs, rhs
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
lhs = "({})".format(self.lhs) if self.lhs._priority > self._priority else str(self.lhs)
|
||||||
|
rhs = "({})".format(self.rhs) if self.rhs._priority > self._priority else str(self.rhs)
|
||||||
|
return "{} {} {}".format(lhs, self._symbol, rhs)
|
||||||
|
|
||||||
|
def __eq__(lhs, rhs):
|
||||||
|
return rhs.__class__ == lhs.__class__ and lhs.lhs == rhs.lhs and lhs.rhs == rhs.rhs
|
||||||
|
|
||||||
|
def eval(self, env):
|
||||||
|
return self.__class__._op(self.lhs.eval(env), self.rhs.eval(env))
|
||||||
|
|
||||||
|
def free_vars(self):
|
||||||
|
return self.lhs.free_vars() | self.rhs.free_vars()
|
||||||
|
|
||||||
|
def _fold_binop(self, lhs, rhs):
|
||||||
|
if isinstance(lhs, Const) and lhs.__class__ == rhs.__class__:
|
||||||
|
return Const(self.__class__._op(lhs.value, rhs.value))
|
||||||
|
elif isinstance(lhs, (MUToS, SToMU)) and lhs.__class__ == rhs.__class__:
|
||||||
|
return lhs.__class__(self.__class__(lhs.operand, rhs.operand),
|
||||||
|
ref_period=lhs.ref_period).fold()
|
||||||
|
else:
|
||||||
|
return self.__class__(lhs, rhs)
|
||||||
|
|
||||||
|
def fold(self, vars=None):
|
||||||
|
return self._fold_binop(self.lhs.fold(vars), self.rhs.fold(vars))
|
||||||
|
|
||||||
|
class BinOpFixpoint(BinOp):
|
||||||
|
def _fold_binop(self, lhs, rhs):
|
||||||
|
if isinstance(lhs, Const) and lhs.value == self._fixpoint:
|
||||||
|
return rhs
|
||||||
|
elif isinstance(rhs, Const) and rhs.value == self._fixpoint:
|
||||||
|
return lhs
|
||||||
|
else:
|
||||||
|
return super()._fold_binop(lhs, rhs)
|
||||||
|
|
||||||
|
class Add(BinOpFixpoint):
|
||||||
|
_priority = 2
|
||||||
|
_symbol = "+"
|
||||||
|
_op = lambda a, b: a + b
|
||||||
|
_fixpoint = 0
|
||||||
|
|
||||||
|
class Mul(BinOpFixpoint):
|
||||||
|
_priority = 1
|
||||||
|
_symbol = "*"
|
||||||
|
_op = lambda a, b: a * b
|
||||||
|
_fixpoint = 1
|
||||||
|
|
||||||
|
class Sub(BinOp):
|
||||||
|
_priority = 2
|
||||||
|
_symbol = "-"
|
||||||
|
_op = lambda a, b: a - b
|
||||||
|
|
||||||
|
def _fold_binop(self, lhs, rhs):
|
||||||
|
if isinstance(rhs, Const) and rhs.value == 0:
|
||||||
|
return lhs
|
||||||
|
else:
|
||||||
|
return super()._fold_binop(lhs, rhs)
|
||||||
|
|
||||||
|
class Div(BinOp):
|
||||||
|
def _fold_binop(self, lhs, rhs):
|
||||||
|
if isinstance(rhs, Const) and rhs.value == 1:
|
||||||
|
return lhs
|
||||||
|
else:
|
||||||
|
return super()._fold_binop(lhs, rhs)
|
||||||
|
|
||||||
|
class TrueDiv(Div):
|
||||||
|
_priority = 1
|
||||||
|
_symbol = "/"
|
||||||
|
_op = lambda a, b: a / b if b != 0 else 0
|
||||||
|
|
||||||
|
class FloorDiv(Div):
|
||||||
|
_priority = 1
|
||||||
|
_symbol = "//"
|
||||||
|
_op = lambda a, b: a // b if b != 0 else 0
|
||||||
|
|
||||||
|
class Max(Expr):
|
||||||
|
_priority = 1
|
||||||
|
|
||||||
|
def __init__(self, operands):
|
||||||
|
assert isinstance(operands, list)
|
||||||
|
assert all([isinstance(operand, Expr) for operand in operands])
|
||||||
|
assert operands != []
|
||||||
|
self.operands = operands
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "max({})".format(", ".join([str(operand) for operand in self.operands]))
|
||||||
|
|
||||||
|
def __eq__(lhs, rhs):
|
||||||
|
return rhs.__class__ == lhs.__class__ and lhs.operands == rhs.operands
|
||||||
|
|
||||||
|
def free_vars(self):
|
||||||
|
return reduce(lambda a, b: a | b, [operand.free_vars() for operand in self.operands])
|
||||||
|
|
||||||
|
def eval(self, env):
|
||||||
|
return max([operand.eval() for operand in self.operands])
|
||||||
|
|
||||||
|
def fold(self, vars=None):
|
||||||
|
consts, exprs = [], []
|
||||||
|
for operand in self.operands:
|
||||||
|
operand = operand.fold(vars)
|
||||||
|
if isinstance(operand, Const):
|
||||||
|
consts.append(operand.value)
|
||||||
|
elif operand not in exprs:
|
||||||
|
exprs.append(operand)
|
||||||
|
if len(consts) > 0:
|
||||||
|
exprs.append(Const(max(consts)))
|
||||||
|
if len(exprs) == 1:
|
||||||
|
return exprs[0]
|
||||||
|
else:
|
||||||
|
return Max(exprs)
|
||||||
|
|
||||||
|
def is_const(expr, value=None):
|
||||||
|
expr = expr.fold()
|
||||||
|
if value is None:
|
||||||
|
return isinstance(expr, Const)
|
||||||
|
else:
|
||||||
|
return isinstance(expr, Const) and expr.value == value
|
||||||
|
|
||||||
|
def is_zero(expr):
|
||||||
|
return is_const(expr, 0)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,94 @@
|
||||||
|
"""
|
||||||
|
The :class:`Module` class encapsulates a single Python module,
|
||||||
|
which corresponds to a single ARTIQ translation unit (one LLVM
|
||||||
|
bitcode file and one object file, unless LTO is used).
|
||||||
|
A :class:`Module` can be created from a typed AST.
|
||||||
|
|
||||||
|
The :class:`Source` class parses a single source file or
|
||||||
|
string and infers types for it using a trivial :module:`prelude`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pythonparser import source, diagnostic, parse_buffer
|
||||||
|
from . import prelude, types, transforms, analyses, validators
|
||||||
|
|
||||||
|
class Source:
|
||||||
|
def __init__(self, source_buffer, engine=None):
|
||||||
|
if engine is None:
|
||||||
|
self.engine = diagnostic.Engine(all_errors_are_fatal=True)
|
||||||
|
else:
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
self.object_map = None
|
||||||
|
|
||||||
|
self.name, _ = os.path.splitext(os.path.basename(source_buffer.name))
|
||||||
|
|
||||||
|
asttyped_rewriter = transforms.ASTTypedRewriter(engine=engine,
|
||||||
|
prelude=prelude.globals())
|
||||||
|
inferencer = transforms.Inferencer(engine=engine)
|
||||||
|
|
||||||
|
self.parsetree, self.comments = parse_buffer(source_buffer, engine=engine)
|
||||||
|
self.typedtree = asttyped_rewriter.visit(self.parsetree)
|
||||||
|
self.globals = asttyped_rewriter.globals
|
||||||
|
inferencer.visit(self.typedtree)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_string(cls, source_string, name="input.py", first_line=1, engine=None):
|
||||||
|
return cls(source.Buffer(source_string + "\n", name, first_line), engine=engine)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_filename(cls, filename, engine=None):
|
||||||
|
with open(filename) as f:
|
||||||
|
return cls(source.Buffer(f.read(), filename, 1), engine=engine)
|
||||||
|
|
||||||
|
class Module:
|
||||||
|
def __init__(self, src, ref_period=1e-6):
|
||||||
|
self.engine = src.engine
|
||||||
|
self.object_map = src.object_map
|
||||||
|
|
||||||
|
int_monomorphizer = transforms.IntMonomorphizer(engine=self.engine)
|
||||||
|
inferencer = transforms.Inferencer(engine=self.engine)
|
||||||
|
monomorphism_validator = validators.MonomorphismValidator(engine=self.engine)
|
||||||
|
escape_validator = validators.EscapeValidator(engine=self.engine)
|
||||||
|
iodelay_estimator = transforms.IODelayEstimator(engine=self.engine,
|
||||||
|
ref_period=ref_period)
|
||||||
|
artiq_ir_generator = transforms.ARTIQIRGenerator(engine=self.engine,
|
||||||
|
module_name=src.name,
|
||||||
|
ref_period=ref_period)
|
||||||
|
dead_code_eliminator = transforms.DeadCodeEliminator(engine=self.engine)
|
||||||
|
local_access_validator = validators.LocalAccessValidator(engine=self.engine)
|
||||||
|
devirtualization = analyses.Devirtualization()
|
||||||
|
interleaver = transforms.Interleaver(engine=self.engine)
|
||||||
|
|
||||||
|
self.name = src.name
|
||||||
|
self.globals = src.globals
|
||||||
|
int_monomorphizer.visit(src.typedtree)
|
||||||
|
inferencer.visit(src.typedtree)
|
||||||
|
monomorphism_validator.visit(src.typedtree)
|
||||||
|
escape_validator.visit(src.typedtree)
|
||||||
|
iodelay_estimator.visit_fixpoint(src.typedtree)
|
||||||
|
devirtualization.visit(src.typedtree)
|
||||||
|
self.artiq_ir = artiq_ir_generator.visit(src.typedtree)
|
||||||
|
artiq_ir_generator.annotate_calls(devirtualization)
|
||||||
|
dead_code_eliminator.process(self.artiq_ir)
|
||||||
|
local_access_validator.process(self.artiq_ir)
|
||||||
|
interleaver.process(self.artiq_ir)
|
||||||
|
|
||||||
|
def build_llvm_ir(self, target):
|
||||||
|
"""Compile the module to LLVM IR for the specified target."""
|
||||||
|
llvm_ir_generator = transforms.LLVMIRGenerator(engine=self.engine,
|
||||||
|
module_name=self.name, target=target,
|
||||||
|
object_map=self.object_map)
|
||||||
|
return llvm_ir_generator.process(self.artiq_ir)
|
||||||
|
|
||||||
|
def entry_point(self):
|
||||||
|
"""Return the name of the function that is the entry point of this module."""
|
||||||
|
if self.name != "":
|
||||||
|
return self.name + ".__modinit__"
|
||||||
|
else:
|
||||||
|
return "__modinit__"
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
printer = types.TypePrinter()
|
||||||
|
globals = ["%s: %s" % (var, printer.name(self.globals[var])) for var in self.globals]
|
||||||
|
return "<artiq.compiler.Module %s {\n %s\n}>" % (repr(self.name), ",\n ".join(globals))
|
|
@ -0,0 +1,44 @@
|
||||||
|
"""
|
||||||
|
The :mod:`prelude` module contains the initial global environment
|
||||||
|
in which ARTIQ kernels are evaluated.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from . import builtins
|
||||||
|
|
||||||
|
def globals():
|
||||||
|
return {
|
||||||
|
# Value constructors
|
||||||
|
"bool": builtins.fn_bool(),
|
||||||
|
"int": builtins.fn_int(),
|
||||||
|
"float": builtins.fn_float(),
|
||||||
|
"list": builtins.fn_list(),
|
||||||
|
"range": builtins.fn_range(),
|
||||||
|
|
||||||
|
# Exception constructors
|
||||||
|
"Exception": builtins.fn_Exception(),
|
||||||
|
"IndexError": builtins.fn_IndexError(),
|
||||||
|
"ValueError": builtins.fn_ValueError(),
|
||||||
|
"ZeroDivisionError": builtins.fn_ZeroDivisionError(),
|
||||||
|
|
||||||
|
# Built-in Python functions
|
||||||
|
"len": builtins.fn_len(),
|
||||||
|
"round": builtins.fn_round(),
|
||||||
|
"print": builtins.fn_print(),
|
||||||
|
|
||||||
|
# ARTIQ decorators
|
||||||
|
"kernel": builtins.fn_kernel(),
|
||||||
|
|
||||||
|
# ARTIQ context managers
|
||||||
|
"parallel": builtins.fn_parallel(),
|
||||||
|
"sequential": builtins.fn_sequential(),
|
||||||
|
|
||||||
|
# ARTIQ time management functions
|
||||||
|
"now": builtins.fn_now(),
|
||||||
|
"delay": builtins.fn_delay(),
|
||||||
|
"at": builtins.fn_at(),
|
||||||
|
"now_mu": builtins.fn_now_mu(),
|
||||||
|
"delay_mu": builtins.fn_delay_mu(),
|
||||||
|
"at_mu": builtins.fn_at_mu(),
|
||||||
|
"mu_to_seconds": builtins.fn_mu_to_seconds(),
|
||||||
|
"seconds_to_mu": builtins.fn_seconds_to_mu(),
|
||||||
|
}
|
|
@ -0,0 +1,169 @@
|
||||||
|
import os, sys, tempfile, subprocess
|
||||||
|
from artiq.compiler import types
|
||||||
|
from llvmlite_artiq import ir as ll, binding as llvm
|
||||||
|
|
||||||
|
llvm.initialize()
|
||||||
|
llvm.initialize_all_targets()
|
||||||
|
llvm.initialize_all_asmprinters()
|
||||||
|
|
||||||
|
class RunTool:
|
||||||
|
def __init__(self, pattern, **tempdata):
|
||||||
|
self.files = []
|
||||||
|
self.pattern = pattern
|
||||||
|
self.tempdata = tempdata
|
||||||
|
|
||||||
|
def maketemp(self, data):
|
||||||
|
f = tempfile.NamedTemporaryFile()
|
||||||
|
f.write(data)
|
||||||
|
f.flush()
|
||||||
|
self.files.append(f)
|
||||||
|
return f
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
tempfiles = {}
|
||||||
|
tempnames = {}
|
||||||
|
for key in self.tempdata:
|
||||||
|
tempfiles[key] = self.maketemp(self.tempdata[key])
|
||||||
|
tempnames[key] = tempfiles[key].name
|
||||||
|
|
||||||
|
cmdline = []
|
||||||
|
for argument in self.pattern:
|
||||||
|
cmdline.append(argument.format(**tempnames))
|
||||||
|
|
||||||
|
process = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
stdout, stderr = process.communicate()
|
||||||
|
if process.returncode != 0:
|
||||||
|
raise Exception("{} invocation failed: {}".
|
||||||
|
format(cmdline[0], stderr.decode('utf-8')))
|
||||||
|
|
||||||
|
tempfiles["__stdout__"] = stdout.decode('utf-8')
|
||||||
|
return tempfiles
|
||||||
|
|
||||||
|
def __exit__(self, exc_typ, exc_value, exc_trace):
|
||||||
|
for f in self.files:
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
class Target:
|
||||||
|
"""
|
||||||
|
A description of the target environment where the binaries
|
||||||
|
generated by the ARTIQ compiler will be deployed.
|
||||||
|
|
||||||
|
:var triple: (string)
|
||||||
|
LLVM target triple, e.g. ``"or1k"``
|
||||||
|
:var data_layout: (string)
|
||||||
|
LLVM target data layout, e.g. ``"E-m:e-p:32:32-i64:32-f64:32-v64:32-v128:32-a:0:32-n32"``
|
||||||
|
:var features: (list of string)
|
||||||
|
LLVM target CPU features, e.g. ``["mul", "div", "ffl1"]``
|
||||||
|
:var print_function: (string)
|
||||||
|
Name of a formatted print functions (with the signature of ``printf``)
|
||||||
|
provided by the target, e.g. ``"printf"``.
|
||||||
|
"""
|
||||||
|
triple = "unknown"
|
||||||
|
data_layout = ""
|
||||||
|
features = []
|
||||||
|
print_function = "printf"
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.llcontext = ll.Context()
|
||||||
|
|
||||||
|
def compile(self, module):
|
||||||
|
"""Compile the module to a relocatable object for this target."""
|
||||||
|
|
||||||
|
if os.getenv("ARTIQ_DUMP_SIG"):
|
||||||
|
print("====== MODULE_SIGNATURE DUMP ======", file=sys.stderr)
|
||||||
|
print(module, file=sys.stderr)
|
||||||
|
|
||||||
|
if os.getenv("ARTIQ_DUMP_IR"):
|
||||||
|
print("====== ARTIQ IR DUMP ======", file=sys.stderr)
|
||||||
|
type_printer = types.TypePrinter()
|
||||||
|
for function in module.artiq_ir:
|
||||||
|
print(function.as_entity(type_printer), file=sys.stderr)
|
||||||
|
|
||||||
|
llmod = module.build_llvm_ir(self)
|
||||||
|
llparsedmod = llvm.parse_assembly(str(llmod))
|
||||||
|
llparsedmod.verify()
|
||||||
|
|
||||||
|
if os.getenv("ARTIQ_DUMP_LLVM"):
|
||||||
|
print("====== LLVM IR DUMP ======", file=sys.stderr)
|
||||||
|
print(str(llparsedmod), file=sys.stderr)
|
||||||
|
|
||||||
|
llpassmgrbuilder = llvm.create_pass_manager_builder()
|
||||||
|
llpassmgrbuilder.opt_level = 2 # -O2
|
||||||
|
llpassmgrbuilder.size_level = 1 # -Os
|
||||||
|
|
||||||
|
llpassmgr = llvm.create_module_pass_manager()
|
||||||
|
llpassmgrbuilder.populate(llpassmgr)
|
||||||
|
llpassmgr.run(llparsedmod)
|
||||||
|
|
||||||
|
if os.getenv("ARTIQ_DUMP_LLVM"):
|
||||||
|
print("====== LLVM IR DUMP (OPTIMIZED) ======", file=sys.stderr)
|
||||||
|
print(str(llparsedmod), file=sys.stderr)
|
||||||
|
|
||||||
|
lltarget = llvm.Target.from_triple(self.triple)
|
||||||
|
llmachine = lltarget.create_target_machine(
|
||||||
|
features=",".join(["+{}".format(f) for f in self.features]),
|
||||||
|
reloc="pic", codemodel="default")
|
||||||
|
|
||||||
|
if os.getenv("ARTIQ_DUMP_ASSEMBLY"):
|
||||||
|
print("====== ASSEMBLY DUMP ======", file=sys.stderr)
|
||||||
|
print(llmachine.emit_assembly(llparsedmod), file=sys.stderr)
|
||||||
|
|
||||||
|
return llmachine.emit_object(llparsedmod)
|
||||||
|
|
||||||
|
def link(self, objects, init_fn):
|
||||||
|
"""Link the relocatable objects into a shared library for this target."""
|
||||||
|
with RunTool([self.triple + "-ld", "-shared", "--eh-frame-hdr", "-init", init_fn] +
|
||||||
|
["{{obj{}}}".format(index) for index in range(len(objects))] +
|
||||||
|
["-o", "{output}"],
|
||||||
|
output=b"",
|
||||||
|
**{"obj{}".format(index): obj for index, obj in enumerate(objects)}) \
|
||||||
|
as results:
|
||||||
|
library = results["output"].read()
|
||||||
|
|
||||||
|
if os.getenv("ARTIQ_DUMP_ELF"):
|
||||||
|
shlib_temp = tempfile.NamedTemporaryFile(suffix=".so", delete=False)
|
||||||
|
shlib_temp.write(library)
|
||||||
|
shlib_temp.close()
|
||||||
|
print("====== SHARED LIBRARY DUMP ======", file=sys.stderr)
|
||||||
|
print("Shared library dumped as {}".format(shlib_temp.name), file=sys.stderr)
|
||||||
|
|
||||||
|
return library
|
||||||
|
|
||||||
|
def compile_and_link(self, modules):
|
||||||
|
return self.link([self.compile(module) for module in modules],
|
||||||
|
init_fn=modules[0].entry_point())
|
||||||
|
|
||||||
|
def strip(self, library):
|
||||||
|
with RunTool([self.triple + "-strip", "--strip-debug", "{library}", "-o", "{output}"],
|
||||||
|
library=library, output=b"") \
|
||||||
|
as results:
|
||||||
|
return results["output"].read()
|
||||||
|
|
||||||
|
def symbolize(self, library, addresses):
|
||||||
|
# Addresses point one instruction past the jump; offset them back by 1.
|
||||||
|
offset_addresses = [hex(addr - 1) for addr in addresses]
|
||||||
|
with RunTool([self.triple + "-addr2line", "--functions", "--inlines",
|
||||||
|
"--exe={library}"] + offset_addresses,
|
||||||
|
library=library) \
|
||||||
|
as results:
|
||||||
|
lines = results["__stdout__"].rstrip().split("\n")
|
||||||
|
backtrace = []
|
||||||
|
for function_name, location, address in zip(lines[::2], lines[1::2], addresses):
|
||||||
|
filename, line = location.rsplit(":", 1)
|
||||||
|
if filename == "??":
|
||||||
|
continue
|
||||||
|
# can't get column out of addr2line D:
|
||||||
|
backtrace.append((filename, int(line), -1, function_name, address))
|
||||||
|
return backtrace
|
||||||
|
|
||||||
|
class NativeTarget(Target):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.triple = llvm.get_default_triple()
|
||||||
|
|
||||||
|
class OR1KTarget(Target):
|
||||||
|
triple = "or1k-linux"
|
||||||
|
data_layout = "E-m:e-p:32:32-i64:32-f64:32-v64:32-v128:32-a:0:32-n32"
|
||||||
|
features = ["mul", "div", "ffl1", "cmov", "addc"]
|
||||||
|
print_function = "lognonl"
|
|
@ -0,0 +1,21 @@
|
||||||
|
import time, cProfile as profile, pstats
|
||||||
|
|
||||||
|
def benchmark(f, name):
|
||||||
|
profiler = profile.Profile()
|
||||||
|
profiler.enable()
|
||||||
|
|
||||||
|
start = time.perf_counter()
|
||||||
|
end = 0
|
||||||
|
runs = 0
|
||||||
|
while end - start < 5 or runs < 10:
|
||||||
|
f()
|
||||||
|
runs += 1
|
||||||
|
end = time.perf_counter()
|
||||||
|
|
||||||
|
profiler.create_stats()
|
||||||
|
|
||||||
|
print("{} {} runs: {:.2f}s, {:.2f}ms/run".format(
|
||||||
|
runs, name, end - start, (end - start) / runs * 1000))
|
||||||
|
|
||||||
|
stats = pstats.Stats(profiler)
|
||||||
|
stats.strip_dirs().sort_stats('time').print_stats(10)
|
|
@ -0,0 +1,35 @@
|
||||||
|
import sys, os
|
||||||
|
|
||||||
|
from artiq.master.databases import DeviceDB
|
||||||
|
from artiq.master.worker_db import DeviceManager
|
||||||
|
|
||||||
|
from artiq.coredevice.core import Core, CompileError
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) > 1 and sys.argv[1] == "+compile":
|
||||||
|
del sys.argv[1]
|
||||||
|
compile_only = True
|
||||||
|
else:
|
||||||
|
compile_only = False
|
||||||
|
|
||||||
|
ddb_path = os.path.join(os.path.dirname(sys.argv[1]), "device_db.pyon")
|
||||||
|
dmgr = DeviceManager(DeviceDB(ddb_path))
|
||||||
|
|
||||||
|
with open(sys.argv[1]) as f:
|
||||||
|
testcase_code = compile(f.read(), f.name, "exec")
|
||||||
|
testcase_vars = {'__name__': 'testbench', 'dmgr': dmgr}
|
||||||
|
exec(testcase_code, testcase_vars)
|
||||||
|
|
||||||
|
try:
|
||||||
|
core = dmgr.get("core")
|
||||||
|
if compile_only:
|
||||||
|
core.compile(testcase_vars["entrypoint"], (), {})
|
||||||
|
else:
|
||||||
|
core.run(testcase_vars["entrypoint"], (), {})
|
||||||
|
print(core.comm.get_log())
|
||||||
|
core.comm.clear_log()
|
||||||
|
except CompileError as error:
|
||||||
|
print("\n".join(error.__cause__.diagnostic.render(only_line=True)))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,83 @@
|
||||||
|
import sys, fileinput, os
|
||||||
|
from pythonparser import source, diagnostic, algorithm, parse_buffer
|
||||||
|
from .. import prelude, types
|
||||||
|
from ..transforms import ASTTypedRewriter, Inferencer, IntMonomorphizer
|
||||||
|
|
||||||
|
class Printer(algorithm.Visitor):
|
||||||
|
"""
|
||||||
|
:class:`Printer` prints ``:`` and the node type after every typed node,
|
||||||
|
and ``->`` and the node type before the colon in a function definition.
|
||||||
|
|
||||||
|
In almost all cases (except function definition) this does not result
|
||||||
|
in valid Python syntax.
|
||||||
|
|
||||||
|
:ivar rewriter: (:class:`pythonparser.source.Rewriter`) rewriter instance
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, buf):
|
||||||
|
self.rewriter = source.Rewriter(buf)
|
||||||
|
self.type_printer = types.TypePrinter()
|
||||||
|
|
||||||
|
def rewrite(self):
|
||||||
|
return self.rewriter.rewrite()
|
||||||
|
|
||||||
|
def visit_FunctionDefT(self, node):
|
||||||
|
super().generic_visit(node)
|
||||||
|
|
||||||
|
self.rewriter.insert_before(node.colon_loc,
|
||||||
|
"->{}".format(self.type_printer.name(node.return_type)))
|
||||||
|
|
||||||
|
def visit_ExceptHandlerT(self, node):
|
||||||
|
super().generic_visit(node)
|
||||||
|
|
||||||
|
if node.name_loc:
|
||||||
|
self.rewriter.insert_after(node.name_loc,
|
||||||
|
":{}".format(self.type_printer.name(node.name_type)))
|
||||||
|
|
||||||
|
def generic_visit(self, node):
|
||||||
|
super().generic_visit(node)
|
||||||
|
|
||||||
|
if hasattr(node, "type"):
|
||||||
|
self.rewriter.insert_after(node.loc,
|
||||||
|
":{}".format(self.type_printer.name(node.type)))
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) > 1 and sys.argv[1] == "+mono":
|
||||||
|
del sys.argv[1]
|
||||||
|
monomorphize = True
|
||||||
|
else:
|
||||||
|
monomorphize = False
|
||||||
|
|
||||||
|
if len(sys.argv) > 1 and sys.argv[1] == "+diag":
|
||||||
|
del sys.argv[1]
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render(only_line=True)))
|
||||||
|
if diag.level == "fatal":
|
||||||
|
exit()
|
||||||
|
else:
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render()))
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
buf = source.Buffer("".join(fileinput.input()).expandtabs(),
|
||||||
|
os.path.basename(fileinput.filename()))
|
||||||
|
parsed, comments = parse_buffer(buf, engine=engine)
|
||||||
|
typed = ASTTypedRewriter(engine=engine, prelude=prelude.globals()).visit(parsed)
|
||||||
|
Inferencer(engine=engine).visit(typed)
|
||||||
|
if monomorphize:
|
||||||
|
IntMonomorphizer(engine=engine).visit(typed)
|
||||||
|
Inferencer(engine=engine).visit(typed)
|
||||||
|
|
||||||
|
printer = Printer(buf)
|
||||||
|
printer.visit(typed)
|
||||||
|
for comment in comments:
|
||||||
|
if comment.text.find("CHECK") >= 0:
|
||||||
|
printer.rewriter.remove(comment.loc)
|
||||||
|
print(printer.rewrite().source)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,19 @@
|
||||||
|
import sys, fileinput
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from .. import Module, Source
|
||||||
|
|
||||||
|
def main():
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render()))
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
mod = Module(Source.from_string("".join(fileinput.input()).expandtabs(), engine=engine))
|
||||||
|
for fn in mod.artiq_ir:
|
||||||
|
print(fn)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,35 @@
|
||||||
|
import os, sys, fileinput, ctypes
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from llvmlite_artiq import binding as llvm
|
||||||
|
from .. import Module, Source
|
||||||
|
from ..targets import NativeTarget
|
||||||
|
|
||||||
|
def main():
|
||||||
|
libartiq_support = os.getenv('LIBARTIQ_SUPPORT')
|
||||||
|
if libartiq_support is not None:
|
||||||
|
llvm.load_library_permanently(libartiq_support)
|
||||||
|
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render()))
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
source = "".join(fileinput.input())
|
||||||
|
source = source.replace("#ARTIQ#", "")
|
||||||
|
mod = Module(Source.from_string(source.expandtabs(), engine=engine))
|
||||||
|
|
||||||
|
target = NativeTarget()
|
||||||
|
llmod = mod.build_llvm_ir(target)
|
||||||
|
llparsedmod = llvm.parse_assembly(str(llmod))
|
||||||
|
llparsedmod.verify()
|
||||||
|
|
||||||
|
llmachine = llvm.Target.from_triple(target.triple).create_target_machine()
|
||||||
|
lljit = llvm.create_mcjit_compiler(llparsedmod, llmachine)
|
||||||
|
llmain = lljit.get_pointer_to_global(llparsedmod.get_function(llmod.name + ".__modinit__"))
|
||||||
|
ctypes.CFUNCTYPE(None)(llmain)()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,30 @@
|
||||||
|
import sys, fileinput
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from llvmlite_artiq import ir as ll
|
||||||
|
from .. import Module, Source
|
||||||
|
from ..targets import NativeTarget
|
||||||
|
|
||||||
|
def main():
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render()))
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
mod = Module(Source.from_string("".join(fileinput.input()).expandtabs(), engine=engine))
|
||||||
|
|
||||||
|
target = NativeTarget()
|
||||||
|
llmod = mod.build_llvm_ir(target=target)
|
||||||
|
|
||||||
|
# Add main so that the result can be executed with lli
|
||||||
|
llmain = ll.Function(llmod, ll.FunctionType(ll.VoidType(), []), "main")
|
||||||
|
llbuilder = ll.IRBuilder(llmain.append_basic_block("entry"))
|
||||||
|
llbuilder.call(llmod.get_global(llmod.name + ".__modinit__"), [])
|
||||||
|
llbuilder.ret_void()
|
||||||
|
|
||||||
|
print(llmod)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,37 @@
|
||||||
|
import sys, os
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from .. import Module, Source
|
||||||
|
from ..targets import OR1KTarget
|
||||||
|
from . import benchmark
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if not len(sys.argv) == 2:
|
||||||
|
print("Expected exactly one module filename", file=sys.stderr)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render()), file=sys.stderr)
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
# Make sure everything's valid
|
||||||
|
filename = sys.argv[1]
|
||||||
|
with open(filename) as f:
|
||||||
|
code = f.read()
|
||||||
|
source = Source.from_string(code, filename, engine=engine)
|
||||||
|
module = Module(source)
|
||||||
|
|
||||||
|
benchmark(lambda: Source.from_string(code, filename),
|
||||||
|
"ARTIQ parsing and inference")
|
||||||
|
|
||||||
|
benchmark(lambda: Module(source),
|
||||||
|
"ARTIQ transforms and validators")
|
||||||
|
|
||||||
|
benchmark(lambda: OR1KTarget().compile_and_link([module]),
|
||||||
|
"LLVM optimization and linking")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,52 @@
|
||||||
|
import sys, os
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from ...protocols.file_db import FlatFileDB
|
||||||
|
from ...master.worker_db import DeviceManager
|
||||||
|
from .. import Module
|
||||||
|
from ..embedding import Stitcher
|
||||||
|
from ..targets import OR1KTarget
|
||||||
|
from . import benchmark
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if not len(sys.argv) == 2:
|
||||||
|
print("Expected exactly one module filename", file=sys.stderr)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render()), file=sys.stderr)
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
with open(sys.argv[1]) as f:
|
||||||
|
testcase_code = compile(f.read(), f.name, "exec")
|
||||||
|
testcase_vars = {'__name__': 'testbench'}
|
||||||
|
exec(testcase_code, testcase_vars)
|
||||||
|
|
||||||
|
ddb_path = os.path.join(os.path.dirname(sys.argv[1]), "ddb.pyon")
|
||||||
|
dmgr = DeviceManager(FlatFileDB(ddb_path))
|
||||||
|
|
||||||
|
def embed():
|
||||||
|
experiment = testcase_vars["Benchmark"](dmgr)
|
||||||
|
|
||||||
|
stitcher = Stitcher()
|
||||||
|
stitcher.stitch_call(experiment.run, (experiment,), {})
|
||||||
|
stitcher.finalize()
|
||||||
|
return stitcher
|
||||||
|
|
||||||
|
stitcher = embed()
|
||||||
|
module = Module(stitcher)
|
||||||
|
|
||||||
|
benchmark(lambda: embed(),
|
||||||
|
"ARTIQ embedding")
|
||||||
|
|
||||||
|
benchmark(lambda: Module(stitcher),
|
||||||
|
"ARTIQ transforms and validators")
|
||||||
|
|
||||||
|
benchmark(lambda: OR1KTarget().compile_and_link([module]),
|
||||||
|
"LLVM optimization and linking")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,30 @@
|
||||||
|
import sys, os
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from .. import Module, Source
|
||||||
|
from ..targets import OR1KTarget
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if not len(sys.argv) > 1:
|
||||||
|
print("Expected at least one module filename", file=sys.stderr)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render()), file=sys.stderr)
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
modules = []
|
||||||
|
for filename in sys.argv[1:]:
|
||||||
|
modules.append(Module(Source.from_filename(filename, engine=engine)))
|
||||||
|
|
||||||
|
llobj = OR1KTarget().compile_and_link(modules)
|
||||||
|
|
||||||
|
basename, ext = os.path.splitext(sys.argv[-1])
|
||||||
|
with open(basename + ".so", "wb") as f:
|
||||||
|
f.write(llobj)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,43 @@
|
||||||
|
import sys, fileinput
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from .. import types, iodelay, Module, Source
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) > 1 and sys.argv[1] == "+diag":
|
||||||
|
del sys.argv[1]
|
||||||
|
diag = True
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render(only_line=True)))
|
||||||
|
if diag.level == "fatal":
|
||||||
|
exit()
|
||||||
|
else:
|
||||||
|
diag = False
|
||||||
|
def process_diagnostic(diag):
|
||||||
|
print("\n".join(diag.render(colored=True)))
|
||||||
|
if diag.level in ("fatal", "error"):
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
if len(sys.argv) > 1 and sys.argv[1] == "+delay":
|
||||||
|
del sys.argv[1]
|
||||||
|
force_delays = True
|
||||||
|
else:
|
||||||
|
force_delays = False
|
||||||
|
|
||||||
|
engine = diagnostic.Engine()
|
||||||
|
engine.process = process_diagnostic
|
||||||
|
|
||||||
|
try:
|
||||||
|
mod = Module(Source.from_string("".join(fileinput.input()).expandtabs(), engine=engine))
|
||||||
|
|
||||||
|
if force_delays:
|
||||||
|
for var in mod.globals:
|
||||||
|
typ = mod.globals[var].find()
|
||||||
|
if types.is_function(typ) and types.is_indeterminate_delay(typ.delay):
|
||||||
|
process_diagnostic(typ.delay.find().cause)
|
||||||
|
|
||||||
|
print(repr(mod))
|
||||||
|
except:
|
||||||
|
if not diag: raise
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,8 @@
|
||||||
|
from .asttyped_rewriter import ASTTypedRewriter
|
||||||
|
from .inferencer import Inferencer
|
||||||
|
from .int_monomorphizer import IntMonomorphizer
|
||||||
|
from .iodelay_estimator import IODelayEstimator
|
||||||
|
from .artiq_ir_generator import ARTIQIRGenerator
|
||||||
|
from .dead_code_eliminator import DeadCodeEliminator
|
||||||
|
from .llvm_ir_generator import LLVMIRGenerator
|
||||||
|
from .interleaver import Interleaver
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,496 @@
|
||||||
|
"""
|
||||||
|
:class:`ASTTypedRewriter` rewrites a parsetree (:mod:`pythonparser.ast`)
|
||||||
|
to a typedtree (:mod:`..asttyped`).
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections import OrderedDict
|
||||||
|
from pythonparser import ast, algorithm, diagnostic
|
||||||
|
from .. import asttyped, types, builtins
|
||||||
|
|
||||||
|
# This visitor will be called for every node with a scope,
|
||||||
|
# i.e.: class, function, comprehension, lambda
|
||||||
|
class LocalExtractor(algorithm.Visitor):
|
||||||
|
def __init__(self, env_stack, engine):
|
||||||
|
super().__init__()
|
||||||
|
self.env_stack = env_stack
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
self.in_root = False
|
||||||
|
self.in_assign = False
|
||||||
|
self.typing_env = OrderedDict()
|
||||||
|
|
||||||
|
# which names are global have to be recorded in the current scope
|
||||||
|
self.global_ = set()
|
||||||
|
|
||||||
|
# which names are nonlocal only affects whether the current scope
|
||||||
|
# gets a new binding or not, so we throw this away
|
||||||
|
self.nonlocal_ = set()
|
||||||
|
|
||||||
|
# parameters can't be declared as global or nonlocal
|
||||||
|
self.params = set()
|
||||||
|
|
||||||
|
def visit_in_assign(self, node, in_assign):
|
||||||
|
try:
|
||||||
|
old_in_assign, self.in_assign = self.in_assign, in_assign
|
||||||
|
return self.visit(node)
|
||||||
|
finally:
|
||||||
|
self.in_assign = old_in_assign
|
||||||
|
|
||||||
|
def visit_Assign(self, node):
|
||||||
|
self.visit(node.value)
|
||||||
|
self.visit_in_assign(node.targets, in_assign=True)
|
||||||
|
|
||||||
|
def visit_For(self, node):
|
||||||
|
self.visit(node.iter)
|
||||||
|
self.visit_in_assign(node.target, in_assign=True)
|
||||||
|
self.visit(node.body)
|
||||||
|
self.visit(node.orelse)
|
||||||
|
|
||||||
|
def visit_withitem(self, node):
|
||||||
|
self.visit(node.context_expr)
|
||||||
|
self.visit_in_assign(node.optional_vars, in_assign=True)
|
||||||
|
|
||||||
|
def visit_comprehension(self, node):
|
||||||
|
self.visit(node.iter)
|
||||||
|
self.visit_in_assign(node.target, in_assign=True)
|
||||||
|
self.visit(node.ifs)
|
||||||
|
|
||||||
|
def visit_generator(self, node):
|
||||||
|
if self.in_root:
|
||||||
|
return
|
||||||
|
self.in_root = True
|
||||||
|
self.visit(list(reversed(node.generators)))
|
||||||
|
self.visit(node.elt)
|
||||||
|
|
||||||
|
visit_ListComp = visit_generator
|
||||||
|
visit_SetComp = visit_generator
|
||||||
|
visit_GeneratorExp = visit_generator
|
||||||
|
|
||||||
|
def visit_DictComp(self, node):
|
||||||
|
if self.in_root:
|
||||||
|
return
|
||||||
|
self.in_root = True
|
||||||
|
self.visit(list(reversed(node.generators)))
|
||||||
|
self.visit(node.key)
|
||||||
|
self.visit(node.value)
|
||||||
|
|
||||||
|
def visit_root(self, node):
|
||||||
|
if self.in_root:
|
||||||
|
return
|
||||||
|
self.in_root = True
|
||||||
|
self.generic_visit(node)
|
||||||
|
|
||||||
|
visit_Module = visit_root # don't look at inner scopes
|
||||||
|
visit_ClassDef = visit_root
|
||||||
|
visit_Lambda = visit_root
|
||||||
|
|
||||||
|
def visit_FunctionDef(self, node):
|
||||||
|
if self.in_root:
|
||||||
|
self._assignable(node.name)
|
||||||
|
self.visit_root(node)
|
||||||
|
|
||||||
|
def _assignable(self, name):
|
||||||
|
assert name is not None
|
||||||
|
if name not in self.typing_env and name not in self.nonlocal_:
|
||||||
|
self.typing_env[name] = types.TVar()
|
||||||
|
|
||||||
|
def visit_arg(self, node):
|
||||||
|
if node.arg in self.params:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"duplicate parameter '{name}'", {"name": node.arg},
|
||||||
|
node.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
return
|
||||||
|
self._assignable(node.arg)
|
||||||
|
self.params.add(node.arg)
|
||||||
|
|
||||||
|
def visit_Name(self, node):
|
||||||
|
if self.in_assign:
|
||||||
|
# code like:
|
||||||
|
# x = 1
|
||||||
|
# def f():
|
||||||
|
# x = 1
|
||||||
|
# creates a new binding for x in f's scope
|
||||||
|
self._assignable(node.id)
|
||||||
|
|
||||||
|
def visit_Attribute(self, node):
|
||||||
|
self.visit_in_assign(node.value, in_assign=False)
|
||||||
|
|
||||||
|
def visit_Subscript(self, node):
|
||||||
|
self.visit_in_assign(node.value, in_assign=False)
|
||||||
|
self.visit_in_assign(node.slice, in_assign=False)
|
||||||
|
|
||||||
|
def _check_not_in(self, name, names, curkind, newkind, loc):
|
||||||
|
if name in names:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"name '{name}' cannot be {curkind} and {newkind} simultaneously",
|
||||||
|
{"name": name, "curkind": curkind, "newkind": newkind}, loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def visit_Global(self, node):
|
||||||
|
for name, loc in zip(node.names, node.name_locs):
|
||||||
|
if self._check_not_in(name, self.nonlocal_, "nonlocal", "global", loc) or \
|
||||||
|
self._check_not_in(name, self.params, "a parameter", "global", loc):
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.global_.add(name)
|
||||||
|
if len(self.env_stack) == 1:
|
||||||
|
self._assignable(name) # already in global scope
|
||||||
|
else:
|
||||||
|
if name not in self.env_stack[1]:
|
||||||
|
self.env_stack[1][name] = types.TVar()
|
||||||
|
self.typing_env[name] = self.env_stack[1][name]
|
||||||
|
|
||||||
|
def visit_Nonlocal(self, node):
|
||||||
|
for name, loc in zip(node.names, node.name_locs):
|
||||||
|
if self._check_not_in(name, self.global_, "global", "nonlocal", loc) or \
|
||||||
|
self._check_not_in(name, self.params, "a parameter", "nonlocal", loc):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# nonlocal does not search prelude and global scopes
|
||||||
|
found = False
|
||||||
|
for outer_env in reversed(self.env_stack[2:]):
|
||||||
|
if name in outer_env:
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"cannot declare name '{name}' as nonlocal: it is not bound in any outer scope",
|
||||||
|
{"name": name},
|
||||||
|
loc, [node.keyword_loc])
|
||||||
|
self.engine.process(diag)
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.nonlocal_.add(name)
|
||||||
|
|
||||||
|
def visit_ExceptHandler(self, node):
|
||||||
|
self.visit(node.type)
|
||||||
|
if node.name is not None:
|
||||||
|
self._assignable(node.name)
|
||||||
|
for stmt in node.body:
|
||||||
|
self.visit(stmt)
|
||||||
|
|
||||||
|
|
||||||
|
class ASTTypedRewriter(algorithm.Transformer):
|
||||||
|
"""
|
||||||
|
:class:`ASTTypedRewriter` converts an untyped AST to a typed AST
|
||||||
|
where all type fields of non-literals are filled with fresh type variables,
|
||||||
|
and type fields of literals are filled with corresponding types.
|
||||||
|
|
||||||
|
:class:`ASTTypedRewriter` also discovers the scope of variable bindings
|
||||||
|
via :class:`LocalExtractor`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, engine, prelude):
|
||||||
|
self.engine = engine
|
||||||
|
self.globals = None
|
||||||
|
self.env_stack = [prelude]
|
||||||
|
self.in_class = None
|
||||||
|
|
||||||
|
def _try_find_name(self, name):
|
||||||
|
for typing_env in reversed(self.env_stack):
|
||||||
|
if name in typing_env:
|
||||||
|
return typing_env[name]
|
||||||
|
|
||||||
|
def _find_name(self, name, loc):
|
||||||
|
if self.in_class is not None:
|
||||||
|
typ = self.in_class.constructor_type.attributes.get(name)
|
||||||
|
if typ is not None:
|
||||||
|
return typ
|
||||||
|
|
||||||
|
typ = self._try_find_name(name)
|
||||||
|
if typ is not None:
|
||||||
|
return typ
|
||||||
|
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"undefined variable '{name}'", {"name":name}, loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
# Visitors that replace node with a typed node
|
||||||
|
#
|
||||||
|
def visit_Module(self, node):
|
||||||
|
extractor = LocalExtractor(env_stack=self.env_stack, engine=self.engine)
|
||||||
|
extractor.visit(node)
|
||||||
|
|
||||||
|
node = asttyped.ModuleT(
|
||||||
|
typing_env=extractor.typing_env, globals_in_scope=extractor.global_,
|
||||||
|
body=node.body, loc=node.loc)
|
||||||
|
self.globals = node.typing_env
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.env_stack.append(node.typing_env)
|
||||||
|
return self.generic_visit(node)
|
||||||
|
finally:
|
||||||
|
self.env_stack.pop()
|
||||||
|
|
||||||
|
def visit_FunctionDef(self, node):
|
||||||
|
extractor = LocalExtractor(env_stack=self.env_stack, engine=self.engine)
|
||||||
|
extractor.visit(node)
|
||||||
|
|
||||||
|
node = asttyped.FunctionDefT(
|
||||||
|
typing_env=extractor.typing_env, globals_in_scope=extractor.global_,
|
||||||
|
signature_type=self._find_name(node.name, node.name_loc), return_type=types.TVar(),
|
||||||
|
name=node.name, args=node.args, returns=node.returns,
|
||||||
|
body=node.body, decorator_list=node.decorator_list,
|
||||||
|
keyword_loc=node.keyword_loc, name_loc=node.name_loc,
|
||||||
|
arrow_loc=node.arrow_loc, colon_loc=node.colon_loc, at_locs=node.at_locs,
|
||||||
|
loc=node.loc)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.env_stack.append(node.typing_env)
|
||||||
|
return self.generic_visit(node)
|
||||||
|
finally:
|
||||||
|
self.env_stack.pop()
|
||||||
|
|
||||||
|
def visit_ClassDef(self, node):
|
||||||
|
if any(node.bases) or any(node.keywords) or \
|
||||||
|
node.starargs is not None or node.kwargs is not None:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"inheritance is not supported", {},
|
||||||
|
node.lparen_loc.join(node.rparen_loc))
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
for child in node.body:
|
||||||
|
if isinstance(child, (ast.Assign, ast.FunctionDef, ast.Pass)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"class body must contain only assignments and function definitions", {},
|
||||||
|
child.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
if node.name in self.env_stack[-1]:
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"variable '{name}' is already defined", {"name":node.name}, node.name_loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
extractor = LocalExtractor(env_stack=self.env_stack, engine=self.engine)
|
||||||
|
extractor.visit(node)
|
||||||
|
|
||||||
|
# Now we create two types.
|
||||||
|
# The first type is the type of instances created by the constructor.
|
||||||
|
# Its attributes are those of the class environment, but wrapped
|
||||||
|
# appropriately so that they are linked to the class from which they
|
||||||
|
# originate.
|
||||||
|
instance_type = types.TInstance(node.name, OrderedDict())
|
||||||
|
|
||||||
|
# The second type is the type of the constructor itself (in other words,
|
||||||
|
# the class object): it is simply a singleton type that has the class
|
||||||
|
# environment as attributes.
|
||||||
|
constructor_type = types.TConstructor(instance_type)
|
||||||
|
constructor_type.attributes = extractor.typing_env
|
||||||
|
instance_type.constructor = constructor_type
|
||||||
|
|
||||||
|
self.env_stack[-1][node.name] = constructor_type
|
||||||
|
|
||||||
|
node = asttyped.ClassDefT(
|
||||||
|
constructor_type=constructor_type,
|
||||||
|
name=node.name,
|
||||||
|
bases=self.visit(node.bases), keywords=self.visit(node.keywords),
|
||||||
|
starargs=self.visit(node.starargs), kwargs=self.visit(node.kwargs),
|
||||||
|
body=node.body,
|
||||||
|
decorator_list=self.visit(node.decorator_list),
|
||||||
|
keyword_loc=node.keyword_loc, name_loc=node.name_loc,
|
||||||
|
lparen_loc=node.lparen_loc, star_loc=node.star_loc,
|
||||||
|
dstar_loc=node.dstar_loc, rparen_loc=node.rparen_loc,
|
||||||
|
colon_loc=node.colon_loc, at_locs=node.at_locs,
|
||||||
|
loc=node.loc)
|
||||||
|
|
||||||
|
try:
|
||||||
|
old_in_class, self.in_class = self.in_class, node
|
||||||
|
return self.generic_visit(node)
|
||||||
|
finally:
|
||||||
|
self.in_class = old_in_class
|
||||||
|
|
||||||
|
def visit_arg(self, node):
|
||||||
|
return asttyped.argT(type=self._find_name(node.arg, node.loc),
|
||||||
|
arg=node.arg, annotation=self.visit(node.annotation),
|
||||||
|
arg_loc=node.arg_loc, colon_loc=node.colon_loc, loc=node.loc)
|
||||||
|
|
||||||
|
def visit_Num(self, node):
|
||||||
|
if isinstance(node.n, int):
|
||||||
|
typ = builtins.TInt()
|
||||||
|
elif isinstance(node.n, float):
|
||||||
|
typ = builtins.TFloat()
|
||||||
|
else:
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"numeric type {type} is not supported", {"type": node.n.__class__.__name__},
|
||||||
|
node.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
return asttyped.NumT(type=typ,
|
||||||
|
n=node.n, loc=node.loc)
|
||||||
|
|
||||||
|
def visit_Str(self, node):
|
||||||
|
return asttyped.StrT(type=builtins.TStr(),
|
||||||
|
s=node.s,
|
||||||
|
begin_loc=node.begin_loc, end_loc=node.end_loc, loc=node.loc)
|
||||||
|
|
||||||
|
def visit_Name(self, node):
|
||||||
|
return asttyped.NameT(type=self._find_name(node.id, node.loc),
|
||||||
|
id=node.id, ctx=node.ctx, loc=node.loc)
|
||||||
|
|
||||||
|
def visit_NameConstant(self, node):
|
||||||
|
if node.value is True or node.value is False:
|
||||||
|
typ = builtins.TBool()
|
||||||
|
elif node.value is None:
|
||||||
|
typ = builtins.TNone()
|
||||||
|
return asttyped.NameConstantT(type=typ, value=node.value, loc=node.loc)
|
||||||
|
|
||||||
|
def visit_Tuple(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
return asttyped.TupleT(type=types.TTuple([x.type for x in node.elts]),
|
||||||
|
elts=node.elts, ctx=node.ctx, loc=node.loc)
|
||||||
|
|
||||||
|
def visit_List(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.ListT(type=builtins.TList(),
|
||||||
|
elts=node.elts, ctx=node.ctx,
|
||||||
|
begin_loc=node.begin_loc, end_loc=node.end_loc, loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_Attribute(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.AttributeT(type=types.TVar(),
|
||||||
|
value=node.value, attr=node.attr, ctx=node.ctx,
|
||||||
|
dot_loc=node.dot_loc, attr_loc=node.attr_loc, loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_Slice(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.SliceT(type=types.TVar(),
|
||||||
|
lower=node.lower, upper=node.upper, step=node.step,
|
||||||
|
bound_colon_loc=node.bound_colon_loc,
|
||||||
|
step_colon_loc=node.step_colon_loc,
|
||||||
|
loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_Subscript(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.SubscriptT(type=types.TVar(),
|
||||||
|
value=node.value, slice=node.slice, ctx=node.ctx,
|
||||||
|
begin_loc=node.begin_loc, end_loc=node.end_loc, loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_BoolOp(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.BoolOpT(type=types.TVar(),
|
||||||
|
op=node.op, values=node.values,
|
||||||
|
op_locs=node.op_locs, loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_UnaryOp(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.UnaryOpT(type=types.TVar(),
|
||||||
|
op=node.op, operand=node.operand,
|
||||||
|
loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_BinOp(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.BinOpT(type=types.TVar(),
|
||||||
|
left=node.left, op=node.op, right=node.right,
|
||||||
|
loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_Compare(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.CompareT(type=types.TVar(),
|
||||||
|
left=node.left, ops=node.ops, comparators=node.comparators,
|
||||||
|
loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_IfExp(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.IfExpT(type=types.TVar(),
|
||||||
|
test=node.test, body=node.body, orelse=node.orelse,
|
||||||
|
if_loc=node.if_loc, else_loc=node.else_loc, loc=node.loc)
|
||||||
|
return self.visit(node)
|
||||||
|
|
||||||
|
def visit_ListComp(self, node):
|
||||||
|
extractor = LocalExtractor(env_stack=self.env_stack, engine=self.engine)
|
||||||
|
extractor.visit(node)
|
||||||
|
|
||||||
|
node = asttyped.ListCompT(
|
||||||
|
typing_env=extractor.typing_env, globals_in_scope=extractor.global_,
|
||||||
|
type=types.TVar(),
|
||||||
|
elt=node.elt, generators=node.generators,
|
||||||
|
begin_loc=node.begin_loc, end_loc=node.end_loc, loc=node.loc)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.env_stack.append(node.typing_env)
|
||||||
|
return self.generic_visit(node)
|
||||||
|
finally:
|
||||||
|
self.env_stack.pop()
|
||||||
|
|
||||||
|
def visit_Call(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
node = asttyped.CallT(type=types.TVar(), iodelay=None,
|
||||||
|
func=node.func, args=node.args, keywords=node.keywords,
|
||||||
|
starargs=node.starargs, kwargs=node.kwargs,
|
||||||
|
star_loc=node.star_loc, dstar_loc=node.dstar_loc,
|
||||||
|
begin_loc=node.begin_loc, end_loc=node.end_loc, loc=node.loc)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Lambda(self, node):
|
||||||
|
extractor = LocalExtractor(env_stack=self.env_stack, engine=self.engine)
|
||||||
|
extractor.visit(node)
|
||||||
|
|
||||||
|
node = asttyped.LambdaT(
|
||||||
|
typing_env=extractor.typing_env, globals_in_scope=extractor.global_,
|
||||||
|
type=types.TVar(),
|
||||||
|
args=node.args, body=node.body,
|
||||||
|
lambda_loc=node.lambda_loc, colon_loc=node.colon_loc, loc=node.loc)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.env_stack.append(node.typing_env)
|
||||||
|
return self.generic_visit(node)
|
||||||
|
finally:
|
||||||
|
self.env_stack.pop()
|
||||||
|
|
||||||
|
def visit_ExceptHandler(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
if node.name is not None:
|
||||||
|
name_type = self._find_name(node.name, node.name_loc)
|
||||||
|
else:
|
||||||
|
name_type = types.TVar()
|
||||||
|
node = asttyped.ExceptHandlerT(
|
||||||
|
name_type=name_type,
|
||||||
|
filter=node.type, name=node.name, body=node.body,
|
||||||
|
except_loc=node.except_loc, as_loc=node.as_loc, name_loc=node.name_loc,
|
||||||
|
colon_loc=node.colon_loc, loc=node.loc)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Raise(self, node):
|
||||||
|
node = self.generic_visit(node)
|
||||||
|
if node.cause:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"'raise from' syntax is not supported", {},
|
||||||
|
node.from_loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
return node
|
||||||
|
|
||||||
|
# Unsupported visitors
|
||||||
|
#
|
||||||
|
def visit_unsupported(self, node):
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"this syntax is not supported", {},
|
||||||
|
node.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
# expr
|
||||||
|
visit_Dict = visit_unsupported
|
||||||
|
visit_DictComp = visit_unsupported
|
||||||
|
visit_Ellipsis = visit_unsupported
|
||||||
|
visit_GeneratorExp = visit_unsupported
|
||||||
|
visit_Set = visit_unsupported
|
||||||
|
visit_SetComp = visit_unsupported
|
||||||
|
visit_Starred = visit_unsupported
|
||||||
|
visit_Yield = visit_unsupported
|
||||||
|
visit_YieldFrom = visit_unsupported
|
||||||
|
|
||||||
|
# stmt
|
||||||
|
visit_Delete = visit_unsupported
|
||||||
|
visit_Import = visit_unsupported
|
||||||
|
visit_ImportFrom = visit_unsupported
|
|
@ -0,0 +1,43 @@
|
||||||
|
"""
|
||||||
|
:class:`DeadCodeEliminator` is a very simple dead code elimination
|
||||||
|
transform: it only removes basic blocks with no predecessors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .. import ir
|
||||||
|
|
||||||
|
class DeadCodeEliminator:
|
||||||
|
def __init__(self, engine):
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
def process(self, functions):
|
||||||
|
for func in functions:
|
||||||
|
self.process_function(func)
|
||||||
|
|
||||||
|
def process_function(self, func):
|
||||||
|
for block in list(func.basic_blocks):
|
||||||
|
if not any(block.predecessors()) and block != func.entry():
|
||||||
|
for use in set(block.uses):
|
||||||
|
if isinstance(use, ir.SetLocal):
|
||||||
|
use.erase()
|
||||||
|
self.remove_block(block)
|
||||||
|
|
||||||
|
def remove_block(self, block):
|
||||||
|
# block.uses are updated while iterating
|
||||||
|
for use in set(block.uses):
|
||||||
|
if isinstance(use, ir.Phi):
|
||||||
|
use.remove_incoming_block(block)
|
||||||
|
if not any(use.operands):
|
||||||
|
self.remove_instruction(use)
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
|
||||||
|
block.erase()
|
||||||
|
|
||||||
|
def remove_instruction(self, insn):
|
||||||
|
for use in set(insn.uses):
|
||||||
|
if isinstance(use, ir.Phi):
|
||||||
|
use.remove_incoming_value(insn)
|
||||||
|
if not any(use.operands):
|
||||||
|
self.remove_instruction(use)
|
||||||
|
|
||||||
|
insn.erase()
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,28 @@
|
||||||
|
"""
|
||||||
|
:class:`IntMonomorphizer` collapses the integer literals of undetermined
|
||||||
|
width to 32 bits, assuming they fit into 32 bits, or 64 bits if they
|
||||||
|
do not.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pythonparser import algorithm, diagnostic
|
||||||
|
from .. import types, builtins
|
||||||
|
|
||||||
|
class IntMonomorphizer(algorithm.Visitor):
|
||||||
|
def __init__(self, engine):
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
def visit_NumT(self, node):
|
||||||
|
if builtins.is_int(node.type):
|
||||||
|
if types.is_var(node.type["width"]):
|
||||||
|
if -2**31 < node.n < 2**31-1:
|
||||||
|
width = 32
|
||||||
|
elif -2**63 < node.n < 2**63-1:
|
||||||
|
width = 64
|
||||||
|
else:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"integer literal out of range for a signed 64-bit value", {},
|
||||||
|
node.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
return
|
||||||
|
|
||||||
|
node.type["width"].unify(types.TValue(width))
|
|
@ -0,0 +1,161 @@
|
||||||
|
"""
|
||||||
|
:class:`Interleaver` reorders requests to the RTIO core so that
|
||||||
|
the timestamp would always monotonically nondecrease.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
|
||||||
|
from .. import types, builtins, ir, iodelay
|
||||||
|
from ..analyses import domination
|
||||||
|
from ..algorithms import inline
|
||||||
|
|
||||||
|
def delay_free_subgraph(root, limit):
|
||||||
|
visited = set()
|
||||||
|
queue = root.successors()
|
||||||
|
while len(queue) > 0:
|
||||||
|
block = queue.pop()
|
||||||
|
visited.add(block)
|
||||||
|
|
||||||
|
if block is limit:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(block.terminator(), ir.Delay):
|
||||||
|
return False
|
||||||
|
|
||||||
|
for successor in block.successors():
|
||||||
|
if successor not in visited:
|
||||||
|
queue.append(successor)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def iodelay_of_block(block):
|
||||||
|
terminator = block.terminator()
|
||||||
|
if isinstance(terminator, ir.Delay):
|
||||||
|
# We should be able to fold everything without free variables.
|
||||||
|
folded_expr = terminator.expr.fold()
|
||||||
|
assert iodelay.is_const(folded_expr)
|
||||||
|
return folded_expr.value
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def is_pure_delay(insn):
|
||||||
|
return isinstance(insn, ir.Builtin) and insn.op in ("delay", "delay_mu")
|
||||||
|
|
||||||
|
def is_impure_delay_block(block):
|
||||||
|
terminator = block.terminator()
|
||||||
|
return isinstance(terminator, ir.Delay) and \
|
||||||
|
not is_pure_delay(terminator.decomposition())
|
||||||
|
|
||||||
|
class Interleaver:
|
||||||
|
def __init__(self, engine):
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
def process(self, functions):
|
||||||
|
for func in functions:
|
||||||
|
self.process_function(func)
|
||||||
|
|
||||||
|
def process_function(self, func):
|
||||||
|
for insn in func.instructions():
|
||||||
|
if isinstance(insn, ir.Delay):
|
||||||
|
if any(insn.expr.free_vars()):
|
||||||
|
# If a function has free variables in delay expressions,
|
||||||
|
# that means its IO delay depends on arguments.
|
||||||
|
# Do not change such functions in any way so that it will
|
||||||
|
# be successfully inlined and then removed by DCE.
|
||||||
|
return
|
||||||
|
|
||||||
|
postdom_tree = None
|
||||||
|
for insn in func.instructions():
|
||||||
|
if not isinstance(insn, ir.Parallel):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Lazily compute dominators.
|
||||||
|
if postdom_tree is None:
|
||||||
|
postdom_tree = domination.PostDominatorTree(func)
|
||||||
|
|
||||||
|
interleave_until = postdom_tree.immediate_dominator(insn.basic_block)
|
||||||
|
assert (interleave_until is not None) # no nonlocal flow in `with parallel`
|
||||||
|
|
||||||
|
target_block = insn.basic_block
|
||||||
|
target_time = 0
|
||||||
|
source_blocks = insn.basic_block.successors()
|
||||||
|
source_times = [0 for _ in source_blocks]
|
||||||
|
|
||||||
|
while len(source_blocks) > 0:
|
||||||
|
def time_after_block(pair):
|
||||||
|
index, block = pair
|
||||||
|
return source_times[index] + iodelay_of_block(block)
|
||||||
|
|
||||||
|
# Always prefer impure blocks (with calls) to pure blocks, because
|
||||||
|
# impure blocks may expand with smaller delays appearing, and in
|
||||||
|
# case of a tie, if a pure block is preferred, this would violate
|
||||||
|
# the timeline monotonicity.
|
||||||
|
available_source_blocks = list(filter(is_impure_delay_block, source_blocks))
|
||||||
|
if not any(available_source_blocks):
|
||||||
|
available_source_blocks = source_blocks
|
||||||
|
|
||||||
|
index, source_block = min(enumerate(available_source_blocks), key=time_after_block)
|
||||||
|
source_block_delay = iodelay_of_block(source_block)
|
||||||
|
|
||||||
|
new_target_time = source_times[index] + source_block_delay
|
||||||
|
target_time_delta = new_target_time - target_time
|
||||||
|
assert target_time_delta >= 0
|
||||||
|
|
||||||
|
target_terminator = target_block.terminator()
|
||||||
|
if isinstance(target_terminator, ir.Parallel):
|
||||||
|
target_terminator.replace_with(ir.Branch(source_block))
|
||||||
|
else:
|
||||||
|
assert isinstance(target_terminator, (ir.Delay, ir.Branch))
|
||||||
|
target_terminator.set_target(source_block)
|
||||||
|
|
||||||
|
source_terminator = source_block.terminator()
|
||||||
|
|
||||||
|
if not isinstance(source_terminator, ir.Delay):
|
||||||
|
source_terminator.replace_with(ir.Branch(source_terminator.target()))
|
||||||
|
else:
|
||||||
|
old_decomp = source_terminator.decomposition()
|
||||||
|
if is_pure_delay(old_decomp):
|
||||||
|
if target_time_delta > 0:
|
||||||
|
new_decomp_expr = ir.Constant(int(target_time_delta), builtins.TInt64())
|
||||||
|
new_decomp = ir.Builtin("delay_mu", [new_decomp_expr], builtins.TNone())
|
||||||
|
new_decomp.loc = old_decomp.loc
|
||||||
|
|
||||||
|
source_terminator.basic_block.insert(new_decomp, before=source_terminator)
|
||||||
|
source_terminator.expr = iodelay.Const(target_time_delta)
|
||||||
|
source_terminator.set_decomposition(new_decomp)
|
||||||
|
else:
|
||||||
|
source_terminator.replace_with(ir.Branch(source_terminator.target()))
|
||||||
|
old_decomp.erase()
|
||||||
|
else: # It's a call.
|
||||||
|
need_to_inline = len(source_blocks) > 1
|
||||||
|
if need_to_inline:
|
||||||
|
if old_decomp.static_target_function is None:
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"it is not possible to interleave this function call within "
|
||||||
|
"a 'with parallel:' statement because the compiler could not "
|
||||||
|
"prove that the same function would always be called", {},
|
||||||
|
old_decomp.loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
inline(old_decomp)
|
||||||
|
postdom_tree = domination.PostDominatorTree(func)
|
||||||
|
continue
|
||||||
|
elif target_time_delta > 0:
|
||||||
|
source_terminator.expr = iodelay.Const(target_time_delta)
|
||||||
|
else:
|
||||||
|
source_terminator.replace_with(ir.Branch(source_terminator.target()))
|
||||||
|
|
||||||
|
target_block = source_block
|
||||||
|
target_time = new_target_time
|
||||||
|
|
||||||
|
new_source_block = postdom_tree.immediate_dominator(source_block)
|
||||||
|
assert (new_source_block is not None)
|
||||||
|
assert delay_free_subgraph(source_block, new_source_block)
|
||||||
|
|
||||||
|
if new_source_block == interleave_until:
|
||||||
|
# We're finished with this branch.
|
||||||
|
del source_blocks[index]
|
||||||
|
del source_times[index]
|
||||||
|
else:
|
||||||
|
source_blocks[index] = new_source_block
|
||||||
|
source_times[index] = new_target_time
|
|
@ -0,0 +1,286 @@
|
||||||
|
"""
|
||||||
|
:class:`IODelayEstimator` calculates the amount of time
|
||||||
|
elapsed from the point of view of the RTIO core for
|
||||||
|
every function.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pythonparser import ast, algorithm, diagnostic
|
||||||
|
from .. import types, iodelay, builtins, asttyped
|
||||||
|
|
||||||
|
class _UnknownDelay(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class _IndeterminateDelay(Exception):
|
||||||
|
def __init__(self, cause):
|
||||||
|
self.cause = cause
|
||||||
|
|
||||||
|
class IODelayEstimator(algorithm.Visitor):
|
||||||
|
def __init__(self, engine, ref_period):
|
||||||
|
self.engine = engine
|
||||||
|
self.ref_period = ref_period
|
||||||
|
self.changed = False
|
||||||
|
self.current_delay = iodelay.Const(0)
|
||||||
|
self.current_args = None
|
||||||
|
self.current_goto = None
|
||||||
|
self.current_return = None
|
||||||
|
|
||||||
|
def evaluate(self, node, abort):
|
||||||
|
if isinstance(node, asttyped.NumT):
|
||||||
|
return iodelay.Const(node.n)
|
||||||
|
elif isinstance(node, asttyped.CoerceT):
|
||||||
|
return self.evaluate(node.value, abort)
|
||||||
|
elif isinstance(node, asttyped.NameT):
|
||||||
|
if self.current_args is None:
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"this variable is not an argument", {},
|
||||||
|
node.loc)
|
||||||
|
abort([note])
|
||||||
|
elif node.id in [arg.arg for arg in self.current_args.args]:
|
||||||
|
return iodelay.Var(node.id)
|
||||||
|
else:
|
||||||
|
notes = [
|
||||||
|
diagnostic.Diagnostic("note",
|
||||||
|
"this variable is not an argument of the innermost function", {},
|
||||||
|
node.loc),
|
||||||
|
diagnostic.Diagnostic("note",
|
||||||
|
"only these arguments are in scope of analysis", {},
|
||||||
|
self.current_args.loc)
|
||||||
|
]
|
||||||
|
abort(notes)
|
||||||
|
elif isinstance(node, asttyped.BinOpT):
|
||||||
|
lhs = self.evaluate(node.left, abort)
|
||||||
|
rhs = self.evaluate(node.right, abort)
|
||||||
|
if isinstance(node.op, ast.Add):
|
||||||
|
return lhs + rhs
|
||||||
|
elif isinstance(node.op, ast.Sub):
|
||||||
|
return lhs - rhs
|
||||||
|
elif isinstance(node.op, ast.Mult):
|
||||||
|
return lhs * rhs
|
||||||
|
elif isinstance(node.op, ast.Div):
|
||||||
|
return lhs / rhs
|
||||||
|
elif isinstance(node.op, ast.FloorDiv):
|
||||||
|
return lhs // rhs
|
||||||
|
else:
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"this operator is not supported", {},
|
||||||
|
node.op.loc)
|
||||||
|
abort([note])
|
||||||
|
else:
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"this expression is not supported", {},
|
||||||
|
node.loc)
|
||||||
|
abort([note])
|
||||||
|
|
||||||
|
def abort(self, message, loc, notes=[]):
|
||||||
|
diag = diagnostic.Diagnostic("error", message, {}, loc, notes=notes)
|
||||||
|
raise _IndeterminateDelay(diag)
|
||||||
|
|
||||||
|
def visit_fixpoint(self, node):
|
||||||
|
while True:
|
||||||
|
self.changed = False
|
||||||
|
self.visit(node)
|
||||||
|
if not self.changed:
|
||||||
|
return
|
||||||
|
|
||||||
|
def visit_ModuleT(self, node):
|
||||||
|
try:
|
||||||
|
for stmt in node.body:
|
||||||
|
try:
|
||||||
|
self.visit(stmt)
|
||||||
|
except _UnknownDelay:
|
||||||
|
pass # more luck next time?
|
||||||
|
except _IndeterminateDelay:
|
||||||
|
pass # we don't care; module-level code is never interleaved
|
||||||
|
|
||||||
|
def visit_function(self, args, body, typ, loc):
|
||||||
|
old_args, self.current_args = self.current_args, args
|
||||||
|
old_return, self.current_return = self.current_return, None
|
||||||
|
old_delay, self.current_delay = self.current_delay, iodelay.Const(0)
|
||||||
|
try:
|
||||||
|
self.visit(body)
|
||||||
|
if not iodelay.is_zero(self.current_delay) and self.current_return is not None:
|
||||||
|
self.abort("only return statement at the end of the function "
|
||||||
|
"can be interleaved", self.current_return.loc)
|
||||||
|
|
||||||
|
delay = types.TFixedDelay(self.current_delay.fold())
|
||||||
|
except _IndeterminateDelay as error:
|
||||||
|
delay = types.TIndeterminateDelay(error.cause)
|
||||||
|
self.current_delay = old_delay
|
||||||
|
self.current_return = old_return
|
||||||
|
self.current_args = old_args
|
||||||
|
|
||||||
|
if types.is_indeterminate_delay(delay) and types.is_indeterminate_delay(typ.delay):
|
||||||
|
# Both delays indeterminate; no point in unifying since that will
|
||||||
|
# replace the lazy and more specific error with an eager and more generic
|
||||||
|
# error (unification error of delay(?) with delay(?), which is useless).
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
old_delay = typ.delay.find()
|
||||||
|
typ.delay.unify(delay)
|
||||||
|
if typ.delay.find() != old_delay:
|
||||||
|
self.changed = True
|
||||||
|
except types.UnificationError as e:
|
||||||
|
printer = types.TypePrinter()
|
||||||
|
diag = diagnostic.Diagnostic("fatal",
|
||||||
|
"delay {delaya} was inferred for this function, but its delay is already "
|
||||||
|
"constrained externally to {delayb}",
|
||||||
|
{"delaya": printer.name(delay), "delayb": printer.name(typ.delay)},
|
||||||
|
loc)
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
def visit_FunctionDefT(self, node):
|
||||||
|
self.visit(node.args.defaults)
|
||||||
|
self.visit(node.args.kw_defaults)
|
||||||
|
|
||||||
|
# We can only handle return in tail position.
|
||||||
|
if isinstance(node.body[-1], ast.Return):
|
||||||
|
body = node.body[:-1]
|
||||||
|
else:
|
||||||
|
body = node.body
|
||||||
|
self.visit_function(node.args, body, node.signature_type.find(), node.loc)
|
||||||
|
|
||||||
|
def visit_LambdaT(self, node):
|
||||||
|
self.visit_function(node.args, node.body, node.type.find(), node.loc)
|
||||||
|
|
||||||
|
def get_iterable_length(self, node):
|
||||||
|
def abort(notes):
|
||||||
|
self.abort("for statement cannot be interleaved because "
|
||||||
|
"trip count is indeterminate",
|
||||||
|
node.loc, notes)
|
||||||
|
|
||||||
|
def evaluate(node):
|
||||||
|
return self.evaluate(node, abort)
|
||||||
|
|
||||||
|
if isinstance(node, asttyped.CallT) and types.is_builtin(node.func.type, "range"):
|
||||||
|
range_min, range_max, range_step = iodelay.Const(0), None, iodelay.Const(1)
|
||||||
|
if len(node.args) == 3:
|
||||||
|
range_min, range_max, range_step = map(evaluate, node.args)
|
||||||
|
elif len(node.args) == 2:
|
||||||
|
range_min, range_max = map(evaluate, node.args)
|
||||||
|
elif len(node.args) == 1:
|
||||||
|
range_max, = map(evaluate, node.args)
|
||||||
|
return (range_max - range_min) // range_step
|
||||||
|
else:
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"this value is not a constant range literal", {},
|
||||||
|
node.loc)
|
||||||
|
abort([note])
|
||||||
|
|
||||||
|
def visit_For(self, node):
|
||||||
|
self.visit(node.iter)
|
||||||
|
|
||||||
|
old_goto, self.current_goto = self.current_goto, None
|
||||||
|
old_delay, self.current_delay = self.current_delay, iodelay.Const(0)
|
||||||
|
self.visit(node.body)
|
||||||
|
if iodelay.is_zero(self.current_delay):
|
||||||
|
self.current_delay = old_delay
|
||||||
|
else:
|
||||||
|
if self.current_goto is not None:
|
||||||
|
self.abort("loop trip count is indeterminate because of control flow",
|
||||||
|
self.current_goto.loc)
|
||||||
|
|
||||||
|
trip_count = self.get_iterable_length(node.iter)
|
||||||
|
self.current_delay = old_delay + self.current_delay * trip_count
|
||||||
|
self.current_goto = old_goto
|
||||||
|
|
||||||
|
self.visit(node.orelse)
|
||||||
|
|
||||||
|
def visit_goto(self, node):
|
||||||
|
self.current_goto = node
|
||||||
|
|
||||||
|
visit_Break = visit_goto
|
||||||
|
visit_Continue = visit_goto
|
||||||
|
|
||||||
|
def visit_control_flow(self, kind, node):
|
||||||
|
old_delay, self.current_delay = self.current_delay, iodelay.Const(0)
|
||||||
|
self.generic_visit(node)
|
||||||
|
if not iodelay.is_zero(self.current_delay):
|
||||||
|
self.abort("{} cannot be interleaved".format(kind), node.loc)
|
||||||
|
self.current_delay = old_delay
|
||||||
|
|
||||||
|
visit_While = lambda self, node: self.visit_control_flow("while statement", node)
|
||||||
|
visit_If = lambda self, node: self.visit_control_flow("if statement", node)
|
||||||
|
visit_IfExpT = lambda self, node: self.visit_control_flow("if expression", node)
|
||||||
|
visit_Try = lambda self, node: self.visit_control_flow("try statement", node)
|
||||||
|
|
||||||
|
def visit_Return(self, node):
|
||||||
|
self.current_return = node
|
||||||
|
|
||||||
|
def visit_With(self, node):
|
||||||
|
self.visit(node.items)
|
||||||
|
|
||||||
|
context_expr = node.items[0].context_expr
|
||||||
|
if len(node.items) == 1 and types.is_builtin(context_expr.type, "parallel"):
|
||||||
|
try:
|
||||||
|
delays = []
|
||||||
|
for stmt in node.body:
|
||||||
|
old_delay, self.current_delay = self.current_delay, iodelay.Const(0)
|
||||||
|
self.visit(stmt)
|
||||||
|
delays.append(self.current_delay)
|
||||||
|
self.current_delay = old_delay
|
||||||
|
|
||||||
|
if any(delays):
|
||||||
|
self.current_delay += iodelay.Max(delays)
|
||||||
|
except _IndeterminateDelay as error:
|
||||||
|
# Interleave failures inside `with` statements are hard failures,
|
||||||
|
# since there's no chance that the code will never actually execute
|
||||||
|
# inside a `with` statement after all.
|
||||||
|
self.engine.process(error.cause)
|
||||||
|
|
||||||
|
elif len(node.items) == 1 and types.is_builtin(context_expr.type, "sequential"):
|
||||||
|
self.visit(node.body)
|
||||||
|
else:
|
||||||
|
self.abort("with statement cannot be interleaved", node.loc)
|
||||||
|
|
||||||
|
def visit_CallT(self, node):
|
||||||
|
typ = node.func.type.find()
|
||||||
|
def abort(notes):
|
||||||
|
self.abort("this call cannot be interleaved because "
|
||||||
|
"an argument cannot be statically evaluated",
|
||||||
|
node.loc, notes)
|
||||||
|
|
||||||
|
if types.is_builtin(typ, "delay"):
|
||||||
|
value = self.evaluate(node.args[0], abort=abort)
|
||||||
|
call_delay = iodelay.SToMU(value, ref_period=self.ref_period)
|
||||||
|
elif types.is_builtin(typ, "delay_mu"):
|
||||||
|
value = self.evaluate(node.args[0], abort=abort)
|
||||||
|
call_delay = value
|
||||||
|
elif not types.is_builtin(typ):
|
||||||
|
if types.is_function(typ):
|
||||||
|
offset = 0
|
||||||
|
elif types.is_method(typ):
|
||||||
|
offset = 1
|
||||||
|
typ = types.get_method_function(typ)
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
|
||||||
|
delay = typ.find().delay.find()
|
||||||
|
if types.is_var(delay):
|
||||||
|
raise _UnknownDelay()
|
||||||
|
elif delay.is_indeterminate():
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"function called here", {},
|
||||||
|
node.loc)
|
||||||
|
cause = delay.cause
|
||||||
|
cause = diagnostic.Diagnostic(cause.level, cause.reason, cause.arguments,
|
||||||
|
cause.location, cause.highlights,
|
||||||
|
cause.notes + [note])
|
||||||
|
raise _IndeterminateDelay(cause)
|
||||||
|
elif delay.is_fixed():
|
||||||
|
args = {}
|
||||||
|
for kw_node in node.keywords:
|
||||||
|
args[kw_node.arg] = kw_node.value
|
||||||
|
for arg_name, arg_node in zip(list(typ.args)[offset:], node.args):
|
||||||
|
args[arg_name] = arg_node
|
||||||
|
|
||||||
|
free_vars = delay.duration.free_vars()
|
||||||
|
call_delay = delay.duration.fold(
|
||||||
|
{ arg: self.evaluate(args[arg], abort=abort) for arg in free_vars })
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
else:
|
||||||
|
call_delay = iodelay.Const(0)
|
||||||
|
|
||||||
|
self.current_delay += call_delay
|
||||||
|
node.iodelay = call_delay
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,657 @@
|
||||||
|
"""
|
||||||
|
The :mod:`types` module contains the classes describing the types
|
||||||
|
in :mod:`asttyped`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import string
|
||||||
|
from collections import OrderedDict
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from . import iodelay
|
||||||
|
|
||||||
|
|
||||||
|
class UnificationError(Exception):
|
||||||
|
def __init__(self, typea, typeb):
|
||||||
|
self.typea, self.typeb = typea, typeb
|
||||||
|
|
||||||
|
|
||||||
|
def genalnum():
|
||||||
|
ident = ["a"]
|
||||||
|
while True:
|
||||||
|
yield "".join(ident)
|
||||||
|
pos = len(ident) - 1
|
||||||
|
while pos >= 0:
|
||||||
|
cur_n = string.ascii_lowercase.index(ident[pos])
|
||||||
|
if cur_n < 25:
|
||||||
|
ident[pos] = string.ascii_lowercase[cur_n + 1]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
ident[pos] = "a"
|
||||||
|
pos -= 1
|
||||||
|
if pos < 0:
|
||||||
|
ident = ["a"] + ident
|
||||||
|
|
||||||
|
def _freeze(dict_):
|
||||||
|
return tuple((key, dict_[key]) for key in dict_)
|
||||||
|
|
||||||
|
def _map_find(elts):
|
||||||
|
if isinstance(elts, list):
|
||||||
|
return [x.find() for x in elts]
|
||||||
|
elif isinstance(elts, dict):
|
||||||
|
return {k: elts[k].find() for k in elts}
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
|
||||||
|
|
||||||
|
class Type(object):
|
||||||
|
def __str__(self):
|
||||||
|
return TypePrinter().name(self)
|
||||||
|
|
||||||
|
class TVar(Type):
|
||||||
|
"""
|
||||||
|
A type variable.
|
||||||
|
|
||||||
|
In effect, the classic union-find data structure is intrusively
|
||||||
|
folded into this class.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.parent = self
|
||||||
|
|
||||||
|
def find(self):
|
||||||
|
if self.parent is self:
|
||||||
|
return self
|
||||||
|
else:
|
||||||
|
root = self.parent.find()
|
||||||
|
self.parent = root # path compression
|
||||||
|
return root
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
other = other.find()
|
||||||
|
|
||||||
|
if self.parent is self:
|
||||||
|
self.parent = other
|
||||||
|
else:
|
||||||
|
self.find().unify(other)
|
||||||
|
|
||||||
|
def fold(self, accum, fn):
|
||||||
|
if self.parent is self:
|
||||||
|
return fn(accum, self)
|
||||||
|
else:
|
||||||
|
return self.find().fold(accum, fn)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.parent is self:
|
||||||
|
return "<artiq.compiler.types.TVar %d>" % id(self)
|
||||||
|
else:
|
||||||
|
return repr(self.find())
|
||||||
|
|
||||||
|
# __eq__ and __hash__ are not overridden and default to
|
||||||
|
# comparison by identity. Use .find() explicitly before
|
||||||
|
# any lookups or comparisons.
|
||||||
|
|
||||||
|
class TMono(Type):
|
||||||
|
"""
|
||||||
|
A monomorphic type, possibly parametric.
|
||||||
|
|
||||||
|
:class:`TMono` is supposed to be subclassed by builtin types,
|
||||||
|
unlike all other :class:`Type` descendants. Similarly,
|
||||||
|
instances of :class:`TMono` should never be allocated directly,
|
||||||
|
as that will break the type-sniffing code in :mod:`builtins`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
attributes = OrderedDict()
|
||||||
|
|
||||||
|
def __init__(self, name, params={}):
|
||||||
|
assert isinstance(params, (dict, OrderedDict))
|
||||||
|
self.name, self.params = name, OrderedDict(sorted(params.items()))
|
||||||
|
|
||||||
|
def find(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
if isinstance(other, TMono) and self.name == other.name:
|
||||||
|
assert self.params.keys() == other.params.keys()
|
||||||
|
for param in self.params:
|
||||||
|
self.params[param].unify(other.params[param])
|
||||||
|
elif isinstance(other, TVar):
|
||||||
|
other.unify(self)
|
||||||
|
else:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
def fold(self, accum, fn):
|
||||||
|
for param in self.params:
|
||||||
|
accum = self.params[param].fold(accum, fn)
|
||||||
|
return fn(accum, self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "artiq.compiler.types.TMono(%s, %s)" % (repr(self.name), repr(self.params))
|
||||||
|
|
||||||
|
def __getitem__(self, param):
|
||||||
|
return self.params[param]
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, TMono) and \
|
||||||
|
self.name == other.name and \
|
||||||
|
_map_find(self.params) == _map_find(other.params)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.name, _freeze(self.params)))
|
||||||
|
|
||||||
|
class TTuple(Type):
|
||||||
|
"""
|
||||||
|
A tuple type.
|
||||||
|
|
||||||
|
:ivar elts: (list of :class:`Type`) elements
|
||||||
|
"""
|
||||||
|
|
||||||
|
attributes = OrderedDict()
|
||||||
|
|
||||||
|
def __init__(self, elts=[]):
|
||||||
|
self.elts = elts
|
||||||
|
|
||||||
|
def find(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
if isinstance(other, TTuple) and len(self.elts) == len(other.elts):
|
||||||
|
for selfelt, otherelt in zip(self.elts, other.elts):
|
||||||
|
selfelt.unify(otherelt)
|
||||||
|
elif isinstance(other, TVar):
|
||||||
|
other.unify(self)
|
||||||
|
else:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
def fold(self, accum, fn):
|
||||||
|
for elt in self.elts:
|
||||||
|
accum = elt.fold(accum, fn)
|
||||||
|
return fn(accum, self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "artiq.compiler.types.TTuple(%s)" % repr(self.elts)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, TTuple) and \
|
||||||
|
_map_find(self.elts) == _map_find(other.elts)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
class _TPointer(TMono):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("pointer")
|
||||||
|
|
||||||
|
class TFunction(Type):
|
||||||
|
"""
|
||||||
|
A function type.
|
||||||
|
|
||||||
|
:ivar args: (:class:`collections.OrderedDict` of string to :class:`Type`)
|
||||||
|
mandatory arguments
|
||||||
|
:ivar optargs: (:class:`collections.OrderedDict` of string to :class:`Type`)
|
||||||
|
optional arguments
|
||||||
|
:ivar ret: (:class:`Type`)
|
||||||
|
return type
|
||||||
|
:ivar delay: (:class:`Type`)
|
||||||
|
RTIO delay
|
||||||
|
"""
|
||||||
|
|
||||||
|
attributes = OrderedDict([
|
||||||
|
('__code__', _TPointer()),
|
||||||
|
('__closure__', _TPointer()),
|
||||||
|
])
|
||||||
|
|
||||||
|
def __init__(self, args, optargs, ret):
|
||||||
|
assert isinstance(args, OrderedDict)
|
||||||
|
assert isinstance(optargs, OrderedDict)
|
||||||
|
assert isinstance(ret, Type)
|
||||||
|
self.args, self.optargs, self.ret = args, optargs, ret
|
||||||
|
self.delay = TVar()
|
||||||
|
|
||||||
|
def arity(self):
|
||||||
|
return len(self.args) + len(self.optargs)
|
||||||
|
|
||||||
|
def find(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
if isinstance(other, TFunction) and \
|
||||||
|
self.args.keys() == other.args.keys() and \
|
||||||
|
self.optargs.keys() == other.optargs.keys():
|
||||||
|
for selfarg, otherarg in zip(list(self.args.values()) + list(self.optargs.values()),
|
||||||
|
list(other.args.values()) + list(other.optargs.values())):
|
||||||
|
selfarg.unify(otherarg)
|
||||||
|
self.ret.unify(other.ret)
|
||||||
|
self.delay.unify(other.delay)
|
||||||
|
elif isinstance(other, TVar):
|
||||||
|
other.unify(self)
|
||||||
|
else:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
def fold(self, accum, fn):
|
||||||
|
for arg in self.args:
|
||||||
|
accum = self.args[arg].fold(accum, fn)
|
||||||
|
for optarg in self.optargs:
|
||||||
|
accum = self.optargs[optarg].fold(accum, fn)
|
||||||
|
accum = self.ret.fold(accum, fn)
|
||||||
|
return fn(accum, self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "artiq.compiler.types.TFunction({}, {}, {})".format(
|
||||||
|
repr(self.args), repr(self.optargs), repr(self.ret))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, TFunction) and \
|
||||||
|
_map_find(self.args) == _map_find(other.args) and \
|
||||||
|
_map_find(self.optargs) == _map_find(other.optargs)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((_freeze(self.args), _freeze(self.optargs), self.ret))
|
||||||
|
|
||||||
|
class TRPCFunction(TFunction):
|
||||||
|
"""
|
||||||
|
A function type of a remote function.
|
||||||
|
|
||||||
|
:ivar service: (int) RPC service number
|
||||||
|
"""
|
||||||
|
|
||||||
|
attributes = OrderedDict()
|
||||||
|
|
||||||
|
def __init__(self, args, optargs, ret, service):
|
||||||
|
super().__init__(args, optargs, ret)
|
||||||
|
self.service = service
|
||||||
|
self.delay = TFixedDelay(iodelay.Const(0))
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
if isinstance(other, TRPCFunction) and \
|
||||||
|
self.service == other.service:
|
||||||
|
super().unify(other)
|
||||||
|
elif isinstance(other, TVar):
|
||||||
|
other.unify(self)
|
||||||
|
else:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
class TCFunction(TFunction):
|
||||||
|
"""
|
||||||
|
A function type of a runtime-provided C function.
|
||||||
|
|
||||||
|
:ivar name: (str) C function name
|
||||||
|
"""
|
||||||
|
|
||||||
|
attributes = OrderedDict()
|
||||||
|
|
||||||
|
def __init__(self, args, ret, name):
|
||||||
|
super().__init__(args, OrderedDict(), ret)
|
||||||
|
self.name = name
|
||||||
|
self.delay = TFixedDelay(iodelay.Const(0))
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
if isinstance(other, TCFunction) and \
|
||||||
|
self.name == other.name:
|
||||||
|
super().unify(other)
|
||||||
|
elif isinstance(other, TVar):
|
||||||
|
other.unify(self)
|
||||||
|
else:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
class TBuiltin(Type):
|
||||||
|
"""
|
||||||
|
An instance of builtin type. Every instance of a builtin
|
||||||
|
type is treated specially according to its name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
assert isinstance(name, str)
|
||||||
|
self.name = name
|
||||||
|
self.attributes = OrderedDict()
|
||||||
|
|
||||||
|
def find(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
if self != other:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
def fold(self, accum, fn):
|
||||||
|
return fn(accum, self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "artiq.compiler.types.{}({})".format(type(self).__name__, repr(self.name))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, TBuiltin) and \
|
||||||
|
self.name == other.name
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.name)
|
||||||
|
|
||||||
|
class TBuiltinFunction(TBuiltin):
|
||||||
|
"""
|
||||||
|
A type of a builtin function.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class TConstructor(TBuiltin):
|
||||||
|
"""
|
||||||
|
A type of a constructor of a class, e.g. ``list``.
|
||||||
|
Note that this is not the same as the type of an instance of
|
||||||
|
the class, which is ``TMono("list", ...)`` (or a descendant).
|
||||||
|
|
||||||
|
:ivar instance: (:class:`Type`)
|
||||||
|
the type of the instance created by this constructor
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, instance):
|
||||||
|
assert isinstance(instance, TMono)
|
||||||
|
super().__init__(instance.name)
|
||||||
|
self.instance = instance
|
||||||
|
|
||||||
|
class TExceptionConstructor(TConstructor):
|
||||||
|
"""
|
||||||
|
A type of a constructor of an exception, e.g. ``Exception``.
|
||||||
|
Note that this is not the same as the type of an instance of
|
||||||
|
the class, which is ``TMono("Exception", ...)``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class TInstance(TMono):
|
||||||
|
"""
|
||||||
|
A type of an instance of a user-defined class.
|
||||||
|
|
||||||
|
:ivar constructor: (:class:`TConstructor`)
|
||||||
|
the type of the constructor with which this instance
|
||||||
|
was created
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, attributes):
|
||||||
|
assert isinstance(attributes, OrderedDict)
|
||||||
|
super().__init__(name)
|
||||||
|
self.attributes = attributes
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "artiq.compiler.types.TInstance({}, {})".format(
|
||||||
|
repr(self.name), repr(self.attributes))
|
||||||
|
|
||||||
|
class TMethod(TMono):
|
||||||
|
"""
|
||||||
|
A type of a method.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, self_type, function_type):
|
||||||
|
super().__init__("method", {"self": self_type, "fn": function_type})
|
||||||
|
self.attributes = OrderedDict([
|
||||||
|
("__func__", function_type),
|
||||||
|
("__self__", self_type),
|
||||||
|
])
|
||||||
|
|
||||||
|
class TValue(Type):
|
||||||
|
"""
|
||||||
|
A type-level value (such as the integer denoting width of
|
||||||
|
a generic integer type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def find(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
if isinstance(other, TVar):
|
||||||
|
other.unify(self)
|
||||||
|
elif self != other:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
def fold(self, accum, fn):
|
||||||
|
return fn(accum, self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "artiq.compiler.types.TValue(%s)" % repr(self.value)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, TValue) and \
|
||||||
|
self.value == other.value
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
class TDelay(Type):
|
||||||
|
"""
|
||||||
|
The type-level representation of IO delay.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, duration, cause):
|
||||||
|
assert duration is None or isinstance(duration, iodelay.Expr)
|
||||||
|
assert cause is None or isinstance(cause, diagnostic.Diagnostic)
|
||||||
|
assert (not (duration and cause)) and (duration or cause)
|
||||||
|
self.duration, self.cause = duration, cause
|
||||||
|
|
||||||
|
def is_fixed(self):
|
||||||
|
return self.duration is not None
|
||||||
|
|
||||||
|
def is_indeterminate(self):
|
||||||
|
return self.cause is not None
|
||||||
|
|
||||||
|
def find(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def unify(self, other):
|
||||||
|
other = other.find()
|
||||||
|
|
||||||
|
if self.is_fixed() and other.is_fixed() and \
|
||||||
|
self.duration.fold() == other.duration.fold():
|
||||||
|
pass
|
||||||
|
elif isinstance(other, TVar):
|
||||||
|
other.unify(self)
|
||||||
|
else:
|
||||||
|
raise UnificationError(self, other)
|
||||||
|
|
||||||
|
def fold(self, accum, fn):
|
||||||
|
# delay types do not participate in folding
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, TDelay) and \
|
||||||
|
(self.duration == other.duration and \
|
||||||
|
self.cause == other.cause)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.duration is None:
|
||||||
|
return "<{}.TIndeterminateDelay>".format(__name__)
|
||||||
|
elif self.cause is None:
|
||||||
|
return "{}.TFixedDelay({})".format(__name__, self.duration)
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
|
||||||
|
def TIndeterminateDelay(cause):
|
||||||
|
return TDelay(None, cause)
|
||||||
|
|
||||||
|
def TFixedDelay(duration):
|
||||||
|
return TDelay(duration, None)
|
||||||
|
|
||||||
|
|
||||||
|
def is_var(typ):
|
||||||
|
return isinstance(typ.find(), TVar)
|
||||||
|
|
||||||
|
def is_mono(typ, name=None, **params):
|
||||||
|
typ = typ.find()
|
||||||
|
params_match = True
|
||||||
|
for param in params:
|
||||||
|
if param not in typ.params:
|
||||||
|
return False
|
||||||
|
params_match = params_match and \
|
||||||
|
typ.params[param].find() == params[param].find()
|
||||||
|
return isinstance(typ, TMono) and \
|
||||||
|
(name is None or (typ.name == name and params_match))
|
||||||
|
|
||||||
|
def is_polymorphic(typ):
|
||||||
|
return typ.fold(False, lambda accum, typ: accum or is_var(typ))
|
||||||
|
|
||||||
|
def is_tuple(typ, elts=None):
|
||||||
|
typ = typ.find()
|
||||||
|
if elts:
|
||||||
|
return isinstance(typ, TTuple) and \
|
||||||
|
elts == typ.elts
|
||||||
|
else:
|
||||||
|
return isinstance(typ, TTuple)
|
||||||
|
|
||||||
|
def _is_pointer(typ):
|
||||||
|
return isinstance(typ.find(), _TPointer)
|
||||||
|
|
||||||
|
def is_function(typ):
|
||||||
|
return isinstance(typ.find(), TFunction)
|
||||||
|
|
||||||
|
def is_rpc_function(typ):
|
||||||
|
return isinstance(typ.find(), TRPCFunction)
|
||||||
|
|
||||||
|
def is_c_function(typ):
|
||||||
|
return isinstance(typ.find(), TCFunction)
|
||||||
|
|
||||||
|
def is_builtin(typ, name=None):
|
||||||
|
typ = typ.find()
|
||||||
|
if name is None:
|
||||||
|
return isinstance(typ, TBuiltin)
|
||||||
|
else:
|
||||||
|
return isinstance(typ, TBuiltin) and \
|
||||||
|
typ.name == name
|
||||||
|
|
||||||
|
def is_constructor(typ, name=None):
|
||||||
|
typ = typ.find()
|
||||||
|
if name is not None:
|
||||||
|
return isinstance(typ, TConstructor) and \
|
||||||
|
typ.name == name
|
||||||
|
else:
|
||||||
|
return isinstance(typ, TConstructor)
|
||||||
|
|
||||||
|
def is_exn_constructor(typ, name=None):
|
||||||
|
typ = typ.find()
|
||||||
|
if name is not None:
|
||||||
|
return isinstance(typ, TExceptionConstructor) and \
|
||||||
|
typ.name == name
|
||||||
|
else:
|
||||||
|
return isinstance(typ, TExceptionConstructor)
|
||||||
|
|
||||||
|
def is_instance(typ, name=None):
|
||||||
|
typ = typ.find()
|
||||||
|
if name is not None:
|
||||||
|
return isinstance(typ, TInstance) and \
|
||||||
|
typ.name == name
|
||||||
|
else:
|
||||||
|
return isinstance(typ, TInstance)
|
||||||
|
|
||||||
|
def is_method(typ):
|
||||||
|
return isinstance(typ.find(), TMethod)
|
||||||
|
|
||||||
|
def get_method_self(typ):
|
||||||
|
if is_method(typ):
|
||||||
|
return typ.find().params["self"]
|
||||||
|
|
||||||
|
def get_method_function(typ):
|
||||||
|
if is_method(typ):
|
||||||
|
return typ.find().params["fn"]
|
||||||
|
|
||||||
|
def is_value(typ):
|
||||||
|
return isinstance(typ.find(), TValue)
|
||||||
|
|
||||||
|
def get_value(typ):
|
||||||
|
typ = typ.find()
|
||||||
|
if isinstance(typ, TVar):
|
||||||
|
return None
|
||||||
|
elif isinstance(typ, TValue):
|
||||||
|
return typ.value
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
|
||||||
|
def is_delay(typ):
|
||||||
|
return isinstance(typ.find(), TDelay)
|
||||||
|
|
||||||
|
def is_fixed_delay(typ):
|
||||||
|
return is_delay(typ) and typ.find().is_fixed()
|
||||||
|
|
||||||
|
def is_indeterminate_delay(typ):
|
||||||
|
return is_delay(typ) and typ.find().is_indeterminate()
|
||||||
|
|
||||||
|
|
||||||
|
class TypePrinter(object):
|
||||||
|
"""
|
||||||
|
A class that prints types using Python-like syntax and gives
|
||||||
|
type variables sequential alphabetic names.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.gen = genalnum()
|
||||||
|
self.map = {}
|
||||||
|
self.recurse_guard = set()
|
||||||
|
|
||||||
|
def name(self, typ):
|
||||||
|
typ = typ.find()
|
||||||
|
if isinstance(typ, TVar):
|
||||||
|
if typ not in self.map:
|
||||||
|
self.map[typ] = "'%s" % next(self.gen)
|
||||||
|
return self.map[typ]
|
||||||
|
elif isinstance(typ, TInstance):
|
||||||
|
if typ in self.recurse_guard:
|
||||||
|
return "<instance {}>".format(typ.name)
|
||||||
|
else:
|
||||||
|
self.recurse_guard.add(typ)
|
||||||
|
attrs = ", ".join(["{}: {}".format(attr, self.name(typ.attributes[attr]))
|
||||||
|
for attr in typ.attributes])
|
||||||
|
return "<instance {} {{{}}}>".format(typ.name, attrs)
|
||||||
|
elif isinstance(typ, TMono):
|
||||||
|
if typ.params == {}:
|
||||||
|
return typ.name
|
||||||
|
else:
|
||||||
|
return "%s(%s)" % (typ.name, ", ".join(
|
||||||
|
["%s=%s" % (k, self.name(typ.params[k])) for k in typ.params]))
|
||||||
|
elif isinstance(typ, TTuple):
|
||||||
|
if len(typ.elts) == 1:
|
||||||
|
return "(%s,)" % self.name(typ.elts[0])
|
||||||
|
else:
|
||||||
|
return "(%s)" % ", ".join(list(map(self.name, typ.elts)))
|
||||||
|
elif isinstance(typ, (TFunction, TRPCFunction, TCFunction)):
|
||||||
|
args = []
|
||||||
|
args += [ "%s:%s" % (arg, self.name(typ.args[arg])) for arg in typ.args]
|
||||||
|
args += ["?%s:%s" % (arg, self.name(typ.optargs[arg])) for arg in typ.optargs]
|
||||||
|
signature = "(%s)->%s" % (", ".join(args), self.name(typ.ret))
|
||||||
|
|
||||||
|
delay = typ.delay.find()
|
||||||
|
if isinstance(delay, TVar):
|
||||||
|
signature += " delay({})".format(self.name(delay))
|
||||||
|
elif not (delay.is_fixed() and iodelay.is_zero(delay.duration)):
|
||||||
|
signature += " " + self.name(delay)
|
||||||
|
|
||||||
|
if isinstance(typ, TRPCFunction):
|
||||||
|
return "rpc({}) {}".format(typ.service, signature)
|
||||||
|
if isinstance(typ, TCFunction):
|
||||||
|
return "ffi({}) {}".format(repr(typ.name), signature)
|
||||||
|
elif isinstance(typ, TFunction):
|
||||||
|
return signature
|
||||||
|
elif isinstance(typ, TBuiltinFunction):
|
||||||
|
return "<function {}>".format(typ.name)
|
||||||
|
elif isinstance(typ, (TConstructor, TExceptionConstructor)):
|
||||||
|
if typ in self.recurse_guard:
|
||||||
|
return "<constructor {}>".format(typ.name)
|
||||||
|
else:
|
||||||
|
self.recurse_guard.add(typ)
|
||||||
|
attrs = ", ".join(["{}: {}".format(attr, self.name(typ.attributes[attr]))
|
||||||
|
for attr in typ.attributes])
|
||||||
|
return "<constructor {} {{{}}}>".format(typ.name, attrs)
|
||||||
|
elif isinstance(typ, TValue):
|
||||||
|
return repr(typ.value)
|
||||||
|
elif isinstance(typ, TDelay):
|
||||||
|
if typ.is_fixed():
|
||||||
|
return "delay({} mu)".format(typ.duration)
|
||||||
|
elif typ.is_indeterminate():
|
||||||
|
return "delay(?)"
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
else:
|
||||||
|
assert False
|
|
@ -0,0 +1,3 @@
|
||||||
|
from .monomorphism import MonomorphismValidator
|
||||||
|
from .escape import EscapeValidator
|
||||||
|
from .local_access import LocalAccessValidator
|
|
@ -0,0 +1,311 @@
|
||||||
|
"""
|
||||||
|
:class:`EscapeValidator` verifies that no mutable data escapes
|
||||||
|
the region of its allocation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import functools
|
||||||
|
from pythonparser import algorithm, diagnostic
|
||||||
|
from .. import asttyped, types, builtins
|
||||||
|
|
||||||
|
def has_region(typ):
|
||||||
|
return typ.fold(False, lambda accum, typ: accum or builtins.is_allocated(typ))
|
||||||
|
|
||||||
|
class Region:
|
||||||
|
"""
|
||||||
|
A last-in-first-out allocation region. Tied to lexical scoping
|
||||||
|
and is internally represented simply by a source range.
|
||||||
|
|
||||||
|
:ivar range: (:class:`pythonparser.source.Range` or None)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, source_range=None):
|
||||||
|
self.range = source_range
|
||||||
|
|
||||||
|
def present(self):
|
||||||
|
return bool(self.range)
|
||||||
|
|
||||||
|
def includes(self, other):
|
||||||
|
assert self.range
|
||||||
|
assert self.range.source_buffer == other.range.source_buffer
|
||||||
|
|
||||||
|
return self.range.begin_pos <= other.range.begin_pos and \
|
||||||
|
self.range.end_pos >= other.range.end_pos
|
||||||
|
|
||||||
|
def intersects(self, other):
|
||||||
|
assert self.range.source_buffer == other.range.source_buffer
|
||||||
|
assert self.range
|
||||||
|
|
||||||
|
return (self.range.begin_pos <= other.range.begin_pos <= self.range.end_pos and \
|
||||||
|
other.range.end_pos > self.range.end_pos) or \
|
||||||
|
(other.range.begin_pos <= self.range.begin_pos <= other.range.end_pos and \
|
||||||
|
self.range.end_pos > other.range.end_pos)
|
||||||
|
|
||||||
|
def contract(self, other):
|
||||||
|
if not self.range:
|
||||||
|
self.range = other.range
|
||||||
|
|
||||||
|
def outlives(lhs, rhs):
|
||||||
|
if lhs is None: # lhs lives forever
|
||||||
|
return True
|
||||||
|
elif rhs is None: # rhs lives forever, lhs does not
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
assert not lhs.intersects(rhs)
|
||||||
|
return lhs.includes(rhs)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Region({})".format(repr(self.range))
|
||||||
|
|
||||||
|
class RegionOf(algorithm.Visitor):
|
||||||
|
"""
|
||||||
|
Visit an expression and return the list of regions that must
|
||||||
|
be alive for the expression to execute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, env_stack, youngest_region):
|
||||||
|
self.env_stack, self.youngest_region = env_stack, youngest_region
|
||||||
|
|
||||||
|
# Liveness determined by assignments
|
||||||
|
def visit_NameT(self, node):
|
||||||
|
# First, look at stack regions
|
||||||
|
for region in reversed(self.env_stack[1:]):
|
||||||
|
if node.id in region:
|
||||||
|
return region[node.id]
|
||||||
|
|
||||||
|
# Then, look at the global region of this module
|
||||||
|
if node.id in self.env_stack[0]:
|
||||||
|
return None
|
||||||
|
|
||||||
|
assert False
|
||||||
|
|
||||||
|
# Value lives as long as the current scope, if it's mutable,
|
||||||
|
# or else forever
|
||||||
|
def visit_sometimes_allocating(self, node):
|
||||||
|
if has_region(node.type):
|
||||||
|
return self.youngest_region
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
visit_BinOpT = visit_sometimes_allocating
|
||||||
|
visit_CallT = visit_sometimes_allocating
|
||||||
|
|
||||||
|
# Value lives as long as the object/container, if it's mutable,
|
||||||
|
# or else forever
|
||||||
|
def visit_accessor(self, node):
|
||||||
|
if has_region(node.type):
|
||||||
|
return self.visit(node.value)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
visit_AttributeT = visit_accessor
|
||||||
|
visit_SubscriptT = visit_accessor
|
||||||
|
|
||||||
|
# Value lives as long as the shortest living operand
|
||||||
|
def visit_selecting(self, nodes):
|
||||||
|
regions = [self.visit(node) for node in nodes]
|
||||||
|
regions = list(filter(lambda x: x, regions))
|
||||||
|
if any(regions):
|
||||||
|
regions.sort(key=functools.cmp_to_key(Region.outlives), reverse=True)
|
||||||
|
return regions[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def visit_BoolOpT(self, node):
|
||||||
|
return self.visit_selecting(node.values)
|
||||||
|
|
||||||
|
def visit_IfExpT(self, node):
|
||||||
|
return self.visit_selecting([node.body, node.orelse])
|
||||||
|
|
||||||
|
def visit_TupleT(self, node):
|
||||||
|
return self.visit_selecting(node.elts)
|
||||||
|
|
||||||
|
# Value lives as long as the current scope
|
||||||
|
def visit_allocating(self, node):
|
||||||
|
return self.youngest_region
|
||||||
|
|
||||||
|
visit_DictT = visit_allocating
|
||||||
|
visit_DictCompT = visit_allocating
|
||||||
|
visit_GeneratorExpT = visit_allocating
|
||||||
|
visit_LambdaT = visit_allocating
|
||||||
|
visit_ListT = visit_allocating
|
||||||
|
visit_ListCompT = visit_allocating
|
||||||
|
visit_SetT = visit_allocating
|
||||||
|
visit_SetCompT = visit_allocating
|
||||||
|
|
||||||
|
# Value lives forever
|
||||||
|
def visit_immutable(self, node):
|
||||||
|
assert not has_region(node.type)
|
||||||
|
return None
|
||||||
|
|
||||||
|
visit_NameConstantT = visit_immutable
|
||||||
|
visit_NumT = visit_immutable
|
||||||
|
visit_EllipsisT = visit_immutable
|
||||||
|
visit_UnaryOpT = visit_immutable
|
||||||
|
visit_CompareT = visit_immutable
|
||||||
|
|
||||||
|
# Value is mutable, but still lives forever
|
||||||
|
def visit_StrT(self, node):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Not implemented
|
||||||
|
def visit_unimplemented(self, node):
|
||||||
|
assert False
|
||||||
|
|
||||||
|
visit_StarredT = visit_unimplemented
|
||||||
|
visit_YieldT = visit_unimplemented
|
||||||
|
visit_YieldFromT = visit_unimplemented
|
||||||
|
|
||||||
|
|
||||||
|
class AssignedNamesOf(algorithm.Visitor):
|
||||||
|
"""
|
||||||
|
Visit an expression and return the list of names that appear
|
||||||
|
on the lhs of assignment, directly or through an accessor.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def visit_NameT(self, node):
|
||||||
|
return [node]
|
||||||
|
|
||||||
|
def visit_accessor(self, node):
|
||||||
|
return self.visit(node.value)
|
||||||
|
|
||||||
|
visit_AttributeT = visit_accessor
|
||||||
|
visit_SubscriptT = visit_accessor
|
||||||
|
|
||||||
|
def visit_sequence(self, node):
|
||||||
|
return reduce(list.__add__, map(self.visit, node.elts))
|
||||||
|
|
||||||
|
visit_TupleT = visit_sequence
|
||||||
|
visit_ListT = visit_sequence
|
||||||
|
|
||||||
|
def visit_StarredT(self, node):
|
||||||
|
assert False
|
||||||
|
|
||||||
|
|
||||||
|
class EscapeValidator(algorithm.Visitor):
|
||||||
|
def __init__(self, engine):
|
||||||
|
self.engine = engine
|
||||||
|
self.youngest_region = None
|
||||||
|
self.env_stack = []
|
||||||
|
self.youngest_env = None
|
||||||
|
|
||||||
|
def _region_of(self, expr):
|
||||||
|
return RegionOf(self.env_stack, self.youngest_region).visit(expr)
|
||||||
|
|
||||||
|
def _names_of(self, expr):
|
||||||
|
return AssignedNamesOf().visit(expr)
|
||||||
|
|
||||||
|
def _diagnostics_for(self, region, loc, descr="the value of the expression"):
|
||||||
|
if region:
|
||||||
|
return [
|
||||||
|
diagnostic.Diagnostic("note",
|
||||||
|
"{descr} is alive from this point...", {"descr": descr},
|
||||||
|
region.range.begin()),
|
||||||
|
diagnostic.Diagnostic("note",
|
||||||
|
"... to this point", {},
|
||||||
|
region.range.end())
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
diagnostic.Diagnostic("note",
|
||||||
|
"{descr} is alive forever", {"descr": descr},
|
||||||
|
loc)
|
||||||
|
]
|
||||||
|
|
||||||
|
def visit_in_region(self, node, region, typing_env):
|
||||||
|
try:
|
||||||
|
old_youngest_region = self.youngest_region
|
||||||
|
self.youngest_region = region
|
||||||
|
|
||||||
|
old_youngest_env = self.youngest_env
|
||||||
|
self.youngest_env = {}
|
||||||
|
|
||||||
|
for name in typing_env:
|
||||||
|
if has_region(typing_env[name]):
|
||||||
|
self.youngest_env[name] = Region(None) # not yet known
|
||||||
|
else:
|
||||||
|
self.youngest_env[name] = None # lives forever
|
||||||
|
self.env_stack.append(self.youngest_env)
|
||||||
|
|
||||||
|
self.generic_visit(node)
|
||||||
|
finally:
|
||||||
|
self.env_stack.pop()
|
||||||
|
self.youngest_env = old_youngest_env
|
||||||
|
self.youngest_region = old_youngest_region
|
||||||
|
|
||||||
|
def visit_ModuleT(self, node):
|
||||||
|
self.visit_in_region(node, None, node.typing_env)
|
||||||
|
|
||||||
|
def visit_FunctionDefT(self, node):
|
||||||
|
self.youngest_env[node.name] = self.youngest_region
|
||||||
|
self.visit_in_region(node, Region(node.loc), node.typing_env)
|
||||||
|
|
||||||
|
def visit_ClassDefT(self, node):
|
||||||
|
self.youngest_env[node.name] = self.youngest_region
|
||||||
|
self.visit_in_region(node, Region(node.loc), node.constructor_type.attributes)
|
||||||
|
|
||||||
|
# Only three ways for a pointer to escape:
|
||||||
|
# * Assigning or op-assigning it (we ensure an outlives relationship)
|
||||||
|
# * Returning it (we only allow returning values that live forever)
|
||||||
|
# * Raising it (we forbid allocating exceptions that refer to mutable data)¹
|
||||||
|
#
|
||||||
|
# Literals doesn't count: a constructed object is always
|
||||||
|
# outlived by all its constituents.
|
||||||
|
# Closures don't count: see above.
|
||||||
|
# Calling functions doesn't count: arguments never outlive
|
||||||
|
# the function body.
|
||||||
|
#
|
||||||
|
# ¹Strings are currently never allocated with a limited lifetime,
|
||||||
|
# and exceptions can only refer to strings, so we don't actually check
|
||||||
|
# this property. But we will need to, if string operations are ever added.
|
||||||
|
|
||||||
|
def visit_assignment(self, target, value, is_aug_assign=False):
|
||||||
|
target_region = self._region_of(target)
|
||||||
|
value_region = self._region_of(value) if not is_aug_assign else self.youngest_region
|
||||||
|
|
||||||
|
# If this is a variable, we might need to contract the live range.
|
||||||
|
if value_region is not None:
|
||||||
|
for name in self._names_of(target):
|
||||||
|
region = self._region_of(name)
|
||||||
|
if region is not None:
|
||||||
|
region.contract(value_region)
|
||||||
|
|
||||||
|
# The assigned value should outlive the assignee
|
||||||
|
if not Region.outlives(value_region, target_region):
|
||||||
|
if is_aug_assign:
|
||||||
|
target_desc = "the assignment target, allocated here,"
|
||||||
|
else:
|
||||||
|
target_desc = "the assignment target"
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"this expression has type {type}",
|
||||||
|
{"type": types.TypePrinter().name(value.type)},
|
||||||
|
value.loc)
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"the assigned value does not outlive the assignment target", {},
|
||||||
|
value.loc, [target.loc],
|
||||||
|
notes=self._diagnostics_for(target_region, target.loc,
|
||||||
|
target_desc) +
|
||||||
|
self._diagnostics_for(value_region, value.loc,
|
||||||
|
"the assigned value"))
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
def visit_Assign(self, node):
|
||||||
|
for target in node.targets:
|
||||||
|
self.visit_assignment(target, node.value)
|
||||||
|
|
||||||
|
def visit_AugAssign(self, node):
|
||||||
|
if builtins.is_allocated(node.target.type):
|
||||||
|
# If the target is mutable, op-assignment will allocate
|
||||||
|
# in the youngest region.
|
||||||
|
self.visit_assignment(node.target, node.value, is_aug_assign=True)
|
||||||
|
|
||||||
|
def visit_Return(self, node):
|
||||||
|
region = self._region_of(node.value)
|
||||||
|
if region:
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"this expression has type {type}",
|
||||||
|
{"type": types.TypePrinter().name(node.value.type)},
|
||||||
|
node.value.loc)
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"cannot return a mutable value that does not live forever", {},
|
||||||
|
node.value.loc, notes=self._diagnostics_for(region, node.value.loc) + [note])
|
||||||
|
self.engine.process(diag)
|
|
@ -0,0 +1,175 @@
|
||||||
|
"""
|
||||||
|
:class:`LocalAccessValidator` verifies that local variables
|
||||||
|
are not accessed before being used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import reduce
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
from .. import ir, analyses
|
||||||
|
|
||||||
|
def is_special_variable(name):
|
||||||
|
return "$" in name
|
||||||
|
|
||||||
|
class LocalAccessValidator:
|
||||||
|
def __init__(self, engine):
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
def process(self, functions):
|
||||||
|
for func in functions:
|
||||||
|
self.process_function(func)
|
||||||
|
|
||||||
|
def process_function(self, func):
|
||||||
|
# Find all environments and closures allocated in this func.
|
||||||
|
environments, closures = [], []
|
||||||
|
for insn in func.instructions():
|
||||||
|
if isinstance(insn, ir.Alloc) and ir.is_environment(insn.type):
|
||||||
|
environments.append(insn)
|
||||||
|
elif isinstance(insn, ir.Closure):
|
||||||
|
closures.append(insn)
|
||||||
|
|
||||||
|
# Compute initial state of interesting environments.
|
||||||
|
# Environments consisting only of internal variables (containing a ".")
|
||||||
|
# are ignored.
|
||||||
|
initial_state = {}
|
||||||
|
for env in environments:
|
||||||
|
env_state = {var: False for var in env.type.params if "." not in var}
|
||||||
|
if any(env_state):
|
||||||
|
initial_state[env] = env_state
|
||||||
|
|
||||||
|
# Traverse the acyclic graph made of basic blocks and forward edges only,
|
||||||
|
# while updating the environment state.
|
||||||
|
domtree = analyses.DominatorTree(func)
|
||||||
|
state = {}
|
||||||
|
def traverse(block):
|
||||||
|
# Have we computed the state of this block already?
|
||||||
|
if block in state:
|
||||||
|
return state[block]
|
||||||
|
|
||||||
|
# No! Which forward edges lead to this block?
|
||||||
|
# If we dominate a predecessor, it's a back edge instead.
|
||||||
|
forward_edge_preds = [pred for pred in block.predecessors()
|
||||||
|
if block not in domtree.dominators(pred)]
|
||||||
|
|
||||||
|
# Figure out what the state is before the leader
|
||||||
|
# instruction of this block.
|
||||||
|
pred_states = [traverse(pred) for pred in forward_edge_preds]
|
||||||
|
block_state = {}
|
||||||
|
if len(pred_states) > 1:
|
||||||
|
for env in initial_state:
|
||||||
|
# The variable has to be initialized in all predecessors
|
||||||
|
# in order to be initialized in this block.
|
||||||
|
def merge_state(a, b):
|
||||||
|
return {var: a[var] and b[var] for var in a}
|
||||||
|
block_state[env] = reduce(merge_state,
|
||||||
|
[state[env] for state in pred_states])
|
||||||
|
elif len(pred_states) == 1:
|
||||||
|
# The state is the same as at the terminator of predecessor.
|
||||||
|
# We'll mutate it, so copy.
|
||||||
|
pred_state = pred_states[0]
|
||||||
|
for env in initial_state:
|
||||||
|
env_state = pred_state[env]
|
||||||
|
block_state[env] = {var: env_state[var] for var in env_state}
|
||||||
|
else:
|
||||||
|
# This is the entry block.
|
||||||
|
for env in initial_state:
|
||||||
|
env_state = initial_state[env]
|
||||||
|
block_state[env] = {var: env_state[var] for var in env_state}
|
||||||
|
|
||||||
|
# Update the state based on block contents, while validating
|
||||||
|
# that no access to uninitialized variables will be done.
|
||||||
|
for insn in block.instructions:
|
||||||
|
def pred_at_fault(env, var_name):
|
||||||
|
# Find out where the uninitialized state comes from.
|
||||||
|
for pred, pred_state in zip(forward_edge_preds, pred_states):
|
||||||
|
if not pred_state[env][var_name]:
|
||||||
|
return pred
|
||||||
|
|
||||||
|
# It's the entry block and it was never initialized.
|
||||||
|
return None
|
||||||
|
|
||||||
|
set_local_in_this_frame = False
|
||||||
|
if (isinstance(insn, (ir.SetLocal, ir.GetLocal)) and
|
||||||
|
not is_special_variable(insn.var_name)):
|
||||||
|
env, var_name = insn.environment(), insn.var_name
|
||||||
|
|
||||||
|
# Make sure that the variable is defined in the scope of this function.
|
||||||
|
if env in block_state and var_name in block_state[env]:
|
||||||
|
if isinstance(insn, ir.SetLocal):
|
||||||
|
# We've just initialized it.
|
||||||
|
block_state[env][var_name] = True
|
||||||
|
set_local_in_this_frame = True
|
||||||
|
else: # isinstance(insn, ir.GetLocal)
|
||||||
|
if not block_state[env][var_name]:
|
||||||
|
# Oops, accessing it uninitialized.
|
||||||
|
self._uninitialized_access(insn, var_name,
|
||||||
|
pred_at_fault(env, var_name))
|
||||||
|
|
||||||
|
closures_to_check = []
|
||||||
|
|
||||||
|
if (isinstance(insn, (ir.SetLocal, ir.SetAttr, ir.SetElem)) and
|
||||||
|
not set_local_in_this_frame):
|
||||||
|
# Closures may escape via these mechanisms and be invoked elsewhere.
|
||||||
|
if isinstance(insn.value(), ir.Closure):
|
||||||
|
closures_to_check.append(insn.value())
|
||||||
|
|
||||||
|
if isinstance(insn, (ir.Call, ir.Invoke)):
|
||||||
|
# We can't always trace the flow of closures from point of
|
||||||
|
# definition to point of call; however, we know that, by transitiveness
|
||||||
|
# of this analysis, only closures defined in this function can contain
|
||||||
|
# uninitialized variables.
|
||||||
|
#
|
||||||
|
# Thus, enumerate the closures, and check all of them during any operation
|
||||||
|
# that may eventually result in the closure being called.
|
||||||
|
closures_to_check = closures
|
||||||
|
|
||||||
|
for closure in closures_to_check:
|
||||||
|
env = closure.environment()
|
||||||
|
# Make sure this environment has any interesting variables.
|
||||||
|
if env in block_state:
|
||||||
|
for var_name in block_state[env]:
|
||||||
|
if not block_state[env][var_name] and not is_special_variable(var_name):
|
||||||
|
# A closure would capture this variable while it is not always
|
||||||
|
# initialized. Note that this check is transitive.
|
||||||
|
self._uninitialized_access(closure, var_name,
|
||||||
|
pred_at_fault(env, var_name))
|
||||||
|
|
||||||
|
# Save the state.
|
||||||
|
state[block] = block_state
|
||||||
|
|
||||||
|
return block_state
|
||||||
|
|
||||||
|
for block in func.basic_blocks:
|
||||||
|
traverse(block)
|
||||||
|
|
||||||
|
def _uninitialized_access(self, insn, var_name, pred_at_fault):
|
||||||
|
if pred_at_fault is not None:
|
||||||
|
uninitialized_loc = None
|
||||||
|
for pred_insn in reversed(pred_at_fault.instructions):
|
||||||
|
if pred_insn.loc is not None:
|
||||||
|
uninitialized_loc = pred_insn.loc.begin()
|
||||||
|
break
|
||||||
|
assert uninitialized_loc is not None
|
||||||
|
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"variable is not initialized when control flows from this point", {},
|
||||||
|
uninitialized_loc)
|
||||||
|
else:
|
||||||
|
note = None
|
||||||
|
|
||||||
|
if note is not None:
|
||||||
|
notes = [note]
|
||||||
|
else:
|
||||||
|
notes = []
|
||||||
|
|
||||||
|
if isinstance(insn, ir.Closure):
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"variable '{name}' can be captured in a closure uninitialized here",
|
||||||
|
{"name": var_name},
|
||||||
|
insn.loc, notes=notes)
|
||||||
|
else:
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"variable '{name}' is not always initialized here",
|
||||||
|
{"name": var_name},
|
||||||
|
insn.loc, notes=notes)
|
||||||
|
|
||||||
|
self.engine.process(diag)
|
|
@ -0,0 +1,39 @@
|
||||||
|
"""
|
||||||
|
:class:`MonomorphismValidator` verifies that all type variables have been
|
||||||
|
elided, which is necessary for code generation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pythonparser import algorithm, diagnostic
|
||||||
|
from .. import asttyped, types, builtins
|
||||||
|
|
||||||
|
class MonomorphismValidator(algorithm.Visitor):
|
||||||
|
def __init__(self, engine):
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
def visit_FunctionDefT(self, node):
|
||||||
|
super().generic_visit(node)
|
||||||
|
|
||||||
|
return_type = node.signature_type.find().ret
|
||||||
|
if types.is_polymorphic(return_type):
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"the function has return type {type}",
|
||||||
|
{"type": types.TypePrinter().name(return_type)},
|
||||||
|
node.name_loc)
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"the return type of this function cannot be fully inferred", {},
|
||||||
|
node.name_loc, notes=[note])
|
||||||
|
self.engine.process(diag)
|
||||||
|
|
||||||
|
def generic_visit(self, node):
|
||||||
|
super().generic_visit(node)
|
||||||
|
|
||||||
|
if isinstance(node, asttyped.commontyped):
|
||||||
|
if types.is_polymorphic(node.type):
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"the expression has type {type}",
|
||||||
|
{"type": types.TypePrinter().name(node.type)},
|
||||||
|
node.loc)
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"the type of this expression cannot be fully inferred", {},
|
||||||
|
node.loc, notes=[note])
|
||||||
|
self.engine.process(diag)
|
|
@ -3,28 +3,16 @@ from operator import itemgetter
|
||||||
|
|
||||||
class Comm:
|
class Comm:
|
||||||
def __init__(self, dmgr):
|
def __init__(self, dmgr):
|
||||||
pass
|
super().__init__()
|
||||||
|
|
||||||
def switch_clock(self, external):
|
def switch_clock(self, external):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def load(self, kcode):
|
def load(self, kernel_library):
|
||||||
print("================")
|
pass
|
||||||
print(" LLVM IR")
|
|
||||||
print("================")
|
|
||||||
print(kcode)
|
|
||||||
|
|
||||||
def run(self, kname):
|
def run(self):
|
||||||
print("RUN: "+kname)
|
pass
|
||||||
|
|
||||||
def serve(self, rpc_map, exception_map):
|
def serve(self, object_map, symbolizer):
|
||||||
print("================")
|
pass
|
||||||
print(" RPC map")
|
|
||||||
print("================")
|
|
||||||
for k, v in sorted(rpc_map.items(), key=itemgetter(0)):
|
|
||||||
print(str(k)+" -> "+str(v))
|
|
||||||
print("================")
|
|
||||||
print(" Exception map")
|
|
||||||
print("================")
|
|
||||||
for k, v in sorted(exception_map.items(), key=itemgetter(0)):
|
|
||||||
print(str(k)+" -> "+str(v))
|
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
import struct
|
import struct
|
||||||
import logging
|
import logging
|
||||||
|
import traceback
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from fractions import Fraction
|
from fractions import Fraction
|
||||||
|
|
||||||
from artiq.coredevice import runtime_exceptions
|
|
||||||
from artiq.language import core as core_language
|
from artiq.language import core as core_language
|
||||||
from artiq.coredevice.rpc_wrapper import RPCWrapper
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -13,22 +12,26 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class _H2DMsgType(Enum):
|
class _H2DMsgType(Enum):
|
||||||
LOG_REQUEST = 1
|
LOG_REQUEST = 1
|
||||||
IDENT_REQUEST = 2
|
LOG_CLEAR = 2
|
||||||
SWITCH_CLOCK = 3
|
|
||||||
|
|
||||||
LOAD_OBJECT = 4
|
IDENT_REQUEST = 3
|
||||||
RUN_KERNEL = 5
|
SWITCH_CLOCK = 4
|
||||||
|
|
||||||
RPC_REPLY = 6
|
LOAD_LIBRARY = 5
|
||||||
|
RUN_KERNEL = 6
|
||||||
|
|
||||||
FLASH_READ_REQUEST = 7
|
RPC_REPLY = 7
|
||||||
FLASH_WRITE_REQUEST = 8
|
RPC_EXCEPTION = 8
|
||||||
FLASH_ERASE_REQUEST = 9
|
|
||||||
FLASH_REMOVE_REQUEST = 10
|
FLASH_READ_REQUEST = 9
|
||||||
|
FLASH_WRITE_REQUEST = 10
|
||||||
|
FLASH_ERASE_REQUEST = 11
|
||||||
|
FLASH_REMOVE_REQUEST = 12
|
||||||
|
|
||||||
|
|
||||||
class _D2HMsgType(Enum):
|
class _D2HMsgType(Enum):
|
||||||
LOG_REPLY = 1
|
LOG_REPLY = 1
|
||||||
|
|
||||||
IDENT_REPLY = 2
|
IDENT_REPLY = 2
|
||||||
CLOCK_SWITCH_COMPLETED = 3
|
CLOCK_SWITCH_COMPLETED = 3
|
||||||
CLOCK_SWITCH_FAILED = 4
|
CLOCK_SWITCH_FAILED = 4
|
||||||
|
@ -50,9 +53,16 @@ class _D2HMsgType(Enum):
|
||||||
class UnsupportedDevice(Exception):
|
class UnsupportedDevice(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class RPCReturnValueError(ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CommGeneric:
|
class CommGeneric:
|
||||||
# methods for derived classes to implement
|
def __init__(self):
|
||||||
|
self._read_type = self._write_type = None
|
||||||
|
self._read_length = 0
|
||||||
|
self._write_buffer = []
|
||||||
|
|
||||||
def open(self):
|
def open(self):
|
||||||
"""Opens the communication channel.
|
"""Opens the communication channel.
|
||||||
Must do nothing if already opened."""
|
Must do nothing if already opened."""
|
||||||
|
@ -72,175 +82,412 @@ class CommGeneric:
|
||||||
"""Writes exactly length bytes to the communication channel.
|
"""Writes exactly length bytes to the communication channel.
|
||||||
The channel is assumed to be opened."""
|
The channel is assumed to be opened."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
#
|
||||||
|
# Reader interface
|
||||||
#
|
#
|
||||||
|
|
||||||
def _read_header(self):
|
def _read_header(self):
|
||||||
self.open()
|
self.open()
|
||||||
|
|
||||||
|
if self._read_length > 0:
|
||||||
|
raise IOError("Read underrun ({} bytes remaining)".
|
||||||
|
format(self._read_length))
|
||||||
|
|
||||||
|
# Wait for a synchronization sequence, 5a 5a 5a 5a.
|
||||||
sync_count = 0
|
sync_count = 0
|
||||||
while sync_count < 4:
|
while sync_count < 4:
|
||||||
(c, ) = struct.unpack("B", self.read(1))
|
(sync_byte, ) = struct.unpack("B", self.read(1))
|
||||||
if c == 0x5a:
|
if sync_byte == 0x5a:
|
||||||
sync_count += 1
|
sync_count += 1
|
||||||
else:
|
else:
|
||||||
sync_count = 0
|
sync_count = 0
|
||||||
length = struct.unpack(">l", self.read(4))[0]
|
|
||||||
if not length: # inband connection close
|
|
||||||
raise OSError("Connection closed")
|
|
||||||
tyv = struct.unpack("B", self.read(1))[0]
|
|
||||||
ty = _D2HMsgType(tyv)
|
|
||||||
logger.debug("receiving message: type=%r length=%d", ty, length)
|
|
||||||
return length, ty
|
|
||||||
|
|
||||||
def _write_header(self, length, ty):
|
# Read message header.
|
||||||
|
(self._read_length, ) = struct.unpack(">l", self.read(4))
|
||||||
|
if not self._read_length: # inband connection close
|
||||||
|
raise OSError("Connection closed")
|
||||||
|
|
||||||
|
(raw_type, ) = struct.unpack("B", self.read(1))
|
||||||
|
self._read_type = _D2HMsgType(raw_type)
|
||||||
|
|
||||||
|
if self._read_length < 9:
|
||||||
|
raise IOError("Read overrun in message header ({} remaining)".
|
||||||
|
format(self._read_length))
|
||||||
|
self._read_length -= 9
|
||||||
|
|
||||||
|
logger.debug("receiving message: type=%r length=%d",
|
||||||
|
self._read_type, self._read_length)
|
||||||
|
|
||||||
|
def _read_expect(self, ty):
|
||||||
|
if self._read_type != ty:
|
||||||
|
raise IOError("Incorrect reply from device: {} (expected {})".
|
||||||
|
format(self._read_type, ty))
|
||||||
|
|
||||||
|
def _read_empty(self, ty):
|
||||||
|
self._read_header()
|
||||||
|
self._read_expect(ty)
|
||||||
|
|
||||||
|
def _read_chunk(self, length):
|
||||||
|
if self._read_length < length:
|
||||||
|
raise IOError("Read overrun while trying to read {} bytes ({} remaining)"
|
||||||
|
" in packet {}".
|
||||||
|
format(length, self._read_length, self._read_type))
|
||||||
|
|
||||||
|
self._read_length -= length
|
||||||
|
return self.read(length)
|
||||||
|
|
||||||
|
def _read_int8(self):
|
||||||
|
(value, ) = struct.unpack("B", self._read_chunk(1))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _read_int32(self):
|
||||||
|
(value, ) = struct.unpack(">l", self._read_chunk(4))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _read_int64(self):
|
||||||
|
(value, ) = struct.unpack(">q", self._read_chunk(8))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _read_float64(self):
|
||||||
|
(value, ) = struct.unpack(">d", self._read_chunk(8))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _read_bytes(self):
|
||||||
|
return self._read_chunk(self._read_int32())
|
||||||
|
|
||||||
|
def _read_string(self):
|
||||||
|
return self._read_bytes()[:-1].decode('utf-8')
|
||||||
|
|
||||||
|
#
|
||||||
|
# Writer interface
|
||||||
|
#
|
||||||
|
|
||||||
|
def _write_header(self, ty):
|
||||||
self.open()
|
self.open()
|
||||||
logger.debug("sending message: type=%r length=%d", ty, length)
|
|
||||||
self.write(struct.pack(">ll", 0x5a5a5a5a, length))
|
logger.debug("preparing to send message: type=%r", ty)
|
||||||
if ty is not None:
|
self._write_type = ty
|
||||||
self.write(struct.pack("B", ty.value))
|
self._write_buffer = []
|
||||||
|
|
||||||
|
def _write_flush(self):
|
||||||
|
# Calculate message size.
|
||||||
|
length = sum([len(chunk) for chunk in self._write_buffer])
|
||||||
|
logger.debug("sending message: type=%r length=%d", self._write_type, length)
|
||||||
|
|
||||||
|
# Write synchronization sequence, header and body.
|
||||||
|
self.write(struct.pack(">llB", 0x5a5a5a5a,
|
||||||
|
9 + length, self._write_type.value))
|
||||||
|
for chunk in self._write_buffer:
|
||||||
|
self.write(chunk)
|
||||||
|
|
||||||
|
def _write_empty(self, ty):
|
||||||
|
self._write_header(ty)
|
||||||
|
self._write_flush()
|
||||||
|
|
||||||
|
def _write_chunk(self, chunk):
|
||||||
|
self._write_buffer.append(chunk)
|
||||||
|
|
||||||
|
def _write_int8(self, value):
|
||||||
|
self._write_buffer.append(struct.pack("B", value))
|
||||||
|
|
||||||
|
def _write_int32(self, value):
|
||||||
|
self._write_buffer.append(struct.pack(">l", value))
|
||||||
|
|
||||||
|
def _write_int64(self, value):
|
||||||
|
self._write_buffer.append(struct.pack(">q", value))
|
||||||
|
|
||||||
|
def _write_float64(self, value):
|
||||||
|
self._write_buffer.append(struct.pack(">d", value))
|
||||||
|
|
||||||
|
def _write_bytes(self, value):
|
||||||
|
self._write_int32(len(value))
|
||||||
|
self._write_buffer.append(value)
|
||||||
|
|
||||||
|
def _write_string(self, value):
|
||||||
|
self._write_bytes(value.encode("utf-8") + b"\0")
|
||||||
|
|
||||||
|
#
|
||||||
|
# Exported APIs
|
||||||
|
#
|
||||||
|
|
||||||
def reset_session(self):
|
def reset_session(self):
|
||||||
self._write_header(0, None)
|
self.write(struct.pack(">ll", 0x5a5a5a5a, 0))
|
||||||
|
|
||||||
def check_ident(self):
|
def check_ident(self):
|
||||||
self._write_header(9, _H2DMsgType.IDENT_REQUEST)
|
self._write_empty(_H2DMsgType.IDENT_REQUEST)
|
||||||
_, ty = self._read_header()
|
|
||||||
if ty != _D2HMsgType.IDENT_REPLY:
|
self._read_header()
|
||||||
raise IOError("Incorrect reply from device: {}".format(ty))
|
self._read_expect(_D2HMsgType.IDENT_REPLY)
|
||||||
(reply, ) = struct.unpack("B", self.read(1))
|
runtime_id = self._read_chunk(4)
|
||||||
runtime_id = chr(reply)
|
if runtime_id != b"AROR":
|
||||||
for i in range(3):
|
|
||||||
(reply, ) = struct.unpack("B", self.read(1))
|
|
||||||
runtime_id += chr(reply)
|
|
||||||
if runtime_id != "AROR":
|
|
||||||
raise UnsupportedDevice("Unsupported runtime ID: {}"
|
raise UnsupportedDevice("Unsupported runtime ID: {}"
|
||||||
.format(runtime_id))
|
.format(runtime_id))
|
||||||
|
|
||||||
def switch_clock(self, external):
|
def switch_clock(self, external):
|
||||||
self._write_header(10, _H2DMsgType.SWITCH_CLOCK)
|
self._write_header(_H2DMsgType.SWITCH_CLOCK)
|
||||||
self.write(struct.pack("B", int(external)))
|
self._write_int8(external)
|
||||||
_, ty = self._read_header()
|
self._write_flush()
|
||||||
if ty != _D2HMsgType.CLOCK_SWITCH_COMPLETED:
|
|
||||||
raise IOError("Incorrect reply from device: {}".format(ty))
|
|
||||||
|
|
||||||
def load(self, kcode):
|
self._read_empty(_D2HMsgType.CLOCK_SWITCH_COMPLETED)
|
||||||
self._write_header(len(kcode) + 9, _H2DMsgType.LOAD_OBJECT)
|
|
||||||
self.write(kcode)
|
|
||||||
_, ty = self._read_header()
|
|
||||||
if ty != _D2HMsgType.LOAD_COMPLETED:
|
|
||||||
raise IOError("Incorrect reply from device: "+str(ty))
|
|
||||||
|
|
||||||
def run(self, kname):
|
|
||||||
self._write_header(len(kname) + 9, _H2DMsgType.RUN_KERNEL)
|
|
||||||
self.write(bytes(kname, "ascii"))
|
|
||||||
logger.debug("running kernel: %s", kname)
|
|
||||||
|
|
||||||
def flash_storage_read(self, key):
|
|
||||||
self._write_header(9+len(key), _H2DMsgType.FLASH_READ_REQUEST)
|
|
||||||
self.write(key)
|
|
||||||
length, ty = self._read_header()
|
|
||||||
if ty != _D2HMsgType.FLASH_READ_REPLY:
|
|
||||||
raise IOError("Incorrect reply from device: {}".format(ty))
|
|
||||||
value = self.read(length - 9)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def flash_storage_write(self, key, value):
|
|
||||||
self._write_header(9+len(key)+1+len(value),
|
|
||||||
_H2DMsgType.FLASH_WRITE_REQUEST)
|
|
||||||
self.write(key)
|
|
||||||
self.write(b"\x00")
|
|
||||||
self.write(value)
|
|
||||||
_, ty = self._read_header()
|
|
||||||
if ty != _D2HMsgType.FLASH_OK_REPLY:
|
|
||||||
if ty == _D2HMsgType.FLASH_ERROR_REPLY:
|
|
||||||
raise IOError("Flash storage is full")
|
|
||||||
else:
|
|
||||||
raise IOError("Incorrect reply from device: {}".format(ty))
|
|
||||||
|
|
||||||
def flash_storage_erase(self):
|
|
||||||
self._write_header(9, _H2DMsgType.FLASH_ERASE_REQUEST)
|
|
||||||
_, ty = self._read_header()
|
|
||||||
if ty != _D2HMsgType.FLASH_OK_REPLY:
|
|
||||||
raise IOError("Incorrect reply from device: {}".format(ty))
|
|
||||||
|
|
||||||
def flash_storage_remove(self, key):
|
|
||||||
self._write_header(9+len(key), _H2DMsgType.FLASH_REMOVE_REQUEST)
|
|
||||||
self.write(key)
|
|
||||||
_, ty = self._read_header()
|
|
||||||
if ty != _D2HMsgType.FLASH_OK_REPLY:
|
|
||||||
raise IOError("Incorrect reply from device: {}".format(ty))
|
|
||||||
|
|
||||||
def _receive_rpc_value(self, type_tag):
|
|
||||||
if type_tag == "n":
|
|
||||||
return None
|
|
||||||
if type_tag == "b":
|
|
||||||
return bool(struct.unpack("B", self.read(1))[0])
|
|
||||||
if type_tag == "i":
|
|
||||||
return struct.unpack(">l", self.read(4))[0]
|
|
||||||
if type_tag == "I":
|
|
||||||
return struct.unpack(">q", self.read(8))[0]
|
|
||||||
if type_tag == "f":
|
|
||||||
return struct.unpack(">d", self.read(8))[0]
|
|
||||||
if type_tag == "F":
|
|
||||||
n, d = struct.unpack(">qq", self.read(16))
|
|
||||||
return Fraction(n, d)
|
|
||||||
|
|
||||||
def _receive_rpc_values(self):
|
|
||||||
r = []
|
|
||||||
while True:
|
|
||||||
type_tag = chr(struct.unpack("B", self.read(1))[0])
|
|
||||||
if type_tag == "\x00":
|
|
||||||
return r
|
|
||||||
elif type_tag == "l":
|
|
||||||
elt_type_tag = chr(struct.unpack("B", self.read(1))[0])
|
|
||||||
length = struct.unpack(">l", self.read(4))[0]
|
|
||||||
r.append([self._receive_rpc_value(elt_type_tag)
|
|
||||||
for i in range(length)])
|
|
||||||
else:
|
|
||||||
r.append(self._receive_rpc_value(type_tag))
|
|
||||||
|
|
||||||
def _serve_rpc(self, rpc_wrapper, rpc_map, user_exception_map):
|
|
||||||
rpc_num = struct.unpack(">l", self.read(4))[0]
|
|
||||||
args = self._receive_rpc_values()
|
|
||||||
logger.debug("rpc service: %d %r", rpc_num, args)
|
|
||||||
eid, r = rpc_wrapper.run_rpc(
|
|
||||||
user_exception_map, rpc_map[rpc_num], args)
|
|
||||||
self._write_header(9+2*4, _H2DMsgType.RPC_REPLY)
|
|
||||||
self.write(struct.pack(">ll", eid, r))
|
|
||||||
logger.debug("rpc service: %d %r == %r (eid %d)", rpc_num, args,
|
|
||||||
r, eid)
|
|
||||||
|
|
||||||
def _serve_exception(self, rpc_wrapper, user_exception_map):
|
|
||||||
eid, p0, p1, p2 = struct.unpack(">lqqq", self.read(4+3*8))
|
|
||||||
rpc_wrapper.filter_rpc_exception(eid)
|
|
||||||
if eid < core_language.first_user_eid:
|
|
||||||
exception = runtime_exceptions.exception_map[eid]
|
|
||||||
raise exception(self.core, p0, p1, p2)
|
|
||||||
else:
|
|
||||||
exception = user_exception_map[eid]
|
|
||||||
raise exception
|
|
||||||
|
|
||||||
def serve(self, rpc_map, user_exception_map):
|
|
||||||
rpc_wrapper = RPCWrapper()
|
|
||||||
while True:
|
|
||||||
_, ty = self._read_header()
|
|
||||||
if ty == _D2HMsgType.RPC_REQUEST:
|
|
||||||
self._serve_rpc(rpc_wrapper, rpc_map, user_exception_map)
|
|
||||||
elif ty == _D2HMsgType.KERNEL_EXCEPTION:
|
|
||||||
self._serve_exception(rpc_wrapper, user_exception_map)
|
|
||||||
elif ty == _D2HMsgType.KERNEL_FINISHED:
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
raise IOError("Incorrect request from device: "+str(ty))
|
|
||||||
|
|
||||||
def get_log(self):
|
def get_log(self):
|
||||||
self._write_header(9, _H2DMsgType.LOG_REQUEST)
|
self._write_empty(_H2DMsgType.LOG_REQUEST)
|
||||||
length, ty = self._read_header()
|
|
||||||
if ty != _D2HMsgType.LOG_REPLY:
|
self._read_header()
|
||||||
raise IOError("Incorrect request from device: "+str(ty))
|
self._read_expect(_D2HMsgType.LOG_REPLY)
|
||||||
r = ""
|
return self._read_chunk(self._read_length).decode('utf-8')
|
||||||
for i in range(length - 9):
|
|
||||||
c = struct.unpack("B", self.read(1))[0]
|
def clear_log(self):
|
||||||
if c:
|
self._write_empty(_H2DMsgType.LOG_CLEAR)
|
||||||
r += chr(c)
|
|
||||||
return r
|
self._read_empty(_D2HMsgType.LOG_REPLY)
|
||||||
|
|
||||||
|
def flash_storage_read(self, key):
|
||||||
|
self._write_header(_H2DMsgType.FLASH_READ_REQUEST)
|
||||||
|
self._write_string(key)
|
||||||
|
self._write_flush()
|
||||||
|
|
||||||
|
self._read_header()
|
||||||
|
self._read_expect(_D2HMsgType.FLASH_READ_REPLY)
|
||||||
|
return self._read_chunk(self._read_length)
|
||||||
|
|
||||||
|
def flash_storage_write(self, key, value):
|
||||||
|
self._write_header(_H2DMsgType.FLASH_WRITE_REQUEST)
|
||||||
|
self._write_string(key)
|
||||||
|
self._write_bytes(value)
|
||||||
|
self._write_flush()
|
||||||
|
|
||||||
|
self._read_header()
|
||||||
|
if self._read_type == _D2HMsgType.FLASH_ERROR_REPLY:
|
||||||
|
raise IOError("Flash storage is full")
|
||||||
|
else:
|
||||||
|
self._read_expect(_D2HMsgType.FLASH_OK_REPLY)
|
||||||
|
|
||||||
|
def flash_storage_erase(self):
|
||||||
|
self._write_empty(_H2DMsgType.FLASH_ERASE_REQUEST)
|
||||||
|
|
||||||
|
self._read_empty(_D2HMsgType.FLASH_OK_REPLY)
|
||||||
|
|
||||||
|
def flash_storage_remove(self, key):
|
||||||
|
self._write_header(_H2DMsgType.FLASH_REMOVE_REQUEST)
|
||||||
|
self._write_string(key)
|
||||||
|
self._write_flush()
|
||||||
|
|
||||||
|
self._read_empty(_D2HMsgType.FLASH_OK_REPLY)
|
||||||
|
|
||||||
|
def load(self, kernel_library):
|
||||||
|
self._write_header(_H2DMsgType.LOAD_LIBRARY)
|
||||||
|
self._write_chunk(kernel_library)
|
||||||
|
self._write_flush()
|
||||||
|
|
||||||
|
self._read_empty(_D2HMsgType.LOAD_COMPLETED)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self._write_empty(_H2DMsgType.RUN_KERNEL)
|
||||||
|
logger.debug("running kernel")
|
||||||
|
|
||||||
|
_rpc_sentinel = object()
|
||||||
|
|
||||||
|
# See session.c:{send,receive}_rpc_value and llvm_ir_generator.py:_rpc_tag.
|
||||||
|
def _receive_rpc_value(self, object_map):
|
||||||
|
tag = chr(self._read_int8())
|
||||||
|
if tag == "\x00":
|
||||||
|
return self._rpc_sentinel
|
||||||
|
elif tag == "t":
|
||||||
|
length = self._read_int8()
|
||||||
|
return tuple(self._receive_rpc_value(object_map) for _ in range(length))
|
||||||
|
elif tag == "n":
|
||||||
|
return None
|
||||||
|
elif tag == "b":
|
||||||
|
return bool(self._read_int8())
|
||||||
|
elif tag == "i":
|
||||||
|
return self._read_int32()
|
||||||
|
elif tag == "I":
|
||||||
|
return self._read_int64()
|
||||||
|
elif tag == "f":
|
||||||
|
return self._read_float64()
|
||||||
|
elif tag == "F":
|
||||||
|
numerator = self._read_int64()
|
||||||
|
denominator = self._read_int64()
|
||||||
|
return Fraction(numerator, denominator)
|
||||||
|
elif tag == "s":
|
||||||
|
return self._read_string()
|
||||||
|
elif tag == "l":
|
||||||
|
length = self._read_int32()
|
||||||
|
return [self._receive_rpc_value(object_map) for _ in range(length)]
|
||||||
|
elif tag == "r":
|
||||||
|
start = self._receive_rpc_value(object_map)
|
||||||
|
stop = self._receive_rpc_value(object_map)
|
||||||
|
step = self._receive_rpc_value(object_map)
|
||||||
|
return range(start, stop, step)
|
||||||
|
elif tag == "o":
|
||||||
|
present = self._read_int8()
|
||||||
|
if present:
|
||||||
|
return self._receive_rpc_value(object_map)
|
||||||
|
elif tag == "O":
|
||||||
|
return object_map.retrieve(self._read_int32())
|
||||||
|
else:
|
||||||
|
raise IOError("Unknown RPC value tag: {}".format(repr(tag)))
|
||||||
|
|
||||||
|
def _receive_rpc_args(self, object_map):
|
||||||
|
args = []
|
||||||
|
while True:
|
||||||
|
value = self._receive_rpc_value(object_map)
|
||||||
|
if value is self._rpc_sentinel:
|
||||||
|
return args
|
||||||
|
args.append(value)
|
||||||
|
|
||||||
|
def _skip_rpc_value(self, tags):
|
||||||
|
tag = tags.pop(0)
|
||||||
|
if tag == "t":
|
||||||
|
length = tags.pop(0)
|
||||||
|
for _ in range(length):
|
||||||
|
self._skip_rpc_value(tags)
|
||||||
|
elif tag == "l":
|
||||||
|
self._skip_rpc_value(tags)
|
||||||
|
elif tag == "r":
|
||||||
|
self._skip_rpc_value(tags)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _send_rpc_value(self, tags, value, root, function):
|
||||||
|
def check(cond, expected):
|
||||||
|
if not cond:
|
||||||
|
raise RPCReturnValueError(
|
||||||
|
"type mismatch: cannot serialize {value} as {type}"
|
||||||
|
" ({function} has returned {root})".format(
|
||||||
|
value=repr(value), type=expected(),
|
||||||
|
function=function, root=root))
|
||||||
|
|
||||||
|
tag = chr(tags.pop(0))
|
||||||
|
if tag == "t":
|
||||||
|
length = tags.pop(0)
|
||||||
|
check(isinstance(value, tuple) and length == len(value),
|
||||||
|
lambda: "tuple of {}".format(length))
|
||||||
|
for elt in value:
|
||||||
|
self._send_rpc_value(tags, elt, root, function)
|
||||||
|
elif tag == "n":
|
||||||
|
check(value is None,
|
||||||
|
lambda: "None")
|
||||||
|
elif tag == "b":
|
||||||
|
check(isinstance(value, bool),
|
||||||
|
lambda: "bool")
|
||||||
|
self._write_int8(value)
|
||||||
|
elif tag == "i":
|
||||||
|
check(isinstance(value, int) and (-2**31 < value < 2**31-1),
|
||||||
|
lambda: "32-bit int")
|
||||||
|
self._write_int32(value)
|
||||||
|
elif tag == "I":
|
||||||
|
check(isinstance(value, int) and (-2**63 < value < 2**63-1),
|
||||||
|
lambda: "64-bit int")
|
||||||
|
self._write_int64(value)
|
||||||
|
elif tag == "f":
|
||||||
|
check(isinstance(value, float),
|
||||||
|
lambda: "float")
|
||||||
|
self._write_float64(value)
|
||||||
|
elif tag == "F":
|
||||||
|
check(isinstance(value, Fraction) and
|
||||||
|
(-2**63 < value.numerator < 2**63-1) and
|
||||||
|
(-2**63 < value.denominator < 2**63-1),
|
||||||
|
lambda: "64-bit Fraction")
|
||||||
|
self._write_int64(value.numerator)
|
||||||
|
self._write_int64(value.denominator)
|
||||||
|
elif tag == "s":
|
||||||
|
check(isinstance(value, str) and "\x00" not in value,
|
||||||
|
lambda: "str")
|
||||||
|
self._write_string(value)
|
||||||
|
elif tag == "l":
|
||||||
|
check(isinstance(value, list),
|
||||||
|
lambda: "list")
|
||||||
|
self._write_int32(len(value))
|
||||||
|
for elt in value:
|
||||||
|
tags_copy = bytearray(tags)
|
||||||
|
self._send_rpc_value(tags_copy, elt, root, function)
|
||||||
|
self._skip_rpc_value(tags)
|
||||||
|
elif tag == "r":
|
||||||
|
check(isinstance(value, range),
|
||||||
|
lambda: "range")
|
||||||
|
tags_copy = bytearray(tags)
|
||||||
|
self._send_rpc_value(tags_copy, value.start, root, function)
|
||||||
|
tags_copy = bytearray(tags)
|
||||||
|
self._send_rpc_value(tags_copy, value.stop, root, function)
|
||||||
|
tags_copy = bytearray(tags)
|
||||||
|
self._send_rpc_value(tags_copy, value.step, root, function)
|
||||||
|
tags = tags_copy
|
||||||
|
else:
|
||||||
|
raise IOError("Unknown RPC value tag: {}".format(repr(tag)))
|
||||||
|
|
||||||
|
def _serve_rpc(self, object_map):
|
||||||
|
service = self._read_int32()
|
||||||
|
args = self._receive_rpc_args(object_map)
|
||||||
|
return_tags = self._read_bytes()
|
||||||
|
logger.debug("rpc service: %d %r -> %s", service, args, return_tags)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = object_map.retrieve(service)(*args)
|
||||||
|
logger.debug("rpc service: %d %r == %r", service, args, result)
|
||||||
|
|
||||||
|
self._write_header(_H2DMsgType.RPC_REPLY)
|
||||||
|
self._write_bytes(return_tags)
|
||||||
|
self._send_rpc_value(bytearray(return_tags), result, result,
|
||||||
|
object_map.retrieve(service))
|
||||||
|
self._write_flush()
|
||||||
|
except core_language.ARTIQException as exn:
|
||||||
|
logger.debug("rpc service: %d %r ! %r", service, args, exn)
|
||||||
|
|
||||||
|
self._write_header(_H2DMsgType.RPC_EXCEPTION)
|
||||||
|
self._write_string(exn.name)
|
||||||
|
self._write_string(exn.message)
|
||||||
|
for index in range(3):
|
||||||
|
self._write_int64(exn.param[index])
|
||||||
|
|
||||||
|
self._write_string(exn.filename)
|
||||||
|
self._write_int32(exn.line)
|
||||||
|
self._write_int32(exn.column)
|
||||||
|
self._write_string(exn.function)
|
||||||
|
|
||||||
|
self._write_flush()
|
||||||
|
except Exception as exn:
|
||||||
|
logger.debug("rpc service: %d %r ! %r", service, args, exn)
|
||||||
|
|
||||||
|
self._write_header(_H2DMsgType.RPC_EXCEPTION)
|
||||||
|
self._write_string(type(exn).__name__)
|
||||||
|
self._write_string(str(exn))
|
||||||
|
for index in range(3):
|
||||||
|
self._write_int64(0)
|
||||||
|
|
||||||
|
(_, (filename, line, function, _), ) = traceback.extract_tb(exn.__traceback__, 2)
|
||||||
|
self._write_string(filename)
|
||||||
|
self._write_int32(line)
|
||||||
|
self._write_int32(-1) # column not known
|
||||||
|
self._write_string(function)
|
||||||
|
|
||||||
|
self._write_flush()
|
||||||
|
|
||||||
|
def _serve_exception(self, symbolizer):
|
||||||
|
name = self._read_string()
|
||||||
|
message = self._read_string()
|
||||||
|
params = [self._read_int64() for _ in range(3)]
|
||||||
|
|
||||||
|
filename = self._read_string()
|
||||||
|
line = self._read_int32()
|
||||||
|
column = self._read_int32()
|
||||||
|
function = self._read_string()
|
||||||
|
|
||||||
|
backtrace = [self._read_int32() for _ in range(self._read_int32())]
|
||||||
|
|
||||||
|
traceback = list(reversed(symbolizer(backtrace))) + \
|
||||||
|
[(filename, line, column, function, None)]
|
||||||
|
raise core_language.ARTIQException(name, message, params, traceback)
|
||||||
|
|
||||||
|
def serve(self, object_map, symbolizer):
|
||||||
|
while True:
|
||||||
|
self._read_header()
|
||||||
|
if self._read_type == _D2HMsgType.RPC_REQUEST:
|
||||||
|
self._serve_rpc(object_map)
|
||||||
|
elif self._read_type == _D2HMsgType.KERNEL_EXCEPTION:
|
||||||
|
self._serve_exception(symbolizer)
|
||||||
|
else:
|
||||||
|
self._read_expect(_D2HMsgType.KERNEL_FINISHED)
|
||||||
|
return
|
||||||
|
|
|
@ -10,6 +10,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Comm(CommGeneric):
|
class Comm(CommGeneric):
|
||||||
def __init__(self, dmgr, serial_dev, baud_rate=115200):
|
def __init__(self, dmgr, serial_dev, baud_rate=115200):
|
||||||
|
super().__init__()
|
||||||
self.serial_dev = serial_dev
|
self.serial_dev = serial_dev
|
||||||
self.baud_rate = baud_rate
|
self.baud_rate = baud_rate
|
||||||
|
|
||||||
|
@ -27,10 +28,10 @@ class Comm(CommGeneric):
|
||||||
del self.port
|
del self.port
|
||||||
|
|
||||||
def read(self, length):
|
def read(self, length):
|
||||||
r = bytes()
|
result = bytes()
|
||||||
while len(r) < length:
|
while len(result) < length:
|
||||||
r += self.port.read(length - len(r))
|
result += self.port.read(length - len(result))
|
||||||
return r
|
return result
|
||||||
|
|
||||||
def write(self, data):
|
def write(self, data):
|
||||||
remaining = len(data)
|
remaining = len(data)
|
||||||
|
|
|
@ -26,6 +26,7 @@ def set_keepalive(sock, after_idle, interval, max_fails):
|
||||||
|
|
||||||
class Comm(CommGeneric):
|
class Comm(CommGeneric):
|
||||||
def __init__(self, dmgr, host, port=1381):
|
def __init__(self, dmgr, host, port=1381):
|
||||||
|
super().__init__()
|
||||||
self.host = host
|
self.host = host
|
||||||
self.port = port
|
self.port = port
|
||||||
|
|
||||||
|
|
|
@ -1,49 +1,37 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from pythonparser import diagnostic
|
||||||
|
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
from artiq.language.units import ns
|
from artiq.language.types import *
|
||||||
|
from artiq.language.units import *
|
||||||
|
|
||||||
from artiq.transforms.inline import inline
|
from artiq.compiler import Stitcher, Module
|
||||||
from artiq.transforms.quantize_time import quantize_time
|
from artiq.compiler.targets import OR1KTarget
|
||||||
from artiq.transforms.remove_inter_assigns import remove_inter_assigns
|
|
||||||
from artiq.transforms.fold_constants import fold_constants
|
|
||||||
from artiq.transforms.remove_dead_code import remove_dead_code
|
|
||||||
from artiq.transforms.unroll_loops import unroll_loops
|
|
||||||
from artiq.transforms.interleave import interleave
|
|
||||||
from artiq.transforms.lower_time import lower_time
|
|
||||||
from artiq.transforms.unparse import unparse
|
|
||||||
|
|
||||||
from artiq.coredevice.runtime import Runtime
|
# Import for side effects (creating the exception classes).
|
||||||
|
from artiq.coredevice import exceptions
|
||||||
from artiq.py2llvm import get_runtime_binary
|
|
||||||
|
|
||||||
|
|
||||||
def _announce_unparse(label, node):
|
class CompileError(Exception):
|
||||||
print("*** Unparsing: "+label)
|
def __init__(self, diagnostic):
|
||||||
print(unparse(node))
|
self.diagnostic = diagnostic
|
||||||
|
|
||||||
|
def render_string(self, colored=False):
|
||||||
|
def shorten_path(path):
|
||||||
|
return path.replace(os.path.normpath(os.path.join(__file__, "..", "..")), "<artiq>")
|
||||||
|
lines = [shorten_path(path) for path in self.diagnostic.render(colored=colored)]
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# Prepend a newline so that the message shows up on after
|
||||||
|
# exception class name printed by Python.
|
||||||
|
return "\n" + self.render_string(colored=True)
|
||||||
|
|
||||||
|
|
||||||
def _make_debug_unparse(final):
|
@syscall
|
||||||
try:
|
def rtio_get_counter() -> TInt64:
|
||||||
env = os.environ["ARTIQ_UNPARSE"]
|
raise NotImplementedError("syscall not simulated")
|
||||||
except KeyError:
|
|
||||||
env = ""
|
|
||||||
selected_labels = set(env.split())
|
|
||||||
if "all" in selected_labels:
|
|
||||||
return _announce_unparse
|
|
||||||
else:
|
|
||||||
if "final" in selected_labels:
|
|
||||||
selected_labels.add(final)
|
|
||||||
|
|
||||||
def _filtered_unparse(label, node):
|
|
||||||
if label in selected_labels:
|
|
||||||
_announce_unparse(label, node)
|
|
||||||
return _filtered_unparse
|
|
||||||
|
|
||||||
|
|
||||||
def _no_debug_unparse(label, node):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Core:
|
class Core:
|
||||||
"""Core device driver.
|
"""Core device driver.
|
||||||
|
@ -66,79 +54,46 @@ class Core:
|
||||||
self.first_run = True
|
self.first_run = True
|
||||||
self.core = self
|
self.core = self
|
||||||
self.comm.core = self
|
self.comm.core = self
|
||||||
self.runtime = Runtime()
|
|
||||||
|
|
||||||
def transform_stack(self, func_def, rpc_map, exception_map,
|
def compile(self, function, args, kwargs, with_attr_writeback=True):
|
||||||
debug_unparse=_no_debug_unparse):
|
try:
|
||||||
remove_inter_assigns(func_def)
|
engine = diagnostic.Engine(all_errors_are_fatal=True)
|
||||||
debug_unparse("remove_inter_assigns_1", func_def)
|
|
||||||
|
|
||||||
quantize_time(func_def, self.ref_period)
|
stitcher = Stitcher(engine=engine)
|
||||||
debug_unparse("quantize_time", func_def)
|
stitcher.stitch_call(function, args, kwargs)
|
||||||
|
stitcher.finalize()
|
||||||
|
|
||||||
fold_constants(func_def)
|
module = Module(stitcher, ref_period=self.ref_period)
|
||||||
debug_unparse("fold_constants_1", func_def)
|
target = OR1KTarget()
|
||||||
|
|
||||||
unroll_loops(func_def, 500)
|
library = target.compile_and_link([module])
|
||||||
debug_unparse("unroll_loops", func_def)
|
stripped_library = target.strip(library)
|
||||||
|
|
||||||
interleave(func_def)
|
return stitcher.object_map, stripped_library, \
|
||||||
debug_unparse("interleave", func_def)
|
lambda addresses: target.symbolize(library, addresses)
|
||||||
|
except diagnostic.Error as error:
|
||||||
|
raise CompileError(error.diagnostic) from error
|
||||||
|
|
||||||
lower_time(func_def)
|
def run(self, function, args, kwargs):
|
||||||
debug_unparse("lower_time", func_def)
|
object_map, kernel_library, symbolizer = self.compile(function, args, kwargs)
|
||||||
|
|
||||||
remove_inter_assigns(func_def)
|
|
||||||
debug_unparse("remove_inter_assigns_2", func_def)
|
|
||||||
|
|
||||||
fold_constants(func_def)
|
|
||||||
debug_unparse("fold_constants_2", func_def)
|
|
||||||
|
|
||||||
remove_dead_code(func_def)
|
|
||||||
debug_unparse("remove_dead_code_1", func_def)
|
|
||||||
|
|
||||||
remove_inter_assigns(func_def)
|
|
||||||
debug_unparse("remove_inter_assigns_3", func_def)
|
|
||||||
|
|
||||||
fold_constants(func_def)
|
|
||||||
debug_unparse("fold_constants_3", func_def)
|
|
||||||
|
|
||||||
remove_dead_code(func_def)
|
|
||||||
debug_unparse("remove_dead_code_2", func_def)
|
|
||||||
|
|
||||||
def compile(self, k_function, k_args, k_kwargs, with_attr_writeback=True):
|
|
||||||
debug_unparse = _make_debug_unparse("remove_dead_code_2")
|
|
||||||
|
|
||||||
func_def, rpc_map, exception_map = inline(
|
|
||||||
self, k_function, k_args, k_kwargs, with_attr_writeback)
|
|
||||||
debug_unparse("inline", func_def)
|
|
||||||
self.transform_stack(func_def, rpc_map, exception_map, debug_unparse)
|
|
||||||
|
|
||||||
binary = get_runtime_binary(self.runtime, func_def)
|
|
||||||
|
|
||||||
return binary, rpc_map, exception_map
|
|
||||||
|
|
||||||
def run(self, k_function, k_args, k_kwargs):
|
|
||||||
if self.first_run:
|
if self.first_run:
|
||||||
self.comm.check_ident()
|
self.comm.check_ident()
|
||||||
self.comm.switch_clock(self.external_clock)
|
self.comm.switch_clock(self.external_clock)
|
||||||
|
self.first_run = False
|
||||||
|
|
||||||
binary, rpc_map, exception_map = self.compile(
|
self.comm.load(kernel_library)
|
||||||
k_function, k_args, k_kwargs)
|
self.comm.run()
|
||||||
self.comm.load(binary)
|
self.comm.serve(object_map, symbolizer)
|
||||||
self.comm.run(k_function.__name__)
|
|
||||||
self.comm.serve(rpc_map, exception_map)
|
|
||||||
self.first_run = False
|
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def get_rtio_counter_mu(self):
|
def get_rtio_counter_mu(self):
|
||||||
"""Return the current value of the hardware RTIO counter."""
|
return rtio_get_counter()
|
||||||
return syscall("rtio_get_counter")
|
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def break_realtime(self):
|
def break_realtime(self):
|
||||||
"""Set the timeline to the current value of the hardware RTIO counter
|
"""Set the timeline to the current value of the hardware RTIO counter
|
||||||
plus a margin of 125000 machine units."""
|
plus a margin of 125000 machine units."""
|
||||||
min_now = syscall("rtio_get_counter") + 125000
|
min_now = rtio_get_counter() + 125000
|
||||||
if now_mu() < min_now:
|
if now_mu() < min_now:
|
||||||
at_mu(min_now)
|
at_mu(min_now)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
|
from artiq.language.types import *
|
||||||
from artiq.language.units import *
|
from artiq.language.units import *
|
||||||
|
|
||||||
|
|
||||||
|
@ -9,6 +10,24 @@ PHASE_MODE_ABSOLUTE = 1
|
||||||
PHASE_MODE_TRACKING = 2
|
PHASE_MODE_TRACKING = 2
|
||||||
|
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def dds_init(time_mu: TInt64, channel: TInt32) -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def dds_batch_enter(time_mu: TInt64) -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def dds_batch_exit() -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def dds_set(time_mu: TInt64, channel: TInt32, ftw: TInt32,
|
||||||
|
pow: TInt32, phase_mode: TInt32) -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
|
||||||
class _BatchContextManager:
|
class _BatchContextManager:
|
||||||
def __init__(self, dds_bus):
|
def __init__(self, dds_bus):
|
||||||
self.dds_bus = dds_bus
|
self.dds_bus = dds_bus
|
||||||
|
@ -37,13 +56,13 @@ class DDSBus:
|
||||||
|
|
||||||
The time of execution of the DDS commands is the time of entering the
|
The time of execution of the DDS commands is the time of entering the
|
||||||
batch (as closely as hardware permits)."""
|
batch (as closely as hardware permits)."""
|
||||||
syscall("dds_batch_enter", now_mu())
|
dds_batch_enter(now_mu())
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def batch_exit(self):
|
def batch_exit(self):
|
||||||
"""Ends a DDS command batch. All buffered DDS commands are issued
|
"""Ends a DDS command batch. All buffered DDS commands are issued
|
||||||
on the bus."""
|
on the bus."""
|
||||||
syscall("dds_batch_exit")
|
dds_batch_exit()
|
||||||
|
|
||||||
|
|
||||||
class _DDSGeneric:
|
class _DDSGeneric:
|
||||||
|
@ -105,7 +124,7 @@ class _DDSGeneric:
|
||||||
"""Resets and initializes the DDS channel.
|
"""Resets and initializes the DDS channel.
|
||||||
|
|
||||||
The runtime does this for all channels upon core device startup."""
|
The runtime does this for all channels upon core device startup."""
|
||||||
syscall("dds_init", now_mu(), self.channel)
|
dds_init(now_mu(), self.channel)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set_phase_mode(self, phase_mode):
|
def set_phase_mode(self, phase_mode):
|
||||||
|
@ -144,8 +163,7 @@ class _DDSGeneric:
|
||||||
"""
|
"""
|
||||||
if phase_mode == _PHASE_MODE_DEFAULT:
|
if phase_mode == _PHASE_MODE_DEFAULT:
|
||||||
phase_mode = self.phase_mode
|
phase_mode = self.phase_mode
|
||||||
syscall("dds_set", now_mu(), self.channel, frequency,
|
dds_set(now_mu(), self.channel, frequency, phase, phase_mode, amplitude)
|
||||||
phase, phase_mode, amplitude)
|
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set(self, frequency, phase=0.0, phase_mode=_PHASE_MODE_DEFAULT,
|
def set(self, frequency, phase=0.0, phase_mode=_PHASE_MODE_DEFAULT,
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
from artiq.language.core import ARTIQException
|
||||||
|
|
||||||
|
class ZeroDivisionError(ARTIQException):
|
||||||
|
"""Python's :class:`ZeroDivisionError`, mirrored in ARTIQ."""
|
||||||
|
|
||||||
|
class ValueError(ARTIQException):
|
||||||
|
"""Python's :class:`ValueError`, mirrored in ARTIQ."""
|
||||||
|
|
||||||
|
class IndexError(ARTIQException):
|
||||||
|
"""Python's :class:`IndexError`, mirrored in ARTIQ."""
|
||||||
|
|
||||||
|
class InternalError(ARTIQException):
|
||||||
|
"""Raised when the runtime encounters an internal error condition."""
|
||||||
|
|
||||||
|
class RTIOUnderflow(ARTIQException):
|
||||||
|
"""Raised when the CPU fails to submit a RTIO event early enough
|
||||||
|
(with respect to the event's timestamp).
|
||||||
|
|
||||||
|
The offending event is discarded and the RTIO core keeps operating.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class RTIOSequenceError(ARTIQException):
|
||||||
|
"""Raised when an event is submitted on a given channel with a timestamp
|
||||||
|
not larger than the previous one.
|
||||||
|
|
||||||
|
The offending event is discarded and the RTIO core keeps operating.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class RTIOOverflow(ARTIQException):
|
||||||
|
"""Raised when at least one event could not be registered into the RTIO
|
||||||
|
input FIFO because it was full (CPU not reading fast enough).
|
||||||
|
|
||||||
|
This does not interrupt operations further than cancelling the current
|
||||||
|
read attempt and discarding some events. Reading can be reattempted after
|
||||||
|
the exception is caught, and events will be partially retrieved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class DDSBatchError(ARTIQException):
|
||||||
|
"""Raised when attempting to start a DDS batch while already in a batch,
|
||||||
|
or when too many commands are batched.
|
||||||
|
"""
|
|
@ -1,40 +0,0 @@
|
||||||
from artiq.coredevice.runtime_exceptions import exception_map, _RPCException
|
|
||||||
|
|
||||||
|
|
||||||
def _lookup_exception(d, e):
|
|
||||||
for eid, exception in d.items():
|
|
||||||
if isinstance(e, exception):
|
|
||||||
return eid
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
class RPCWrapper:
|
|
||||||
def __init__(self):
|
|
||||||
self.last_exception = None
|
|
||||||
|
|
||||||
def run_rpc(self, user_exception_map, fn, args):
|
|
||||||
eid = 0
|
|
||||||
r = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = fn(*args)
|
|
||||||
except Exception as e:
|
|
||||||
eid = _lookup_exception(user_exception_map, e)
|
|
||||||
if not eid:
|
|
||||||
eid = _lookup_exception(exception_map, e)
|
|
||||||
if eid:
|
|
||||||
self.last_exception = None
|
|
||||||
else:
|
|
||||||
self.last_exception = e
|
|
||||||
eid = _RPCException.eid
|
|
||||||
|
|
||||||
if r is None:
|
|
||||||
r = 0
|
|
||||||
else:
|
|
||||||
r = int(r)
|
|
||||||
|
|
||||||
return eid, r
|
|
||||||
|
|
||||||
def filter_rpc_exception(self, eid):
|
|
||||||
if eid == _RPCException.eid:
|
|
||||||
raise self.last_exception
|
|
|
@ -1,212 +1,13 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import llvmlite_artiq.ir as ll
|
class SourceLoader:
|
||||||
import llvmlite_artiq.binding as llvm
|
def __init__(self, runtime_root):
|
||||||
|
self.runtime_root = runtime_root
|
||||||
|
|
||||||
from artiq.py2llvm import base_types, fractions, lists
|
def get_source(self, filename):
|
||||||
from artiq.language import units
|
print(os.path.join(self.runtime_root, filename))
|
||||||
|
with open(os.path.join(self.runtime_root, filename)) as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
artiq_root = os.path.join(os.path.dirname(__file__), "..", "..")
|
||||||
llvm.initialize()
|
source_loader = SourceLoader(os.path.join(artiq_root, "soc", "runtime"))
|
||||||
llvm.initialize_all_targets()
|
|
||||||
llvm.initialize_all_asmprinters()
|
|
||||||
|
|
||||||
_syscalls = {
|
|
||||||
"now_init": "n:I",
|
|
||||||
"now_save": "I:n",
|
|
||||||
"watchdog_set": "i:i",
|
|
||||||
"watchdog_clear": "i:n",
|
|
||||||
"rtio_get_counter": "n:I",
|
|
||||||
"ttl_set_o": "Iib:n",
|
|
||||||
"ttl_set_oe": "Iib:n",
|
|
||||||
"ttl_set_sensitivity": "Iii:n",
|
|
||||||
"ttl_get": "iI:I",
|
|
||||||
"ttl_clock_set": "Iii:n",
|
|
||||||
"dds_init": "Ii:n",
|
|
||||||
"dds_batch_enter": "I:n",
|
|
||||||
"dds_batch_exit": "n:n",
|
|
||||||
"dds_set": "Iiiiii:n",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _chr_to_type(c):
|
|
||||||
if c == "n":
|
|
||||||
return ll.VoidType()
|
|
||||||
if c == "b":
|
|
||||||
return ll.IntType(1)
|
|
||||||
if c == "i":
|
|
||||||
return ll.IntType(32)
|
|
||||||
if c == "I":
|
|
||||||
return ll.IntType(64)
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
def _str_to_functype(s):
|
|
||||||
assert(s[-2] == ":")
|
|
||||||
type_ret = _chr_to_type(s[-1])
|
|
||||||
type_args = [_chr_to_type(c) for c in s[:-2] if c != "n"]
|
|
||||||
return ll.FunctionType(type_ret, type_args)
|
|
||||||
|
|
||||||
|
|
||||||
def _chr_to_value(c):
|
|
||||||
if c == "n":
|
|
||||||
return base_types.VNone()
|
|
||||||
if c == "b":
|
|
||||||
return base_types.VBool()
|
|
||||||
if c == "i":
|
|
||||||
return base_types.VInt()
|
|
||||||
if c == "I":
|
|
||||||
return base_types.VInt(64)
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
def _value_to_str(v):
|
|
||||||
if isinstance(v, base_types.VNone):
|
|
||||||
return "n"
|
|
||||||
if isinstance(v, base_types.VBool):
|
|
||||||
return "b"
|
|
||||||
if isinstance(v, base_types.VInt):
|
|
||||||
if v.nbits == 32:
|
|
||||||
return "i"
|
|
||||||
if v.nbits == 64:
|
|
||||||
return "I"
|
|
||||||
raise ValueError
|
|
||||||
if isinstance(v, base_types.VFloat):
|
|
||||||
return "f"
|
|
||||||
if isinstance(v, fractions.VFraction):
|
|
||||||
return "F"
|
|
||||||
if isinstance(v, lists.VList):
|
|
||||||
return "l" + _value_to_str(v.el_type)
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
class LinkInterface:
|
|
||||||
def init_module(self, module):
|
|
||||||
self.module = module
|
|
||||||
llvm_module = self.module.llvm_module
|
|
||||||
|
|
||||||
# RPC
|
|
||||||
func_type = ll.FunctionType(ll.IntType(32), [ll.IntType(32)],
|
|
||||||
var_arg=1)
|
|
||||||
self.rpc = ll.Function(llvm_module, func_type, "__syscall_rpc")
|
|
||||||
|
|
||||||
# syscalls
|
|
||||||
self.syscalls = dict()
|
|
||||||
for func_name, func_type_str in _syscalls.items():
|
|
||||||
func_type = _str_to_functype(func_type_str)
|
|
||||||
self.syscalls[func_name] = ll.Function(
|
|
||||||
llvm_module, func_type, "__syscall_" + func_name)
|
|
||||||
|
|
||||||
# exception handling
|
|
||||||
func_type = ll.FunctionType(ll.IntType(32),
|
|
||||||
[ll.PointerType(ll.IntType(8))])
|
|
||||||
self.eh_setjmp = ll.Function(llvm_module, func_type,
|
|
||||||
"__eh_setjmp")
|
|
||||||
self.eh_setjmp.attributes.add("nounwind")
|
|
||||||
self.eh_setjmp.attributes.add("returns_twice")
|
|
||||||
|
|
||||||
func_type = ll.FunctionType(ll.PointerType(ll.IntType(8)), [])
|
|
||||||
self.eh_push = ll.Function(llvm_module, func_type, "__eh_push")
|
|
||||||
|
|
||||||
func_type = ll.FunctionType(ll.VoidType(), [ll.IntType(32)])
|
|
||||||
self.eh_pop = ll.Function(llvm_module, func_type, "__eh_pop")
|
|
||||||
|
|
||||||
func_type = ll.FunctionType(ll.IntType(32), [])
|
|
||||||
self.eh_getid = ll.Function(llvm_module, func_type, "__eh_getid")
|
|
||||||
|
|
||||||
func_type = ll.FunctionType(ll.VoidType(), [ll.IntType(32)])
|
|
||||||
self.eh_raise = ll.Function(llvm_module, func_type, "__eh_raise")
|
|
||||||
self.eh_raise.attributes.add("noreturn")
|
|
||||||
|
|
||||||
def _build_rpc(self, args, builder):
|
|
||||||
r = base_types.VInt()
|
|
||||||
if builder is not None:
|
|
||||||
new_args = []
|
|
||||||
new_args.append(args[0].auto_load(builder)) # RPC number
|
|
||||||
for arg in args[1:]:
|
|
||||||
# type tag
|
|
||||||
arg_type_str = _value_to_str(arg)
|
|
||||||
arg_type_int = 0
|
|
||||||
for c in reversed(arg_type_str):
|
|
||||||
arg_type_int <<= 8
|
|
||||||
arg_type_int |= ord(c)
|
|
||||||
new_args.append(ll.Constant(ll.IntType(32), arg_type_int))
|
|
||||||
|
|
||||||
# pointer to value
|
|
||||||
if not isinstance(arg, base_types.VNone):
|
|
||||||
if isinstance(arg.llvm_value.type, ll.PointerType):
|
|
||||||
new_args.append(arg.llvm_value)
|
|
||||||
else:
|
|
||||||
arg_ptr = arg.new()
|
|
||||||
arg_ptr.alloca(builder)
|
|
||||||
arg_ptr.auto_store(builder, arg.llvm_value)
|
|
||||||
new_args.append(arg_ptr.llvm_value)
|
|
||||||
# end marker
|
|
||||||
new_args.append(ll.Constant(ll.IntType(32), 0))
|
|
||||||
r.auto_store(builder, builder.call(self.rpc, new_args))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _build_regular_syscall(self, syscall_name, args, builder):
|
|
||||||
r = _chr_to_value(_syscalls[syscall_name][-1])
|
|
||||||
if builder is not None:
|
|
||||||
args = [arg.auto_load(builder) for arg in args]
|
|
||||||
r.auto_store(builder, builder.call(self.syscalls[syscall_name],
|
|
||||||
args))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def build_syscall(self, syscall_name, args, builder):
|
|
||||||
if syscall_name == "rpc":
|
|
||||||
return self._build_rpc(args, builder)
|
|
||||||
else:
|
|
||||||
return self._build_regular_syscall(syscall_name, args, builder)
|
|
||||||
|
|
||||||
def build_catch(self, builder):
|
|
||||||
jmpbuf = builder.call(self.eh_push, [])
|
|
||||||
exception_occured = builder.call(self.eh_setjmp, [jmpbuf])
|
|
||||||
return builder.icmp_signed("!=",
|
|
||||||
exception_occured,
|
|
||||||
ll.Constant(ll.IntType(32), 0))
|
|
||||||
|
|
||||||
def build_pop(self, builder, levels):
|
|
||||||
builder.call(self.eh_pop, [ll.Constant(ll.IntType(32), levels)])
|
|
||||||
|
|
||||||
def build_getid(self, builder):
|
|
||||||
return builder.call(self.eh_getid, [])
|
|
||||||
|
|
||||||
def build_raise(self, builder, eid):
|
|
||||||
builder.call(self.eh_raise, [eid])
|
|
||||||
|
|
||||||
|
|
||||||
def _debug_dump_obj(obj):
|
|
||||||
try:
|
|
||||||
env = os.environ["ARTIQ_DUMP_OBJECT"]
|
|
||||||
except KeyError:
|
|
||||||
return
|
|
||||||
|
|
||||||
for i in range(1000):
|
|
||||||
filename = "{}_{:03d}.elf".format(env, i)
|
|
||||||
try:
|
|
||||||
f = open(filename, "xb")
|
|
||||||
except FileExistsError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
f.write(obj)
|
|
||||||
f.close()
|
|
||||||
return
|
|
||||||
raise IOError
|
|
||||||
|
|
||||||
|
|
||||||
class Runtime(LinkInterface):
|
|
||||||
def __init__(self):
|
|
||||||
self.cpu_type = "or1k"
|
|
||||||
# allow 1ms for all initial DDS programming
|
|
||||||
self.warmup_time = 1*units.ms
|
|
||||||
|
|
||||||
def emit_object(self):
|
|
||||||
tm = llvm.Target.from_triple(self.cpu_type).create_target_machine()
|
|
||||||
obj = tm.emit_object(self.module.llvm_module_ref)
|
|
||||||
_debug_dump_obj(obj)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<Runtime {}>".format(self.cpu_type)
|
|
||||||
|
|
|
@ -1,85 +0,0 @@
|
||||||
import inspect
|
|
||||||
|
|
||||||
from artiq.language.core import RuntimeException
|
|
||||||
|
|
||||||
|
|
||||||
# Must be kept in sync with soc/runtime/exceptions.h
|
|
||||||
|
|
||||||
class InternalError(RuntimeException):
|
|
||||||
"""Raised when the runtime encounters an internal error condition."""
|
|
||||||
eid = 1
|
|
||||||
|
|
||||||
|
|
||||||
class _RPCException(RuntimeException):
|
|
||||||
eid = 2
|
|
||||||
|
|
||||||
|
|
||||||
class RTIOUnderflow(RuntimeException):
|
|
||||||
"""Raised when the CPU fails to submit a RTIO event early enough
|
|
||||||
(with respect to the event's timestamp).
|
|
||||||
|
|
||||||
The offending event is discarded and the RTIO core keeps operating.
|
|
||||||
"""
|
|
||||||
eid = 3
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "at {} on channel {}, violation {}".format(
|
|
||||||
self.p0*self.core.ref_period,
|
|
||||||
self.p1,
|
|
||||||
(self.p2 - self.p0)*self.core.ref_period)
|
|
||||||
|
|
||||||
|
|
||||||
class RTIOSequenceError(RuntimeException):
|
|
||||||
"""Raised when an event is submitted on a given channel with a timestamp
|
|
||||||
not larger than the previous one.
|
|
||||||
|
|
||||||
The offending event is discarded and the RTIO core keeps operating.
|
|
||||||
"""
|
|
||||||
eid = 4
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "at {} on channel {}".format(self.p0*self.core.ref_period,
|
|
||||||
self.p1)
|
|
||||||
|
|
||||||
class RTIOCollisionError(RuntimeException):
|
|
||||||
"""Raised when an event is submitted on a given channel with the same
|
|
||||||
coarse timestamp as the previous one but with a different fine timestamp.
|
|
||||||
|
|
||||||
Coarse timestamps correspond to the RTIO system clock (typically around
|
|
||||||
125MHz) whereas fine timestamps correspond to the RTIO SERDES clock
|
|
||||||
(typically around 1GHz).
|
|
||||||
|
|
||||||
The offending event is discarded and the RTIO core keeps operating.
|
|
||||||
"""
|
|
||||||
eid = 5
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "at {} on channel {}".format(self.p0*self.core.ref_period,
|
|
||||||
self.p1)
|
|
||||||
|
|
||||||
|
|
||||||
class RTIOOverflow(RuntimeException):
|
|
||||||
"""Raised when at least one event could not be registered into the RTIO
|
|
||||||
input FIFO because it was full (CPU not reading fast enough).
|
|
||||||
|
|
||||||
This does not interrupt operations further than cancelling the current
|
|
||||||
read attempt and discarding some events. Reading can be reattempted after
|
|
||||||
the exception is caught, and events will be partially retrieved.
|
|
||||||
"""
|
|
||||||
eid = 6
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "on channel {}".format(self.p0)
|
|
||||||
|
|
||||||
|
|
||||||
class DDSBatchError(RuntimeException):
|
|
||||||
"""Raised when attempting to start a DDS batch while already in a batch,
|
|
||||||
or when too many commands are batched.
|
|
||||||
"""
|
|
||||||
eid = 7
|
|
||||||
|
|
||||||
|
|
||||||
exception_map = {e.eid: e for e in globals().values()
|
|
||||||
if inspect.isclass(e)
|
|
||||||
and issubclass(e, RuntimeException)
|
|
||||||
and hasattr(e, "eid")}
|
|
|
@ -1,4 +1,26 @@
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
|
from artiq.language.types import *
|
||||||
|
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def ttl_set_o(time_mu: TInt64, channel: TInt32, enabled: TBool) -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def ttl_set_oe(time_mu: TInt64, channel: TInt32, enabled: TBool) -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def ttl_set_sensitivity(time_mu: TInt64, channel: TInt32, sensitivity: TInt32) -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def ttl_get(channel: TInt32, time_limit_mu: TInt64) -> TInt64:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
@syscall
|
||||||
|
def ttl_clock_set(time_mu: TInt64, channel: TInt32, ftw: TInt32) -> TNone:
|
||||||
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
|
|
||||||
class TTLOut:
|
class TTLOut:
|
||||||
|
@ -13,18 +35,18 @@ class TTLOut:
|
||||||
self.channel = channel
|
self.channel = channel
|
||||||
|
|
||||||
# in RTIO cycles
|
# in RTIO cycles
|
||||||
self.o_previous_timestamp = int64(0)
|
self.o_previous_timestamp = int(0, width=64)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set_o(self, o):
|
def set_o(self, o):
|
||||||
syscall("ttl_set_o", now_mu(), self.channel, o)
|
ttl_set_o(now_mu(), self.channel, o)
|
||||||
self.o_previous_timestamp = now_mu()
|
self.o_previous_timestamp = now_mu()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def sync(self):
|
def sync(self):
|
||||||
"""Busy-wait until all programmed level switches have been
|
"""Busy-wait until all programmed level switches have been
|
||||||
effected."""
|
effected."""
|
||||||
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
while self.core.get_rtio_counter_mu() < self.o_previous_timestamp:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
|
@ -76,12 +98,12 @@ class TTLInOut:
|
||||||
self.channel = channel
|
self.channel = channel
|
||||||
|
|
||||||
# in RTIO cycles
|
# in RTIO cycles
|
||||||
self.o_previous_timestamp = int64(0)
|
self.o_previous_timestamp = int(0, width=64)
|
||||||
self.i_previous_timestamp = int64(0)
|
self.i_previous_timestamp = int(0, width=64)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set_oe(self, oe):
|
def set_oe(self, oe):
|
||||||
syscall("ttl_set_oe", now_mu(), self.channel, oe)
|
ttl_set_oe(now_mu(), self.channel, oe)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def output(self):
|
def output(self):
|
||||||
|
@ -95,14 +117,14 @@ class TTLInOut:
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set_o(self, o):
|
def set_o(self, o):
|
||||||
syscall("ttl_set_o", now_mu(), self.channel, o)
|
ttl_set_o(now_mu(), self.channel, o)
|
||||||
self.o_previous_timestamp = now_mu()
|
self.o_previous_timestamp = now_mu()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def sync(self):
|
def sync(self):
|
||||||
"""Busy-wait until all programmed level switches have been
|
"""Busy-wait until all programmed level switches have been
|
||||||
effected."""
|
effected."""
|
||||||
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
while self.core.get_rtio_counter_mu() < self.o_previous_timestamp:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
|
@ -137,7 +159,7 @@ class TTLInOut:
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def _set_sensitivity(self, value):
|
def _set_sensitivity(self, value):
|
||||||
syscall("ttl_set_sensitivity", now_mu(), self.channel, value)
|
ttl_set_sensitivity(now_mu(), self.channel, value)
|
||||||
self.i_previous_timestamp = now_mu()
|
self.i_previous_timestamp = now_mu()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
|
@ -193,8 +215,7 @@ class TTLInOut:
|
||||||
"""Poll the RTIO input during all the previously programmed gate
|
"""Poll the RTIO input during all the previously programmed gate
|
||||||
openings, and returns the number of registered events."""
|
openings, and returns the number of registered events."""
|
||||||
count = 0
|
count = 0
|
||||||
while syscall("ttl_get", self.channel,
|
while ttl_get(self.channel, self.i_previous_timestamp) >= 0:
|
||||||
self.i_previous_timestamp) >= 0:
|
|
||||||
count += 1
|
count += 1
|
||||||
return count
|
return count
|
||||||
|
|
||||||
|
@ -205,7 +226,7 @@ class TTLInOut:
|
||||||
|
|
||||||
If the gate is permanently closed, returns a negative value.
|
If the gate is permanently closed, returns a negative value.
|
||||||
"""
|
"""
|
||||||
return syscall("ttl_get", self.channel, self.i_previous_timestamp)
|
return ttl_get(self.channel, self.i_previous_timestamp)
|
||||||
|
|
||||||
|
|
||||||
class TTLClockGen:
|
class TTLClockGen:
|
||||||
|
@ -221,7 +242,7 @@ class TTLClockGen:
|
||||||
self.channel = channel
|
self.channel = channel
|
||||||
|
|
||||||
# in RTIO cycles
|
# in RTIO cycles
|
||||||
self.previous_timestamp = int64(0)
|
self.previous_timestamp = int(0, width=64)
|
||||||
self.acc_width = 24
|
self.acc_width = 24
|
||||||
|
|
||||||
@portable
|
@portable
|
||||||
|
@ -256,7 +277,7 @@ class TTLClockGen:
|
||||||
that are not powers of two cause jitter of one RTIO clock cycle at the
|
that are not powers of two cause jitter of one RTIO clock cycle at the
|
||||||
output.
|
output.
|
||||||
"""
|
"""
|
||||||
syscall("ttl_clock_set", now_mu(), self.channel, frequency)
|
ttl_clock_set(now_mu(), self.channel, frequency)
|
||||||
self.previous_timestamp = now_mu()
|
self.previous_timestamp = now_mu()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
|
@ -273,5 +294,5 @@ class TTLClockGen:
|
||||||
def sync(self):
|
def sync(self):
|
||||||
"""Busy-wait until all programmed frequency switches and stops have
|
"""Busy-wait until all programmed frequency switches and stops have
|
||||||
been effected."""
|
been effected."""
|
||||||
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
while self.core.get_rtio_counter_mu() < self.o_previous_timestamp:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
#!/usr/bin/env python3.5
|
#!/usr/bin/env python3.5
|
||||||
|
|
||||||
import logging
|
import sys, logging, argparse
|
||||||
import argparse
|
|
||||||
|
|
||||||
from artiq.master.databases import DeviceDB, DatasetDB
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
from artiq.master.worker_db import DeviceManager, DatasetManager
|
from artiq.master.worker_db import DeviceManager, DatasetManager
|
||||||
|
from artiq.coredevice.core import CompileError
|
||||||
from artiq.tools import *
|
from artiq.tools import *
|
||||||
|
|
||||||
|
|
||||||
|
@ -40,34 +40,35 @@ def main():
|
||||||
dataset_mgr = DatasetManager(DatasetDB(args.dataset_db))
|
dataset_mgr = DatasetManager(DatasetDB(args.dataset_db))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
module = file_import(args.file)
|
module = file_import(args.file, prefix="artiq_run_")
|
||||||
exp = get_experiment(module, args.experiment)
|
exp = get_experiment(module, args.experiment)
|
||||||
arguments = parse_arguments(args.arguments)
|
arguments = parse_arguments(args.arguments)
|
||||||
exp_inst = exp(device_mgr, dataset_mgr, **arguments)
|
exp_inst = exp(device_mgr, dataset_mgr, **arguments)
|
||||||
|
|
||||||
if (not hasattr(exp.run, "k_function_info")
|
if not hasattr(exp.run, "artiq_embedded"):
|
||||||
or not exp.run.k_function_info):
|
|
||||||
raise ValueError("Experiment entry point must be a kernel")
|
raise ValueError("Experiment entry point must be a kernel")
|
||||||
core_name = exp.run.k_function_info.core_name
|
core_name = exp.run.artiq_embedded.core_name
|
||||||
core = getattr(exp_inst, core_name)
|
core = getattr(exp_inst, core_name)
|
||||||
|
|
||||||
binary, rpc_map, _ = core.compile(exp.run.k_function_info.k_function,
|
object_map, kernel_library, symbolizer = \
|
||||||
[exp_inst], {},
|
core.compile(exp.run, [exp_inst], {},
|
||||||
with_attr_writeback=False)
|
with_attr_writeback=False)
|
||||||
|
except CompileError as error:
|
||||||
|
print(error.render_string(colored=True), file=sys.stderr)
|
||||||
|
return
|
||||||
finally:
|
finally:
|
||||||
device_mgr.close_devices()
|
device_mgr.close_devices()
|
||||||
|
|
||||||
if rpc_map:
|
if object_map.has_rpc():
|
||||||
raise ValueError("Experiment must not use RPC")
|
raise ValueError("Experiment must not use RPC")
|
||||||
|
|
||||||
output = args.output
|
output = args.output
|
||||||
if output is None:
|
if output is None:
|
||||||
output = args.file
|
basename, ext = os.path.splitext(args.file)
|
||||||
if output.endswith(".py"):
|
output = "{}.elf".format(basename)
|
||||||
output = output[:-3]
|
|
||||||
output += ".elf"
|
|
||||||
with open(output, "wb") as f:
|
with open(output, "wb") as f:
|
||||||
f.write(binary)
|
f.write(kernel_library)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -26,7 +26,7 @@ def get_argparser():
|
||||||
# Configuration Read command
|
# Configuration Read command
|
||||||
p_read = subparsers.add_parser("cfg-read",
|
p_read = subparsers.add_parser("cfg-read",
|
||||||
help="read key from core device config")
|
help="read key from core device config")
|
||||||
p_read.add_argument("key", type=to_bytes,
|
p_read.add_argument("key", type=str,
|
||||||
help="key to be read from core device config")
|
help="key to be read from core device config")
|
||||||
|
|
||||||
# Configuration Write command
|
# Configuration Write command
|
||||||
|
@ -34,11 +34,11 @@ def get_argparser():
|
||||||
help="write key-value records to core "
|
help="write key-value records to core "
|
||||||
"device config")
|
"device config")
|
||||||
p_write.add_argument("-s", "--string", nargs=2, action="append",
|
p_write.add_argument("-s", "--string", nargs=2, action="append",
|
||||||
default=[], metavar=("KEY", "STRING"), type=to_bytes,
|
default=[], metavar=("KEY", "STRING"), type=str,
|
||||||
help="key-value records to be written to core device "
|
help="key-value records to be written to core device "
|
||||||
"config")
|
"config")
|
||||||
p_write.add_argument("-f", "--file", nargs=2, action="append",
|
p_write.add_argument("-f", "--file", nargs=2, action="append",
|
||||||
type=to_bytes, default=[],
|
type=str, default=[],
|
||||||
metavar=("KEY", "FILENAME"),
|
metavar=("KEY", "FILENAME"),
|
||||||
help="key and file whose content to be written to "
|
help="key and file whose content to be written to "
|
||||||
"core device config")
|
"core device config")
|
||||||
|
@ -47,7 +47,7 @@ def get_argparser():
|
||||||
p_delete = subparsers.add_parser("cfg-delete",
|
p_delete = subparsers.add_parser("cfg-delete",
|
||||||
help="delete key from core device config")
|
help="delete key from core device config")
|
||||||
p_delete.add_argument("key", nargs=argparse.REMAINDER,
|
p_delete.add_argument("key", nargs=argparse.REMAINDER,
|
||||||
default=[], type=to_bytes,
|
default=[], type=str,
|
||||||
help="key to be deleted from core device config")
|
help="key to be deleted from core device config")
|
||||||
|
|
||||||
# Configuration Erase command
|
# Configuration Erase command
|
||||||
|
@ -61,9 +61,10 @@ def main():
|
||||||
device_mgr = DeviceManager(DeviceDB(args.device_db))
|
device_mgr = DeviceManager(DeviceDB(args.device_db))
|
||||||
try:
|
try:
|
||||||
comm = device_mgr.get("comm")
|
comm = device_mgr.get("comm")
|
||||||
|
comm.check_ident()
|
||||||
|
|
||||||
if args.action == "log":
|
if args.action == "log":
|
||||||
print(comm.get_log())
|
print(comm.get_log(), end='')
|
||||||
elif args.action == "cfg-read":
|
elif args.action == "cfg-read":
|
||||||
value = comm.flash_storage_read(args.key)
|
value = comm.flash_storage_read(args.key)
|
||||||
if not value:
|
if not value:
|
||||||
|
@ -72,7 +73,7 @@ def main():
|
||||||
print(value)
|
print(value)
|
||||||
elif args.action == "cfg-write":
|
elif args.action == "cfg-write":
|
||||||
for key, value in args.string:
|
for key, value in args.string:
|
||||||
comm.flash_storage_write(key, value)
|
comm.flash_storage_write(key, value.encode("utf-8"))
|
||||||
for key, filename in args.file:
|
for key, filename in args.file:
|
||||||
with open(filename, "rb") as fi:
|
with open(filename, "rb") as fi:
|
||||||
comm.flash_storage_write(key, fi.read())
|
comm.flash_storage_write(key, fi.read())
|
||||||
|
|
|
@ -12,9 +12,11 @@ import h5py
|
||||||
from artiq.language.environment import EnvExperiment
|
from artiq.language.environment import EnvExperiment
|
||||||
from artiq.master.databases import DeviceDB, DatasetDB
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
from artiq.master.worker_db import DeviceManager, DatasetManager
|
from artiq.master.worker_db import DeviceManager, DatasetManager
|
||||||
|
from artiq.coredevice.core import CompileError
|
||||||
|
from artiq.compiler.embedding import ObjectMap
|
||||||
|
from artiq.compiler.targets import OR1KTarget
|
||||||
from artiq.tools import *
|
from artiq.tools import *
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,9 +27,13 @@ class ELFRunner(EnvExperiment):
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
with open(self.file, "rb") as f:
|
with open(self.file, "rb") as f:
|
||||||
self.core.comm.load(f.read())
|
kernel_library = f.read()
|
||||||
self.core.comm.run("run")
|
|
||||||
self.core.comm.serve(dict(), dict())
|
target = OR1KTarget()
|
||||||
|
self.core.comm.load(kernel_library)
|
||||||
|
self.core.comm.run()
|
||||||
|
self.core.comm.serve(ObjectMap(),
|
||||||
|
lambda addresses: target.symbolize(kernel_library, addresses))
|
||||||
|
|
||||||
|
|
||||||
class DummyScheduler:
|
class DummyScheduler:
|
||||||
|
@ -92,7 +98,7 @@ def _build_experiment(device_mgr, dataset_mgr, args):
|
||||||
"for ELF kernels")
|
"for ELF kernels")
|
||||||
return ELFRunner(device_mgr, dataset_mgr, file=args.file)
|
return ELFRunner(device_mgr, dataset_mgr, file=args.file)
|
||||||
else:
|
else:
|
||||||
module = file_import(args.file)
|
module = file_import(args.file, prefix="artiq_run_")
|
||||||
file = args.file
|
file = args.file
|
||||||
else:
|
else:
|
||||||
module = sys.modules["__main__"]
|
module = sys.modules["__main__"]
|
||||||
|
@ -122,6 +128,9 @@ def run(with_file=False):
|
||||||
exp_inst.prepare()
|
exp_inst.prepare()
|
||||||
exp_inst.run()
|
exp_inst.run()
|
||||||
exp_inst.analyze()
|
exp_inst.analyze()
|
||||||
|
except CompileError as error:
|
||||||
|
print(error.render_string(colored=True), file=sys.stderr)
|
||||||
|
return
|
||||||
finally:
|
finally:
|
||||||
device_mgr.close_devices()
|
device_mgr.close_devices()
|
||||||
|
|
||||||
|
|
|
@ -131,12 +131,7 @@ trce -v 12 -fastpaths -tsi {build_name}.tsi -o {build_name}.twr {build_name}.ncd
|
||||||
"""
|
"""
|
||||||
platform.add_extension(nist_qc1.papilio_adapter_io)
|
platform.add_extension(nist_qc1.papilio_adapter_io)
|
||||||
|
|
||||||
self.submodules.leds = gpio.GPIOOut(Cat(
|
self.submodules.leds = gpio.GPIOOut(platform.request("user_led", 4))
|
||||||
platform.request("user_led", 0),
|
|
||||||
platform.request("user_led", 1),
|
|
||||||
platform.request("user_led", 2),
|
|
||||||
platform.request("user_led", 3),
|
|
||||||
))
|
|
||||||
|
|
||||||
self.comb += [
|
self.comb += [
|
||||||
platform.request("ttl_l_tx_en").eq(1),
|
platform.request("ttl_l_tx_en").eq(1),
|
||||||
|
@ -173,9 +168,10 @@ trce -v 12 -fastpaths -tsi {build_name}.tsi -o {build_name}.twr {build_name}.ncd
|
||||||
self.submodules += phy
|
self.submodules += phy
|
||||||
rtio_channels.append(rtio.Channel.from_phy(phy, ofifo_depth=4))
|
rtio_channels.append(rtio.Channel.from_phy(phy, ofifo_depth=4))
|
||||||
|
|
||||||
phy = ttl_simple.Output(platform.request("user_led", 4))
|
for led_number in range(4):
|
||||||
self.submodules += phy
|
phy = ttl_simple.Output(platform.request("user_led", led_number))
|
||||||
rtio_channels.append(rtio.Channel.from_phy(phy, ofifo_depth=4))
|
self.submodules += phy
|
||||||
|
rtio_channels.append(rtio.Channel.from_phy(phy, ofifo_depth=4))
|
||||||
|
|
||||||
self.add_constant("RTIO_REGULAR_TTL_COUNT", len(rtio_channels))
|
self.add_constant("RTIO_REGULAR_TTL_COUNT", len(rtio_channels))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
# Copyright (C) 2014, 2015 Robert Jordens <jordens@gmail.com>
|
# Copyright (C) 2014, 2015 Robert Jordens <jordens@gmail.com>
|
||||||
|
|
||||||
from artiq.language import core, environment, units, scan
|
from artiq.language import core, types, environment, units, scan
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
|
from artiq.language.types import *
|
||||||
from artiq.language.environment import *
|
from artiq.language.environment import *
|
||||||
from artiq.language.units import *
|
from artiq.language.units import *
|
||||||
from artiq.language.scan import *
|
from artiq.language.scan import *
|
||||||
|
@ -9,6 +10,7 @@ from artiq.language.scan import *
|
||||||
|
|
||||||
__all__ = []
|
__all__ = []
|
||||||
__all__.extend(core.__all__)
|
__all__.extend(core.__all__)
|
||||||
|
__all__.extend(types.__all__)
|
||||||
__all__.extend(environment.__all__)
|
__all__.extend(environment.__all__)
|
||||||
__all__.extend(units.__all__)
|
__all__.extend(units.__all__)
|
||||||
__all__.extend(scan.__all__)
|
__all__.extend(scan.__all__)
|
||||||
|
|
|
@ -2,92 +2,168 @@
|
||||||
Core ARTIQ extensions to the Python language.
|
Core ARTIQ extensions to the Python language.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os, linecache, re
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
|
# for runtime files in backtraces
|
||||||
|
from artiq.coredevice.runtime import source_loader
|
||||||
|
|
||||||
__all__ = ["int64", "round64", "TerminationRequested",
|
|
||||||
"kernel", "portable",
|
__all__ = ["host_int", "int",
|
||||||
"set_time_manager", "set_syscall_manager", "set_watchdog_factory",
|
"kernel", "portable", "syscall",
|
||||||
"RuntimeException", "EncodedException"]
|
"set_time_manager", "set_watchdog_factory",
|
||||||
|
"ARTIQException",
|
||||||
|
"TerminationRequested"]
|
||||||
|
|
||||||
# global namespace for kernels
|
# global namespace for kernels
|
||||||
kernel_globals = ("sequential", "parallel",
|
kernel_globals = (
|
||||||
|
"sequential", "parallel",
|
||||||
"delay_mu", "now_mu", "at_mu", "delay",
|
"delay_mu", "now_mu", "at_mu", "delay",
|
||||||
"seconds_to_mu", "mu_to_seconds",
|
"seconds_to_mu", "mu_to_seconds",
|
||||||
"syscall", "watchdog")
|
"watchdog"
|
||||||
|
)
|
||||||
__all__.extend(kernel_globals)
|
__all__.extend(kernel_globals)
|
||||||
|
|
||||||
|
host_int = int
|
||||||
|
|
||||||
class int64(int):
|
class int:
|
||||||
"""64-bit integers for static compilation.
|
"""
|
||||||
|
Arbitrary-precision integers for static compilation.
|
||||||
|
|
||||||
When this class is used instead of Python's ``int``, the static compiler
|
The static compiler does not use unlimited-precision integers,
|
||||||
stores the corresponding variable on 64 bits instead of 32.
|
like Python normally does, because of their unbounded memory requirements.
|
||||||
|
Instead, it allows to choose a bit width (usually 32 or 64) at compile-time,
|
||||||
|
and all computations follow wrap-around semantics on overflow.
|
||||||
|
|
||||||
When used in the interpreter, it behaves as ``int`` and the results of
|
This class implements the same semantics on the host.
|
||||||
integer operations involving it are also ``int64`` (which matches the
|
|
||||||
size promotion rules of the static compiler). This way, it is possible to
|
|
||||||
specify 64-bit size annotations on constants that are passed to the
|
|
||||||
kernels.
|
|
||||||
|
|
||||||
Example:
|
For example:
|
||||||
|
|
||||||
>>> a = int64(1)
|
>>> a = int(1, width=64)
|
||||||
>>> b = int64(3) + 2
|
>>> b = int(3, width=64) + 2
|
||||||
>>> isinstance(a, int64)
|
>>> isinstance(a, int)
|
||||||
True
|
True
|
||||||
>>> isinstance(b, int64)
|
>>> isinstance(b, int)
|
||||||
True
|
True
|
||||||
>>> a + b
|
>>> a + b
|
||||||
6
|
int(6, width=64)
|
||||||
|
>>> int(10, width=32) + 0x7fffffff
|
||||||
|
int(9, width=32)
|
||||||
|
>>> int(0x80000000)
|
||||||
|
int(-2147483648, width=32)
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def _make_int64_op_method(int_method):
|
__slots__ = ['_value', '_width']
|
||||||
def method(self, *args):
|
|
||||||
r = int_method(self, *args)
|
|
||||||
if isinstance(r, int):
|
|
||||||
r = int64(r)
|
|
||||||
return r
|
|
||||||
return method
|
|
||||||
|
|
||||||
for _op_name in ("neg", "pos", "abs", "invert", "round",
|
def __new__(cls, value, width=32):
|
||||||
"add", "radd", "sub", "rsub", "mul", "rmul", "pow", "rpow",
|
if isinstance(value, int):
|
||||||
"lshift", "rlshift", "rshift", "rrshift",
|
return value
|
||||||
"and", "rand", "xor", "rxor", "or", "ror",
|
else:
|
||||||
"floordiv", "rfloordiv", "mod", "rmod"):
|
sign_bit = 2 ** (width - 1)
|
||||||
_method_name = "__" + _op_name + "__"
|
value = host_int(value)
|
||||||
_orig_method = getattr(int, _method_name)
|
if value & sign_bit:
|
||||||
setattr(int64, _method_name, _make_int64_op_method(_orig_method))
|
value = -1 & ~sign_bit + (value & (sign_bit - 1)) + 1
|
||||||
|
else:
|
||||||
|
value &= sign_bit - 1
|
||||||
|
|
||||||
for _op_name in ("add", "sub", "mul", "floordiv", "mod",
|
self = super().__new__(cls)
|
||||||
"pow", "lshift", "rshift", "lshift",
|
self._value = value
|
||||||
"and", "xor", "or"):
|
self._width = width
|
||||||
_op_method = getattr(int, "__" + _op_name + "__")
|
return self
|
||||||
setattr(int64, "__i" + _op_name + "__", _make_int64_op_method(_op_method))
|
|
||||||
|
@property
|
||||||
|
def width(self):
|
||||||
|
return self._width
|
||||||
|
|
||||||
|
def __int__(self):
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
def __float__(self):
|
||||||
|
return float(self._value)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self._value)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "int({}, width={})".format(self._value, self._width)
|
||||||
|
|
||||||
|
def _unaryop(lower_fn):
|
||||||
|
def operator(self):
|
||||||
|
return int(lower_fn(self._value), self._width)
|
||||||
|
return operator
|
||||||
|
|
||||||
|
__neg__ = _unaryop(host_int.__neg__)
|
||||||
|
__pos__ = _unaryop(host_int.__pos__)
|
||||||
|
__abs__ = _unaryop(host_int.__abs__)
|
||||||
|
__invert__ = _unaryop(host_int.__invert__)
|
||||||
|
__round__ = _unaryop(host_int.__round__)
|
||||||
|
|
||||||
|
def _binaryop(lower_fn, rlower_fn=None):
|
||||||
|
def operator(self, other):
|
||||||
|
if isinstance(other, host_int):
|
||||||
|
return int(lower_fn(self._value, other), self._width)
|
||||||
|
elif isinstance(other, int):
|
||||||
|
width = self._width if self._width > other._width else other._width
|
||||||
|
return int(lower_fn(self._value, other._value), width)
|
||||||
|
elif rlower_fn:
|
||||||
|
return getattr(other, rlower_fn)(self._value)
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
return operator
|
||||||
|
|
||||||
|
__add__ = __iadd__ = _binaryop(host_int.__add__, "__radd__")
|
||||||
|
__sub__ = __isub__ = _binaryop(host_int.__sub__, "__rsub__")
|
||||||
|
__mul__ = __imul__ = _binaryop(host_int.__mul__, "__rmul__")
|
||||||
|
__truediv__ = __itruediv__ = _binaryop(host_int.__truediv__, "__rtruediv__")
|
||||||
|
__floordiv__ = __ifloordiv__ = _binaryop(host_int.__floordiv__, "__rfloordiv__")
|
||||||
|
__mod__ = __imod__ = _binaryop(host_int.__mod__, "__rmod__")
|
||||||
|
__pow__ = __ipow__ = _binaryop(host_int.__pow__, "__rpow__")
|
||||||
|
|
||||||
|
__radd__ = _binaryop(host_int.__radd__, "__add__")
|
||||||
|
__rsub__ = _binaryop(host_int.__rsub__, "__sub__")
|
||||||
|
__rmul__ = _binaryop(host_int.__rmul__, "__mul__")
|
||||||
|
__rfloordiv__ = _binaryop(host_int.__rfloordiv__, "__floordiv__")
|
||||||
|
__rtruediv__ = _binaryop(host_int.__rtruediv__, "__truediv__")
|
||||||
|
__rmod__ = _binaryop(host_int.__rmod__, "__mod__")
|
||||||
|
__rpow__ = _binaryop(host_int.__rpow__, "__pow__")
|
||||||
|
|
||||||
|
__lshift__ = __ilshift__ = _binaryop(host_int.__lshift__)
|
||||||
|
__rshift__ = __irshift__ = _binaryop(host_int.__rshift__)
|
||||||
|
__and__ = __iand__ = _binaryop(host_int.__and__)
|
||||||
|
__or__ = __ior__ = _binaryop(host_int.__or__)
|
||||||
|
__xor__ = __ixor__ = _binaryop(host_int.__xor__)
|
||||||
|
|
||||||
|
__rlshift__ = _binaryop(host_int.__rlshift__)
|
||||||
|
__rrshift__ = _binaryop(host_int.__rrshift__)
|
||||||
|
__rand__ = _binaryop(host_int.__rand__)
|
||||||
|
__ror__ = _binaryop(host_int.__ror__)
|
||||||
|
__rxor__ = _binaryop(host_int.__rxor__)
|
||||||
|
|
||||||
|
def _compareop(lower_fn, rlower_fn):
|
||||||
|
def operator(self, other):
|
||||||
|
if isinstance(other, host_int):
|
||||||
|
return lower_fn(self._value, other)
|
||||||
|
elif isinstance(other, int):
|
||||||
|
return lower_fn(self._value, other._value)
|
||||||
|
else:
|
||||||
|
return getattr(other, rlower_fn)(self._value)
|
||||||
|
return operator
|
||||||
|
|
||||||
|
__eq__ = _compareop(host_int.__eq__, "__ne__")
|
||||||
|
__ne__ = _compareop(host_int.__ne__, "__eq__")
|
||||||
|
__gt__ = _compareop(host_int.__gt__, "__le__")
|
||||||
|
__ge__ = _compareop(host_int.__ge__, "__lt__")
|
||||||
|
__lt__ = _compareop(host_int.__lt__, "__ge__")
|
||||||
|
__le__ = _compareop(host_int.__le__, "__gt__")
|
||||||
|
|
||||||
|
|
||||||
def round64(x):
|
_ARTIQEmbeddedInfo = namedtuple("_ARTIQEmbeddedInfo",
|
||||||
"""Rounds to a 64-bit integer.
|
"core_name function syscall")
|
||||||
|
|
||||||
This function is equivalent to ``int64(round(x))`` but, when targeting
|
|
||||||
static compilation, prevents overflow when the rounded value is too large
|
|
||||||
to fit in a 32-bit integer.
|
|
||||||
"""
|
|
||||||
return int64(round(x))
|
|
||||||
|
|
||||||
|
|
||||||
class TerminationRequested(Exception):
|
|
||||||
"""Raised by ``pause`` when the user has requested termination."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_KernelFunctionInfo = namedtuple("_KernelFunctionInfo", "core_name k_function")
|
|
||||||
|
|
||||||
|
|
||||||
def kernel(arg):
|
def kernel(arg):
|
||||||
"""This decorator marks an object's method for execution on the core
|
"""
|
||||||
|
This decorator marks an object's method for execution on the core
|
||||||
device.
|
device.
|
||||||
|
|
||||||
When a decorated method is called from the Python interpreter, the ``core``
|
When a decorated method is called from the Python interpreter, the ``core``
|
||||||
|
@ -106,26 +182,20 @@ def kernel(arg):
|
||||||
specifies the name of the attribute to use as core device driver.
|
specifies the name of the attribute to use as core device driver.
|
||||||
"""
|
"""
|
||||||
if isinstance(arg, str):
|
if isinstance(arg, str):
|
||||||
def real_decorator(k_function):
|
def inner_decorator(function):
|
||||||
@wraps(k_function)
|
@wraps(function)
|
||||||
def run_on_core(exp, *k_args, **k_kwargs):
|
def run_on_core(self, *k_args, **k_kwargs):
|
||||||
return getattr(exp, arg).run(k_function,
|
return getattr(self, arg).run(run_on_core, ((self,) + k_args), k_kwargs)
|
||||||
((exp,) + k_args), k_kwargs)
|
run_on_core.artiq_embedded = _ARTIQEmbeddedInfo(
|
||||||
run_on_core.k_function_info = _KernelFunctionInfo(
|
core_name=arg, function=function, syscall=None)
|
||||||
core_name=arg, k_function=k_function)
|
|
||||||
return run_on_core
|
return run_on_core
|
||||||
return real_decorator
|
return inner_decorator
|
||||||
else:
|
else:
|
||||||
@wraps(arg)
|
return kernel("core")(arg)
|
||||||
def run_on_core(exp, *k_args, **k_kwargs):
|
|
||||||
return exp.core.run(arg, ((exp,) + k_args), k_kwargs)
|
|
||||||
run_on_core.k_function_info = _KernelFunctionInfo(
|
|
||||||
core_name="core", k_function=arg)
|
|
||||||
return run_on_core
|
|
||||||
|
|
||||||
|
def portable(function):
|
||||||
def portable(f):
|
"""
|
||||||
"""This decorator marks a function for execution on the same device as its
|
This decorator marks a function for execution on the same device as its
|
||||||
caller.
|
caller.
|
||||||
|
|
||||||
In other words, a decorated function called from the interpreter on the
|
In other words, a decorated function called from the interpreter on the
|
||||||
|
@ -133,8 +203,30 @@ def portable(f):
|
||||||
core device). A decorated function called from a kernel will be executed
|
core device). A decorated function called from a kernel will be executed
|
||||||
on the core device (no RPC).
|
on the core device (no RPC).
|
||||||
"""
|
"""
|
||||||
f.k_function_info = _KernelFunctionInfo(core_name="", k_function=f)
|
function.artiq_embedded = \
|
||||||
return f
|
_ARTIQEmbeddedInfo(core_name=None, function=function, syscall=None)
|
||||||
|
return function
|
||||||
|
|
||||||
|
def syscall(arg):
|
||||||
|
"""
|
||||||
|
This decorator marks a function as a system call. When executed on a core
|
||||||
|
device, a C function with the provided name (or the same name as
|
||||||
|
the Python function, if not provided) will be called. When executed on
|
||||||
|
host, the Python function will be called as usual.
|
||||||
|
|
||||||
|
Every argument and the return value must be annotated with ARTIQ types.
|
||||||
|
|
||||||
|
Only drivers should normally define syscalls.
|
||||||
|
"""
|
||||||
|
if isinstance(arg, str):
|
||||||
|
def inner_decorator(function):
|
||||||
|
function.artiq_embedded = \
|
||||||
|
_ARTIQEmbeddedInfo(core_name=None, function=None,
|
||||||
|
syscall=function.__name__)
|
||||||
|
return function
|
||||||
|
return inner_decorator
|
||||||
|
else:
|
||||||
|
return syscall(arg.__name__)(arg)
|
||||||
|
|
||||||
|
|
||||||
class _DummyTimeManager:
|
class _DummyTimeManager:
|
||||||
|
@ -163,22 +255,6 @@ def set_time_manager(time_manager):
|
||||||
_time_manager = time_manager
|
_time_manager = time_manager
|
||||||
|
|
||||||
|
|
||||||
class _DummySyscallManager:
|
|
||||||
def do(self, *args):
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Attempted to interpret kernel without a syscall manager")
|
|
||||||
|
|
||||||
_syscall_manager = _DummySyscallManager()
|
|
||||||
|
|
||||||
|
|
||||||
def set_syscall_manager(syscall_manager):
|
|
||||||
"""Set the system call manager used for simulating the core device's
|
|
||||||
runtime in the Python interpreter.
|
|
||||||
"""
|
|
||||||
global _syscall_manager
|
|
||||||
_syscall_manager = syscall_manager
|
|
||||||
|
|
||||||
|
|
||||||
class _Sequential:
|
class _Sequential:
|
||||||
"""In a sequential block, statements are executed one after another, with
|
"""In a sequential block, statements are executed one after another, with
|
||||||
the time increasing as one moves down the statement list."""
|
the time increasing as one moves down the statement list."""
|
||||||
|
@ -251,17 +327,6 @@ def mu_to_seconds(mu, core=None):
|
||||||
return mu*core.ref_period
|
return mu*core.ref_period
|
||||||
|
|
||||||
|
|
||||||
def syscall(*args):
|
|
||||||
"""Invokes a service of the runtime.
|
|
||||||
|
|
||||||
Kernels use this function to interface to the outside world: program RTIO
|
|
||||||
events, make RPCs, etc.
|
|
||||||
|
|
||||||
Only drivers should normally use ``syscall``.
|
|
||||||
"""
|
|
||||||
return _syscall_manager.do(*args)
|
|
||||||
|
|
||||||
|
|
||||||
class _DummyWatchdog:
|
class _DummyWatchdog:
|
||||||
def __init__(self, timeout):
|
def __init__(self, timeout):
|
||||||
pass
|
pass
|
||||||
|
@ -286,32 +351,70 @@ def watchdog(timeout):
|
||||||
return _watchdog_factory(timeout)
|
return _watchdog_factory(timeout)
|
||||||
|
|
||||||
|
|
||||||
_encoded_exceptions = dict()
|
class TerminationRequested(Exception):
|
||||||
|
"""Raised by ``pause`` when the user has requested termination."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def EncodedException(eid):
|
class ARTIQException(Exception):
|
||||||
"""Represents exceptions on the core device, which are identified
|
"""Base class for exceptions raised or passed through the core device."""
|
||||||
by a single number."""
|
|
||||||
try:
|
|
||||||
return _encoded_exceptions[eid]
|
|
||||||
except KeyError:
|
|
||||||
class EncodedException(Exception):
|
|
||||||
def __init__(self):
|
|
||||||
Exception.__init__(self, eid)
|
|
||||||
_encoded_exceptions[eid] = EncodedException
|
|
||||||
return EncodedException
|
|
||||||
|
|
||||||
|
# Try and create an instance of the specific class, if one exists.
|
||||||
|
def __new__(cls, name, message, params, traceback):
|
||||||
|
def find_subclass(cls):
|
||||||
|
if cls.__name__ == name:
|
||||||
|
return cls
|
||||||
|
else:
|
||||||
|
for subclass in cls.__subclasses__():
|
||||||
|
cls = find_subclass(subclass)
|
||||||
|
if cls is not None:
|
||||||
|
return cls
|
||||||
|
|
||||||
class RuntimeException(Exception):
|
more_specific_cls = find_subclass(cls)
|
||||||
"""Base class for all exceptions used by the device runtime.
|
if more_specific_cls is None:
|
||||||
Those exceptions are defined in ``artiq.coredevice.runtime_exceptions``.
|
more_specific_cls = cls
|
||||||
"""
|
|
||||||
def __init__(self, core, p0, p1, p2):
|
|
||||||
Exception.__init__(self)
|
|
||||||
self.core = core
|
|
||||||
self.p0 = p0
|
|
||||||
self.p1 = p1
|
|
||||||
self.p2 = p2
|
|
||||||
|
|
||||||
|
exn = Exception.__new__(more_specific_cls)
|
||||||
|
exn.__init__(name, message, params, traceback)
|
||||||
|
return exn
|
||||||
|
|
||||||
first_user_eid = 1024
|
def __init__(self, name, message, params, traceback):
|
||||||
|
Exception.__init__(self, name, message, *params)
|
||||||
|
self.name, self.message, self.params = name, message, params
|
||||||
|
self.traceback = list(traceback)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
lines = []
|
||||||
|
|
||||||
|
if type(self).__name__ == self.name:
|
||||||
|
lines.append(self.message.format(*self.params))
|
||||||
|
else:
|
||||||
|
lines.append("({}) {}".format(self.name, self.message.format(*self.params)))
|
||||||
|
|
||||||
|
lines.append("Core Device Traceback (most recent call last):")
|
||||||
|
for (filename, line, column, function, address) in self.traceback:
|
||||||
|
stub_globals = {"__name__": filename, "__loader__": source_loader}
|
||||||
|
source_line = linecache.getline(filename, line, stub_globals)
|
||||||
|
indentation = re.search(r"^\s*", source_line).end()
|
||||||
|
|
||||||
|
if address is None:
|
||||||
|
formatted_address = ""
|
||||||
|
else:
|
||||||
|
formatted_address = " (RA=0x{:x})".format(address)
|
||||||
|
|
||||||
|
filename = filename.replace(os.path.normpath(os.path.join(os.path.dirname(__file__),
|
||||||
|
"..")), "<artiq>")
|
||||||
|
if column == -1:
|
||||||
|
lines.append(" File \"{file}\", line {line}, in {function}{address}".
|
||||||
|
format(file=filename, line=line, function=function,
|
||||||
|
address=formatted_address))
|
||||||
|
lines.append(" {}".format(source_line.strip() if source_line else "<unknown>"))
|
||||||
|
else:
|
||||||
|
lines.append(" File \"{file}\", line {line}, column {column},"
|
||||||
|
" in {function}{address}".
|
||||||
|
format(file=filename, line=line, column=column + 1,
|
||||||
|
function=function, address=formatted_address))
|
||||||
|
lines.append(" {}".format(source_line.strip() if source_line else "<unknown>"))
|
||||||
|
lines.append(" {}^".format(" " * (column - indentation)))
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
"""
|
||||||
|
Values representing ARTIQ types, to be used in function type
|
||||||
|
annotations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from artiq.compiler import types, builtins
|
||||||
|
|
||||||
|
__all__ = ["TNone", "TBool", "TInt32", "TInt64", "TFloat",
|
||||||
|
"TStr", "TList", "TRange32", "TRange64"]
|
||||||
|
|
||||||
|
TNone = builtins.TNone()
|
||||||
|
TBool = builtins.TBool()
|
||||||
|
TInt32 = builtins.TInt(types.TValue(32))
|
||||||
|
TInt64 = builtins.TInt(types.TValue(64))
|
||||||
|
TFloat = builtins.TFloat()
|
||||||
|
TStr = builtins.TStr()
|
||||||
|
TList = builtins.TList
|
||||||
|
TRange32 = builtins.TRange(builtins.TInt(types.TValue(32)))
|
||||||
|
TRange64 = builtins.TRange(builtins.TInt(types.TValue(64)))
|
|
@ -1,6 +0,0 @@
|
||||||
from artiq.py2llvm.module import Module
|
|
||||||
|
|
||||||
def get_runtime_binary(runtime, func_def):
|
|
||||||
module = Module(runtime)
|
|
||||||
module.compile_function(func_def, dict())
|
|
||||||
return module.emit_object()
|
|
|
@ -1,539 +0,0 @@
|
||||||
import ast
|
|
||||||
|
|
||||||
import llvmlite_artiq.ir as ll
|
|
||||||
|
|
||||||
from artiq.py2llvm import values, base_types, fractions, lists, iterators
|
|
||||||
from artiq.py2llvm.tools import is_terminated
|
|
||||||
|
|
||||||
|
|
||||||
_ast_unops = {
|
|
||||||
ast.Invert: "o_inv",
|
|
||||||
ast.Not: "o_not",
|
|
||||||
ast.UAdd: "o_pos",
|
|
||||||
ast.USub: "o_neg"
|
|
||||||
}
|
|
||||||
|
|
||||||
_ast_binops = {
|
|
||||||
ast.Add: values.operators.add,
|
|
||||||
ast.Sub: values.operators.sub,
|
|
||||||
ast.Mult: values.operators.mul,
|
|
||||||
ast.Div: values.operators.truediv,
|
|
||||||
ast.FloorDiv: values.operators.floordiv,
|
|
||||||
ast.Mod: values.operators.mod,
|
|
||||||
ast.Pow: values.operators.pow,
|
|
||||||
ast.LShift: values.operators.lshift,
|
|
||||||
ast.RShift: values.operators.rshift,
|
|
||||||
ast.BitOr: values.operators.or_,
|
|
||||||
ast.BitXor: values.operators.xor,
|
|
||||||
ast.BitAnd: values.operators.and_
|
|
||||||
}
|
|
||||||
|
|
||||||
_ast_cmps = {
|
|
||||||
ast.Eq: values.operators.eq,
|
|
||||||
ast.NotEq: values.operators.ne,
|
|
||||||
ast.Lt: values.operators.lt,
|
|
||||||
ast.LtE: values.operators.le,
|
|
||||||
ast.Gt: values.operators.gt,
|
|
||||||
ast.GtE: values.operators.ge
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Visitor:
|
|
||||||
def __init__(self, runtime, ns, builder=None):
|
|
||||||
self.runtime = runtime
|
|
||||||
self.ns = ns
|
|
||||||
self.builder = builder
|
|
||||||
self._break_stack = []
|
|
||||||
self._continue_stack = []
|
|
||||||
self._active_exception_stack = []
|
|
||||||
self._exception_level_stack = [0]
|
|
||||||
|
|
||||||
# builder can be None for visit_expression
|
|
||||||
def visit_expression(self, node):
|
|
||||||
method = "_visit_expr_" + node.__class__.__name__
|
|
||||||
try:
|
|
||||||
visitor = getattr(self, method)
|
|
||||||
except AttributeError:
|
|
||||||
raise NotImplementedError("Unsupported node '{}' in expression"
|
|
||||||
.format(node.__class__.__name__))
|
|
||||||
return visitor(node)
|
|
||||||
|
|
||||||
def _visit_expr_Name(self, node):
|
|
||||||
try:
|
|
||||||
r = self.ns[node.id]
|
|
||||||
except KeyError:
|
|
||||||
raise NameError("Name '{}' is not defined".format(node.id))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _visit_expr_NameConstant(self, node):
|
|
||||||
v = node.value
|
|
||||||
if v is None:
|
|
||||||
r = base_types.VNone()
|
|
||||||
elif isinstance(v, bool):
|
|
||||||
r = base_types.VBool()
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
if self.builder is not None:
|
|
||||||
r.set_const_value(self.builder, v)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _visit_expr_Num(self, node):
|
|
||||||
n = node.n
|
|
||||||
if isinstance(n, int):
|
|
||||||
if abs(n) < 2**31:
|
|
||||||
r = base_types.VInt()
|
|
||||||
else:
|
|
||||||
r = base_types.VInt(64)
|
|
||||||
elif isinstance(n, float):
|
|
||||||
r = base_types.VFloat()
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
if self.builder is not None:
|
|
||||||
r.set_const_value(self.builder, n)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _visit_expr_UnaryOp(self, node):
|
|
||||||
value = self.visit_expression(node.operand)
|
|
||||||
return getattr(value, _ast_unops[type(node.op)])(self.builder)
|
|
||||||
|
|
||||||
def _visit_expr_BinOp(self, node):
|
|
||||||
return _ast_binops[type(node.op)](self.visit_expression(node.left),
|
|
||||||
self.visit_expression(node.right),
|
|
||||||
self.builder)
|
|
||||||
|
|
||||||
def _visit_expr_BoolOp(self, node):
|
|
||||||
if self.builder is not None:
|
|
||||||
initial_block = self.builder.basic_block
|
|
||||||
function = initial_block.function
|
|
||||||
merge_block = function.append_basic_block("b_merge")
|
|
||||||
|
|
||||||
test_blocks = []
|
|
||||||
test_values = []
|
|
||||||
for i, value in enumerate(node.values):
|
|
||||||
if self.builder is not None:
|
|
||||||
test_block = function.append_basic_block("b_{}_test".format(i))
|
|
||||||
test_blocks.append(test_block)
|
|
||||||
self.builder.position_at_end(test_block)
|
|
||||||
test_values.append(self.visit_expression(value))
|
|
||||||
|
|
||||||
result = test_values[0].new()
|
|
||||||
for value in test_values[1:]:
|
|
||||||
result.merge(value)
|
|
||||||
|
|
||||||
if self.builder is not None:
|
|
||||||
self.builder.position_at_end(initial_block)
|
|
||||||
result.alloca(self.builder, "b_result")
|
|
||||||
self.builder.branch(test_blocks[0])
|
|
||||||
|
|
||||||
next_test_blocks = test_blocks[1:]
|
|
||||||
next_test_blocks.append(None)
|
|
||||||
for block, next_block, value in zip(test_blocks,
|
|
||||||
next_test_blocks,
|
|
||||||
test_values):
|
|
||||||
self.builder.position_at_end(block)
|
|
||||||
bval = value.o_bool(self.builder)
|
|
||||||
result.auto_store(self.builder,
|
|
||||||
value.auto_load(self.builder))
|
|
||||||
if next_block is None:
|
|
||||||
self.builder.branch(merge_block)
|
|
||||||
else:
|
|
||||||
if isinstance(node.op, ast.Or):
|
|
||||||
self.builder.cbranch(bval.auto_load(self.builder),
|
|
||||||
merge_block,
|
|
||||||
next_block)
|
|
||||||
elif isinstance(node.op, ast.And):
|
|
||||||
self.builder.cbranch(bval.auto_load(self.builder),
|
|
||||||
next_block,
|
|
||||||
merge_block)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
self.builder.position_at_end(merge_block)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _visit_expr_Compare(self, node):
|
|
||||||
comparisons = []
|
|
||||||
old_comparator = self.visit_expression(node.left)
|
|
||||||
for op, comparator_a in zip(node.ops, node.comparators):
|
|
||||||
comparator = self.visit_expression(comparator_a)
|
|
||||||
comparison = _ast_cmps[type(op)](old_comparator, comparator,
|
|
||||||
self.builder)
|
|
||||||
comparisons.append(comparison)
|
|
||||||
old_comparator = comparator
|
|
||||||
r = comparisons[0]
|
|
||||||
for comparison in comparisons[1:]:
|
|
||||||
r = values.operators.and_(r, comparison)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _visit_expr_Call(self, node):
|
|
||||||
fn = node.func.id
|
|
||||||
if fn in {"bool", "int", "int64", "round", "round64", "float", "len"}:
|
|
||||||
value = self.visit_expression(node.args[0])
|
|
||||||
return getattr(value, "o_" + fn)(self.builder)
|
|
||||||
elif fn == "Fraction":
|
|
||||||
r = fractions.VFraction()
|
|
||||||
if self.builder is not None:
|
|
||||||
numerator = self.visit_expression(node.args[0])
|
|
||||||
denominator = self.visit_expression(node.args[1])
|
|
||||||
r.set_value_nd(self.builder, numerator, denominator)
|
|
||||||
return r
|
|
||||||
elif fn == "range":
|
|
||||||
return iterators.IRange(
|
|
||||||
self.builder,
|
|
||||||
[self.visit_expression(arg) for arg in node.args])
|
|
||||||
elif fn == "syscall":
|
|
||||||
return self.runtime.build_syscall(
|
|
||||||
node.args[0].s,
|
|
||||||
[self.visit_expression(expr) for expr in node.args[1:]],
|
|
||||||
self.builder)
|
|
||||||
else:
|
|
||||||
raise NameError("Function '{}' is not defined".format(fn))
|
|
||||||
|
|
||||||
def _visit_expr_Attribute(self, node):
|
|
||||||
value = self.visit_expression(node.value)
|
|
||||||
return value.o_getattr(node.attr, self.builder)
|
|
||||||
|
|
||||||
def _visit_expr_List(self, node):
|
|
||||||
elts = [self.visit_expression(elt) for elt in node.elts]
|
|
||||||
if elts:
|
|
||||||
el_type = elts[0].new()
|
|
||||||
for elt in elts[1:]:
|
|
||||||
el_type.merge(elt)
|
|
||||||
else:
|
|
||||||
el_type = base_types.VNone()
|
|
||||||
count = len(elts)
|
|
||||||
r = lists.VList(el_type, count)
|
|
||||||
r.elts = elts
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _visit_expr_ListComp(self, node):
|
|
||||||
if len(node.generators) != 1:
|
|
||||||
raise NotImplementedError
|
|
||||||
generator = node.generators[0]
|
|
||||||
if not isinstance(generator, ast.comprehension):
|
|
||||||
raise NotImplementedError
|
|
||||||
if not isinstance(generator.target, ast.Name):
|
|
||||||
raise NotImplementedError
|
|
||||||
target = generator.target.id
|
|
||||||
if not isinstance(generator.iter, ast.Call):
|
|
||||||
raise NotImplementedError
|
|
||||||
if not isinstance(generator.iter.func, ast.Name):
|
|
||||||
raise NotImplementedError
|
|
||||||
if generator.iter.func.id != "range":
|
|
||||||
raise NotImplementedError
|
|
||||||
if len(generator.iter.args) != 1:
|
|
||||||
raise NotImplementedError
|
|
||||||
if not isinstance(generator.iter.args[0], ast.Num):
|
|
||||||
raise NotImplementedError
|
|
||||||
count = generator.iter.args[0].n
|
|
||||||
|
|
||||||
# Prevent incorrect use of the generator target, if it is defined in
|
|
||||||
# the local function namespace.
|
|
||||||
if target in self.ns:
|
|
||||||
old_target_val = self.ns[target]
|
|
||||||
del self.ns[target]
|
|
||||||
else:
|
|
||||||
old_target_val = None
|
|
||||||
elt = self.visit_expression(node.elt)
|
|
||||||
if old_target_val is not None:
|
|
||||||
self.ns[target] = old_target_val
|
|
||||||
|
|
||||||
el_type = elt.new()
|
|
||||||
r = lists.VList(el_type, count)
|
|
||||||
r.elt = elt
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _visit_expr_Subscript(self, node):
|
|
||||||
value = self.visit_expression(node.value)
|
|
||||||
if isinstance(node.slice, ast.Index):
|
|
||||||
index = self.visit_expression(node.slice.value)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
return value.o_subscript(index, self.builder)
|
|
||||||
|
|
||||||
def visit_statements(self, stmts):
|
|
||||||
for node in stmts:
|
|
||||||
node_type = node.__class__.__name__
|
|
||||||
method = "_visit_stmt_" + node_type
|
|
||||||
try:
|
|
||||||
visitor = getattr(self, method)
|
|
||||||
except AttributeError:
|
|
||||||
raise NotImplementedError("Unsupported node '{}' in statement"
|
|
||||||
.format(node_type))
|
|
||||||
visitor(node)
|
|
||||||
if node_type in ("Return", "Break", "Continue"):
|
|
||||||
break
|
|
||||||
|
|
||||||
def _bb_terminated(self):
|
|
||||||
return is_terminated(self.builder.basic_block)
|
|
||||||
|
|
||||||
def _visit_stmt_Assign(self, node):
|
|
||||||
val = self.visit_expression(node.value)
|
|
||||||
if isinstance(node.value, ast.List):
|
|
||||||
if len(node.targets) > 1:
|
|
||||||
raise NotImplementedError
|
|
||||||
target = self.visit_expression(node.targets[0])
|
|
||||||
target.set_count(self.builder, val.alloc_count)
|
|
||||||
for i, elt in enumerate(val.elts):
|
|
||||||
idx = base_types.VInt()
|
|
||||||
idx.set_const_value(self.builder, i)
|
|
||||||
target.o_subscript(idx, self.builder).set_value(self.builder,
|
|
||||||
elt)
|
|
||||||
elif isinstance(node.value, ast.ListComp):
|
|
||||||
if len(node.targets) > 1:
|
|
||||||
raise NotImplementedError
|
|
||||||
target = self.visit_expression(node.targets[0])
|
|
||||||
target.set_count(self.builder, val.alloc_count)
|
|
||||||
|
|
||||||
i = base_types.VInt()
|
|
||||||
i.alloca(self.builder)
|
|
||||||
i.auto_store(self.builder, ll.Constant(ll.IntType(32), 0))
|
|
||||||
|
|
||||||
function = self.builder.basic_block.function
|
|
||||||
copy_block = function.append_basic_block("ai_copy")
|
|
||||||
end_block = function.append_basic_block("ai_end")
|
|
||||||
self.builder.branch(copy_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(copy_block)
|
|
||||||
target.o_subscript(i, self.builder).set_value(self.builder,
|
|
||||||
val.elt)
|
|
||||||
i.auto_store(self.builder, self.builder.add(
|
|
||||||
i.auto_load(self.builder),
|
|
||||||
ll.Constant(ll.IntType(32), 1)))
|
|
||||||
cont = self.builder.icmp_signed(
|
|
||||||
"<", i.auto_load(self.builder),
|
|
||||||
ll.Constant(ll.IntType(32), val.alloc_count))
|
|
||||||
self.builder.cbranch(cont, copy_block, end_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(end_block)
|
|
||||||
else:
|
|
||||||
for target in node.targets:
|
|
||||||
target = self.visit_expression(target)
|
|
||||||
target.set_value(self.builder, val)
|
|
||||||
|
|
||||||
def _visit_stmt_AugAssign(self, node):
|
|
||||||
target = self.visit_expression(node.target)
|
|
||||||
right = self.visit_expression(node.value)
|
|
||||||
val = _ast_binops[type(node.op)](target, right, self.builder)
|
|
||||||
target.set_value(self.builder, val)
|
|
||||||
|
|
||||||
def _visit_stmt_Expr(self, node):
|
|
||||||
self.visit_expression(node.value)
|
|
||||||
|
|
||||||
def _visit_stmt_If(self, node):
|
|
||||||
function = self.builder.basic_block.function
|
|
||||||
then_block = function.append_basic_block("i_then")
|
|
||||||
else_block = function.append_basic_block("i_else")
|
|
||||||
merge_block = function.append_basic_block("i_merge")
|
|
||||||
|
|
||||||
condition = self.visit_expression(node.test).o_bool(self.builder)
|
|
||||||
self.builder.cbranch(condition.auto_load(self.builder),
|
|
||||||
then_block, else_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(then_block)
|
|
||||||
self.visit_statements(node.body)
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(merge_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(else_block)
|
|
||||||
self.visit_statements(node.orelse)
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(merge_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(merge_block)
|
|
||||||
|
|
||||||
def _enter_loop_body(self, break_block, continue_block):
|
|
||||||
self._break_stack.append(break_block)
|
|
||||||
self._continue_stack.append(continue_block)
|
|
||||||
self._exception_level_stack.append(0)
|
|
||||||
|
|
||||||
def _leave_loop_body(self):
|
|
||||||
self._exception_level_stack.pop()
|
|
||||||
self._continue_stack.pop()
|
|
||||||
self._break_stack.pop()
|
|
||||||
|
|
||||||
def _visit_stmt_While(self, node):
|
|
||||||
function = self.builder.basic_block.function
|
|
||||||
|
|
||||||
body_block = function.append_basic_block("w_body")
|
|
||||||
else_block = function.append_basic_block("w_else")
|
|
||||||
condition = self.visit_expression(node.test).o_bool(self.builder)
|
|
||||||
self.builder.cbranch(
|
|
||||||
condition.auto_load(self.builder), body_block, else_block)
|
|
||||||
|
|
||||||
continue_block = function.append_basic_block("w_continue")
|
|
||||||
merge_block = function.append_basic_block("w_merge")
|
|
||||||
self.builder.position_at_end(body_block)
|
|
||||||
self._enter_loop_body(merge_block, continue_block)
|
|
||||||
self.visit_statements(node.body)
|
|
||||||
self._leave_loop_body()
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(continue_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(continue_block)
|
|
||||||
condition = self.visit_expression(node.test).o_bool(self.builder)
|
|
||||||
self.builder.cbranch(
|
|
||||||
condition.auto_load(self.builder), body_block, merge_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(else_block)
|
|
||||||
self.visit_statements(node.orelse)
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(merge_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(merge_block)
|
|
||||||
|
|
||||||
def _visit_stmt_For(self, node):
|
|
||||||
function = self.builder.basic_block.function
|
|
||||||
|
|
||||||
it = self.visit_expression(node.iter)
|
|
||||||
target = self.visit_expression(node.target)
|
|
||||||
itval = it.get_value_ptr()
|
|
||||||
|
|
||||||
body_block = function.append_basic_block("f_body")
|
|
||||||
else_block = function.append_basic_block("f_else")
|
|
||||||
cont = it.o_next(self.builder)
|
|
||||||
self.builder.cbranch(
|
|
||||||
cont.auto_load(self.builder), body_block, else_block)
|
|
||||||
|
|
||||||
continue_block = function.append_basic_block("f_continue")
|
|
||||||
merge_block = function.append_basic_block("f_merge")
|
|
||||||
self.builder.position_at_end(body_block)
|
|
||||||
target.set_value(self.builder, itval)
|
|
||||||
self._enter_loop_body(merge_block, continue_block)
|
|
||||||
self.visit_statements(node.body)
|
|
||||||
self._leave_loop_body()
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(continue_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(continue_block)
|
|
||||||
cont = it.o_next(self.builder)
|
|
||||||
self.builder.cbranch(
|
|
||||||
cont.auto_load(self.builder), body_block, merge_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(else_block)
|
|
||||||
self.visit_statements(node.orelse)
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(merge_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(merge_block)
|
|
||||||
|
|
||||||
def _break_loop_body(self, target_block):
|
|
||||||
exception_levels = self._exception_level_stack[-1]
|
|
||||||
if exception_levels:
|
|
||||||
self.runtime.build_pop(self.builder, exception_levels)
|
|
||||||
self.builder.branch(target_block)
|
|
||||||
|
|
||||||
def _visit_stmt_Break(self, node):
|
|
||||||
self._break_loop_body(self._break_stack[-1])
|
|
||||||
|
|
||||||
def _visit_stmt_Continue(self, node):
|
|
||||||
self._break_loop_body(self._continue_stack[-1])
|
|
||||||
|
|
||||||
def _visit_stmt_Return(self, node):
|
|
||||||
if node.value is None:
|
|
||||||
val = base_types.VNone()
|
|
||||||
else:
|
|
||||||
val = self.visit_expression(node.value)
|
|
||||||
exception_levels = sum(self._exception_level_stack)
|
|
||||||
if exception_levels:
|
|
||||||
self.runtime.build_pop(self.builder, exception_levels)
|
|
||||||
if isinstance(val, base_types.VNone):
|
|
||||||
self.builder.ret_void()
|
|
||||||
else:
|
|
||||||
self.builder.ret(val.auto_load(self.builder))
|
|
||||||
|
|
||||||
def _visit_stmt_Pass(self, node):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _visit_stmt_Raise(self, node):
|
|
||||||
if self._active_exception_stack:
|
|
||||||
finally_block, propagate, propagate_eid = (
|
|
||||||
self._active_exception_stack[-1])
|
|
||||||
self.builder.store(ll.Constant(ll.IntType(1), 1), propagate)
|
|
||||||
if node.exc is not None:
|
|
||||||
eid = ll.Constant(ll.IntType(32), node.exc.args[0].n)
|
|
||||||
self.builder.store(eid, propagate_eid)
|
|
||||||
self.builder.branch(finally_block)
|
|
||||||
else:
|
|
||||||
eid = ll.Constant(ll.IntType(32), node.exc.args[0].n)
|
|
||||||
self.runtime.build_raise(self.builder, eid)
|
|
||||||
|
|
||||||
def _handle_exception(self, function, finally_block,
|
|
||||||
propagate, propagate_eid, handlers):
|
|
||||||
eid = self.runtime.build_getid(self.builder)
|
|
||||||
self._active_exception_stack.append(
|
|
||||||
(finally_block, propagate, propagate_eid))
|
|
||||||
self.builder.store(ll.Constant(ll.IntType(1), 1), propagate)
|
|
||||||
self.builder.store(eid, propagate_eid)
|
|
||||||
|
|
||||||
for handler in handlers:
|
|
||||||
handled_exc_block = function.append_basic_block("try_exc_h")
|
|
||||||
cont_exc_block = function.append_basic_block("try_exc_c")
|
|
||||||
if handler.type is None:
|
|
||||||
self.builder.branch(handled_exc_block)
|
|
||||||
else:
|
|
||||||
if isinstance(handler.type, ast.Tuple):
|
|
||||||
match = self.builder.icmp_signed(
|
|
||||||
"==", eid,
|
|
||||||
ll.Constant(ll.IntType(32),
|
|
||||||
handler.type.elts[0].args[0].n))
|
|
||||||
for elt in handler.type.elts[1:]:
|
|
||||||
match = self.builder.or_(
|
|
||||||
match,
|
|
||||||
self.builder.icmp_signed(
|
|
||||||
"==", eid,
|
|
||||||
ll.Constant(ll.IntType(32), elt.args[0].n)))
|
|
||||||
else:
|
|
||||||
match = self.builder.icmp_signed(
|
|
||||||
"==", eid,
|
|
||||||
ll.Constant(ll.IntType(32), handler.type.args[0].n))
|
|
||||||
self.builder.cbranch(match, handled_exc_block, cont_exc_block)
|
|
||||||
self.builder.position_at_end(handled_exc_block)
|
|
||||||
self.builder.store(ll.Constant(ll.IntType(1), 0), propagate)
|
|
||||||
self.visit_statements(handler.body)
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(finally_block)
|
|
||||||
self.builder.position_at_end(cont_exc_block)
|
|
||||||
self.builder.branch(finally_block)
|
|
||||||
|
|
||||||
self._active_exception_stack.pop()
|
|
||||||
|
|
||||||
def _visit_stmt_Try(self, node):
|
|
||||||
function = self.builder.basic_block.function
|
|
||||||
noexc_block = function.append_basic_block("try_noexc")
|
|
||||||
exc_block = function.append_basic_block("try_exc")
|
|
||||||
finally_block = function.append_basic_block("try_finally")
|
|
||||||
|
|
||||||
propagate = self.builder.alloca(ll.IntType(1),
|
|
||||||
name="propagate")
|
|
||||||
self.builder.store(ll.Constant(ll.IntType(1), 0), propagate)
|
|
||||||
propagate_eid = self.builder.alloca(ll.IntType(32),
|
|
||||||
name="propagate_eid")
|
|
||||||
exception_occured = self.runtime.build_catch(self.builder)
|
|
||||||
self.builder.cbranch(exception_occured, exc_block, noexc_block)
|
|
||||||
|
|
||||||
self.builder.position_at_end(noexc_block)
|
|
||||||
self._exception_level_stack[-1] += 1
|
|
||||||
self.visit_statements(node.body)
|
|
||||||
self._exception_level_stack[-1] -= 1
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.runtime.build_pop(self.builder, 1)
|
|
||||||
self.visit_statements(node.orelse)
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.branch(finally_block)
|
|
||||||
self.builder.position_at_end(exc_block)
|
|
||||||
self._handle_exception(function, finally_block,
|
|
||||||
propagate, propagate_eid, node.handlers)
|
|
||||||
|
|
||||||
propagate_block = function.append_basic_block("try_propagate")
|
|
||||||
merge_block = function.append_basic_block("try_merge")
|
|
||||||
self.builder.position_at_end(finally_block)
|
|
||||||
self.visit_statements(node.finalbody)
|
|
||||||
if not self._bb_terminated():
|
|
||||||
self.builder.cbranch(
|
|
||||||
self.builder.load(propagate),
|
|
||||||
propagate_block, merge_block)
|
|
||||||
self.builder.position_at_end(propagate_block)
|
|
||||||
self.runtime.build_raise(self.builder, self.builder.load(propagate_eid))
|
|
||||||
self.builder.branch(merge_block)
|
|
||||||
self.builder.position_at_end(merge_block)
|
|
|
@ -1,321 +0,0 @@
|
||||||
import llvmlite_artiq.ir as ll
|
|
||||||
|
|
||||||
from artiq.py2llvm.values import VGeneric
|
|
||||||
|
|
||||||
|
|
||||||
class VNone(VGeneric):
|
|
||||||
def get_llvm_type(self):
|
|
||||||
return ll.VoidType()
|
|
||||||
|
|
||||||
def alloca(self, builder, name):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def set_const_value(self, builder, v):
|
|
||||||
assert v is None
|
|
||||||
|
|
||||||
def set_value(self, builder, other):
|
|
||||||
if not isinstance(other, VNone):
|
|
||||||
raise TypeError
|
|
||||||
|
|
||||||
def o_bool(self, builder):
|
|
||||||
r = VBool()
|
|
||||||
if builder is not None:
|
|
||||||
r.set_const_value(builder, False)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_not(self, builder):
|
|
||||||
r = VBool()
|
|
||||||
if builder is not None:
|
|
||||||
r.set_const_value(builder, True)
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
class VInt(VGeneric):
|
|
||||||
def __init__(self, nbits=32):
|
|
||||||
VGeneric.__init__(self)
|
|
||||||
self.nbits = nbits
|
|
||||||
|
|
||||||
def get_llvm_type(self):
|
|
||||||
return ll.IntType(self.nbits)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<VInt:{}>".format(self.nbits)
|
|
||||||
|
|
||||||
def same_type(self, other):
|
|
||||||
return isinstance(other, VInt) and other.nbits == self.nbits
|
|
||||||
|
|
||||||
def merge(self, other):
|
|
||||||
if isinstance(other, VInt) and not isinstance(other, VBool):
|
|
||||||
if other.nbits > self.nbits:
|
|
||||||
self.nbits = other.nbits
|
|
||||||
else:
|
|
||||||
raise TypeError("Incompatible types: {} and {}"
|
|
||||||
.format(repr(self), repr(other)))
|
|
||||||
|
|
||||||
def set_value(self, builder, n):
|
|
||||||
self.auto_store(
|
|
||||||
builder, n.o_intx(self.nbits, builder).auto_load(builder))
|
|
||||||
|
|
||||||
def set_const_value(self, builder, n):
|
|
||||||
self.auto_store(builder, ll.Constant(self.get_llvm_type(), n))
|
|
||||||
|
|
||||||
def o_bool(self, builder, inv=False):
|
|
||||||
r = VBool()
|
|
||||||
if builder is not None:
|
|
||||||
r.auto_store(
|
|
||||||
builder, builder.icmp_signed(
|
|
||||||
"==" if inv else "!=",
|
|
||||||
self.auto_load(builder),
|
|
||||||
ll.Constant(self.get_llvm_type(), 0)))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_float(self, builder):
|
|
||||||
r = VFloat()
|
|
||||||
if builder is not None:
|
|
||||||
if isinstance(self, VBool):
|
|
||||||
cf = builder.uitofp
|
|
||||||
else:
|
|
||||||
cf = builder.sitofp
|
|
||||||
r.auto_store(builder, cf(self.auto_load(builder),
|
|
||||||
r.get_llvm_type()))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_not(self, builder):
|
|
||||||
return self.o_bool(builder, inv=True)
|
|
||||||
|
|
||||||
def o_neg(self, builder):
|
|
||||||
r = VInt(self.nbits)
|
|
||||||
if builder is not None:
|
|
||||||
r.auto_store(
|
|
||||||
builder, builder.mul(
|
|
||||||
self.auto_load(builder),
|
|
||||||
ll.Constant(self.get_llvm_type(), -1)))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_intx(self, target_bits, builder):
|
|
||||||
r = VInt(target_bits)
|
|
||||||
if builder is not None:
|
|
||||||
if self.nbits == target_bits:
|
|
||||||
r.auto_store(
|
|
||||||
builder, self.auto_load(builder))
|
|
||||||
if self.nbits > target_bits:
|
|
||||||
r.auto_store(
|
|
||||||
builder, builder.trunc(self.auto_load(builder),
|
|
||||||
r.get_llvm_type()))
|
|
||||||
if self.nbits < target_bits:
|
|
||||||
if isinstance(self, VBool):
|
|
||||||
ef = builder.zext
|
|
||||||
else:
|
|
||||||
ef = builder.sext
|
|
||||||
r.auto_store(
|
|
||||||
builder, ef(self.auto_load(builder),
|
|
||||||
r.get_llvm_type()))
|
|
||||||
return r
|
|
||||||
o_roundx = o_intx
|
|
||||||
|
|
||||||
def o_truediv(self, other, builder):
|
|
||||||
if isinstance(other, VInt):
|
|
||||||
left = self.o_float(builder)
|
|
||||||
right = other.o_float(builder)
|
|
||||||
return left.o_truediv(right, builder)
|
|
||||||
else:
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
def _make_vint_binop_method(builder_name, bool_op):
|
|
||||||
def binop_method(self, other, builder):
|
|
||||||
if isinstance(other, VInt):
|
|
||||||
target_bits = max(self.nbits, other.nbits)
|
|
||||||
if not bool_op and target_bits == 1:
|
|
||||||
target_bits = 32
|
|
||||||
if bool_op and target_bits == 1:
|
|
||||||
r = VBool()
|
|
||||||
else:
|
|
||||||
r = VInt(target_bits)
|
|
||||||
if builder is not None:
|
|
||||||
left = self.o_intx(target_bits, builder)
|
|
||||||
right = other.o_intx(target_bits, builder)
|
|
||||||
bf = getattr(builder, builder_name)
|
|
||||||
r.auto_store(
|
|
||||||
builder, bf(left.auto_load(builder),
|
|
||||||
right.auto_load(builder)))
|
|
||||||
return r
|
|
||||||
else:
|
|
||||||
return NotImplemented
|
|
||||||
return binop_method
|
|
||||||
|
|
||||||
for _method_name, _builder_name, _bool_op in (("o_add", "add", False),
|
|
||||||
("o_sub", "sub", False),
|
|
||||||
("o_mul", "mul", False),
|
|
||||||
("o_floordiv", "sdiv", False),
|
|
||||||
("o_mod", "srem", False),
|
|
||||||
("o_and", "and_", True),
|
|
||||||
("o_xor", "xor", True),
|
|
||||||
("o_or", "or_", True)):
|
|
||||||
setattr(VInt, _method_name, _make_vint_binop_method(_builder_name, _bool_op))
|
|
||||||
|
|
||||||
|
|
||||||
def _make_vint_cmp_method(icmp_val):
|
|
||||||
def cmp_method(self, other, builder):
|
|
||||||
if isinstance(other, VInt):
|
|
||||||
r = VBool()
|
|
||||||
if builder is not None:
|
|
||||||
target_bits = max(self.nbits, other.nbits)
|
|
||||||
left = self.o_intx(target_bits, builder)
|
|
||||||
right = other.o_intx(target_bits, builder)
|
|
||||||
r.auto_store(
|
|
||||||
builder,
|
|
||||||
builder.icmp_signed(
|
|
||||||
icmp_val, left.auto_load(builder),
|
|
||||||
right.auto_load(builder)))
|
|
||||||
return r
|
|
||||||
else:
|
|
||||||
return NotImplemented
|
|
||||||
return cmp_method
|
|
||||||
|
|
||||||
for _method_name, _icmp_val in (("o_eq", "=="),
|
|
||||||
("o_ne", "!="),
|
|
||||||
("o_lt", "<"),
|
|
||||||
("o_le", "<="),
|
|
||||||
("o_gt", ">"),
|
|
||||||
("o_ge", ">=")):
|
|
||||||
setattr(VInt, _method_name, _make_vint_cmp_method(_icmp_val))
|
|
||||||
|
|
||||||
|
|
||||||
class VBool(VInt):
|
|
||||||
def __init__(self):
|
|
||||||
VInt.__init__(self, 1)
|
|
||||||
|
|
||||||
__repr__ = VGeneric.__repr__
|
|
||||||
same_type = VGeneric.same_type
|
|
||||||
merge = VGeneric.merge
|
|
||||||
|
|
||||||
def set_const_value(self, builder, b):
|
|
||||||
VInt.set_const_value(self, builder, int(b))
|
|
||||||
|
|
||||||
|
|
||||||
class VFloat(VGeneric):
|
|
||||||
def get_llvm_type(self):
|
|
||||||
return ll.DoubleType()
|
|
||||||
|
|
||||||
def set_value(self, builder, v):
|
|
||||||
if not isinstance(v, VFloat):
|
|
||||||
raise TypeError
|
|
||||||
self.auto_store(builder, v.auto_load(builder))
|
|
||||||
|
|
||||||
def set_const_value(self, builder, n):
|
|
||||||
self.auto_store(builder, ll.Constant(self.get_llvm_type(), n))
|
|
||||||
|
|
||||||
def o_float(self, builder):
|
|
||||||
r = VFloat()
|
|
||||||
if builder is not None:
|
|
||||||
r.auto_store(builder, self.auto_load(builder))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_bool(self, builder, inv=False):
|
|
||||||
r = VBool()
|
|
||||||
if builder is not None:
|
|
||||||
r.auto_store(
|
|
||||||
builder, builder.fcmp_ordered(
|
|
||||||
"==" if inv else "!=",
|
|
||||||
self.auto_load(builder),
|
|
||||||
ll.Constant(self.get_llvm_type(), 0.0)))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_not(self, builder):
|
|
||||||
return self.o_bool(builder, True)
|
|
||||||
|
|
||||||
def o_neg(self, builder):
|
|
||||||
r = VFloat()
|
|
||||||
if builder is not None:
|
|
||||||
r.auto_store(
|
|
||||||
builder, builder.fmul(
|
|
||||||
self.auto_load(builder),
|
|
||||||
ll.Constant(self.get_llvm_type(), -1.0)))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_intx(self, target_bits, builder):
|
|
||||||
r = VInt(target_bits)
|
|
||||||
if builder is not None:
|
|
||||||
r.auto_store(builder, builder.fptosi(self.auto_load(builder),
|
|
||||||
r.get_llvm_type()))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_roundx(self, target_bits, builder):
|
|
||||||
r = VInt(target_bits)
|
|
||||||
if builder is not None:
|
|
||||||
function = builder.basic_block.function
|
|
||||||
neg_block = function.append_basic_block("fr_neg")
|
|
||||||
merge_block = function.append_basic_block("fr_merge")
|
|
||||||
|
|
||||||
half = VFloat()
|
|
||||||
half.alloca(builder, "half")
|
|
||||||
half.set_const_value(builder, 0.5)
|
|
||||||
|
|
||||||
condition = builder.fcmp_ordered(
|
|
||||||
"<",
|
|
||||||
self.auto_load(builder),
|
|
||||||
ll.Constant(self.get_llvm_type(), 0.0))
|
|
||||||
builder.cbranch(condition, neg_block, merge_block)
|
|
||||||
|
|
||||||
builder.position_at_end(neg_block)
|
|
||||||
half.set_const_value(builder, -0.5)
|
|
||||||
builder.branch(merge_block)
|
|
||||||
|
|
||||||
builder.position_at_end(merge_block)
|
|
||||||
s = builder.fadd(self.auto_load(builder), half.auto_load(builder))
|
|
||||||
r.auto_store(builder, builder.fptosi(s, r.get_llvm_type()))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_floordiv(self, other, builder):
|
|
||||||
return self.o_truediv(other, builder).o_int64(builder).o_float(builder)
|
|
||||||
|
|
||||||
def _make_vfloat_binop_method(builder_name, reverse):
|
|
||||||
def binop_method(self, other, builder):
|
|
||||||
if not hasattr(other, "o_float"):
|
|
||||||
return NotImplemented
|
|
||||||
r = VFloat()
|
|
||||||
if builder is not None:
|
|
||||||
left = self.o_float(builder)
|
|
||||||
right = other.o_float(builder)
|
|
||||||
if reverse:
|
|
||||||
left, right = right, left
|
|
||||||
bf = getattr(builder, builder_name)
|
|
||||||
r.auto_store(
|
|
||||||
builder, bf(left.auto_load(builder),
|
|
||||||
right.auto_load(builder)))
|
|
||||||
return r
|
|
||||||
return binop_method
|
|
||||||
|
|
||||||
for _method_name, _builder_name in (("add", "fadd"),
|
|
||||||
("sub", "fsub"),
|
|
||||||
("mul", "fmul"),
|
|
||||||
("truediv", "fdiv")):
|
|
||||||
setattr(VFloat, "o_" + _method_name,
|
|
||||||
_make_vfloat_binop_method(_builder_name, False))
|
|
||||||
setattr(VFloat, "or_" + _method_name,
|
|
||||||
_make_vfloat_binop_method(_builder_name, True))
|
|
||||||
|
|
||||||
|
|
||||||
def _make_vfloat_cmp_method(fcmp_val):
|
|
||||||
def cmp_method(self, other, builder):
|
|
||||||
if not hasattr(other, "o_float"):
|
|
||||||
return NotImplemented
|
|
||||||
r = VBool()
|
|
||||||
if builder is not None:
|
|
||||||
left = self.o_float(builder)
|
|
||||||
right = other.o_float(builder)
|
|
||||||
r.auto_store(
|
|
||||||
builder,
|
|
||||||
builder.fcmp_ordered(
|
|
||||||
fcmp_val, left.auto_load(builder),
|
|
||||||
right.auto_load(builder)))
|
|
||||||
return r
|
|
||||||
return cmp_method
|
|
||||||
|
|
||||||
for _method_name, _fcmp_val in (("o_eq", "=="),
|
|
||||||
("o_ne", "!="),
|
|
||||||
("o_lt", "<"),
|
|
||||||
("o_le", "<="),
|
|
||||||
("o_gt", ">"),
|
|
||||||
("o_ge", ">=")):
|
|
||||||
setattr(VFloat, _method_name, _make_vfloat_cmp_method(_fcmp_val))
|
|
|
@ -1,74 +0,0 @@
|
||||||
import ast
|
|
||||||
from copy import deepcopy
|
|
||||||
|
|
||||||
from artiq.py2llvm.ast_body import Visitor
|
|
||||||
from artiq.py2llvm import base_types
|
|
||||||
|
|
||||||
|
|
||||||
class _TypeScanner(ast.NodeVisitor):
|
|
||||||
def __init__(self, env, ns):
|
|
||||||
self.exprv = Visitor(env, ns)
|
|
||||||
|
|
||||||
def _update_target(self, target, val):
|
|
||||||
ns = self.exprv.ns
|
|
||||||
if isinstance(target, ast.Name):
|
|
||||||
if target.id in ns:
|
|
||||||
ns[target.id].merge(val)
|
|
||||||
else:
|
|
||||||
ns[target.id] = deepcopy(val)
|
|
||||||
elif isinstance(target, ast.Subscript):
|
|
||||||
target = target.value
|
|
||||||
levels = 0
|
|
||||||
while isinstance(target, ast.Subscript):
|
|
||||||
target = target.value
|
|
||||||
levels += 1
|
|
||||||
if isinstance(target, ast.Name):
|
|
||||||
target_value = ns[target.id]
|
|
||||||
for i in range(levels):
|
|
||||||
target_value = target_value.o_subscript(None, None)
|
|
||||||
target_value.merge_subscript(val)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def visit_Assign(self, node):
|
|
||||||
val = self.exprv.visit_expression(node.value)
|
|
||||||
for target in node.targets:
|
|
||||||
self._update_target(target, val)
|
|
||||||
|
|
||||||
def visit_AugAssign(self, node):
|
|
||||||
val = self.exprv.visit_expression(ast.BinOp(
|
|
||||||
op=node.op, left=node.target, right=node.value))
|
|
||||||
self._update_target(node.target, val)
|
|
||||||
|
|
||||||
def visit_For(self, node):
|
|
||||||
it = self.exprv.visit_expression(node.iter)
|
|
||||||
self._update_target(node.target, it.get_value_ptr())
|
|
||||||
self.generic_visit(node)
|
|
||||||
|
|
||||||
def visit_Return(self, node):
|
|
||||||
if node.value is None:
|
|
||||||
val = base_types.VNone()
|
|
||||||
else:
|
|
||||||
val = self.exprv.visit_expression(node.value)
|
|
||||||
ns = self.exprv.ns
|
|
||||||
if "return" in ns:
|
|
||||||
ns["return"].merge(val)
|
|
||||||
else:
|
|
||||||
ns["return"] = deepcopy(val)
|
|
||||||
|
|
||||||
|
|
||||||
def infer_function_types(env, node, param_types):
|
|
||||||
ns = deepcopy(param_types)
|
|
||||||
ts = _TypeScanner(env, ns)
|
|
||||||
ts.visit(node)
|
|
||||||
while True:
|
|
||||||
prev_ns = deepcopy(ns)
|
|
||||||
ts = _TypeScanner(env, ns)
|
|
||||||
ts.visit(node)
|
|
||||||
if all(v.same_type(prev_ns[k]) for k, v in ns.items()):
|
|
||||||
# no more promotions - completed
|
|
||||||
if "return" not in ns:
|
|
||||||
ns["return"] = base_types.VNone()
|
|
||||||
return ns
|
|
|
@ -1,51 +0,0 @@
|
||||||
from artiq.py2llvm.values import operators
|
|
||||||
from artiq.py2llvm.base_types import VInt
|
|
||||||
|
|
||||||
class IRange:
|
|
||||||
def __init__(self, builder, args):
|
|
||||||
minimum, step = None, None
|
|
||||||
if len(args) == 1:
|
|
||||||
maximum = args[0]
|
|
||||||
elif len(args) == 2:
|
|
||||||
minimum, maximum = args
|
|
||||||
else:
|
|
||||||
minimum, maximum, step = args
|
|
||||||
if minimum is None:
|
|
||||||
minimum = VInt()
|
|
||||||
if builder is not None:
|
|
||||||
minimum.set_const_value(builder, 0)
|
|
||||||
if step is None:
|
|
||||||
step = VInt()
|
|
||||||
if builder is not None:
|
|
||||||
step.set_const_value(builder, 1)
|
|
||||||
|
|
||||||
self._counter = minimum.new()
|
|
||||||
self._counter.merge(maximum)
|
|
||||||
self._counter.merge(step)
|
|
||||||
self._minimum = self._counter.new()
|
|
||||||
self._maximum = self._counter.new()
|
|
||||||
self._step = self._counter.new()
|
|
||||||
|
|
||||||
if builder is not None:
|
|
||||||
self._minimum.alloca(builder, "irange_min")
|
|
||||||
self._maximum.alloca(builder, "irange_max")
|
|
||||||
self._step.alloca(builder, "irange_step")
|
|
||||||
self._counter.alloca(builder, "irange_count")
|
|
||||||
|
|
||||||
self._minimum.set_value(builder, minimum)
|
|
||||||
self._maximum.set_value(builder, maximum)
|
|
||||||
self._step.set_value(builder, step)
|
|
||||||
|
|
||||||
counter_init = operators.sub(self._minimum, self._step, builder)
|
|
||||||
self._counter.set_value(builder, counter_init)
|
|
||||||
|
|
||||||
# must be a pointer value that can be dereferenced anytime
|
|
||||||
# to get the current value of the iterator
|
|
||||||
def get_value_ptr(self):
|
|
||||||
return self._counter
|
|
||||||
|
|
||||||
def o_next(self, builder):
|
|
||||||
self._counter.set_value(
|
|
||||||
builder,
|
|
||||||
operators.add(self._counter, self._step, builder))
|
|
||||||
return operators.lt(self._counter, self._maximum, builder)
|
|
|
@ -1,72 +0,0 @@
|
||||||
import llvmlite_artiq.ir as ll
|
|
||||||
|
|
||||||
from artiq.py2llvm.values import VGeneric
|
|
||||||
from artiq.py2llvm.base_types import VInt, VNone
|
|
||||||
|
|
||||||
|
|
||||||
class VList(VGeneric):
|
|
||||||
def __init__(self, el_type, alloc_count):
|
|
||||||
VGeneric.__init__(self)
|
|
||||||
self.el_type = el_type
|
|
||||||
self.alloc_count = alloc_count
|
|
||||||
|
|
||||||
def get_llvm_type(self):
|
|
||||||
count = 0 if self.alloc_count is None else self.alloc_count
|
|
||||||
if isinstance(self.el_type, VNone):
|
|
||||||
return ll.LiteralStructType([ll.IntType(32)])
|
|
||||||
else:
|
|
||||||
return ll.LiteralStructType([
|
|
||||||
ll.IntType(32), ll.ArrayType(self.el_type.get_llvm_type(),
|
|
||||||
count)])
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<VList:{} x{}>".format(
|
|
||||||
repr(self.el_type),
|
|
||||||
"?" if self.alloc_count is None else self.alloc_count)
|
|
||||||
|
|
||||||
def same_type(self, other):
|
|
||||||
return (isinstance(other, VList)
|
|
||||||
and self.el_type.same_type(other.el_type))
|
|
||||||
|
|
||||||
def merge(self, other):
|
|
||||||
if isinstance(other, VList):
|
|
||||||
if self.alloc_count:
|
|
||||||
if other.alloc_count:
|
|
||||||
self.el_type.merge(other.el_type)
|
|
||||||
if self.alloc_count < other.alloc_count:
|
|
||||||
self.alloc_count = other.alloc_count
|
|
||||||
else:
|
|
||||||
self.el_type = other.el_type.new()
|
|
||||||
self.alloc_count = other.alloc_count
|
|
||||||
else:
|
|
||||||
raise TypeError("Incompatible types: {} and {}"
|
|
||||||
.format(repr(self), repr(other)))
|
|
||||||
|
|
||||||
def merge_subscript(self, other):
|
|
||||||
self.el_type.merge(other)
|
|
||||||
|
|
||||||
def set_count(self, builder, count):
|
|
||||||
count_ptr = builder.gep(self.llvm_value, [
|
|
||||||
ll.Constant(ll.IntType(32), 0),
|
|
||||||
ll.Constant(ll.IntType(32), 0)])
|
|
||||||
builder.store(ll.Constant(ll.IntType(32), count), count_ptr)
|
|
||||||
|
|
||||||
def o_len(self, builder):
|
|
||||||
r = VInt()
|
|
||||||
if builder is not None:
|
|
||||||
count_ptr = builder.gep(self.llvm_value, [
|
|
||||||
ll.Constant(ll.IntType(32), 0),
|
|
||||||
ll.Constant(ll.IntType(32), 0)])
|
|
||||||
r.auto_store(builder, builder.load(count_ptr))
|
|
||||||
return r
|
|
||||||
|
|
||||||
def o_subscript(self, index, builder):
|
|
||||||
r = self.el_type.new()
|
|
||||||
if builder is not None and not isinstance(r, VNone):
|
|
||||||
index = index.o_int(builder).auto_load(builder)
|
|
||||||
ssa_r = builder.gep(self.llvm_value, [
|
|
||||||
ll.Constant(ll.IntType(32), 0),
|
|
||||||
ll.Constant(ll.IntType(32), 1),
|
|
||||||
index])
|
|
||||||
r.auto_store(builder, ssa_r)
|
|
||||||
return r
|
|
|
@ -1,62 +0,0 @@
|
||||||
import llvmlite_artiq.ir as ll
|
|
||||||
import llvmlite_artiq.binding as llvm
|
|
||||||
|
|
||||||
from artiq.py2llvm import infer_types, ast_body, base_types, fractions, tools
|
|
||||||
|
|
||||||
|
|
||||||
class Module:
|
|
||||||
def __init__(self, runtime=None):
|
|
||||||
self.llvm_module = ll.Module("main")
|
|
||||||
self.runtime = runtime
|
|
||||||
|
|
||||||
if self.runtime is not None:
|
|
||||||
self.runtime.init_module(self)
|
|
||||||
fractions.init_module(self)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
self.llvm_module_ref = llvm.parse_assembly(str(self.llvm_module))
|
|
||||||
pmb = llvm.create_pass_manager_builder()
|
|
||||||
pmb.opt_level = 2
|
|
||||||
pm = llvm.create_module_pass_manager()
|
|
||||||
pmb.populate(pm)
|
|
||||||
pm.run(self.llvm_module_ref)
|
|
||||||
|
|
||||||
def get_ee(self):
|
|
||||||
self.finalize()
|
|
||||||
tm = llvm.Target.from_default_triple().create_target_machine()
|
|
||||||
ee = llvm.create_mcjit_compiler(self.llvm_module_ref, tm)
|
|
||||||
ee.finalize_object()
|
|
||||||
return ee
|
|
||||||
|
|
||||||
def emit_object(self):
|
|
||||||
self.finalize()
|
|
||||||
return self.runtime.emit_object()
|
|
||||||
|
|
||||||
def compile_function(self, func_def, param_types):
|
|
||||||
ns = infer_types.infer_function_types(self.runtime, func_def, param_types)
|
|
||||||
retval = ns["return"]
|
|
||||||
|
|
||||||
function_type = ll.FunctionType(retval.get_llvm_type(),
|
|
||||||
[ns[arg.arg].get_llvm_type() for arg in func_def.args.args])
|
|
||||||
function = ll.Function(self.llvm_module, function_type, func_def.name)
|
|
||||||
bb = function.append_basic_block("entry")
|
|
||||||
builder = ll.IRBuilder()
|
|
||||||
builder.position_at_end(bb)
|
|
||||||
|
|
||||||
for arg_ast, arg_llvm in zip(func_def.args.args, function.args):
|
|
||||||
arg_llvm.name = arg_ast.arg
|
|
||||||
for k, v in ns.items():
|
|
||||||
v.alloca(builder, k)
|
|
||||||
for arg_ast, arg_llvm in zip(func_def.args.args, function.args):
|
|
||||||
ns[arg_ast.arg].auto_store(builder, arg_llvm)
|
|
||||||
|
|
||||||
visitor = ast_body.Visitor(self.runtime, ns, builder)
|
|
||||||
visitor.visit_statements(func_def.body)
|
|
||||||
|
|
||||||
if not tools.is_terminated(builder.basic_block):
|
|
||||||
if isinstance(retval, base_types.VNone):
|
|
||||||
builder.ret_void()
|
|
||||||
else:
|
|
||||||
builder.ret(retval.auto_load(builder))
|
|
||||||
|
|
||||||
return function, retval
|
|
|
@ -1,5 +0,0 @@
|
||||||
import llvmlite_artiq.ir as ll
|
|
||||||
|
|
||||||
def is_terminated(basic_block):
|
|
||||||
return (basic_block.instructions
|
|
||||||
and isinstance(basic_block.instructions[-1], ll.Terminator))
|
|
|
@ -1,94 +0,0 @@
|
||||||
from types import SimpleNamespace
|
|
||||||
from copy import copy
|
|
||||||
|
|
||||||
import llvmlite_artiq.ir as ll
|
|
||||||
|
|
||||||
|
|
||||||
class VGeneric:
|
|
||||||
def __init__(self):
|
|
||||||
self.llvm_value = None
|
|
||||||
|
|
||||||
def new(self):
|
|
||||||
r = copy(self)
|
|
||||||
r.llvm_value = None
|
|
||||||
return r
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<" + self.__class__.__name__ + ">"
|
|
||||||
|
|
||||||
def same_type(self, other):
|
|
||||||
return isinstance(other, self.__class__)
|
|
||||||
|
|
||||||
def merge(self, other):
|
|
||||||
if not self.same_type(other):
|
|
||||||
raise TypeError("Incompatible types: {} and {}"
|
|
||||||
.format(repr(self), repr(other)))
|
|
||||||
|
|
||||||
def auto_load(self, builder):
|
|
||||||
if isinstance(self.llvm_value.type, ll.PointerType):
|
|
||||||
return builder.load(self.llvm_value)
|
|
||||||
else:
|
|
||||||
return self.llvm_value
|
|
||||||
|
|
||||||
def auto_store(self, builder, llvm_value):
|
|
||||||
if self.llvm_value is None:
|
|
||||||
self.llvm_value = llvm_value
|
|
||||||
elif isinstance(self.llvm_value.type, ll.PointerType):
|
|
||||||
builder.store(llvm_value, self.llvm_value)
|
|
||||||
else:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Attempted to set LLVM SSA value multiple times")
|
|
||||||
|
|
||||||
def alloca(self, builder, name=""):
|
|
||||||
if self.llvm_value is not None:
|
|
||||||
raise RuntimeError("Attempted to alloca existing LLVM value "+name)
|
|
||||||
self.llvm_value = builder.alloca(self.get_llvm_type(), name=name)
|
|
||||||
|
|
||||||
def o_int(self, builder):
|
|
||||||
return self.o_intx(32, builder)
|
|
||||||
|
|
||||||
def o_int64(self, builder):
|
|
||||||
return self.o_intx(64, builder)
|
|
||||||
|
|
||||||
def o_round(self, builder):
|
|
||||||
return self.o_roundx(32, builder)
|
|
||||||
|
|
||||||
def o_round64(self, builder):
|
|
||||||
return self.o_roundx(64, builder)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_binary_operator(op_name):
|
|
||||||
def op(l, r, builder):
|
|
||||||
try:
|
|
||||||
opf = getattr(l, "o_" + op_name)
|
|
||||||
except AttributeError:
|
|
||||||
result = NotImplemented
|
|
||||||
else:
|
|
||||||
result = opf(r, builder)
|
|
||||||
if result is NotImplemented:
|
|
||||||
try:
|
|
||||||
ropf = getattr(r, "or_" + op_name)
|
|
||||||
except AttributeError:
|
|
||||||
result = NotImplemented
|
|
||||||
else:
|
|
||||||
result = ropf(l, builder)
|
|
||||||
if result is NotImplemented:
|
|
||||||
raise TypeError(
|
|
||||||
"Unsupported operand types for {}: {} and {}"
|
|
||||||
.format(op_name, type(l).__name__, type(r).__name__))
|
|
||||||
return result
|
|
||||||
return op
|
|
||||||
|
|
||||||
|
|
||||||
def _make_operators():
|
|
||||||
d = dict()
|
|
||||||
for op_name in ("add", "sub", "mul",
|
|
||||||
"truediv", "floordiv", "mod",
|
|
||||||
"pow", "lshift", "rshift", "xor",
|
|
||||||
"eq", "ne", "lt", "le", "gt", "ge"):
|
|
||||||
d[op_name] = _make_binary_operator(op_name)
|
|
||||||
d["and_"] = _make_binary_operator("and")
|
|
||||||
d["or_"] = _make_binary_operator("or")
|
|
||||||
return SimpleNamespace(**d)
|
|
||||||
|
|
||||||
operators = _make_operators()
|
|
|
@ -1,5 +1,5 @@
|
||||||
import inspect
|
import inspect
|
||||||
import ast
|
from pythonparser import parse, ast
|
||||||
|
|
||||||
import llvmlite_artiq.ir as ll
|
import llvmlite_artiq.ir as ll
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ def _gcd(a, b):
|
||||||
|
|
||||||
|
|
||||||
def init_module(module):
|
def init_module(module):
|
||||||
func_def = ast.parse(inspect.getsource(_gcd)).body[0]
|
func_def = parse(inspect.getsource(_gcd)).body[0]
|
||||||
function, _ = module.compile_function(func_def,
|
function, _ = module.compile_function(func_def,
|
||||||
{"a": VInt(64), "b": VInt(64)})
|
{"a": VInt(64), "b": VInt(64)})
|
||||||
function.linkage = "internal"
|
function.linkage = "internal"
|
|
@ -0,0 +1,169 @@
|
||||||
|
import unittest
|
||||||
|
from pythonparser import parse, ast
|
||||||
|
import inspect
|
||||||
|
from fractions import Fraction
|
||||||
|
from ctypes import CFUNCTYPE, c_int, c_int32, c_int64, c_double
|
||||||
|
import struct
|
||||||
|
|
||||||
|
import llvmlite_or1k.binding as llvm
|
||||||
|
|
||||||
|
from artiq.language.core import int64
|
||||||
|
from artiq.py2llvm.infer_types import infer_function_types
|
||||||
|
from artiq.py2llvm import base_types, lists
|
||||||
|
from artiq.py2llvm.module import Module
|
||||||
|
|
||||||
|
def simplify_encode(a, b):
|
||||||
|
f = Fraction(a, b)
|
||||||
|
return f.numerator*1000 + f.denominator
|
||||||
|
|
||||||
|
|
||||||
|
def frac_arith_encode(op, a, b, c, d):
|
||||||
|
if op == 0:
|
||||||
|
f = Fraction(a, b) - Fraction(c, d)
|
||||||
|
elif op == 1:
|
||||||
|
f = Fraction(a, b) + Fraction(c, d)
|
||||||
|
elif op == 2:
|
||||||
|
f = Fraction(a, b) * Fraction(c, d)
|
||||||
|
else:
|
||||||
|
f = Fraction(a, b) / Fraction(c, d)
|
||||||
|
return f.numerator*1000 + f.denominator
|
||||||
|
|
||||||
|
|
||||||
|
def frac_arith_encode_int(op, a, b, x):
|
||||||
|
if op == 0:
|
||||||
|
f = Fraction(a, b) - x
|
||||||
|
elif op == 1:
|
||||||
|
f = Fraction(a, b) + x
|
||||||
|
elif op == 2:
|
||||||
|
f = Fraction(a, b) * x
|
||||||
|
else:
|
||||||
|
f = Fraction(a, b) / x
|
||||||
|
return f.numerator*1000 + f.denominator
|
||||||
|
|
||||||
|
|
||||||
|
def frac_arith_encode_int_rev(op, a, b, x):
|
||||||
|
if op == 0:
|
||||||
|
f = x - Fraction(a, b)
|
||||||
|
elif op == 1:
|
||||||
|
f = x + Fraction(a, b)
|
||||||
|
elif op == 2:
|
||||||
|
f = x * Fraction(a, b)
|
||||||
|
else:
|
||||||
|
f = x / Fraction(a, b)
|
||||||
|
return f.numerator*1000 + f.denominator
|
||||||
|
|
||||||
|
|
||||||
|
def frac_arith_float(op, a, b, x):
|
||||||
|
if op == 0:
|
||||||
|
return Fraction(a, b) - x
|
||||||
|
elif op == 1:
|
||||||
|
return Fraction(a, b) + x
|
||||||
|
elif op == 2:
|
||||||
|
return Fraction(a, b) * x
|
||||||
|
else:
|
||||||
|
return Fraction(a, b) / x
|
||||||
|
|
||||||
|
|
||||||
|
def frac_arith_float_rev(op, a, b, x):
|
||||||
|
if op == 0:
|
||||||
|
return x - Fraction(a, b)
|
||||||
|
elif op == 1:
|
||||||
|
return x + Fraction(a, b)
|
||||||
|
elif op == 2:
|
||||||
|
return x * Fraction(a, b)
|
||||||
|
else:
|
||||||
|
return x / Fraction(a, b)
|
||||||
|
|
||||||
|
|
||||||
|
class CodeGenCase(unittest.TestCase):
|
||||||
|
def test_frac_simplify(self):
|
||||||
|
simplify_encode_c = CompiledFunction(
|
||||||
|
simplify_encode, {"a": base_types.VInt(), "b": base_types.VInt()})
|
||||||
|
for a in _test_range():
|
||||||
|
for b in _test_range():
|
||||||
|
self.assertEqual(
|
||||||
|
simplify_encode_c(a, b), simplify_encode(a, b))
|
||||||
|
|
||||||
|
def _test_frac_arith(self, op):
|
||||||
|
frac_arith_encode_c = CompiledFunction(
|
||||||
|
frac_arith_encode, {
|
||||||
|
"op": base_types.VInt(),
|
||||||
|
"a": base_types.VInt(), "b": base_types.VInt(),
|
||||||
|
"c": base_types.VInt(), "d": base_types.VInt()})
|
||||||
|
for a in _test_range():
|
||||||
|
for b in _test_range():
|
||||||
|
for c in _test_range():
|
||||||
|
for d in _test_range():
|
||||||
|
self.assertEqual(
|
||||||
|
frac_arith_encode_c(op, a, b, c, d),
|
||||||
|
frac_arith_encode(op, a, b, c, d))
|
||||||
|
|
||||||
|
def test_frac_add(self):
|
||||||
|
self._test_frac_arith(0)
|
||||||
|
|
||||||
|
def test_frac_sub(self):
|
||||||
|
self._test_frac_arith(1)
|
||||||
|
|
||||||
|
def test_frac_mul(self):
|
||||||
|
self._test_frac_arith(2)
|
||||||
|
|
||||||
|
def test_frac_div(self):
|
||||||
|
self._test_frac_arith(3)
|
||||||
|
|
||||||
|
def _test_frac_arith_int(self, op, rev):
|
||||||
|
f = frac_arith_encode_int_rev if rev else frac_arith_encode_int
|
||||||
|
f_c = CompiledFunction(f, {
|
||||||
|
"op": base_types.VInt(),
|
||||||
|
"a": base_types.VInt(), "b": base_types.VInt(),
|
||||||
|
"x": base_types.VInt()})
|
||||||
|
for a in _test_range():
|
||||||
|
for b in _test_range():
|
||||||
|
for x in _test_range():
|
||||||
|
self.assertEqual(
|
||||||
|
f_c(op, a, b, x),
|
||||||
|
f(op, a, b, x))
|
||||||
|
|
||||||
|
def test_frac_add_int(self):
|
||||||
|
self._test_frac_arith_int(0, False)
|
||||||
|
self._test_frac_arith_int(0, True)
|
||||||
|
|
||||||
|
def test_frac_sub_int(self):
|
||||||
|
self._test_frac_arith_int(1, False)
|
||||||
|
self._test_frac_arith_int(1, True)
|
||||||
|
|
||||||
|
def test_frac_mul_int(self):
|
||||||
|
self._test_frac_arith_int(2, False)
|
||||||
|
self._test_frac_arith_int(2, True)
|
||||||
|
|
||||||
|
def test_frac_div_int(self):
|
||||||
|
self._test_frac_arith_int(3, False)
|
||||||
|
self._test_frac_arith_int(3, True)
|
||||||
|
|
||||||
|
def _test_frac_arith_float(self, op, rev):
|
||||||
|
f = frac_arith_float_rev if rev else frac_arith_float
|
||||||
|
f_c = CompiledFunction(f, {
|
||||||
|
"op": base_types.VInt(),
|
||||||
|
"a": base_types.VInt(), "b": base_types.VInt(),
|
||||||
|
"x": base_types.VFloat()})
|
||||||
|
for a in _test_range():
|
||||||
|
for b in _test_range():
|
||||||
|
for x in _test_range():
|
||||||
|
self.assertAlmostEqual(
|
||||||
|
f_c(op, a, b, x/2),
|
||||||
|
f(op, a, b, x/2))
|
||||||
|
|
||||||
|
def test_frac_add_float(self):
|
||||||
|
self._test_frac_arith_float(0, False)
|
||||||
|
self._test_frac_arith_float(0, True)
|
||||||
|
|
||||||
|
def test_frac_sub_float(self):
|
||||||
|
self._test_frac_arith_float(1, False)
|
||||||
|
self._test_frac_arith_float(1, True)
|
||||||
|
|
||||||
|
def test_frac_mul_float(self):
|
||||||
|
self._test_frac_arith_float(2, False)
|
||||||
|
self._test_frac_arith_float(2, True)
|
||||||
|
|
||||||
|
def test_frac_div_float(self):
|
||||||
|
self._test_frac_arith_float(3, False)
|
||||||
|
self._test_frac_arith_float(3, True)
|
|
@ -0,0 +1,43 @@
|
||||||
|
def visit_With(self, node):
|
||||||
|
self.generic_visit(node)
|
||||||
|
if (isinstance(node.items[0].context_expr, ast.Call)
|
||||||
|
and node.items[0].context_expr.func.id == "watchdog"):
|
||||||
|
|
||||||
|
idname = "__watchdog_id_" + str(self.watchdog_id_counter)
|
||||||
|
self.watchdog_id_counter += 1
|
||||||
|
|
||||||
|
time = ast.BinOp(left=node.items[0].context_expr.args[0],
|
||||||
|
op=ast.Mult(),
|
||||||
|
right=ast.Num(1000))
|
||||||
|
time_int = ast.Call(
|
||||||
|
func=ast.Name("round", ast.Load()),
|
||||||
|
args=[time],
|
||||||
|
keywords=[], starargs=None, kwargs=None)
|
||||||
|
syscall_set = ast.Call(
|
||||||
|
func=ast.Name("syscall", ast.Load()),
|
||||||
|
args=[ast.Str("watchdog_set"), time_int],
|
||||||
|
keywords=[], starargs=None, kwargs=None)
|
||||||
|
stmt_set = ast.copy_location(
|
||||||
|
ast.Assign(targets=[ast.Name(idname, ast.Store())],
|
||||||
|
value=syscall_set),
|
||||||
|
node)
|
||||||
|
|
||||||
|
syscall_clear = ast.Call(
|
||||||
|
func=ast.Name("syscall", ast.Load()),
|
||||||
|
args=[ast.Str("watchdog_clear"),
|
||||||
|
ast.Name(idname, ast.Load())],
|
||||||
|
keywords=[], starargs=None, kwargs=None)
|
||||||
|
stmt_clear = ast.copy_location(ast.Expr(syscall_clear), node)
|
||||||
|
|
||||||
|
node.items[0] = ast.withitem(
|
||||||
|
context_expr=ast.Name(id="sequential",
|
||||||
|
ctx=ast.Load()),
|
||||||
|
optional_vars=None)
|
||||||
|
node.body = [
|
||||||
|
stmt_set,
|
||||||
|
ast.Try(body=node.body,
|
||||||
|
handlers=[],
|
||||||
|
orelse=[],
|
||||||
|
finalbody=[stmt_clear])
|
||||||
|
]
|
||||||
|
return node
|
|
@ -3,10 +3,18 @@ include $(MISOC_DIRECTORY)/software/common.mak
|
||||||
|
|
||||||
PYTHON ?= python3.5
|
PYTHON ?= python3.5
|
||||||
|
|
||||||
OBJECTS := isr.o flash_storage.o clock.o rtiocrg.o elf_loader.o services.o session.o log.o test_mode.o kloader.o bridge_ctl.o mailbox.o ksupport_data.o net_server.o moninj.o main.o
|
OBJECTS := isr.o clock.o rtiocrg.o flash_storage.o mailbox.o \
|
||||||
OBJECTS_KSUPPORT := ksupport.o exception_jmp.o exceptions.o mailbox.o bridge.o rtio.o ttl.o dds.o
|
session.o log.o moninj.o net_server.o bridge_ctl.o \
|
||||||
|
ksupport_data.o kloader.o test_mode.o main.o
|
||||||
|
OBJECTS_KSUPPORT := ksupport.o artiq_personality.o mailbox.o \
|
||||||
|
bridge.o rtio.o ttl.o dds.o
|
||||||
|
|
||||||
CFLAGS += -I$(LIBLWIP_DIRECTORY)/../lwip/src/include -I$(LIBLWIP_DIRECTORY) -I.
|
CFLAGS += -I$(MISOC_DIRECTORY)/software/include/dyld \
|
||||||
|
-I$(LIBDYLD_DIRECTORY)/include \
|
||||||
|
-I$(LIBUNWIND_DIRECTORY) \
|
||||||
|
-I$(LIBUNWIND_DIRECTORY)/../unwinder/include \
|
||||||
|
-I$(LIBLWIP_DIRECTORY)/../lwip/src/include \
|
||||||
|
-I$(LIBLWIP_DIRECTORY)
|
||||||
|
|
||||||
all: runtime.bin runtime.fbi
|
all: runtime.bin runtime.fbi
|
||||||
|
|
||||||
|
@ -19,7 +27,7 @@ all: runtime.bin runtime.fbi
|
||||||
|
|
||||||
runtime.elf: $(OBJECTS)
|
runtime.elf: $(OBJECTS)
|
||||||
$(LD) $(LDFLAGS) \
|
$(LD) $(LDFLAGS) \
|
||||||
-T $(RUNTIME_DIRECTORY)/linker.ld \
|
-T $(RUNTIME_DIRECTORY)/runtime.ld \
|
||||||
-N -o $@ \
|
-N -o $@ \
|
||||||
../libbase/crt0-$(CPU).o \
|
../libbase/crt0-$(CPU).o \
|
||||||
$(OBJECTS) \
|
$(OBJECTS) \
|
||||||
|
@ -31,28 +39,21 @@ runtime.elf: $(OBJECTS)
|
||||||
|
|
||||||
ksupport.elf: $(OBJECTS_KSUPPORT)
|
ksupport.elf: $(OBJECTS_KSUPPORT)
|
||||||
$(LD) $(LDFLAGS) \
|
$(LD) $(LDFLAGS) \
|
||||||
|
--eh-frame-hdr \
|
||||||
-T $(RUNTIME_DIRECTORY)/ksupport.ld \
|
-T $(RUNTIME_DIRECTORY)/ksupport.ld \
|
||||||
-N -o $@ \
|
-N -o $@ \
|
||||||
../libbase/crt0-$(CPU).o \
|
../libbase/crt0-$(CPU).o \
|
||||||
$^ \
|
$^ \
|
||||||
|
-L../libbase \
|
||||||
-L../libcompiler_rt \
|
-L../libcompiler_rt \
|
||||||
-lcompiler_rt
|
-L../libunwind \
|
||||||
|
-L../libdyld \
|
||||||
|
-lbase -lcompiler_rt -lunwind -ldyld
|
||||||
@chmod -x $@
|
@chmod -x $@
|
||||||
|
|
||||||
ksupport_data.o: ksupport.bin
|
ksupport_data.o: ksupport.elf
|
||||||
$(LD) -r -b binary -o $@ $<
|
$(LD) -r -b binary -o $@ $<
|
||||||
|
|
||||||
service_table.h: ksupport.elf $(RUNTIME_DIRECTORY)/gen_service_table.py
|
|
||||||
@echo " GEN " $@ && $(PYTHON) $(RUNTIME_DIRECTORY)/gen_service_table.py ksupport.elf > $@
|
|
||||||
|
|
||||||
$(RUNTIME_DIRECTORY)/services.c: service_table.h
|
|
||||||
|
|
||||||
services.o: $(RUNTIME_DIRECTORY)/services.c service_table.h
|
|
||||||
$(compile)
|
|
||||||
|
|
||||||
main.o: $(RUNTIME_DIRECTORY)/main.c
|
|
||||||
$(compile)
|
|
||||||
|
|
||||||
%.o: $(RUNTIME_DIRECTORY)/%.c
|
%.o: $(RUNTIME_DIRECTORY)/%.c
|
||||||
$(compile)
|
$(compile)
|
||||||
|
|
||||||
|
@ -62,6 +63,6 @@ main.o: $(RUNTIME_DIRECTORY)/main.c
|
||||||
clean:
|
clean:
|
||||||
$(RM) $(OBJECTS) $(OBJECTS_KSUPPORT)
|
$(RM) $(OBJECTS) $(OBJECTS_KSUPPORT)
|
||||||
$(RM) runtime.elf runtime.bin runtime.fbi .*~ *~
|
$(RM) runtime.elf runtime.bin runtime.fbi .*~ *~
|
||||||
$(RM) service_table.h ksupport.elf ksupport.bin
|
$(RM) ksupport.elf ksupport.bin
|
||||||
|
|
||||||
.PHONY: all clean main.o
|
.PHONY: all clean
|
||||||
|
|
|
@ -0,0 +1,464 @@
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <unwind.h>
|
||||||
|
#include "artiq_personality.h"
|
||||||
|
|
||||||
|
/* Logging */
|
||||||
|
|
||||||
|
#ifndef NDEBUG
|
||||||
|
#define EH_LOG0(fmt) fprintf(stderr, "%s: " fmt "\n", __func__)
|
||||||
|
#define EH_LOG(fmt, ...) fprintf(stderr, "%s: " fmt "\n", __func__, __VA_ARGS__)
|
||||||
|
#else
|
||||||
|
#define EH_LOG0(fmt)
|
||||||
|
#define EH_LOG(fmt, ...)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define EH_FAIL(err) \
|
||||||
|
do { \
|
||||||
|
fprintf(stderr, "%s fatal: %s\n", __func__, err); \
|
||||||
|
abort(); \
|
||||||
|
} while(0)
|
||||||
|
|
||||||
|
#define EH_ASSERT(expr) \
|
||||||
|
if(!(expr)) EH_FAIL(#expr)
|
||||||
|
|
||||||
|
/* DWARF format handling */
|
||||||
|
|
||||||
|
enum {
|
||||||
|
DW_EH_PE_absptr = 0x00,
|
||||||
|
DW_EH_PE_uleb128 = 0x01,
|
||||||
|
DW_EH_PE_udata2 = 0x02,
|
||||||
|
DW_EH_PE_udata4 = 0x03,
|
||||||
|
DW_EH_PE_udata8 = 0x04,
|
||||||
|
DW_EH_PE_sleb128 = 0x09,
|
||||||
|
DW_EH_PE_sdata2 = 0x0A,
|
||||||
|
DW_EH_PE_sdata4 = 0x0B,
|
||||||
|
DW_EH_PE_sdata8 = 0x0C,
|
||||||
|
DW_EH_PE_pcrel = 0x10,
|
||||||
|
DW_EH_PE_textrel = 0x20,
|
||||||
|
DW_EH_PE_datarel = 0x30,
|
||||||
|
DW_EH_PE_funcrel = 0x40,
|
||||||
|
DW_EH_PE_aligned = 0x50,
|
||||||
|
DW_EH_PE_indirect = 0x80,
|
||||||
|
DW_EH_PE_omit = 0xFF
|
||||||
|
};
|
||||||
|
|
||||||
|
// Read a uleb128 encoded value and advance pointer
|
||||||
|
// See Variable Length Data in: http://dwarfstd.org/Dwarf3.pdf
|
||||||
|
static uintptr_t readULEB128(const uint8_t **data) {
|
||||||
|
uintptr_t result = 0;
|
||||||
|
uintptr_t shift = 0;
|
||||||
|
unsigned char byte;
|
||||||
|
const uint8_t *p = *data;
|
||||||
|
|
||||||
|
do {
|
||||||
|
byte = *p++;
|
||||||
|
result |= (byte & 0x7f) << shift;
|
||||||
|
shift += 7;
|
||||||
|
}
|
||||||
|
while (byte & 0x80);
|
||||||
|
|
||||||
|
*data = p;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read a sleb128 encoded value and advance pointer
|
||||||
|
// See Variable Length Data in: http://dwarfstd.org/Dwarf3.pdf
|
||||||
|
static uintptr_t readSLEB128(const uint8_t **data) {
|
||||||
|
uintptr_t result = 0;
|
||||||
|
uintptr_t shift = 0;
|
||||||
|
unsigned char byte;
|
||||||
|
const uint8_t *p = *data;
|
||||||
|
|
||||||
|
do {
|
||||||
|
byte = *p++;
|
||||||
|
result |= (byte & 0x7f) << shift;
|
||||||
|
shift += 7;
|
||||||
|
}
|
||||||
|
while (byte & 0x80);
|
||||||
|
|
||||||
|
*data = p;
|
||||||
|
|
||||||
|
if ((byte & 0x40) && (shift < (sizeof(result) << 3))) {
|
||||||
|
result |= (~0 << shift);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
static unsigned getEncodingSize(uint8_t Encoding) {
|
||||||
|
if (Encoding == DW_EH_PE_omit)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
switch (Encoding & 0x0F) {
|
||||||
|
case DW_EH_PE_absptr:
|
||||||
|
return sizeof(uintptr_t);
|
||||||
|
case DW_EH_PE_udata2:
|
||||||
|
return sizeof(uint16_t);
|
||||||
|
case DW_EH_PE_udata4:
|
||||||
|
return sizeof(uint32_t);
|
||||||
|
case DW_EH_PE_udata8:
|
||||||
|
return sizeof(uint64_t);
|
||||||
|
case DW_EH_PE_sdata2:
|
||||||
|
return sizeof(int16_t);
|
||||||
|
case DW_EH_PE_sdata4:
|
||||||
|
return sizeof(int32_t);
|
||||||
|
case DW_EH_PE_sdata8:
|
||||||
|
return sizeof(int64_t);
|
||||||
|
default:
|
||||||
|
// not supported
|
||||||
|
abort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read a pointer encoded value and advance pointer
|
||||||
|
// See Variable Length Data in: http://dwarfstd.org/Dwarf3.pdf
|
||||||
|
static uintptr_t readEncodedPointer(const uint8_t **data, uint8_t encoding) {
|
||||||
|
uintptr_t result = 0;
|
||||||
|
const uint8_t *p = *data;
|
||||||
|
|
||||||
|
if (encoding == DW_EH_PE_omit)
|
||||||
|
return(result);
|
||||||
|
|
||||||
|
// first get value
|
||||||
|
switch (encoding & 0x0F) {
|
||||||
|
case DW_EH_PE_absptr:
|
||||||
|
memcpy(&result, p, sizeof(uintptr_t));
|
||||||
|
p += sizeof(uintptr_t);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_uleb128:
|
||||||
|
result = readULEB128(&p);
|
||||||
|
break;
|
||||||
|
// Note: This case has not been tested
|
||||||
|
case DW_EH_PE_sleb128:
|
||||||
|
result = readSLEB128(&p);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_udata2:
|
||||||
|
{
|
||||||
|
uint16_t valu16;
|
||||||
|
memcpy(&valu16, p, sizeof(uint16_t));
|
||||||
|
result = valu16;
|
||||||
|
}
|
||||||
|
p += sizeof(uint16_t);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_udata4:
|
||||||
|
{
|
||||||
|
uint32_t valu32;
|
||||||
|
memcpy(&valu32, p, sizeof(uint32_t));
|
||||||
|
result = valu32;
|
||||||
|
}
|
||||||
|
p += sizeof(uint32_t);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_udata8:
|
||||||
|
{
|
||||||
|
uint64_t valu64;
|
||||||
|
memcpy(&valu64, p, sizeof(uint64_t));
|
||||||
|
result = valu64;
|
||||||
|
}
|
||||||
|
p += sizeof(uint64_t);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_sdata2:
|
||||||
|
{
|
||||||
|
int16_t val16;
|
||||||
|
memcpy(&val16, p, sizeof(int16_t));
|
||||||
|
result = val16;
|
||||||
|
}
|
||||||
|
p += sizeof(int16_t);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_sdata4:
|
||||||
|
{
|
||||||
|
int32_t val32;
|
||||||
|
memcpy(&val32, p, sizeof(int32_t));
|
||||||
|
result = val32;
|
||||||
|
}
|
||||||
|
p += sizeof(int32_t);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_sdata8:
|
||||||
|
{
|
||||||
|
int64_t val64;
|
||||||
|
memcpy(&val64, p, sizeof(int64_t));
|
||||||
|
result = val64;
|
||||||
|
}
|
||||||
|
p += sizeof(int64_t);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
// not supported
|
||||||
|
abort();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// then add relative offset
|
||||||
|
switch (encoding & 0x70) {
|
||||||
|
case DW_EH_PE_absptr:
|
||||||
|
// do nothing
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_pcrel:
|
||||||
|
result += (uintptr_t)(*data);
|
||||||
|
break;
|
||||||
|
case DW_EH_PE_textrel:
|
||||||
|
case DW_EH_PE_datarel:
|
||||||
|
case DW_EH_PE_funcrel:
|
||||||
|
case DW_EH_PE_aligned:
|
||||||
|
default:
|
||||||
|
// not supported
|
||||||
|
abort();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// then apply indirection
|
||||||
|
if (encoding & DW_EH_PE_indirect) {
|
||||||
|
result = *((uintptr_t*)result);
|
||||||
|
}
|
||||||
|
|
||||||
|
*data = p;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Raising */
|
||||||
|
|
||||||
|
#define ARTIQ_EXCEPTION_CLASS 0x4152545141525451LL // 'ARTQARTQ'
|
||||||
|
|
||||||
|
static void __artiq_cleanup(_Unwind_Reason_Code reason, struct _Unwind_Exception *exc);
|
||||||
|
static _Unwind_Reason_Code __artiq_uncaught_exception(
|
||||||
|
int version, _Unwind_Action actions, uint64_t exceptionClass,
|
||||||
|
struct _Unwind_Exception *exceptionObject, struct _Unwind_Context *context,
|
||||||
|
void *stop_parameter);
|
||||||
|
|
||||||
|
struct artiq_raised_exception {
|
||||||
|
struct _Unwind_Exception unwind;
|
||||||
|
struct artiq_exception artiq;
|
||||||
|
int handled;
|
||||||
|
struct artiq_backtrace_item backtrace[1024];
|
||||||
|
size_t backtrace_size;
|
||||||
|
};
|
||||||
|
|
||||||
|
static struct artiq_raised_exception inflight;
|
||||||
|
|
||||||
|
void __artiq_raise(struct artiq_exception *artiq_exn) {
|
||||||
|
EH_LOG("===> raise (name=%s, msg=%s, params=[%lld,%lld,%lld])",
|
||||||
|
artiq_exn->name, artiq_exn->message,
|
||||||
|
(long long int)artiq_exn->param[0],
|
||||||
|
(long long int)artiq_exn->param[1],
|
||||||
|
(long long int)artiq_exn->param[2]);
|
||||||
|
|
||||||
|
memmove(&inflight.artiq, artiq_exn, sizeof(struct artiq_exception));
|
||||||
|
inflight.unwind.exception_class = ARTIQ_EXCEPTION_CLASS;
|
||||||
|
inflight.unwind.exception_cleanup = &__artiq_cleanup;
|
||||||
|
inflight.handled = 0;
|
||||||
|
inflight.backtrace_size = 0;
|
||||||
|
|
||||||
|
_Unwind_Reason_Code result = _Unwind_RaiseException(&inflight.unwind);
|
||||||
|
EH_ASSERT((result == _URC_END_OF_STACK) &&
|
||||||
|
"Unexpected error during unwinding");
|
||||||
|
|
||||||
|
// If we're here, there are no handlers, only cleanups.
|
||||||
|
// Force unwinding anyway; we shall stop at nothing except the end of stack.
|
||||||
|
result = _Unwind_ForcedUnwind(&inflight.unwind, &__artiq_uncaught_exception,
|
||||||
|
NULL);
|
||||||
|
EH_FAIL("_Unwind_ForcedUnwind should not return");
|
||||||
|
}
|
||||||
|
|
||||||
|
void __artiq_reraise() {
|
||||||
|
if(inflight.handled) {
|
||||||
|
EH_LOG0("===> reraise");
|
||||||
|
__artiq_raise(&inflight.artiq);
|
||||||
|
} else {
|
||||||
|
EH_LOG0("===> resume");
|
||||||
|
EH_ASSERT((inflight.artiq.typeinfo != 0) &&
|
||||||
|
"Need an exception to reraise");
|
||||||
|
_Unwind_Resume(&inflight.unwind);
|
||||||
|
abort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Unwinding */
|
||||||
|
|
||||||
|
// The code below does not refer to the `inflight` global.
|
||||||
|
|
||||||
|
static void __artiq_cleanup(_Unwind_Reason_Code reason, struct _Unwind_Exception *exc) {
|
||||||
|
EH_LOG0("===> cleanup");
|
||||||
|
struct artiq_raised_exception *inflight = (struct artiq_raised_exception*) exc;
|
||||||
|
// The in-flight exception is statically allocated, so we don't need to free it.
|
||||||
|
// But, we clear it to mark it as processed.
|
||||||
|
memset(&inflight->artiq, 0, sizeof(struct artiq_exception));
|
||||||
|
}
|
||||||
|
|
||||||
|
static _Unwind_Reason_Code __artiq_uncaught_exception(
|
||||||
|
int version, _Unwind_Action actions, uint64_t exceptionClass,
|
||||||
|
struct _Unwind_Exception *exceptionObject, struct _Unwind_Context *context,
|
||||||
|
void *stop_parameter) {
|
||||||
|
struct artiq_raised_exception *inflight =
|
||||||
|
(struct artiq_raised_exception*)exceptionObject;
|
||||||
|
EH_ASSERT(inflight->backtrace_size <
|
||||||
|
sizeof(inflight->backtrace) / sizeof(inflight->backtrace[0]) &&
|
||||||
|
"Out of space for backtrace");
|
||||||
|
|
||||||
|
uintptr_t pc = _Unwind_GetIP(context);
|
||||||
|
uintptr_t funcStart = _Unwind_GetRegionStart(context);
|
||||||
|
uintptr_t pcOffset = pc - funcStart;
|
||||||
|
EH_LOG("===> uncaught (pc=%p+%p)", (void*)funcStart, (void*)pcOffset);
|
||||||
|
|
||||||
|
inflight->backtrace[inflight->backtrace_size].function = funcStart;
|
||||||
|
inflight->backtrace[inflight->backtrace_size].offset = pcOffset;
|
||||||
|
++inflight->backtrace_size;
|
||||||
|
|
||||||
|
if(actions & _UA_END_OF_STACK) {
|
||||||
|
EH_LOG0("end of stack");
|
||||||
|
__artiq_terminate(&inflight->artiq, inflight->backtrace, inflight->backtrace_size);
|
||||||
|
} else {
|
||||||
|
EH_LOG0("continue");
|
||||||
|
return _URC_NO_REASON;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_Unwind_Reason_Code __artiq_personality(
|
||||||
|
int version, _Unwind_Action actions, uint64_t exceptionClass,
|
||||||
|
struct _Unwind_Exception *exceptionObject, struct _Unwind_Context *context);
|
||||||
|
_Unwind_Reason_Code __artiq_personality(
|
||||||
|
int version, _Unwind_Action actions, uint64_t exceptionClass,
|
||||||
|
struct _Unwind_Exception *exceptionObject, struct _Unwind_Context *context) {
|
||||||
|
EH_LOG("===> entry (actions =%s%s%s%s; class=%08lx; object=%p, context=%p)",
|
||||||
|
(actions & _UA_SEARCH_PHASE ? " search" : ""),
|
||||||
|
(actions & _UA_CLEANUP_PHASE ? " cleanup" : ""),
|
||||||
|
(actions & _UA_HANDLER_FRAME ? " handler" : ""),
|
||||||
|
(actions & _UA_FORCE_UNWIND ? " force-unwind" : ""),
|
||||||
|
exceptionClass, exceptionObject, context);
|
||||||
|
EH_ASSERT((exceptionClass == ARTIQ_EXCEPTION_CLASS) &&
|
||||||
|
"Foreign exceptions are not supported");
|
||||||
|
|
||||||
|
struct artiq_raised_exception *inflight =
|
||||||
|
(struct artiq_raised_exception*)exceptionObject;
|
||||||
|
EH_LOG("=> exception name=%s",
|
||||||
|
inflight->artiq.name);
|
||||||
|
|
||||||
|
// Get a pointer to LSDA. If there's no LSDA, this function doesn't
|
||||||
|
// actually handle any exceptions.
|
||||||
|
const uint8_t *lsda = (const uint8_t*) _Unwind_GetLanguageSpecificData(context);
|
||||||
|
if(lsda == NULL)
|
||||||
|
return _URC_CONTINUE_UNWIND;
|
||||||
|
|
||||||
|
EH_LOG("lsda=%p", lsda);
|
||||||
|
|
||||||
|
// Get the current instruction pointer and offset it before next
|
||||||
|
// instruction in the current frame which threw the exception.
|
||||||
|
uintptr_t pc = _Unwind_GetIP(context) - 1;
|
||||||
|
|
||||||
|
// Get beginning of the current frame's code.
|
||||||
|
uintptr_t funcStart = _Unwind_GetRegionStart(context);
|
||||||
|
uintptr_t pcOffset = pc - funcStart;
|
||||||
|
|
||||||
|
EH_LOG("=> pc=%p (%p+%p)", (void*)pc, (void*)funcStart, (void*)pcOffset);
|
||||||
|
|
||||||
|
// Parse LSDA header.
|
||||||
|
uint8_t lpStartEncoding = *lsda++;
|
||||||
|
if (lpStartEncoding != DW_EH_PE_omit) {
|
||||||
|
readEncodedPointer(&lsda, lpStartEncoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t ttypeEncoding = *lsda++;
|
||||||
|
const uint8_t *classInfo = NULL;
|
||||||
|
if (ttypeEncoding != DW_EH_PE_omit) {
|
||||||
|
// Calculate type info locations in emitted dwarf code which
|
||||||
|
// were flagged by type info arguments to llvm.eh.selector
|
||||||
|
// intrinsic
|
||||||
|
uintptr_t classInfoOffset = readULEB128(&lsda);
|
||||||
|
classInfo = lsda + classInfoOffset;
|
||||||
|
EH_LOG("classInfo=%p", classInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk call-site table looking for range that includes current PC.
|
||||||
|
uint8_t callSiteEncoding = *lsda++;
|
||||||
|
uint32_t callSiteTableLength = readULEB128(&lsda);
|
||||||
|
const uint8_t *callSiteTableStart = lsda;
|
||||||
|
const uint8_t *callSiteTableEnd = callSiteTableStart + callSiteTableLength;
|
||||||
|
const uint8_t *actionTableStart = callSiteTableEnd;
|
||||||
|
const uint8_t *callSitePtr = callSiteTableStart;
|
||||||
|
|
||||||
|
while(callSitePtr < callSiteTableEnd) {
|
||||||
|
uintptr_t start = readEncodedPointer(&callSitePtr,
|
||||||
|
callSiteEncoding);
|
||||||
|
uintptr_t length = readEncodedPointer(&callSitePtr,
|
||||||
|
callSiteEncoding);
|
||||||
|
uintptr_t landingPad = readEncodedPointer(&callSitePtr,
|
||||||
|
callSiteEncoding);
|
||||||
|
uintptr_t actionValue = readULEB128(&callSitePtr);
|
||||||
|
|
||||||
|
EH_LOG("call site (start=+%p, len=%d, landingPad=+%p, actionValue=%d)",
|
||||||
|
(void*)start, (int)length, (void*)landingPad, (int)actionValue);
|
||||||
|
|
||||||
|
if(landingPad == 0) {
|
||||||
|
EH_LOG0("no landing pad, skipping");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if((start <= pcOffset) && (pcOffset < (start + length))) {
|
||||||
|
EH_LOG0("=> call site matches pc");
|
||||||
|
|
||||||
|
int exceptionMatched = 0;
|
||||||
|
if(actionValue) {
|
||||||
|
const uint8_t *actionEntry = actionTableStart + (actionValue - 1);
|
||||||
|
EH_LOG("actionEntry=%p", actionEntry);
|
||||||
|
|
||||||
|
for(;;) {
|
||||||
|
// Each emitted DWARF action corresponds to a 2 tuple of
|
||||||
|
// type info address offset, and action offset to the next
|
||||||
|
// emitted action.
|
||||||
|
intptr_t typeInfoOffset = readSLEB128(&actionEntry);
|
||||||
|
const uint8_t *tempActionEntry = actionEntry;
|
||||||
|
intptr_t actionOffset = readSLEB128(&tempActionEntry);
|
||||||
|
EH_LOG("typeInfoOffset=%p actionOffset=%p",
|
||||||
|
(void*)typeInfoOffset, (void*)actionOffset);
|
||||||
|
EH_ASSERT((typeInfoOffset >= 0) && "Filter clauses are not supported");
|
||||||
|
|
||||||
|
unsigned encodingSize = getEncodingSize(ttypeEncoding);
|
||||||
|
const uint8_t *typeInfoPtrPtr = classInfo - typeInfoOffset * encodingSize;
|
||||||
|
uintptr_t typeInfoPtr = readEncodedPointer(&typeInfoPtrPtr, ttypeEncoding);
|
||||||
|
EH_LOG("encodingSize=%u typeInfoPtrPtr=%p typeInfoPtr=%p",
|
||||||
|
encodingSize, typeInfoPtrPtr, (void*)typeInfoPtr);
|
||||||
|
EH_LOG("typeInfo=%s", (char*)typeInfoPtr);
|
||||||
|
|
||||||
|
if(typeInfoPtr == 0 || inflight->artiq.typeinfo == typeInfoPtr) {
|
||||||
|
EH_LOG0("matching action found");
|
||||||
|
exceptionMatched = 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!actionOffset)
|
||||||
|
break;
|
||||||
|
|
||||||
|
actionEntry += actionOffset;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!(actions & _UA_SEARCH_PHASE)) {
|
||||||
|
EH_LOG0("=> jumping to landing pad");
|
||||||
|
|
||||||
|
if(actions & _UA_HANDLER_FRAME)
|
||||||
|
inflight->handled = 1;
|
||||||
|
|
||||||
|
_Unwind_SetGR(context, __builtin_eh_return_data_regno(0),
|
||||||
|
(uintptr_t)exceptionObject);
|
||||||
|
_Unwind_SetGR(context, __builtin_eh_return_data_regno(1),
|
||||||
|
(uintptr_t)&inflight->artiq);
|
||||||
|
_Unwind_SetIP(context, funcStart + landingPad);
|
||||||
|
|
||||||
|
return _URC_INSTALL_CONTEXT;
|
||||||
|
} else if(exceptionMatched) {
|
||||||
|
EH_LOG0("=> handler found");
|
||||||
|
|
||||||
|
return _URC_HANDLER_FOUND;
|
||||||
|
} else {
|
||||||
|
EH_LOG0("=> handler not found");
|
||||||
|
|
||||||
|
return _URC_CONTINUE_UNWIND;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return _URC_CONTINUE_UNWIND;
|
||||||
|
}
|
|
@ -0,0 +1,59 @@
|
||||||
|
#ifndef __ARTIQ_PERSONALITY_H
|
||||||
|
#define __ARTIQ_PERSONALITY_H
|
||||||
|
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
|
||||||
|
struct artiq_exception {
|
||||||
|
union {
|
||||||
|
uintptr_t typeinfo;
|
||||||
|
const char *name;
|
||||||
|
};
|
||||||
|
const char *file;
|
||||||
|
int32_t line;
|
||||||
|
int32_t column;
|
||||||
|
const char *function;
|
||||||
|
const char *message;
|
||||||
|
int64_t param[3];
|
||||||
|
};
|
||||||
|
|
||||||
|
struct artiq_backtrace_item {
|
||||||
|
intptr_t function;
|
||||||
|
intptr_t offset;
|
||||||
|
};
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Provided by the runtime */
|
||||||
|
void __artiq_raise(struct artiq_exception *artiq_exn)
|
||||||
|
__attribute__((noreturn));
|
||||||
|
void __artiq_reraise(void)
|
||||||
|
__attribute__((noreturn));
|
||||||
|
|
||||||
|
#define artiq_raise_from_c(exnname, exnmsg, exnparam0, exnparam1, exnparam2) \
|
||||||
|
do { \
|
||||||
|
struct artiq_exception exn = { \
|
||||||
|
.name = exnname, \
|
||||||
|
.message = exnmsg, \
|
||||||
|
.param = { exnparam0, exnparam1, exnparam2 }, \
|
||||||
|
.file = __FILE__, \
|
||||||
|
.line = __LINE__, \
|
||||||
|
.column = -1, \
|
||||||
|
.function = __func__, \
|
||||||
|
}; \
|
||||||
|
__artiq_raise(&exn); \
|
||||||
|
} while(0)
|
||||||
|
|
||||||
|
/* Called by the runtime */
|
||||||
|
void __artiq_terminate(struct artiq_exception *artiq_exn,
|
||||||
|
struct artiq_backtrace_item *backtrace,
|
||||||
|
size_t backtrace_size)
|
||||||
|
__attribute__((noreturn));
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#endif /* __ARTIQ_PERSONALITY_H */
|
|
@ -38,7 +38,7 @@ static void send_ready(void)
|
||||||
struct msg_base msg;
|
struct msg_base msg;
|
||||||
|
|
||||||
msg.type = MESSAGE_TYPE_BRG_READY;
|
msg.type = MESSAGE_TYPE_BRG_READY;
|
||||||
mailbox_send_and_wait(&msg);
|
mailbox_send_and_wait(&msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
void bridge_main(void)
|
void bridge_main(void)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#include <generated/csr.h>
|
#include <generated/csr.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
|
||||||
#include "exceptions.h"
|
#include "artiq_personality.h"
|
||||||
#include "rtio.h"
|
#include "rtio.h"
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
#include "dds.h"
|
#include "dds.h"
|
||||||
|
@ -177,7 +177,7 @@ static struct dds_set_params batch[DDS_MAX_BATCH];
|
||||||
void dds_batch_enter(long long int timestamp)
|
void dds_batch_enter(long long int timestamp)
|
||||||
{
|
{
|
||||||
if(batch_mode)
|
if(batch_mode)
|
||||||
exception_raise(EID_DDS_BATCH_ERROR);
|
artiq_raise_from_c("DDSBatchError", "DDS batch error", 0, 0, 0);
|
||||||
batch_mode = 1;
|
batch_mode = 1;
|
||||||
batch_count = 0;
|
batch_count = 0;
|
||||||
batch_ref_time = timestamp;
|
batch_ref_time = timestamp;
|
||||||
|
@ -189,7 +189,7 @@ void dds_batch_exit(void)
|
||||||
int i;
|
int i;
|
||||||
|
|
||||||
if(!batch_mode)
|
if(!batch_mode)
|
||||||
exception_raise(EID_DDS_BATCH_ERROR);
|
artiq_raise_from_c("DDSBatchError", "DDS batch error", 0, 0, 0);
|
||||||
rtio_chan_sel_write(RTIO_DDS_CHANNEL);
|
rtio_chan_sel_write(RTIO_DDS_CHANNEL);
|
||||||
/* + FUD time */
|
/* + FUD time */
|
||||||
now = batch_ref_time - batch_count*(DURATION_PROGRAM + DURATION_WRITE);
|
now = batch_ref_time - batch_count*(DURATION_PROGRAM + DURATION_WRITE);
|
||||||
|
@ -207,7 +207,7 @@ void dds_set(long long int timestamp, int channel,
|
||||||
{
|
{
|
||||||
if(batch_mode) {
|
if(batch_mode) {
|
||||||
if(batch_count >= DDS_MAX_BATCH)
|
if(batch_count >= DDS_MAX_BATCH)
|
||||||
exception_raise(EID_DDS_BATCH_ERROR);
|
artiq_raise_from_c("DDSBatchError", "DDS batch error", 0, 0, 0);
|
||||||
/* timestamp parameter ignored (determined by batch) */
|
/* timestamp parameter ignored (determined by batch) */
|
||||||
batch[batch_count].channel = channel;
|
batch[batch_count].channel = channel;
|
||||||
batch[batch_count].ftw = ftw;
|
batch[batch_count].ftw = ftw;
|
||||||
|
|
|
@ -1,240 +0,0 @@
|
||||||
#include <string.h>
|
|
||||||
|
|
||||||
#include "log.h"
|
|
||||||
#include "elf_loader.h"
|
|
||||||
|
|
||||||
#define EI_NIDENT 16
|
|
||||||
|
|
||||||
struct elf32_ehdr {
|
|
||||||
unsigned char ident[EI_NIDENT]; /* ident bytes */
|
|
||||||
unsigned short type; /* file type */
|
|
||||||
unsigned short machine; /* target machine */
|
|
||||||
unsigned int version; /* file version */
|
|
||||||
unsigned int entry; /* start address */
|
|
||||||
unsigned int phoff; /* phdr file offset */
|
|
||||||
unsigned int shoff; /* shdr file offset */
|
|
||||||
unsigned int flags; /* file flags */
|
|
||||||
unsigned short ehsize; /* sizeof ehdr */
|
|
||||||
unsigned short phentsize; /* sizeof phdr */
|
|
||||||
unsigned short phnum; /* number phdrs */
|
|
||||||
unsigned short shentsize; /* sizeof shdr */
|
|
||||||
unsigned short shnum; /* number shdrs */
|
|
||||||
unsigned short shstrndx; /* shdr string index */
|
|
||||||
} __attribute__((packed));
|
|
||||||
|
|
||||||
static const unsigned char elf_magic_header[] = {
|
|
||||||
0x7f, 0x45, 0x4c, 0x46, /* 0x7f, 'E', 'L', 'F' */
|
|
||||||
0x01, /* Only 32-bit objects. */
|
|
||||||
0x02, /* Only big-endian. */
|
|
||||||
0x01, /* Only ELF version 1. */
|
|
||||||
};
|
|
||||||
|
|
||||||
#define ET_NONE 0 /* Unknown type. */
|
|
||||||
#define ET_REL 1 /* Relocatable. */
|
|
||||||
#define ET_EXEC 2 /* Executable. */
|
|
||||||
#define ET_DYN 3 /* Shared object. */
|
|
||||||
#define ET_CORE 4 /* Core file. */
|
|
||||||
|
|
||||||
#define EM_OR1K 0x005c
|
|
||||||
|
|
||||||
struct elf32_shdr {
|
|
||||||
unsigned int name; /* section name */
|
|
||||||
unsigned int type; /* SHT_... */
|
|
||||||
unsigned int flags; /* SHF_... */
|
|
||||||
unsigned int addr; /* virtual address */
|
|
||||||
unsigned int offset; /* file offset */
|
|
||||||
unsigned int size; /* section size */
|
|
||||||
unsigned int link; /* misc info */
|
|
||||||
unsigned int info; /* misc info */
|
|
||||||
unsigned int addralign; /* memory alignment */
|
|
||||||
unsigned int entsize; /* entry size if table */
|
|
||||||
} __attribute__((packed));
|
|
||||||
|
|
||||||
struct elf32_name {
|
|
||||||
char name[12];
|
|
||||||
} __attribute__((packed));
|
|
||||||
|
|
||||||
struct elf32_rela {
|
|
||||||
unsigned int offset; /* Location to be relocated. */
|
|
||||||
unsigned int info; /* Relocation type and symbol index. */
|
|
||||||
int addend; /* Addend. */
|
|
||||||
} __attribute__((packed));
|
|
||||||
|
|
||||||
#define ELF32_R_SYM(info) ((info) >> 8)
|
|
||||||
#define ELF32_R_TYPE(info) ((unsigned char)(info))
|
|
||||||
|
|
||||||
#define R_OR1K_INSN_REL_26 6
|
|
||||||
|
|
||||||
struct elf32_sym {
|
|
||||||
unsigned int name; /* String table index of name. */
|
|
||||||
unsigned int value; /* Symbol value. */
|
|
||||||
unsigned int size; /* Size of associated object. */
|
|
||||||
unsigned char info; /* Type and binding information. */
|
|
||||||
unsigned char other; /* Reserved (not used). */
|
|
||||||
unsigned short shndx; /* Section index of symbol. */
|
|
||||||
} __attribute__((packed));
|
|
||||||
|
|
||||||
#define STT_NOTYPE 0
|
|
||||||
#define STT_OBJECT 1
|
|
||||||
#define STT_FUNC 2
|
|
||||||
#define STT_SECTION 3
|
|
||||||
#define STT_FILE 4
|
|
||||||
|
|
||||||
#define ELF32_ST_TYPE(info) ((info) & 0x0f)
|
|
||||||
|
|
||||||
|
|
||||||
#define SANITIZE_OFFSET_SIZE(offset, size) \
|
|
||||||
if(offset > 0x10000000) { \
|
|
||||||
log("Incorrect offset in ELF data"); \
|
|
||||||
return 0; \
|
|
||||||
} \
|
|
||||||
if((offset + size) > elf_length) { \
|
|
||||||
log("Attempted to access past the end of ELF data"); \
|
|
||||||
return 0; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define GET_POINTER_SAFE(target, target_type, offset) \
|
|
||||||
SANITIZE_OFFSET_SIZE(offset, sizeof(target_type)); \
|
|
||||||
target = (target_type *)((char *)elf_data + offset)
|
|
||||||
|
|
||||||
void *find_symbol(const struct symbol *symbols, const char *name)
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
|
|
||||||
i = 0;
|
|
||||||
while((symbols[i].name != NULL) && (strcmp(symbols[i].name, name) != 0))
|
|
||||||
i++;
|
|
||||||
return symbols[i].target;
|
|
||||||
}
|
|
||||||
|
|
||||||
static int fixup(void *dest, int dest_length, struct elf32_rela *rela, void *target)
|
|
||||||
{
|
|
||||||
int type, offset;
|
|
||||||
unsigned int *_dest = dest;
|
|
||||||
unsigned int *_target = target;
|
|
||||||
|
|
||||||
type = ELF32_R_TYPE(rela->info);
|
|
||||||
offset = rela->offset/4;
|
|
||||||
if(type == R_OR1K_INSN_REL_26) {
|
|
||||||
int val;
|
|
||||||
|
|
||||||
val = _target - (_dest + offset);
|
|
||||||
_dest[offset] = (_dest[offset] & 0xfc000000) | (val & 0x03ffffff);
|
|
||||||
} else
|
|
||||||
log("Unsupported relocation type: %d", type);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
int load_elf(symbol_resolver resolver, symbol_callback callback, void *elf_data, int elf_length, void *dest, int dest_length)
|
|
||||||
{
|
|
||||||
struct elf32_ehdr *ehdr;
|
|
||||||
struct elf32_shdr *strtable;
|
|
||||||
unsigned int shdrptr;
|
|
||||||
int i;
|
|
||||||
|
|
||||||
unsigned int textoff, textsize;
|
|
||||||
unsigned int textrelaoff, textrelasize;
|
|
||||||
unsigned int symtaboff, symtabsize;
|
|
||||||
unsigned int strtaboff, strtabsize;
|
|
||||||
|
|
||||||
|
|
||||||
/* validate ELF */
|
|
||||||
GET_POINTER_SAFE(ehdr, struct elf32_ehdr, 0);
|
|
||||||
if(memcmp(ehdr->ident, elf_magic_header, sizeof(elf_magic_header)) != 0) {
|
|
||||||
log("Incorrect ELF header");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
if(ehdr->type != ET_REL) {
|
|
||||||
log("ELF is not relocatable");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
if(ehdr->machine != EM_OR1K) {
|
|
||||||
log("ELF is for a different machine");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* extract section info */
|
|
||||||
GET_POINTER_SAFE(strtable, struct elf32_shdr, ehdr->shoff + ehdr->shentsize*ehdr->shstrndx);
|
|
||||||
textoff = textsize = 0;
|
|
||||||
textrelaoff = textrelasize = 0;
|
|
||||||
symtaboff = symtabsize = 0;
|
|
||||||
strtaboff = strtabsize = 0;
|
|
||||||
shdrptr = ehdr->shoff;
|
|
||||||
for(i=0;i<ehdr->shnum;i++) {
|
|
||||||
struct elf32_shdr *shdr;
|
|
||||||
struct elf32_name *name;
|
|
||||||
|
|
||||||
GET_POINTER_SAFE(shdr, struct elf32_shdr, shdrptr);
|
|
||||||
GET_POINTER_SAFE(name, struct elf32_name, strtable->offset + shdr->name);
|
|
||||||
|
|
||||||
if(strncmp(name->name, ".text", 5) == 0) {
|
|
||||||
textoff = shdr->offset;
|
|
||||||
textsize = shdr->size;
|
|
||||||
} else if(strncmp(name->name, ".rela.text", 10) == 0) {
|
|
||||||
textrelaoff = shdr->offset;
|
|
||||||
textrelasize = shdr->size;
|
|
||||||
} else if(strncmp(name->name, ".symtab", 7) == 0) {
|
|
||||||
symtaboff = shdr->offset;
|
|
||||||
symtabsize = shdr->size;
|
|
||||||
} else if(strncmp(name->name, ".strtab", 7) == 0) {
|
|
||||||
strtaboff = shdr->offset;
|
|
||||||
strtabsize = shdr->size;
|
|
||||||
}
|
|
||||||
|
|
||||||
shdrptr += ehdr->shentsize;
|
|
||||||
}
|
|
||||||
SANITIZE_OFFSET_SIZE(textoff, textsize);
|
|
||||||
SANITIZE_OFFSET_SIZE(textrelaoff, textrelasize);
|
|
||||||
SANITIZE_OFFSET_SIZE(symtaboff, symtabsize);
|
|
||||||
SANITIZE_OFFSET_SIZE(strtaboff, strtabsize);
|
|
||||||
|
|
||||||
/* load .text section */
|
|
||||||
if(textsize > dest_length) {
|
|
||||||
log(".text section is too large");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
memcpy(dest, (char *)elf_data + textoff, textsize);
|
|
||||||
|
|
||||||
/* process .text relocations */
|
|
||||||
for(i=0;i<textrelasize;i+=sizeof(struct elf32_rela)) {
|
|
||||||
struct elf32_rela *rela;
|
|
||||||
struct elf32_sym *sym;
|
|
||||||
|
|
||||||
GET_POINTER_SAFE(rela, struct elf32_rela, textrelaoff + i);
|
|
||||||
GET_POINTER_SAFE(sym, struct elf32_sym, symtaboff + sizeof(struct elf32_sym)*ELF32_R_SYM(rela->info));
|
|
||||||
if(sym->name != 0) {
|
|
||||||
char *name;
|
|
||||||
void *target;
|
|
||||||
|
|
||||||
name = (char *)elf_data + strtaboff + sym->name;
|
|
||||||
target = resolver(name);
|
|
||||||
if(target == NULL) {
|
|
||||||
log("Undefined symbol: %s", name);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
if(!fixup(dest, dest_length, rela, target))
|
|
||||||
return 0;
|
|
||||||
} else {
|
|
||||||
log("Unsupported relocation");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* list provided functions via callback */
|
|
||||||
for(i=0;i<symtabsize;i+=sizeof(struct elf32_sym)) {
|
|
||||||
struct elf32_sym *sym;
|
|
||||||
|
|
||||||
GET_POINTER_SAFE(sym, struct elf32_sym, symtaboff + i);
|
|
||||||
if((ELF32_ST_TYPE(sym->info) == STT_FUNC) && (sym->name != 0)) {
|
|
||||||
char *name;
|
|
||||||
void *target;
|
|
||||||
|
|
||||||
name = (char *)elf_data + strtaboff + sym->name;
|
|
||||||
target = (char *)dest + sym->value;
|
|
||||||
if(!callback(name, target))
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return 1;
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
#ifndef __ELF_LOADER_H
|
|
||||||
#define __ELF_LOADER_H
|
|
||||||
|
|
||||||
struct symbol {
|
|
||||||
char *name;
|
|
||||||
void *target;
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef void * (*symbol_resolver)(const char *);
|
|
||||||
typedef int (*symbol_callback)(const char *, void *);
|
|
||||||
|
|
||||||
void *find_symbol(const struct symbol *symbols, const char *name);
|
|
||||||
/* elf_data must be aligned on a 32-bit boundary */
|
|
||||||
int load_elf(symbol_resolver resolver, symbol_callback callback, void *elf_data, int elf_length, void *dest, int dest_length);
|
|
||||||
|
|
||||||
#endif /* __ELF_LOADER_H */
|
|
|
@ -1,37 +0,0 @@
|
||||||
.global exception_setjmp
|
|
||||||
.type exception_setjmp, @function
|
|
||||||
exception_setjmp:
|
|
||||||
l.sw 0(r3), r1
|
|
||||||
l.sw 4(r3), r2
|
|
||||||
l.sw 8(r3), r9
|
|
||||||
l.sw 12(r3), r10
|
|
||||||
l.sw 16(r3), r14
|
|
||||||
l.sw 20(r3), r16
|
|
||||||
l.sw 24(r3), r18
|
|
||||||
l.sw 28(r3), r20
|
|
||||||
l.sw 32(r3), r22
|
|
||||||
l.sw 36(r3), r24
|
|
||||||
l.sw 40(r3), r26
|
|
||||||
l.sw 44(r3), r28
|
|
||||||
l.sw 48(r3), r30
|
|
||||||
l.jr r9
|
|
||||||
l.ori r11, r0, 0
|
|
||||||
|
|
||||||
.global exception_longjmp
|
|
||||||
.type exception_longjmp, @function
|
|
||||||
exception_longjmp:
|
|
||||||
l.lwz r1, 0(r3)
|
|
||||||
l.lwz r2, 4(r3)
|
|
||||||
l.lwz r9, 8(r3)
|
|
||||||
l.lwz r10, 12(r3)
|
|
||||||
l.lwz r14, 16(r3)
|
|
||||||
l.lwz r16, 20(r3)
|
|
||||||
l.lwz r18, 24(r3)
|
|
||||||
l.lwz r20, 28(r3)
|
|
||||||
l.lwz r22, 32(r3)
|
|
||||||
l.lwz r24, 36(r3)
|
|
||||||
l.lwz r26, 40(r3)
|
|
||||||
l.lwz r28, 44(r3)
|
|
||||||
l.lwz r30, 48(r3)
|
|
||||||
l.jr r9
|
|
||||||
l.ori r11, r0, 1
|
|
|
@ -1,58 +0,0 @@
|
||||||
#include <generated/csr.h>
|
|
||||||
|
|
||||||
#include "log.h"
|
|
||||||
#include "exceptions.h"
|
|
||||||
|
|
||||||
#define MAX_EXCEPTION_CONTEXTS 64
|
|
||||||
|
|
||||||
struct exception_context {
|
|
||||||
void *jb[13];
|
|
||||||
};
|
|
||||||
|
|
||||||
static struct exception_context exception_contexts[MAX_EXCEPTION_CONTEXTS];
|
|
||||||
static int ec_top;
|
|
||||||
static int stored_id;
|
|
||||||
static long long int stored_params[3];
|
|
||||||
|
|
||||||
void *exception_push(void)
|
|
||||||
{
|
|
||||||
if(ec_top >= MAX_EXCEPTION_CONTEXTS)
|
|
||||||
exception_raise(EID_INTERNAL_ERROR);
|
|
||||||
return exception_contexts[ec_top++].jb;
|
|
||||||
}
|
|
||||||
|
|
||||||
void exception_pop(int levels)
|
|
||||||
{
|
|
||||||
ec_top -= levels;
|
|
||||||
}
|
|
||||||
|
|
||||||
int exception_getid(long long int *eparams)
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
|
|
||||||
if(eparams)
|
|
||||||
for(i=0;i<3;i++)
|
|
||||||
eparams[i] = stored_params[i];
|
|
||||||
return stored_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
void exception_raise(int id)
|
|
||||||
{
|
|
||||||
exception_raise_params(id, 0, 0, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
void exception_raise_params(int id,
|
|
||||||
long long int p0, long long int p1,
|
|
||||||
long long int p2)
|
|
||||||
{
|
|
||||||
if(ec_top > 0) {
|
|
||||||
stored_id = id;
|
|
||||||
stored_params[0] = p0;
|
|
||||||
stored_params[1] = p1;
|
|
||||||
stored_params[2] = p2;
|
|
||||||
exception_longjmp(exception_contexts[--ec_top].jb);
|
|
||||||
} else {
|
|
||||||
log("ERROR: uncaught exception, ID=%d\n", id);
|
|
||||||
while(1);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
#ifndef __EXCEPTIONS_H
|
|
||||||
#define __EXCEPTIONS_H
|
|
||||||
|
|
||||||
enum {
|
|
||||||
EID_NONE = 0,
|
|
||||||
EID_INTERNAL_ERROR = 1,
|
|
||||||
EID_RPC_EXCEPTION = 2,
|
|
||||||
EID_RTIO_UNDERFLOW = 3,
|
|
||||||
EID_RTIO_SEQUENCE_ERROR = 4,
|
|
||||||
EID_RTIO_COLLISION_ERROR = 5,
|
|
||||||
EID_RTIO_OVERFLOW = 6,
|
|
||||||
EID_DDS_BATCH_ERROR = 7
|
|
||||||
};
|
|
||||||
|
|
||||||
int exception_setjmp(void *jb) __attribute__((returns_twice));
|
|
||||||
void exception_longjmp(void *jb) __attribute__((noreturn));
|
|
||||||
|
|
||||||
void *exception_push(void);
|
|
||||||
void exception_pop(int levels);
|
|
||||||
int exception_getid(long long int *eparams);
|
|
||||||
void exception_raise(int id) __attribute__((noreturn));
|
|
||||||
void exception_raise_params(int id,
|
|
||||||
long long int p0, long long int p1,
|
|
||||||
long long int p2) __attribute__((noreturn));
|
|
||||||
|
|
||||||
#endif /* __EXCEPTIONS_H */
|
|
|
@ -115,7 +115,8 @@ static int is_empty(struct record *record)
|
||||||
return record->value_len == 0;
|
return record->value_len == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int key_exists(char *buff, char *key, char *end, char accept_empty, struct record *found_record)
|
static int key_exists(char *buff, const char *key, char *end, char accept_empty,
|
||||||
|
struct record *found_record)
|
||||||
{
|
{
|
||||||
struct iter_state is;
|
struct iter_state is;
|
||||||
struct record iter_record;
|
struct record iter_record;
|
||||||
|
@ -170,7 +171,7 @@ static char check_for_empty_records(char *buff)
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static unsigned int try_to_flush_duplicates(char *new_key, unsigned int buf_len)
|
static unsigned int try_to_flush_duplicates(const char *new_key, unsigned int buf_len)
|
||||||
{
|
{
|
||||||
unsigned int key_size, new_record_size, ret = 0, can_rollback = 0;
|
unsigned int key_size, new_record_size, ret = 0, can_rollback = 0;
|
||||||
struct record record, previous_record;
|
struct record record, previous_record;
|
||||||
|
@ -210,7 +211,8 @@ static unsigned int try_to_flush_duplicates(char *new_key, unsigned int buf_len)
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void write_at_offset(char *key, void *buffer, int buf_len, unsigned int sector_offset)
|
static void write_at_offset(const char *key, const void *buffer,
|
||||||
|
int buf_len, unsigned int sector_offset)
|
||||||
{
|
{
|
||||||
int key_len = strlen(key) + 1;
|
int key_len = strlen(key) + 1;
|
||||||
unsigned int record_size = key_len + buf_len + sizeof(record_size);
|
unsigned int record_size = key_len + buf_len + sizeof(record_size);
|
||||||
|
@ -223,7 +225,7 @@ static void write_at_offset(char *key, void *buffer, int buf_len, unsigned int s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int fs_write(char *key, void *buffer, unsigned int buf_len)
|
int fs_write(const char *key, const void *buffer, unsigned int buf_len)
|
||||||
{
|
{
|
||||||
struct record record;
|
struct record record;
|
||||||
unsigned int key_size = strlen(key) + 1;
|
unsigned int key_size = strlen(key) + 1;
|
||||||
|
@ -269,7 +271,7 @@ void fs_erase(void)
|
||||||
flush_cpu_dcache();
|
flush_cpu_dcache();
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned int fs_read(char *key, void *buffer, unsigned int buf_len, unsigned int *remain)
|
unsigned int fs_read(const char *key, void *buffer, unsigned int buf_len, unsigned int *remain)
|
||||||
{
|
{
|
||||||
unsigned int read_length = 0;
|
unsigned int read_length = 0;
|
||||||
struct iter_state is;
|
struct iter_state is;
|
||||||
|
@ -295,7 +297,7 @@ unsigned int fs_read(char *key, void *buffer, unsigned int buf_len, unsigned int
|
||||||
return read_length;
|
return read_length;
|
||||||
}
|
}
|
||||||
|
|
||||||
void fs_remove(char *key)
|
void fs_remove(const char *key)
|
||||||
{
|
{
|
||||||
fs_write(key, NULL, 0);
|
fs_write(key, NULL, 0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,9 +5,9 @@
|
||||||
#ifndef __FLASH_STORAGE_H
|
#ifndef __FLASH_STORAGE_H
|
||||||
#define __FLASH_STORAGE_H
|
#define __FLASH_STORAGE_H
|
||||||
|
|
||||||
void fs_remove(char *key);
|
void fs_remove(const char *key);
|
||||||
void fs_erase(void);
|
void fs_erase(void);
|
||||||
int fs_write(char *key, void *buffer, unsigned int buflen);
|
int fs_write(const char *key, const void *buffer, unsigned int buflen);
|
||||||
unsigned int fs_read(char *key, void *buffer, unsigned int buflen, unsigned int *remain);
|
unsigned int fs_read(const char *key, void *buffer, unsigned int buflen, unsigned int *remain);
|
||||||
|
|
||||||
#endif /* __FLASH_STORAGE_H */
|
#endif /* __FLASH_STORAGE_H */
|
||||||
|
|
|
@ -1,65 +0,0 @@
|
||||||
#!/usr/bin/env python3.5
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from elftools.elf.elffile import ELFFile
|
|
||||||
|
|
||||||
|
|
||||||
services = [
|
|
||||||
("syscalls", [
|
|
||||||
("now_init", "now_init"),
|
|
||||||
("now_save", "now_save"),
|
|
||||||
|
|
||||||
("watchdog_set", "watchdog_set"),
|
|
||||||
("watchdog_clear", "watchdog_clear"),
|
|
||||||
|
|
||||||
("rpc", "rpc"),
|
|
||||||
|
|
||||||
("rtio_get_counter", "rtio_get_counter"),
|
|
||||||
|
|
||||||
("ttl_set_o", "ttl_set_o"),
|
|
||||||
("ttl_set_oe", "ttl_set_oe"),
|
|
||||||
("ttl_set_sensitivity", "ttl_set_sensitivity"),
|
|
||||||
("ttl_get", "ttl_get"),
|
|
||||||
("ttl_clock_set", "ttl_clock_set"),
|
|
||||||
|
|
||||||
("dds_init", "dds_init"),
|
|
||||||
("dds_batch_enter", "dds_batch_enter"),
|
|
||||||
("dds_batch_exit", "dds_batch_exit"),
|
|
||||||
("dds_set", "dds_set"),
|
|
||||||
]),
|
|
||||||
|
|
||||||
("eh", [
|
|
||||||
("setjmp", "exception_setjmp"),
|
|
||||||
("push", "exception_push"),
|
|
||||||
("pop", "exception_pop"),
|
|
||||||
("getid", "exception_getid"),
|
|
||||||
("raise", "exception_raise"),
|
|
||||||
])
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def print_service_table(ksupport_elf_filename):
|
|
||||||
with open(ksupport_elf_filename, "rb") as f:
|
|
||||||
elf = ELFFile(f)
|
|
||||||
symtab = elf.get_section_by_name(b".symtab")
|
|
||||||
symbols = {symbol.name: symbol.entry.st_value
|
|
||||||
for symbol in symtab.iter_symbols()}
|
|
||||||
for name, contents in services:
|
|
||||||
print("static const struct symbol {}[] = {{".format(name))
|
|
||||||
for name, value in contents:
|
|
||||||
print(" {{\"{}\", (void *)0x{:08x}}},"
|
|
||||||
.format(name, symbols[bytes(value, "ascii")]))
|
|
||||||
print(" {NULL, NULL}")
|
|
||||||
print("};")
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if len(sys.argv) == 2:
|
|
||||||
print_service_table(sys.argv[1])
|
|
||||||
else:
|
|
||||||
print("Incorrect number of command line arguments")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,126 +1,109 @@
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <generated/csr.h>
|
#include <generated/csr.h>
|
||||||
|
|
||||||
|
#include <dyld.h>
|
||||||
|
|
||||||
|
#include "kloader.h"
|
||||||
#include "log.h"
|
#include "log.h"
|
||||||
#include "clock.h"
|
#include "clock.h"
|
||||||
#include "flash_storage.h"
|
#include "flash_storage.h"
|
||||||
#include "mailbox.h"
|
#include "mailbox.h"
|
||||||
#include "messages.h"
|
#include "messages.h"
|
||||||
#include "elf_loader.h"
|
|
||||||
#include "services.h"
|
|
||||||
#include "kloader.h"
|
|
||||||
|
|
||||||
static struct symbol symtab[128];
|
static void start_kernel_cpu(struct msg_load_request *msg)
|
||||||
static int _symtab_count;
|
|
||||||
static char _symtab_strings[128*16];
|
|
||||||
static char *_symtab_strptr;
|
|
||||||
|
|
||||||
static void symtab_init(void)
|
|
||||||
{
|
{
|
||||||
memset(symtab, 0, sizeof(symtab));
|
// Stop kernel CPU before messing with its code.
|
||||||
_symtab_count = 0;
|
kernel_cpu_reset_write(1);
|
||||||
_symtab_strptr = _symtab_strings;
|
|
||||||
}
|
|
||||||
|
|
||||||
static int symtab_add(const char *name, void *target)
|
// Load kernel support code.
|
||||||
{
|
extern void _binary_ksupport_elf_start, _binary_ksupport_elf_end;
|
||||||
if(_symtab_count >= sizeof(symtab)/sizeof(symtab[0])) {
|
memcpy((void *)(KERNELCPU_EXEC_ADDRESS - KSUPPORT_HEADER_SIZE),
|
||||||
log("Too many provided symbols in object");
|
&_binary_ksupport_elf_start,
|
||||||
symtab_init();
|
&_binary_ksupport_elf_end - &_binary_ksupport_elf_start);
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
symtab[_symtab_count].name = _symtab_strptr;
|
|
||||||
symtab[_symtab_count].target = target;
|
|
||||||
_symtab_count++;
|
|
||||||
|
|
||||||
while(1) {
|
// Start kernel CPU.
|
||||||
if(_symtab_strptr >= &_symtab_strings[sizeof(_symtab_strings)]) {
|
mailbox_send(msg);
|
||||||
log("Provided symbol string table overflow");
|
|
||||||
symtab_init();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
*_symtab_strptr = *name;
|
|
||||||
_symtab_strptr++;
|
|
||||||
if(*name == 0)
|
|
||||||
break;
|
|
||||||
name++;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
int kloader_load(void *buffer, int length)
|
|
||||||
{
|
|
||||||
if(!kernel_cpu_reset_read()) {
|
|
||||||
log("BUG: attempted to load while kernel CPU running");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
symtab_init();
|
|
||||||
return load_elf(
|
|
||||||
resolve_service_symbol, symtab_add,
|
|
||||||
buffer, length, (void *)KERNELCPU_PAYLOAD_ADDRESS, 4*1024*1024);
|
|
||||||
}
|
|
||||||
|
|
||||||
kernel_function kloader_find(const char *name)
|
|
||||||
{
|
|
||||||
return find_symbol(symtab, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
extern char _binary_ksupport_bin_start;
|
|
||||||
extern char _binary_ksupport_bin_end;
|
|
||||||
|
|
||||||
static void start_kernel_cpu(void *addr)
|
|
||||||
{
|
|
||||||
memcpy((void *)KERNELCPU_EXEC_ADDRESS, &_binary_ksupport_bin_start,
|
|
||||||
&_binary_ksupport_bin_end - &_binary_ksupport_bin_start);
|
|
||||||
mailbox_acknowledge();
|
|
||||||
mailbox_send(addr);
|
|
||||||
kernel_cpu_reset_write(0);
|
kernel_cpu_reset_write(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
void kloader_start_bridge(void)
|
void kloader_start_bridge()
|
||||||
{
|
{
|
||||||
start_kernel_cpu(NULL);
|
start_kernel_cpu(NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
void kloader_start_user_kernel(kernel_function k)
|
static int load_or_start_kernel(const void *library, int run_kernel)
|
||||||
|
{
|
||||||
|
static struct dyld_info library_info;
|
||||||
|
struct msg_load_request request = {
|
||||||
|
.library = library,
|
||||||
|
.library_info = &library_info,
|
||||||
|
.run_kernel = run_kernel,
|
||||||
|
};
|
||||||
|
start_kernel_cpu(&request);
|
||||||
|
|
||||||
|
struct msg_load_reply *reply = mailbox_wait_and_receive();
|
||||||
|
mailbox_acknowledge();
|
||||||
|
|
||||||
|
if(reply->type != MESSAGE_TYPE_LOAD_REPLY) {
|
||||||
|
log("BUG: unexpected reply to load/run request");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(reply->error != NULL) {
|
||||||
|
log("cannot load kernel: %s", reply->error);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int kloader_load_library(const void *library)
|
||||||
{
|
{
|
||||||
if(!kernel_cpu_reset_read()) {
|
if(!kernel_cpu_reset_read()) {
|
||||||
log("BUG: attempted to start kernel CPU while already running (user kernel)");
|
log("BUG: attempted to load kernel library while kernel CPU is running");
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
start_kernel_cpu((void *)k);
|
|
||||||
|
return load_or_start_kernel(library, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
void kloader_filter_backtrace(struct artiq_backtrace_item *backtrace,
|
||||||
|
size_t *backtrace_size) {
|
||||||
|
struct artiq_backtrace_item *cursor = backtrace;
|
||||||
|
|
||||||
|
// Remove all backtrace items belonging to ksupport and subtract
|
||||||
|
// shared object base from the addresses.
|
||||||
|
for(int i = 0; i < *backtrace_size; i++) {
|
||||||
|
if(backtrace[i].function > KERNELCPU_PAYLOAD_ADDRESS) {
|
||||||
|
backtrace[i].function -= KERNELCPU_PAYLOAD_ADDRESS;
|
||||||
|
*cursor++ = backtrace[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*backtrace_size = cursor - backtrace;
|
||||||
|
}
|
||||||
|
|
||||||
|
void kloader_start_kernel()
|
||||||
|
{
|
||||||
|
load_or_start_kernel(NULL, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int kloader_start_flash_kernel(char *key)
|
static int kloader_start_flash_kernel(char *key)
|
||||||
{
|
{
|
||||||
char buffer[32*1024];
|
|
||||||
unsigned int len, remain;
|
|
||||||
kernel_function k;
|
|
||||||
|
|
||||||
if(!kernel_cpu_reset_read()) {
|
|
||||||
log("BUG: attempted to start kernel CPU while already running (%s)", key);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
#if (defined CSR_SPIFLASH_BASE && defined SPIFLASH_PAGE_SIZE)
|
#if (defined CSR_SPIFLASH_BASE && defined SPIFLASH_PAGE_SIZE)
|
||||||
len = fs_read(key, buffer, sizeof(buffer), &remain);
|
char buffer[32*1024];
|
||||||
if(len <= 0)
|
unsigned int length, remain;
|
||||||
|
|
||||||
|
length = fs_read(key, buffer, sizeof(buffer), &remain);
|
||||||
|
if(length <= 0)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
if(remain) {
|
if(remain) {
|
||||||
log("ERROR: %s too long", key);
|
log("ERROR: kernel %s is too large", key);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if(!kloader_load(buffer, len)) {
|
|
||||||
log("ERROR: failed to load ELF binary (%s)", key);
|
return load_or_start_kernel(buffer, 1);
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
k = kloader_find("run");
|
|
||||||
if(!k) {
|
|
||||||
log("ERROR: failed to find entry point for ELF kernel (%s)", key);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
start_kernel_cpu((void *)k);
|
|
||||||
return 1;
|
|
||||||
#else
|
#else
|
||||||
return 0;
|
return 0;
|
||||||
#endif
|
#endif
|
||||||
|
@ -145,7 +128,7 @@ void kloader_stop(void)
|
||||||
int kloader_validate_kpointer(void *p)
|
int kloader_validate_kpointer(void *p)
|
||||||
{
|
{
|
||||||
unsigned int v = (unsigned int)p;
|
unsigned int v = (unsigned int)p;
|
||||||
if((v < 0x40400000) || (v > (0x4fffffff - 1024*1024))) {
|
if((v < KERNELCPU_EXEC_ADDRESS) || (v > KERNELCPU_LAST_ADDRESS)) {
|
||||||
log("Received invalid pointer from kernel CPU: 0x%08x", v);
|
log("Received invalid pointer from kernel CPU: 0x%08x", v);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -195,7 +178,11 @@ void kloader_service_essential_kmsg(void)
|
||||||
case MESSAGE_TYPE_LOG: {
|
case MESSAGE_TYPE_LOG: {
|
||||||
struct msg_log *msg = (struct msg_log *)umsg;
|
struct msg_log *msg = (struct msg_log *)umsg;
|
||||||
|
|
||||||
log_va(msg->fmt, msg->args);
|
if(msg->no_newline) {
|
||||||
|
lognonl_va(msg->fmt, msg->args);
|
||||||
|
} else {
|
||||||
|
log_va(msg->fmt, msg->args);
|
||||||
|
}
|
||||||
mailbox_acknowledge();
|
mailbox_acknowledge();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,20 +1,23 @@
|
||||||
#ifndef __KLOADER_H
|
#ifndef __KLOADER_H
|
||||||
#define __KLOADER_H
|
#define __KLOADER_H
|
||||||
|
|
||||||
#define KERNELCPU_EXEC_ADDRESS 0x40400000
|
#include "artiq_personality.h"
|
||||||
#define KERNELCPU_PAYLOAD_ADDRESS 0x40408000
|
|
||||||
|
#define KERNELCPU_EXEC_ADDRESS 0x40400000
|
||||||
|
#define KERNELCPU_PAYLOAD_ADDRESS 0x40420000
|
||||||
|
#define KERNELCPU_LAST_ADDRESS (0x4fffffff - 1024*1024)
|
||||||
|
#define KSUPPORT_HEADER_SIZE 0x80
|
||||||
|
|
||||||
extern long long int now;
|
extern long long int now;
|
||||||
|
|
||||||
typedef void (*kernel_function)(void);
|
int kloader_load_library(const void *code);
|
||||||
|
void kloader_filter_backtrace(struct artiq_backtrace_item *backtrace,
|
||||||
int kloader_load(void *buffer, int length);
|
size_t *backtrace_size);
|
||||||
kernel_function kloader_find(const char *name);
|
|
||||||
|
|
||||||
void kloader_start_bridge(void);
|
void kloader_start_bridge(void);
|
||||||
int kloader_start_startup_kernel(void);
|
int kloader_start_startup_kernel(void);
|
||||||
int kloader_start_idle_kernel(void);
|
int kloader_start_idle_kernel(void);
|
||||||
void kloader_start_user_kernel(kernel_function k);
|
void kloader_start_kernel(void);
|
||||||
void kloader_stop(void);
|
void kloader_stop(void);
|
||||||
|
|
||||||
int kloader_validate_kpointer(void *p);
|
int kloader_validate_kpointer(void *p);
|
||||||
|
|
|
@ -1,67 +1,253 @@
|
||||||
#include <stdarg.h>
|
#include <stdarg.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
|
||||||
#include "exceptions.h"
|
#include <link.h>
|
||||||
#include "bridge.h"
|
#include <dlfcn.h>
|
||||||
|
#include <dyld.h>
|
||||||
|
#include <unwind.h>
|
||||||
|
|
||||||
|
#include "ksupport.h"
|
||||||
|
#include "kloader.h"
|
||||||
#include "mailbox.h"
|
#include "mailbox.h"
|
||||||
#include "messages.h"
|
#include "messages.h"
|
||||||
#include "rtio.h"
|
#include "bridge.h"
|
||||||
|
#include "artiq_personality.h"
|
||||||
|
#include "ttl.h"
|
||||||
#include "dds.h"
|
#include "dds.h"
|
||||||
|
#include "rtio.h"
|
||||||
|
|
||||||
/* for the prototypes for watchdog_set() and watchdog_clear() */
|
void ksupport_abort(void);
|
||||||
#include "clock.h"
|
|
||||||
/* for the prototype for rpc() */
|
|
||||||
#include "session.h"
|
|
||||||
/* for the prototype for log() */
|
|
||||||
#include "log.h"
|
|
||||||
|
|
||||||
void exception_handler(unsigned long vect, unsigned long *sp);
|
int64_t now;
|
||||||
void exception_handler(unsigned long vect, unsigned long *sp)
|
|
||||||
|
/* compiler-rt symbols */
|
||||||
|
extern void __divsi3, __modsi3, __ledf2, __gedf2, __unorddf2, __eqdf2, __ltdf2,
|
||||||
|
__nedf2, __gtdf2, __negsf2, __negdf2, __addsf3, __subsf3, __mulsf3,
|
||||||
|
__divsf3, __lshrdi3, __muldi3, __divdi3, __ashldi3, __ashrdi3,
|
||||||
|
__udivmoddi4, __floatsisf, __floatunsisf, __fixsfsi, __fixunssfsi,
|
||||||
|
__adddf3, __subdf3, __muldf3, __divdf3, __floatsidf, __floatunsidf,
|
||||||
|
__floatdidf, __fixdfsi, __fixdfdi, __fixunsdfsi, __clzsi2, __ctzsi2,
|
||||||
|
__udivdi3, __umoddi3, __moddi3;
|
||||||
|
|
||||||
|
/* artiq_personality symbols */
|
||||||
|
extern void __artiq_personality;
|
||||||
|
|
||||||
|
struct symbol {
|
||||||
|
const char *name;
|
||||||
|
void *addr;
|
||||||
|
};
|
||||||
|
|
||||||
|
static const struct symbol runtime_exports[] = {
|
||||||
|
/* compiler-rt */
|
||||||
|
{"__divsi3", &__divsi3},
|
||||||
|
{"__modsi3", &__modsi3},
|
||||||
|
{"__ledf2", &__ledf2},
|
||||||
|
{"__gedf2", &__gedf2},
|
||||||
|
{"__unorddf2", &__unorddf2},
|
||||||
|
{"__eqdf2", &__eqdf2},
|
||||||
|
{"__ltdf2", &__ltdf2},
|
||||||
|
{"__nedf2", &__nedf2},
|
||||||
|
{"__gtdf2", &__gtdf2},
|
||||||
|
{"__negsf2", &__negsf2},
|
||||||
|
{"__negdf2", &__negdf2},
|
||||||
|
{"__addsf3", &__addsf3},
|
||||||
|
{"__subsf3", &__subsf3},
|
||||||
|
{"__mulsf3", &__mulsf3},
|
||||||
|
{"__divsf3", &__divsf3},
|
||||||
|
{"__lshrdi3", &__lshrdi3},
|
||||||
|
{"__muldi3", &__muldi3},
|
||||||
|
{"__divdi3", &__divdi3},
|
||||||
|
{"__ashldi3", &__ashldi3},
|
||||||
|
{"__ashrdi3", &__ashrdi3},
|
||||||
|
{"__udivmoddi4", &__udivmoddi4},
|
||||||
|
{"__floatsisf", &__floatsisf},
|
||||||
|
{"__floatunsisf", &__floatunsisf},
|
||||||
|
{"__fixsfsi", &__fixsfsi},
|
||||||
|
{"__fixunssfsi", &__fixunssfsi},
|
||||||
|
{"__adddf3", &__adddf3},
|
||||||
|
{"__subdf3", &__subdf3},
|
||||||
|
{"__muldf3", &__muldf3},
|
||||||
|
{"__divdf3", &__divdf3},
|
||||||
|
{"__floatsidf", &__floatsidf},
|
||||||
|
{"__floatunsidf", &__floatunsidf},
|
||||||
|
{"__floatdidf", &__floatdidf},
|
||||||
|
{"__fixdfsi", &__fixdfsi},
|
||||||
|
{"__fixdfdi", &__fixdfdi},
|
||||||
|
{"__fixunsdfsi", &__fixunsdfsi},
|
||||||
|
{"__clzsi2", &__clzsi2},
|
||||||
|
{"__ctzsi2", &__ctzsi2},
|
||||||
|
{"__udivdi3", &__udivdi3},
|
||||||
|
{"__umoddi3", &__umoddi3},
|
||||||
|
{"__moddi3", &__moddi3},
|
||||||
|
|
||||||
|
/* exceptions */
|
||||||
|
{"_Unwind_Resume", &_Unwind_Resume},
|
||||||
|
{"__artiq_personality", &__artiq_personality},
|
||||||
|
{"__artiq_raise", &__artiq_raise},
|
||||||
|
{"__artiq_reraise", &__artiq_reraise},
|
||||||
|
{"abort", &ksupport_abort},
|
||||||
|
|
||||||
|
/* proxified syscalls */
|
||||||
|
{"now", &now},
|
||||||
|
|
||||||
|
{"watchdog_set", &watchdog_set},
|
||||||
|
{"watchdog_clear", &watchdog_clear},
|
||||||
|
|
||||||
|
{"log", &log},
|
||||||
|
{"lognonl", &lognonl},
|
||||||
|
{"send_rpc", &send_rpc},
|
||||||
|
{"recv_rpc", &recv_rpc},
|
||||||
|
|
||||||
|
/* direct syscalls */
|
||||||
|
{"rtio_get_counter", &rtio_get_counter},
|
||||||
|
|
||||||
|
{"ttl_set_o", &ttl_set_o},
|
||||||
|
{"ttl_set_oe", &ttl_set_oe},
|
||||||
|
{"ttl_set_sensitivity", &ttl_set_sensitivity},
|
||||||
|
{"ttl_get", &ttl_get},
|
||||||
|
{"ttl_clock_set", &ttl_clock_set},
|
||||||
|
|
||||||
|
{"dds_init", &dds_init},
|
||||||
|
{"dds_batch_enter", &dds_batch_enter},
|
||||||
|
{"dds_batch_exit", &dds_batch_exit},
|
||||||
|
{"dds_set", &dds_set},
|
||||||
|
|
||||||
|
/* end */
|
||||||
|
{NULL, NULL}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* called by libunwind */
|
||||||
|
int fprintf(FILE *stream, const char *fmt, ...)
|
||||||
{
|
{
|
||||||
struct msg_exception msg;
|
struct msg_log request;
|
||||||
|
|
||||||
msg.type = MESSAGE_TYPE_EXCEPTION;
|
request.type = MESSAGE_TYPE_LOG;
|
||||||
msg.eid = EID_INTERNAL_ERROR;
|
request.fmt = fmt;
|
||||||
msg.eparams[0] = 256;
|
request.no_newline = 1;
|
||||||
msg.eparams[1] = 256;
|
va_start(request.args, fmt);
|
||||||
msg.eparams[2] = 256;
|
mailbox_send_and_wait(&request);
|
||||||
mailbox_send_and_wait(&msg);
|
va_end(request.args);
|
||||||
while(1);
|
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
typedef void (*kernel_function)(void);
|
/* called by libunwind */
|
||||||
|
int dladdr (const void *address, Dl_info *info) {
|
||||||
|
/* we don't try to resolve names */
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* called by libunwind */
|
||||||
|
int dl_iterate_phdr (int (*callback) (struct dl_phdr_info *, size_t, void *), void *data) {
|
||||||
|
Elf32_Ehdr *ehdr;
|
||||||
|
struct dl_phdr_info phdr_info;
|
||||||
|
int retval;
|
||||||
|
|
||||||
|
ehdr = (Elf32_Ehdr *)(KERNELCPU_EXEC_ADDRESS - KSUPPORT_HEADER_SIZE);
|
||||||
|
phdr_info = (struct dl_phdr_info){
|
||||||
|
.dlpi_addr = 0, /* absolutely linked */
|
||||||
|
.dlpi_name = "<ksupport>",
|
||||||
|
.dlpi_phdr = (Elf32_Phdr*) ((intptr_t)ehdr + ehdr->e_phoff),
|
||||||
|
.dlpi_phnum = ehdr->e_phnum,
|
||||||
|
};
|
||||||
|
retval = callback(&phdr_info, sizeof(phdr_info), data);
|
||||||
|
if(retval)
|
||||||
|
return retval;
|
||||||
|
|
||||||
|
ehdr = (Elf32_Ehdr *)KERNELCPU_PAYLOAD_ADDRESS;
|
||||||
|
phdr_info = (struct dl_phdr_info){
|
||||||
|
.dlpi_addr = KERNELCPU_PAYLOAD_ADDRESS,
|
||||||
|
.dlpi_name = "<kernel>",
|
||||||
|
.dlpi_phdr = (Elf32_Phdr*) ((intptr_t)ehdr + ehdr->e_phoff),
|
||||||
|
.dlpi_phnum = ehdr->e_phnum,
|
||||||
|
};
|
||||||
|
retval = callback(&phdr_info, sizeof(phdr_info), data);
|
||||||
|
return retval;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Elf32_Addr resolve_runtime_export(const char *name) {
|
||||||
|
const struct symbol *sym = runtime_exports;
|
||||||
|
while(sym->name) {
|
||||||
|
if(!strcmp(sym->name, name))
|
||||||
|
return (Elf32_Addr)sym->addr;
|
||||||
|
++sym;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void exception_handler(unsigned long vect, unsigned long *regs,
|
||||||
|
unsigned long pc, unsigned long ea);
|
||||||
|
void exception_handler(unsigned long vect, unsigned long *regs,
|
||||||
|
unsigned long pc, unsigned long ea)
|
||||||
|
{
|
||||||
|
artiq_raise_from_c("InternalError",
|
||||||
|
"Hardware exception {0} at PC {1}, EA {2}",
|
||||||
|
vect, pc, ea);
|
||||||
|
}
|
||||||
|
|
||||||
int main(void);
|
int main(void);
|
||||||
int main(void)
|
int main(void)
|
||||||
{
|
{
|
||||||
kernel_function k;
|
struct msg_load_request *request = mailbox_receive();
|
||||||
void *jb;
|
struct msg_load_reply load_reply = {
|
||||||
|
.type = MESSAGE_TYPE_LOAD_REPLY,
|
||||||
|
.error = NULL
|
||||||
|
};
|
||||||
|
|
||||||
k = mailbox_receive();
|
if(request == NULL) {
|
||||||
|
|
||||||
if(k == NULL)
|
|
||||||
bridge_main();
|
bridge_main();
|
||||||
else {
|
while(1);
|
||||||
jb = exception_push();
|
}
|
||||||
if(exception_setjmp(jb)) {
|
|
||||||
struct msg_exception msg;
|
|
||||||
|
|
||||||
msg.type = MESSAGE_TYPE_EXCEPTION;
|
if(request->library != NULL) {
|
||||||
msg.eid = exception_getid(msg.eparams);
|
if(!dyld_load(request->library, KERNELCPU_PAYLOAD_ADDRESS,
|
||||||
mailbox_send_and_wait(&msg);
|
resolve_runtime_export, request->library_info,
|
||||||
} else {
|
&load_reply.error)) {
|
||||||
struct msg_base msg;
|
mailbox_send(&load_reply);
|
||||||
|
while(1);
|
||||||
k();
|
|
||||||
exception_pop(1);
|
|
||||||
|
|
||||||
msg.type = MESSAGE_TYPE_FINISHED;
|
|
||||||
mailbox_send_and_wait(&msg);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(request->run_kernel) {
|
||||||
|
void (*kernel_init)() = request->library_info->init;
|
||||||
|
|
||||||
|
mailbox_send_and_wait(&load_reply);
|
||||||
|
|
||||||
|
now = now_init();
|
||||||
|
kernel_init();
|
||||||
|
now_save(now);
|
||||||
|
|
||||||
|
struct msg_base finished_reply;
|
||||||
|
finished_reply.type = MESSAGE_TYPE_FINISHED;
|
||||||
|
mailbox_send_and_wait(&finished_reply);
|
||||||
|
} else {
|
||||||
|
mailbox_send(&load_reply);
|
||||||
|
}
|
||||||
|
|
||||||
while(1);
|
while(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
long long int now_init(void);
|
/* called from __artiq_personality */
|
||||||
|
void __artiq_terminate(struct artiq_exception *artiq_exn,
|
||||||
|
struct artiq_backtrace_item *backtrace,
|
||||||
|
size_t backtrace_size) {
|
||||||
|
struct msg_exception msg;
|
||||||
|
|
||||||
|
msg.type = MESSAGE_TYPE_EXCEPTION;
|
||||||
|
msg.exception = artiq_exn;
|
||||||
|
msg.backtrace = backtrace;
|
||||||
|
msg.backtrace_size = backtrace_size;
|
||||||
|
mailbox_send(&msg);
|
||||||
|
|
||||||
|
while(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
void ksupport_abort() {
|
||||||
|
artiq_raise_from_c("InternalError", "abort() called; check device log for details",
|
||||||
|
0, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
long long int now_init(void)
|
long long int now_init(void)
|
||||||
{
|
{
|
||||||
struct msg_base request;
|
struct msg_base request;
|
||||||
|
@ -72,8 +258,11 @@ long long int now_init(void)
|
||||||
mailbox_send_and_wait(&request);
|
mailbox_send_and_wait(&request);
|
||||||
|
|
||||||
reply = mailbox_wait_and_receive();
|
reply = mailbox_wait_and_receive();
|
||||||
if(reply->type != MESSAGE_TYPE_NOW_INIT_REPLY)
|
if(reply->type != MESSAGE_TYPE_NOW_INIT_REPLY) {
|
||||||
exception_raise_params(EID_INTERNAL_ERROR, 1, 0, 0);
|
log("Malformed MESSAGE_TYPE_NOW_INIT_REQUEST reply type %d",
|
||||||
|
reply->type);
|
||||||
|
while(1);
|
||||||
|
}
|
||||||
now = reply->now;
|
now = reply->now;
|
||||||
mailbox_acknowledge();
|
mailbox_acknowledge();
|
||||||
|
|
||||||
|
@ -85,7 +274,6 @@ long long int now_init(void)
|
||||||
return now;
|
return now;
|
||||||
}
|
}
|
||||||
|
|
||||||
void now_save(long long int now);
|
|
||||||
void now_save(long long int now)
|
void now_save(long long int now)
|
||||||
{
|
{
|
||||||
struct msg_now_save request;
|
struct msg_now_save request;
|
||||||
|
@ -106,8 +294,11 @@ int watchdog_set(int ms)
|
||||||
mailbox_send_and_wait(&request);
|
mailbox_send_and_wait(&request);
|
||||||
|
|
||||||
reply = mailbox_wait_and_receive();
|
reply = mailbox_wait_and_receive();
|
||||||
if(reply->type != MESSAGE_TYPE_WATCHDOG_SET_REPLY)
|
if(reply->type != MESSAGE_TYPE_WATCHDOG_SET_REPLY) {
|
||||||
exception_raise_params(EID_INTERNAL_ERROR, 2, 0, 0);
|
log("Malformed MESSAGE_TYPE_WATCHDOG_SET_REQUEST reply type %d",
|
||||||
|
reply->type);
|
||||||
|
while(1);
|
||||||
|
}
|
||||||
id = reply->id;
|
id = reply->id;
|
||||||
mailbox_acknowledge();
|
mailbox_acknowledge();
|
||||||
|
|
||||||
|
@ -123,28 +314,56 @@ void watchdog_clear(int id)
|
||||||
mailbox_send_and_wait(&request);
|
mailbox_send_and_wait(&request);
|
||||||
}
|
}
|
||||||
|
|
||||||
int rpc(int rpc_num, ...)
|
void send_rpc(int service, const char *tag, ...)
|
||||||
{
|
{
|
||||||
struct msg_rpc_request request;
|
struct msg_rpc_send request;
|
||||||
struct msg_rpc_reply *reply;
|
|
||||||
int eid, retval;
|
|
||||||
|
|
||||||
request.type = MESSAGE_TYPE_RPC_REQUEST;
|
request.type = MESSAGE_TYPE_RPC_SEND;
|
||||||
request.rpc_num = rpc_num;
|
request.service = service;
|
||||||
va_start(request.args, rpc_num);
|
request.tag = tag;
|
||||||
|
va_start(request.args, tag);
|
||||||
mailbox_send_and_wait(&request);
|
mailbox_send_and_wait(&request);
|
||||||
va_end(request.args);
|
va_end(request.args);
|
||||||
|
}
|
||||||
|
|
||||||
|
int recv_rpc(void *slot) {
|
||||||
|
struct msg_rpc_recv_request request;
|
||||||
|
struct msg_rpc_recv_reply *reply;
|
||||||
|
|
||||||
|
request.type = MESSAGE_TYPE_RPC_RECV_REQUEST;
|
||||||
|
request.slot = slot;
|
||||||
|
mailbox_send_and_wait(&request);
|
||||||
|
|
||||||
reply = mailbox_wait_and_receive();
|
reply = mailbox_wait_and_receive();
|
||||||
if(reply->type != MESSAGE_TYPE_RPC_REPLY)
|
if(reply->type != MESSAGE_TYPE_RPC_RECV_REPLY) {
|
||||||
exception_raise_params(EID_INTERNAL_ERROR, 3, 0, 0);
|
log("Malformed MESSAGE_TYPE_RPC_RECV_REQUEST reply type %d",
|
||||||
eid = reply->eid;
|
reply->type);
|
||||||
retval = reply->retval;
|
while(1);
|
||||||
mailbox_acknowledge();
|
}
|
||||||
|
|
||||||
if(eid != EID_NONE)
|
if(reply->exception) {
|
||||||
exception_raise(eid);
|
struct artiq_exception exception;
|
||||||
return retval;
|
memcpy(&exception, reply->exception,
|
||||||
|
sizeof(struct artiq_exception));
|
||||||
|
mailbox_acknowledge();
|
||||||
|
__artiq_raise(&exception);
|
||||||
|
} else {
|
||||||
|
int alloc_size = reply->alloc_size;
|
||||||
|
mailbox_acknowledge();
|
||||||
|
return alloc_size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void lognonl(const char *fmt, ...)
|
||||||
|
{
|
||||||
|
struct msg_log request;
|
||||||
|
|
||||||
|
request.type = MESSAGE_TYPE_LOG;
|
||||||
|
request.fmt = fmt;
|
||||||
|
request.no_newline = 1;
|
||||||
|
va_start(request.args, fmt);
|
||||||
|
mailbox_send_and_wait(&request);
|
||||||
|
va_end(request.args);
|
||||||
}
|
}
|
||||||
|
|
||||||
void log(const char *fmt, ...)
|
void log(const char *fmt, ...)
|
||||||
|
@ -153,6 +372,7 @@ void log(const char *fmt, ...)
|
||||||
|
|
||||||
request.type = MESSAGE_TYPE_LOG;
|
request.type = MESSAGE_TYPE_LOG;
|
||||||
request.fmt = fmt;
|
request.fmt = fmt;
|
||||||
|
request.no_newline = 0;
|
||||||
va_start(request.args, fmt);
|
va_start(request.args, fmt);
|
||||||
mailbox_send_and_wait(&request);
|
mailbox_send_and_wait(&request);
|
||||||
va_end(request.args);
|
va_end(request.args);
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
#ifndef __KSTARTUP_H
|
||||||
|
#define __KSTARTUP_H
|
||||||
|
|
||||||
|
long long int now_init(void);
|
||||||
|
void now_save(long long int now);
|
||||||
|
int watchdog_set(int ms);
|
||||||
|
void watchdog_clear(int id);
|
||||||
|
void send_rpc(int service, const char *tag, ...);
|
||||||
|
int recv_rpc(void *slot);
|
||||||
|
void lognonl(const char *fmt, ...);
|
||||||
|
void log(const char *fmt, ...);
|
||||||
|
|
||||||
|
#endif /* __KSTARTUP_H */
|
|
@ -4,10 +4,10 @@ ENTRY(_start)
|
||||||
INCLUDE generated/regions.ld
|
INCLUDE generated/regions.ld
|
||||||
|
|
||||||
/* First 4M of main memory are reserved for runtime code/data
|
/* First 4M of main memory are reserved for runtime code/data
|
||||||
* then comes kernel memory. First 32K of kernel memory are for support code.
|
* then comes kernel memory. First 128K of kernel memory are for support code.
|
||||||
*/
|
*/
|
||||||
MEMORY {
|
MEMORY {
|
||||||
ksupport : ORIGIN = 0x40400000, LENGTH = 0x8000
|
ksupport (RWX) : ORIGIN = 0x40400000, LENGTH = 0x20000
|
||||||
}
|
}
|
||||||
|
|
||||||
/* On AMP systems, kernel stack is at the end of main RAM,
|
/* On AMP systems, kernel stack is at the end of main RAM,
|
||||||
|
@ -15,6 +15,13 @@ MEMORY {
|
||||||
*/
|
*/
|
||||||
PROVIDE(_fstack = 0x40000000 + LENGTH(main_ram) - 1024*1024 - 4);
|
PROVIDE(_fstack = 0x40000000 + LENGTH(main_ram) - 1024*1024 - 4);
|
||||||
|
|
||||||
|
/* Force ld to make the ELF header as loadable. */
|
||||||
|
PHDRS
|
||||||
|
{
|
||||||
|
text PT_LOAD FILEHDR PHDRS;
|
||||||
|
eh_frame PT_GNU_EH_FRAME;
|
||||||
|
}
|
||||||
|
|
||||||
SECTIONS
|
SECTIONS
|
||||||
{
|
{
|
||||||
.text :
|
.text :
|
||||||
|
@ -22,7 +29,7 @@ SECTIONS
|
||||||
_ftext = .;
|
_ftext = .;
|
||||||
*(.text .stub .text.* .gnu.linkonce.t.*)
|
*(.text .stub .text.* .gnu.linkonce.t.*)
|
||||||
_etext = .;
|
_etext = .;
|
||||||
} > ksupport
|
} :text
|
||||||
|
|
||||||
.rodata :
|
.rodata :
|
||||||
{
|
{
|
||||||
|
@ -33,6 +40,16 @@ SECTIONS
|
||||||
_erodata = .;
|
_erodata = .;
|
||||||
} > ksupport
|
} > ksupport
|
||||||
|
|
||||||
|
.eh_frame :
|
||||||
|
{
|
||||||
|
*(.eh_frame)
|
||||||
|
} :text
|
||||||
|
|
||||||
|
.eh_frame_hdr :
|
||||||
|
{
|
||||||
|
*(.eh_frame_hdr)
|
||||||
|
} :text :eh_frame
|
||||||
|
|
||||||
.data :
|
.data :
|
||||||
{
|
{
|
||||||
. = ALIGN(4);
|
. = ALIGN(4);
|
||||||
|
@ -41,7 +58,7 @@ SECTIONS
|
||||||
*(.data1)
|
*(.data1)
|
||||||
*(.sdata .sdata.* .gnu.linkonce.s.*)
|
*(.sdata .sdata.* .gnu.linkonce.s.*)
|
||||||
_edata = .;
|
_edata = .;
|
||||||
} > ksupport
|
}
|
||||||
|
|
||||||
.bss :
|
.bss :
|
||||||
{
|
{
|
||||||
|
@ -57,5 +74,5 @@ SECTIONS
|
||||||
_ebss = .;
|
_ebss = .;
|
||||||
. = ALIGN(8);
|
. = ALIGN(8);
|
||||||
_heapstart = .;
|
_heapstart = .;
|
||||||
} > ksupport
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#include <stdarg.h>
|
#include <stdarg.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <console.h>
|
||||||
|
|
||||||
#include <generated/csr.h>
|
#include <generated/csr.h>
|
||||||
|
|
||||||
|
@ -8,7 +10,7 @@
|
||||||
static int buffer_index;
|
static int buffer_index;
|
||||||
static char buffer[LOG_BUFFER_SIZE];
|
static char buffer[LOG_BUFFER_SIZE];
|
||||||
|
|
||||||
void log_va(const char *fmt, va_list args)
|
void lognonl_va(const char *fmt, va_list args)
|
||||||
{
|
{
|
||||||
char outbuf[256];
|
char outbuf[256];
|
||||||
int i, len;
|
int i, len;
|
||||||
|
@ -18,16 +20,29 @@ void log_va(const char *fmt, va_list args)
|
||||||
buffer[buffer_index] = outbuf[i];
|
buffer[buffer_index] = outbuf[i];
|
||||||
buffer_index = (buffer_index + 1) % LOG_BUFFER_SIZE;
|
buffer_index = (buffer_index + 1) % LOG_BUFFER_SIZE;
|
||||||
}
|
}
|
||||||
buffer[buffer_index] = '\n';
|
|
||||||
buffer_index = (buffer_index + 1) % LOG_BUFFER_SIZE;
|
|
||||||
|
|
||||||
#ifdef CSR_ETHMAC_BASE
|
#ifdef CSR_ETHMAC_BASE
|
||||||
/* Since main comms are over ethernet, the serial port
|
/* Since main comms are over ethernet, the serial port
|
||||||
* is free for us to use. */
|
* is free for us to use. */
|
||||||
puts(outbuf);
|
putsnonl(outbuf);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void lognonl(const char *fmt, ...)
|
||||||
|
{
|
||||||
|
va_list args;
|
||||||
|
|
||||||
|
va_start(args, fmt);
|
||||||
|
lognonl_va(fmt, args);
|
||||||
|
va_end(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
void log_va(const char *fmt, va_list args)
|
||||||
|
{
|
||||||
|
lognonl_va(fmt, args);
|
||||||
|
lognonl("\n");
|
||||||
|
}
|
||||||
|
|
||||||
void log(const char *fmt, ...)
|
void log(const char *fmt, ...)
|
||||||
{
|
{
|
||||||
va_list args;
|
va_list args;
|
||||||
|
@ -41,9 +56,14 @@ void log_get(char *outbuf)
|
||||||
{
|
{
|
||||||
int i, j;
|
int i, j;
|
||||||
|
|
||||||
j = buffer_index + 1;
|
j = buffer_index;
|
||||||
for(i=0;i<LOG_BUFFER_SIZE;i++) {
|
for(i = 0; i < LOG_BUFFER_SIZE; i++) {
|
||||||
outbuf[i] = buffer[j];
|
outbuf[i] = buffer[j];
|
||||||
j = (j + 1) % LOG_BUFFER_SIZE;
|
j = (j + 1) % LOG_BUFFER_SIZE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void log_clear()
|
||||||
|
{
|
||||||
|
memset(buffer, 0, sizeof(buffer));
|
||||||
|
}
|
||||||
|
|
|
@ -5,9 +5,13 @@
|
||||||
|
|
||||||
#define LOG_BUFFER_SIZE 4096
|
#define LOG_BUFFER_SIZE 4096
|
||||||
|
|
||||||
|
void lognonl_va(const char *fmt, va_list args);
|
||||||
|
void lognonl(const char *fmt, ...);
|
||||||
|
|
||||||
void log_va(const char *fmt, va_list args);
|
void log_va(const char *fmt, va_list args);
|
||||||
void log(const char *fmt, ...);
|
void log(const char *fmt, ...);
|
||||||
|
|
||||||
void log_get(char *outbuf);
|
void log_get(char *outbuf);
|
||||||
|
void log_clear(void);
|
||||||
|
|
||||||
#endif /* __LOG_H */
|
#endif /* __LOG_H */
|
||||||
|
|
|
@ -182,12 +182,13 @@ static void serial_service(void)
|
||||||
|
|
||||||
session_poll((void **)&txdata, &txlen);
|
session_poll((void **)&txdata, &txlen);
|
||||||
if(txlen > 0) {
|
if(txlen > 0) {
|
||||||
for(i=0;i<txlen;i++)
|
for(i = 0; i < txlen; i++)
|
||||||
uart_write(txdata[i]);
|
uart_write(txdata[i]);
|
||||||
session_ack_data(txlen);
|
session_ack_consumed(txlen);
|
||||||
session_ack_mem(txlen);
|
session_ack_sent(txlen);
|
||||||
} else if(txlen < 0)
|
} else if(txlen < 0) {
|
||||||
reset_serial_session(1);
|
reset_serial_session(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void regular_main(void)
|
static void regular_main(void)
|
||||||
|
|
|
@ -2,8 +2,10 @@
|
||||||
#define __MESSAGES_H
|
#define __MESSAGES_H
|
||||||
|
|
||||||
#include <stdarg.h>
|
#include <stdarg.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
|
||||||
enum {
|
enum {
|
||||||
|
MESSAGE_TYPE_LOAD_REPLY,
|
||||||
MESSAGE_TYPE_NOW_INIT_REQUEST,
|
MESSAGE_TYPE_NOW_INIT_REQUEST,
|
||||||
MESSAGE_TYPE_NOW_INIT_REPLY,
|
MESSAGE_TYPE_NOW_INIT_REPLY,
|
||||||
MESSAGE_TYPE_NOW_SAVE,
|
MESSAGE_TYPE_NOW_SAVE,
|
||||||
|
@ -12,8 +14,9 @@ enum {
|
||||||
MESSAGE_TYPE_WATCHDOG_SET_REQUEST,
|
MESSAGE_TYPE_WATCHDOG_SET_REQUEST,
|
||||||
MESSAGE_TYPE_WATCHDOG_SET_REPLY,
|
MESSAGE_TYPE_WATCHDOG_SET_REPLY,
|
||||||
MESSAGE_TYPE_WATCHDOG_CLEAR,
|
MESSAGE_TYPE_WATCHDOG_CLEAR,
|
||||||
MESSAGE_TYPE_RPC_REQUEST,
|
MESSAGE_TYPE_RPC_SEND,
|
||||||
MESSAGE_TYPE_RPC_REPLY,
|
MESSAGE_TYPE_RPC_RECV_REQUEST,
|
||||||
|
MESSAGE_TYPE_RPC_RECV_REPLY,
|
||||||
MESSAGE_TYPE_LOG,
|
MESSAGE_TYPE_LOG,
|
||||||
|
|
||||||
MESSAGE_TYPE_BRG_READY,
|
MESSAGE_TYPE_BRG_READY,
|
||||||
|
@ -33,6 +36,17 @@ struct msg_base {
|
||||||
|
|
||||||
/* kernel messages */
|
/* kernel messages */
|
||||||
|
|
||||||
|
struct msg_load_request {
|
||||||
|
const void *library;
|
||||||
|
struct dyld_info *library_info;
|
||||||
|
int run_kernel;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct msg_load_reply {
|
||||||
|
int type;
|
||||||
|
const char *error;
|
||||||
|
};
|
||||||
|
|
||||||
struct msg_now_init_reply {
|
struct msg_now_init_reply {
|
||||||
int type;
|
int type;
|
||||||
long long int now;
|
long long int now;
|
||||||
|
@ -45,8 +59,9 @@ struct msg_now_save {
|
||||||
|
|
||||||
struct msg_exception {
|
struct msg_exception {
|
||||||
int type;
|
int type;
|
||||||
int eid;
|
struct artiq_exception *exception;
|
||||||
long long int eparams[3];
|
struct artiq_backtrace_item *backtrace;
|
||||||
|
size_t backtrace_size;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct msg_watchdog_set_request {
|
struct msg_watchdog_set_request {
|
||||||
|
@ -64,21 +79,28 @@ struct msg_watchdog_clear {
|
||||||
int id;
|
int id;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct msg_rpc_request {
|
struct msg_rpc_send {
|
||||||
int type;
|
int type;
|
||||||
int rpc_num;
|
int service;
|
||||||
|
const char *tag;
|
||||||
va_list args;
|
va_list args;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct msg_rpc_reply {
|
struct msg_rpc_recv_request {
|
||||||
int type;
|
int type;
|
||||||
int eid;
|
void *slot;
|
||||||
int retval;
|
};
|
||||||
|
|
||||||
|
struct msg_rpc_recv_reply {
|
||||||
|
int type;
|
||||||
|
int alloc_size;
|
||||||
|
struct artiq_exception *exception;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct msg_log {
|
struct msg_log {
|
||||||
int type;
|
int type;
|
||||||
const char *fmt;
|
const char *fmt;
|
||||||
|
int no_newline;
|
||||||
va_list args;
|
va_list args;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -91,7 +91,7 @@ static err_t net_server_recv(void *arg, struct tcp_pcb *pcb, struct pbuf *p, err
|
||||||
|
|
||||||
static err_t net_server_sent(void *arg, struct tcp_pcb *pcb, u16_t len)
|
static err_t net_server_sent(void *arg, struct tcp_pcb *pcb, u16_t len)
|
||||||
{
|
{
|
||||||
session_ack_mem(len);
|
session_ack_sent(len);
|
||||||
return ERR_OK;
|
return ERR_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,7 +208,7 @@ void net_server_service(void)
|
||||||
if(len > sndbuf)
|
if(len > sndbuf)
|
||||||
len = sndbuf;
|
len = sndbuf;
|
||||||
tcp_write(active_pcb, data, len, 0);
|
tcp_write(active_pcb, data, len, 0);
|
||||||
session_ack_data(len);
|
session_ack_consumed(len);
|
||||||
}
|
}
|
||||||
if(len < 0)
|
if(len < 0)
|
||||||
net_server_close(active_cs, active_pcb);
|
net_server_close(active_cs, active_pcb);
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
#include <generated/csr.h>
|
#include <generated/csr.h>
|
||||||
|
|
||||||
#include "exceptions.h"
|
|
||||||
#include "rtio.h"
|
#include "rtio.h"
|
||||||
|
|
||||||
void rtio_init(void)
|
void rtio_init(void)
|
||||||
|
@ -22,17 +21,20 @@ void rtio_process_exceptional_status(int status, long long int timestamp, int ch
|
||||||
while(rtio_o_status_read() & RTIO_O_STATUS_FULL);
|
while(rtio_o_status_read() & RTIO_O_STATUS_FULL);
|
||||||
if(status & RTIO_O_STATUS_UNDERFLOW) {
|
if(status & RTIO_O_STATUS_UNDERFLOW) {
|
||||||
rtio_o_underflow_reset_write(1);
|
rtio_o_underflow_reset_write(1);
|
||||||
exception_raise_params(EID_RTIO_UNDERFLOW,
|
artiq_raise_from_c("RTIOUnderflow",
|
||||||
|
"RTIO underflow at {0} mu, channel {1}, counter {2}",
|
||||||
timestamp, channel, rtio_get_counter());
|
timestamp, channel, rtio_get_counter());
|
||||||
}
|
}
|
||||||
if(status & RTIO_O_STATUS_SEQUENCE_ERROR) {
|
if(status & RTIO_O_STATUS_SEQUENCE_ERROR) {
|
||||||
rtio_o_sequence_error_reset_write(1);
|
rtio_o_sequence_error_reset_write(1);
|
||||||
exception_raise_params(EID_RTIO_SEQUENCE_ERROR,
|
artiq_raise_from_c("RTIOSequenceError",
|
||||||
|
"RTIO sequence error at {0} mu, channel {1}",
|
||||||
timestamp, channel, 0);
|
timestamp, channel, 0);
|
||||||
}
|
}
|
||||||
if(status & RTIO_O_STATUS_COLLISION_ERROR) {
|
if(status & RTIO_O_STATUS_COLLISION_ERROR) {
|
||||||
rtio_o_collision_error_reset_write(1);
|
rtio_o_collision_error_reset_write(1);
|
||||||
exception_raise_params(EID_RTIO_COLLISION_ERROR,
|
artiq_raise_from_c("RTIOCollisionError",
|
||||||
|
"RTIO collision error at {0} mu, channel {1}",
|
||||||
timestamp, channel, 0);
|
timestamp, channel, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
#define __RTIO_H
|
#define __RTIO_H
|
||||||
|
|
||||||
#include <generated/csr.h>
|
#include <generated/csr.h>
|
||||||
|
#include "artiq_personality.h"
|
||||||
|
|
||||||
#define RTIO_O_STATUS_FULL 1
|
#define RTIO_O_STATUS_FULL 1
|
||||||
#define RTIO_O_STATUS_UNDERFLOW 2
|
#define RTIO_O_STATUS_UNDERFLOW 2
|
||||||
|
|
|
@ -10,6 +10,13 @@ MEMORY {
|
||||||
runtime : ORIGIN = 0x40000000, LENGTH = 0x400000 /* 4M */
|
runtime : ORIGIN = 0x40000000, LENGTH = 0x400000 /* 4M */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* First 4M of main memory are reserved for runtime code/data
|
||||||
|
* then comes kernel memory. First 32K of kernel memory are for support code.
|
||||||
|
*/
|
||||||
|
MEMORY {
|
||||||
|
kernel : ORIGIN = 0x40400000, LENGTH = 0x8000
|
||||||
|
}
|
||||||
|
|
||||||
/* Kernel memory space start right after the runtime,
|
/* Kernel memory space start right after the runtime,
|
||||||
* and ends before the runtime stack.
|
* and ends before the runtime stack.
|
||||||
* Runtime stack is always at the end of main_ram.
|
* Runtime stack is always at the end of main_ram.
|
||||||
|
@ -17,6 +24,11 @@ MEMORY {
|
||||||
*/
|
*/
|
||||||
PROVIDE(_fstack = 0x40000000 + LENGTH(main_ram) - 4);
|
PROVIDE(_fstack = 0x40000000 + LENGTH(main_ram) - 4);
|
||||||
|
|
||||||
|
/* On AMP systems, kernel stack is at the end of main RAM,
|
||||||
|
* before the runtime stack. Leave 1M for runtime stack.
|
||||||
|
*/
|
||||||
|
PROVIDE(_kernel_fstack = 0x40000000 + LENGTH(main_ram) - 1024*1024 - 4);
|
||||||
|
|
||||||
SECTIONS
|
SECTIONS
|
||||||
{
|
{
|
||||||
.text :
|
.text :
|
||||||
|
@ -58,6 +70,12 @@ SECTIONS
|
||||||
. = ALIGN(4);
|
. = ALIGN(4);
|
||||||
_ebss = .;
|
_ebss = .;
|
||||||
. = ALIGN(8);
|
. = ALIGN(8);
|
||||||
_heapstart = .;
|
|
||||||
} > runtime
|
} > runtime
|
||||||
|
|
||||||
|
/DISCARD/ :
|
||||||
|
{
|
||||||
|
*(.eh_frame)
|
||||||
|
}
|
||||||
|
|
||||||
|
_heapstart = .;
|
||||||
}
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue