INSTRUCTION stringlengths 1 8.43k | RESPONSE stringlengths 75 104k |
|---|---|
Hydrate Generated Python AST nodes with line numbers and column offsets if they exist in the node environment. | def _ast_with_loc(
py_ast: GeneratedPyAST, env: NodeEnv, include_dependencies: bool = False
) -> GeneratedPyAST:
"""Hydrate Generated Python AST nodes with line numbers and column offsets
if they exist in the node environment."""
if env.line is not None:
py_ast.node.lineno = env.line
if include_dependencies:
for dep in py_ast.dependencies:
dep.lineno = env.line
if env.col is not None:
py_ast.node.col_offset = env.col
if include_dependencies:
for dep in py_ast.dependencies:
dep.col_offset = env.col
return py_ast |
Wrap a generator function in a decorator to supply line and column information to the returned Python AST node. Dependency nodes will not be hydrated functions whose returns need dependency nodes to be hydrated should use _with_ast_loc_deps below. | def _with_ast_loc(f):
"""Wrap a generator function in a decorator to supply line and column
information to the returned Python AST node. Dependency nodes will not
be hydrated, functions whose returns need dependency nodes to be
hydrated should use `_with_ast_loc_deps` below."""
@wraps(f)
def with_lineno_and_col(
ctx: GeneratorContext, node: Node, *args, **kwargs
) -> GeneratedPyAST:
py_ast = f(ctx, node, *args, **kwargs)
return _ast_with_loc(py_ast, node.env)
return with_lineno_and_col |
Wrap a generator function in a decorator to supply line and column information to the returned Python AST node and dependency nodes. | def _with_ast_loc_deps(f):
"""Wrap a generator function in a decorator to supply line and column
information to the returned Python AST node and dependency nodes.
Dependency nodes should likely only be included if they are new nodes
created in the same function wrapped by this function. Otherwise, dependencies
returned from e.g. calling `gen_py_ast` should be assumed to already have
their location information hydrated."""
@wraps(f)
def with_lineno_and_col(
ctx: GeneratorContext, node: Node, *args, **kwargs
) -> GeneratedPyAST:
py_ast = f(ctx, node, *args, **kwargs)
return _ast_with_loc(py_ast, node.env, include_dependencies=True)
return with_lineno_and_col |
Return True if the Var holds a value which should be compiled to a dynamic Var access. | def _is_dynamic(v: Var) -> bool:
"""Return True if the Var holds a value which should be compiled to a dynamic
Var access."""
return (
Maybe(v.meta)
.map(lambda m: m.get(SYM_DYNAMIC_META_KEY, None)) # type: ignore
.or_else_get(False)
) |
Return True if the Var can be redefined. | def _is_redefable(v: Var) -> bool:
"""Return True if the Var can be redefined."""
return (
Maybe(v.meta)
.map(lambda m: m.get(SYM_REDEF_META_KEY, None)) # type: ignore
.or_else_get(False)
) |
Transform non - statements into ast. Expr nodes so they can stand alone as statements. | def statementize(e: ast.AST) -> ast.AST:
"""Transform non-statements into ast.Expr nodes so they can
stand alone as statements."""
# noinspection PyPep8
if isinstance(
e,
(
ast.Assign,
ast.AnnAssign,
ast.AugAssign,
ast.Expr,
ast.Raise,
ast.Assert,
ast.Pass,
ast.Import,
ast.ImportFrom,
ast.If,
ast.For,
ast.While,
ast.Continue,
ast.Break,
ast.Try,
ast.ExceptHandler,
ast.With,
ast.FunctionDef,
ast.Return,
ast.Yield,
ast.YieldFrom,
ast.Global,
ast.ClassDef,
ast.AsyncFunctionDef,
ast.AsyncFor,
ast.AsyncWith,
),
):
return e
return ast.Expr(value=e) |
Given a series of expression AST nodes create a function AST node with the given name that can be called and will return the result of the final expression in the input body nodes. | def expressionize(
body: GeneratedPyAST,
fn_name: str,
args: Optional[Iterable[ast.arg]] = None,
vargs: Optional[ast.arg] = None,
) -> ast.FunctionDef:
"""Given a series of expression AST nodes, create a function AST node
with the given name that can be called and will return the result of
the final expression in the input body nodes.
This helps to fix the impedance mismatch of Python, which includes
statements and expressions, and Lisps, which have only expressions.
"""
args = Maybe(args).or_else_get([])
body_nodes: List[ast.AST] = list(map(statementize, body.dependencies))
body_nodes.append(ast.Return(value=body.node))
return ast.FunctionDef(
name=fn_name,
args=ast.arguments(
args=args,
kwarg=None,
vararg=vargs,
kwonlyargs=[],
defaults=[],
kw_defaults=[],
),
body=body_nodes,
decorator_list=[],
returns=None,
) |
Return True if the compiler should emit a warning about this name being redefined. | def __should_warn_on_redef(
ctx: GeneratorContext, defsym: sym.Symbol, safe_name: str, def_meta: lmap.Map
) -> bool:
"""Return True if the compiler should emit a warning about this name being redefined."""
no_warn_on_redef = def_meta.entry(SYM_NO_WARN_ON_REDEF_META_KEY, False)
if no_warn_on_redef:
return False
elif safe_name in ctx.current_ns.module.__dict__:
return True
elif defsym in ctx.current_ns.interns:
var = ctx.current_ns.find(defsym)
assert var is not None, f"Var {defsym} cannot be none here"
if var.meta is not None and var.meta.entry(SYM_REDEF_META_KEY):
return False
elif var.is_bound:
return True
else:
return False
else:
return False |
Return a Python AST Node for a def expression. | def _def_to_py_ast( # pylint: disable=too-many-branches
ctx: GeneratorContext, node: Def
) -> GeneratedPyAST:
"""Return a Python AST Node for a `def` expression."""
assert node.op == NodeOp.DEF
defsym = node.name
is_defn = False
if node.init is not None:
# Since Python function definitions always take the form `def name(...):`,
# it is redundant to assign them to the their final name after they have
# been defined under a private alias. This codepath generates `defn`
# declarations by directly generating the Python `def` with the correct
# function name and short-circuiting the default double-declaration.
if node.init.op == NodeOp.FN:
assert isinstance(node.init, Fn)
def_ast = _fn_to_py_ast(ctx, node.init, def_name=defsym.name)
is_defn = True
elif (
node.init.op == NodeOp.WITH_META
and isinstance(node.init, WithMeta)
and node.init.expr.op == NodeOp.FN
):
assert isinstance(node.init, WithMeta)
def_ast = _with_meta_to_py_ast(ctx, node.init, def_name=defsym.name)
is_defn = True
else:
def_ast = gen_py_ast(ctx, node.init)
else:
def_ast = GeneratedPyAST(node=ast.NameConstant(None))
ns_name = ast.Call(func=_NEW_SYM_FN_NAME, args=[_NS_VAR_NAME], keywords=[])
def_name = ast.Call(func=_NEW_SYM_FN_NAME, args=[ast.Str(defsym.name)], keywords=[])
safe_name = munge(defsym.name)
assert node.meta is not None, "Meta should always be attached to Def nodes"
def_meta = node.meta.form
assert isinstance(def_meta, lmap.Map), "Meta should always be a map"
# If the Var is marked as dynamic, we need to generate a keyword argument
# for the generated Python code to set the Var as dynamic
is_dynamic = def_meta.entry(SYM_DYNAMIC_META_KEY, False)
dynamic_kwarg = (
[ast.keyword(arg="dynamic", value=ast.NameConstant(is_dynamic))]
if is_dynamic
else []
)
# Warn if this symbol is potentially being redefined
if __should_warn_on_redef(ctx, defsym, safe_name, def_meta):
logger.warning(
f"redefining local Python name '{safe_name}' in module '{ctx.current_ns.module.__name__}'"
)
meta_ast = gen_py_ast(ctx, node.meta)
# For defn style def generation, we specifically need to generate the
# global declaration prior to emitting the Python `def` otherwise the
# Python compiler will throw an exception during compilation
# complaining that we assign the value prior to global declaration.
if is_defn:
def_dependencies = list(
chain(
[] if node.top_level else [ast.Global(names=[safe_name])],
def_ast.dependencies,
[] if meta_ast is None else meta_ast.dependencies,
)
)
else:
def_dependencies = list(
chain(
def_ast.dependencies,
[] if node.top_level else [ast.Global(names=[safe_name])],
[
ast.Assign(
targets=[ast.Name(id=safe_name, ctx=ast.Store())],
value=def_ast.node,
)
],
[] if meta_ast is None else meta_ast.dependencies,
)
)
return GeneratedPyAST(
node=ast.Call(
func=_INTERN_VAR_FN_NAME,
args=[ns_name, def_name, ast.Name(id=safe_name, ctx=ast.Load())],
keywords=list(
chain(
dynamic_kwarg,
[]
if meta_ast is None
else [ast.keyword(arg="meta", value=meta_ast.node)],
)
),
),
dependencies=def_dependencies,
) |
Return a Python AST Node for a deftype * expression. | def _deftype_to_py_ast( # pylint: disable=too-many-branches
ctx: GeneratorContext, node: DefType
) -> GeneratedPyAST:
"""Return a Python AST Node for a `deftype*` expression."""
assert node.op == NodeOp.DEFTYPE
type_name = munge(node.name)
ctx.symbol_table.new_symbol(sym.symbol(node.name), type_name, LocalType.DEFTYPE)
bases = []
for base in node.interfaces:
base_node = gen_py_ast(ctx, base)
assert (
count(base_node.dependencies) == 0
), "Class and host form nodes do not have dependencies"
bases.append(base_node.node)
decorator = ast.Call(
func=_ATTR_CLASS_DECORATOR_NAME,
args=[],
keywords=[
ast.keyword(arg="cmp", value=ast.NameConstant(False)),
ast.keyword(arg="frozen", value=ast.NameConstant(node.is_frozen)),
ast.keyword(arg="slots", value=ast.NameConstant(True)),
],
)
with ctx.new_symbol_table(node.name):
type_nodes = []
for field in node.fields:
safe_field = munge(field.name)
type_nodes.append(
ast.Assign(
targets=[ast.Name(id=safe_field, ctx=ast.Store())],
value=ast.Call(func=_ATTRIB_FIELD_FN_NAME, args=[], keywords=[]),
)
)
ctx.symbol_table.new_symbol(sym.symbol(field.name), safe_field, field.local)
type_deps: List[ast.AST] = []
for method in node.methods:
type_ast = __deftype_method_to_py_ast(ctx, method)
type_nodes.append(type_ast.node)
type_deps.extend(type_ast.dependencies)
return GeneratedPyAST(
node=ast.Name(id=type_name, ctx=ast.Load()),
dependencies=list(
chain(
type_deps,
[
ast.ClassDef(
name=type_name,
bases=bases,
keywords=[],
body=type_nodes,
decorator_list=[decorator],
)
],
)
),
) |
Return a Python AST Node for a do expression. | def _do_to_py_ast(ctx: GeneratorContext, node: Do) -> GeneratedPyAST:
"""Return a Python AST Node for a `do` expression."""
assert node.op == NodeOp.DO
assert not node.is_body
body_ast = GeneratedPyAST.reduce(
*map(partial(gen_py_ast, ctx), chain(node.statements, [node.ret]))
)
fn_body_ast: List[ast.AST] = []
do_result_name = genname(_DO_PREFIX)
fn_body_ast.extend(map(statementize, body_ast.dependencies))
fn_body_ast.append(
ast.Assign(
targets=[ast.Name(id=do_result_name, ctx=ast.Store())], value=body_ast.node
)
)
return GeneratedPyAST(
node=ast.Name(id=do_result_name, ctx=ast.Load()), dependencies=fn_body_ast
) |
Return AST elements generated from reducing a synthetic Lisp: do node ( e. g. a: do node which acts as a body for another node ). | def _synthetic_do_to_py_ast(ctx: GeneratorContext, node: Do) -> GeneratedPyAST:
"""Return AST elements generated from reducing a synthetic Lisp :do node
(e.g. a :do node which acts as a body for another node)."""
assert node.op == NodeOp.DO
assert node.is_body
# TODO: investigate how to handle recur in node.ret
return GeneratedPyAST.reduce(
*map(partial(gen_py_ast, ctx), chain(node.statements, [node.ret]))
) |
Generate a safe Python function name from a function name symbol. If no symbol is provided generate a name with a default prefix. | def __fn_name(s: Optional[str]) -> str:
"""Generate a safe Python function name from a function name symbol.
If no symbol is provided, generate a name with a default prefix."""
return genname("__" + munge(Maybe(s).or_else_get(_FN_PREFIX))) |
Generate a list of Python AST nodes from function method parameters. | def __fn_args_to_py_ast(
ctx: GeneratorContext, params: Iterable[Binding], body: Do
) -> Tuple[List[ast.arg], Optional[ast.arg], List[ast.AST]]:
"""Generate a list of Python AST nodes from function method parameters."""
fn_args, varg = [], None
fn_body_ast: List[ast.AST] = []
for binding in params:
assert binding.init is None, ":fn nodes cannot have bindint :inits"
assert varg is None, "Must have at most one variadic arg"
arg_name = genname(munge(binding.name))
if not binding.is_variadic:
fn_args.append(ast.arg(arg=arg_name, annotation=None))
ctx.symbol_table.new_symbol(
sym.symbol(binding.name), arg_name, LocalType.ARG
)
else:
varg = ast.arg(arg=arg_name, annotation=None)
safe_local = genname(munge(binding.name))
fn_body_ast.append(
ast.Assign(
targets=[ast.Name(id=safe_local, ctx=ast.Store())],
value=ast.Call(
func=_COLLECT_ARGS_FN_NAME,
args=[ast.Name(id=arg_name, ctx=ast.Load())],
keywords=[],
),
)
)
ctx.symbol_table.new_symbol(
sym.symbol(binding.name), safe_local, LocalType.ARG
)
body_ast = _synthetic_do_to_py_ast(ctx, body)
fn_body_ast.extend(map(statementize, body_ast.dependencies))
fn_body_ast.append(ast.Return(value=body_ast.node))
return fn_args, varg, fn_body_ast |
Return a Python AST node for a function with a single arity. | def __single_arity_fn_to_py_ast(
ctx: GeneratorContext,
node: Fn,
method: FnMethod,
def_name: Optional[str] = None,
meta_node: Optional[MetaNode] = None,
) -> GeneratedPyAST:
"""Return a Python AST node for a function with a single arity."""
assert node.op == NodeOp.FN
assert method.op == NodeOp.FN_METHOD
lisp_fn_name = node.local.name if node.local is not None else None
py_fn_name = __fn_name(lisp_fn_name) if def_name is None else munge(def_name)
py_fn_node = ast.AsyncFunctionDef if node.is_async else ast.FunctionDef
with ctx.new_symbol_table(py_fn_name), ctx.new_recur_point(
method.loop_id, RecurType.FN, is_variadic=node.is_variadic
):
# Allow named anonymous functions to recursively call themselves
if lisp_fn_name is not None:
ctx.symbol_table.new_symbol(
sym.symbol(lisp_fn_name), py_fn_name, LocalType.FN
)
fn_args, varg, fn_body_ast = __fn_args_to_py_ast(
ctx, method.params, method.body
)
meta_deps, meta_decorators = __fn_meta(ctx, meta_node)
return GeneratedPyAST(
node=ast.Name(id=py_fn_name, ctx=ast.Load()),
dependencies=list(
chain(
meta_deps,
[
py_fn_node(
name=py_fn_name,
args=ast.arguments(
args=fn_args,
kwarg=None,
vararg=varg,
kwonlyargs=[],
defaults=[],
kw_defaults=[],
),
body=fn_body_ast,
decorator_list=list(
chain(
meta_decorators,
[_BASILISP_FN_FN_NAME],
[_TRAMPOLINE_FN_NAME]
if ctx.recur_point.has_recur
else [],
)
),
returns=None,
)
],
)
),
) |
Return the Python AST nodes for a argument - length dispatch function for multi - arity functions. | def __multi_arity_dispatch_fn( # pylint: disable=too-many-arguments,too-many-locals
ctx: GeneratorContext,
name: str,
arity_map: Mapping[int, str],
default_name: Optional[str] = None,
max_fixed_arity: Optional[int] = None,
meta_node: Optional[MetaNode] = None,
is_async: bool = False,
) -> GeneratedPyAST:
"""Return the Python AST nodes for a argument-length dispatch function
for multi-arity functions.
def fn(*args):
nargs = len(args)
method = __fn_dispatch_map.get(nargs)
if method:
return method(*args)
# Only if default
if nargs > max_fixed_arity:
return default(*args)
raise RuntimeError
"""
dispatch_map_name = f"{name}_dispatch_map"
dispatch_keys, dispatch_vals = [], []
for k, v in arity_map.items():
dispatch_keys.append(ast.Num(k))
dispatch_vals.append(ast.Name(id=v, ctx=ast.Load()))
# Async functions should return await, otherwise just return
handle_return = __handle_async_return if is_async else __handle_return
nargs_name = genname("nargs")
method_name = genname("method")
body = [
ast.Assign(
targets=[ast.Name(id=nargs_name, ctx=ast.Store())],
value=ast.Call(
func=ast.Name(id="len", ctx=ast.Load()),
args=[ast.Name(id=_MULTI_ARITY_ARG_NAME, ctx=ast.Load())],
keywords=[],
),
),
ast.Assign(
targets=[ast.Name(id=method_name, ctx=ast.Store())],
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id=dispatch_map_name, ctx=ast.Load()),
attr="get",
ctx=ast.Load(),
),
args=[ast.Name(id=nargs_name, ctx=ast.Load())],
keywords=[],
),
),
ast.If(
test=ast.Compare(
left=ast.NameConstant(None),
ops=[ast.IsNot()],
comparators=[ast.Name(id=method_name, ctx=ast.Load())],
),
body=[
handle_return(
ast.Call(
func=ast.Name(id=method_name, ctx=ast.Load()),
args=[
ast.Starred(
value=ast.Name(
id=_MULTI_ARITY_ARG_NAME, ctx=ast.Load()
),
ctx=ast.Load(),
)
],
keywords=[],
)
)
],
orelse=[]
if default_name is None
else [
ast.If(
test=ast.Compare(
left=ast.Name(id=nargs_name, ctx=ast.Load()),
ops=[ast.GtE()],
comparators=[ast.Num(max_fixed_arity)],
),
body=[
handle_return(
ast.Call(
func=ast.Name(id=default_name, ctx=ast.Load()),
args=[
ast.Starred(
value=ast.Name(
id=_MULTI_ARITY_ARG_NAME, ctx=ast.Load()
),
ctx=ast.Load(),
)
],
keywords=[],
)
)
],
orelse=[],
)
],
),
ast.Raise(
exc=ast.Call(
func=_load_attr("basilisp.lang.runtime.RuntimeException"),
args=[
ast.Str(f"Wrong number of args passed to function: {name}"),
ast.Name(id=nargs_name, ctx=ast.Load()),
],
keywords=[],
),
cause=None,
),
]
py_fn_node = ast.AsyncFunctionDef if is_async else ast.FunctionDef
meta_deps, meta_decorators = __fn_meta(ctx, meta_node)
return GeneratedPyAST(
node=ast.Name(id=name, ctx=ast.Load()),
dependencies=chain(
[
ast.Assign(
targets=[ast.Name(id=dispatch_map_name, ctx=ast.Store())],
value=ast.Dict(keys=dispatch_keys, values=dispatch_vals),
)
],
meta_deps,
[
py_fn_node(
name=name,
args=ast.arguments(
args=[],
kwarg=None,
vararg=ast.arg(arg=_MULTI_ARITY_ARG_NAME, annotation=None),
kwonlyargs=[],
defaults=[],
kw_defaults=[],
),
body=body,
decorator_list=list(chain(meta_decorators, [_BASILISP_FN_FN_NAME])),
returns=None,
)
],
),
) |
Return a Python AST node for a function with multiple arities. | def __multi_arity_fn_to_py_ast( # pylint: disable=too-many-locals
ctx: GeneratorContext,
node: Fn,
methods: Collection[FnMethod],
def_name: Optional[str] = None,
meta_node: Optional[MetaNode] = None,
) -> GeneratedPyAST:
"""Return a Python AST node for a function with multiple arities."""
assert node.op == NodeOp.FN
assert all([method.op == NodeOp.FN_METHOD for method in methods])
lisp_fn_name = node.local.name if node.local is not None else None
py_fn_name = __fn_name(lisp_fn_name) if def_name is None else munge(def_name)
py_fn_node = ast.AsyncFunctionDef if node.is_async else ast.FunctionDef
arity_to_name = {}
rest_arity_name: Optional[str] = None
fn_defs = []
for method in methods:
arity_name = f"{py_fn_name}__arity{'_rest' if method.is_variadic else method.fixed_arity}"
if method.is_variadic:
rest_arity_name = arity_name
else:
arity_to_name[method.fixed_arity] = arity_name
with ctx.new_symbol_table(arity_name), ctx.new_recur_point(
method.loop_id, RecurType.FN, is_variadic=node.is_variadic
):
# Allow named anonymous functions to recursively call themselves
if lisp_fn_name is not None:
ctx.symbol_table.new_symbol(
sym.symbol(lisp_fn_name), py_fn_name, LocalType.FN
)
fn_args, varg, fn_body_ast = __fn_args_to_py_ast(
ctx, method.params, method.body
)
fn_defs.append(
py_fn_node(
name=arity_name,
args=ast.arguments(
args=fn_args,
kwarg=None,
vararg=varg,
kwonlyargs=[],
defaults=[],
kw_defaults=[],
),
body=fn_body_ast,
decorator_list=[_TRAMPOLINE_FN_NAME]
if ctx.recur_point.has_recur
else [],
returns=None,
)
)
dispatch_fn_ast = __multi_arity_dispatch_fn(
ctx,
py_fn_name,
arity_to_name,
default_name=rest_arity_name,
max_fixed_arity=node.max_fixed_arity,
meta_node=meta_node,
is_async=node.is_async,
)
return GeneratedPyAST(
node=dispatch_fn_ast.node,
dependencies=list(chain(fn_defs, dispatch_fn_ast.dependencies)),
) |
Return a Python AST Node for a fn expression. | def _fn_to_py_ast(
ctx: GeneratorContext,
node: Fn,
def_name: Optional[str] = None,
meta_node: Optional[MetaNode] = None,
) -> GeneratedPyAST:
"""Return a Python AST Node for a `fn` expression."""
assert node.op == NodeOp.FN
if len(node.methods) == 1:
return __single_arity_fn_to_py_ast(
ctx, node, next(iter(node.methods)), def_name=def_name, meta_node=meta_node
)
else:
return __multi_arity_fn_to_py_ast(
ctx, node, node.methods, def_name=def_name, meta_node=meta_node
) |
Generate custom if nodes to handle recur bodies. | def __if_body_to_py_ast(
ctx: GeneratorContext, node: Node, result_name: str
) -> GeneratedPyAST:
"""Generate custom `if` nodes to handle `recur` bodies.
Recur nodes can appear in the then and else expressions of `if` forms.
Recur nodes generate Python `continue` statements, which we would otherwise
attempt to insert directly into an expression. Python will complain if
it finds a statement in an expression AST slot, so we special case the
recur handling here."""
if node.op == NodeOp.RECUR and ctx.recur_point.type == RecurType.LOOP:
assert isinstance(node, Recur)
return _recur_to_py_ast(ctx, node)
elif node.op == NodeOp.DO:
assert isinstance(node, Do)
if_body = _synthetic_do_to_py_ast(ctx, node.assoc(is_body=True))
return GeneratedPyAST(
node=ast.Assign(
targets=[ast.Name(id=result_name, ctx=ast.Store())], value=if_body.node
),
dependencies=list(map(statementize, if_body.dependencies)),
)
else:
py_ast = gen_py_ast(ctx, node)
return GeneratedPyAST(
node=ast.Assign(
targets=[ast.Name(id=result_name, ctx=ast.Store())], value=py_ast.node
),
dependencies=py_ast.dependencies,
) |
Generate an intermediate if statement which assigns to a temporary variable which is returned as the expression value at the end of evaluation. | def _if_to_py_ast(ctx: GeneratorContext, node: If) -> GeneratedPyAST:
"""Generate an intermediate if statement which assigns to a temporary
variable, which is returned as the expression value at the end of
evaluation.
Every expression in Basilisp is true if it is not the literal values nil
or false. This function compiles direct checks for the test value against
the Python values None and False to accommodate this behavior.
Note that the if and else bodies are switched in compilation so that we
can perform a short-circuit or comparison, rather than exhaustively checking
for both false and nil each time."""
assert node.op == NodeOp.IF
test_ast = gen_py_ast(ctx, node.test)
result_name = genname(_IF_RESULT_PREFIX)
then_ast = __if_body_to_py_ast(ctx, node.then, result_name)
else_ast = __if_body_to_py_ast(ctx, node.else_, result_name)
test_name = genname(_IF_TEST_PREFIX)
test_assign = ast.Assign(
targets=[ast.Name(id=test_name, ctx=ast.Store())], value=test_ast.node
)
ifstmt = ast.If(
test=ast.BoolOp(
op=ast.Or(),
values=[
ast.Compare(
left=ast.NameConstant(None),
ops=[ast.Is()],
comparators=[ast.Name(id=test_name, ctx=ast.Load())],
),
ast.Compare(
left=ast.NameConstant(False),
ops=[ast.Is()],
comparators=[ast.Name(id=test_name, ctx=ast.Load())],
),
],
),
values=[],
body=list(map(statementize, chain(else_ast.dependencies, [else_ast.node]))),
orelse=list(map(statementize, chain(then_ast.dependencies, [then_ast.node]))),
)
return GeneratedPyAST(
node=ast.Name(id=result_name, ctx=ast.Load()),
dependencies=list(chain(test_ast.dependencies, [test_assign, ifstmt])),
) |
Return a Python AST node for a Basilisp import * expression. | def _import_to_py_ast(ctx: GeneratorContext, node: Import) -> GeneratedPyAST:
"""Return a Python AST node for a Basilisp `import*` expression."""
assert node.op == NodeOp.IMPORT
last = None
deps: List[ast.AST] = []
for alias in node.aliases:
safe_name = munge(alias.name)
try:
module = importlib.import_module(safe_name)
if alias.alias is not None:
ctx.add_import(sym.symbol(alias.name), module, sym.symbol(alias.alias))
else:
ctx.add_import(sym.symbol(alias.name), module)
except ModuleNotFoundError as e:
raise ImportError(
f"Python module '{alias.name}' not found", node.form, node
) from e
py_import_alias = (
munge(alias.alias)
if alias.alias is not None
else safe_name.split(".", maxsplit=1)[0]
)
deps.append(
ast.Assign(
targets=[ast.Name(id=py_import_alias, ctx=ast.Store())],
value=ast.Call(
func=_load_attr("builtins.__import__"),
args=[ast.Str(safe_name)],
keywords=[],
),
)
)
last = ast.Name(id=py_import_alias, ctx=ast.Load())
# Note that we add this import to the live running system in the above
# calls to `ctx.add_import`, however, since we compile and cache Python
# bytecode, we need to generate calls to `add_import` for the running
# namespace so when this code is reloaded from the cache, the runtime
# is correctly configured.
deps.append(
ast.Call(
func=_load_attr(f"{_NS_VAR_VALUE}.add_import"),
args=[
ast.Call(
func=_NEW_SYM_FN_NAME, args=[ast.Str(safe_name)], keywords=[]
),
last,
],
keywords=[],
)
)
assert last is not None, "import* node must have at least one import"
return GeneratedPyAST(node=last, dependencies=deps) |
Return a Python AST Node for a Basilisp function invocation. | def _invoke_to_py_ast(ctx: GeneratorContext, node: Invoke) -> GeneratedPyAST:
"""Return a Python AST Node for a Basilisp function invocation."""
assert node.op == NodeOp.INVOKE
fn_ast = gen_py_ast(ctx, node.fn)
args_deps, args_nodes = _collection_ast(ctx, node.args)
return GeneratedPyAST(
node=ast.Call(func=fn_ast.node, args=list(args_nodes), keywords=[]),
dependencies=list(chain(fn_ast.dependencies, args_deps)),
) |
Return a Python AST Node for a let * expression. | def _let_to_py_ast(ctx: GeneratorContext, node: Let) -> GeneratedPyAST:
"""Return a Python AST Node for a `let*` expression."""
assert node.op == NodeOp.LET
with ctx.new_symbol_table("let"):
let_body_ast: List[ast.AST] = []
for binding in node.bindings:
init_node = binding.init
assert init_node is not None
init_ast = gen_py_ast(ctx, init_node)
binding_name = genname(munge(binding.name))
let_body_ast.extend(init_ast.dependencies)
let_body_ast.append(
ast.Assign(
targets=[ast.Name(id=binding_name, ctx=ast.Store())],
value=init_ast.node,
)
)
ctx.symbol_table.new_symbol(
sym.symbol(binding.name), binding_name, LocalType.LET
)
let_result_name = genname("let_result")
body_ast = _synthetic_do_to_py_ast(ctx, node.body)
let_body_ast.extend(map(statementize, body_ast.dependencies))
let_body_ast.append(
ast.Assign(
targets=[ast.Name(id=let_result_name, ctx=ast.Store())],
value=body_ast.node,
)
)
return GeneratedPyAST(
node=ast.Name(id=let_result_name, ctx=ast.Load()), dependencies=let_body_ast
) |
Return a Python AST Node for a loop * expression. | def _loop_to_py_ast(ctx: GeneratorContext, node: Loop) -> GeneratedPyAST:
"""Return a Python AST Node for a `loop*` expression."""
assert node.op == NodeOp.LOOP
with ctx.new_symbol_table("loop"):
binding_names = []
init_bindings: List[ast.AST] = []
for binding in node.bindings:
init_node = binding.init
assert init_node is not None
init_ast = gen_py_ast(ctx, init_node)
init_bindings.extend(init_ast.dependencies)
binding_name = genname(munge(binding.name))
binding_names.append(binding_name)
init_bindings.append(
ast.Assign(
targets=[ast.Name(id=binding_name, ctx=ast.Store())],
value=init_ast.node,
)
)
ctx.symbol_table.new_symbol(
sym.symbol(binding.name), binding_name, LocalType.LOOP
)
loop_result_name = genname("loop")
with ctx.new_recur_point(
node.loop_id, RecurType.LOOP, binding_names=binding_names
):
loop_body_ast: List[ast.AST] = []
body_ast = _synthetic_do_to_py_ast(ctx, node.body)
loop_body_ast.extend(body_ast.dependencies)
loop_body_ast.append(
ast.Assign(
targets=[ast.Name(id=loop_result_name, ctx=ast.Store())],
value=body_ast.node,
)
)
loop_body_ast.append(ast.Break())
return GeneratedPyAST(
node=_load_attr(loop_result_name),
dependencies=list(
chain(
[
ast.Assign(
targets=[
ast.Name(id=loop_result_name, ctx=ast.Store())
],
value=ast.NameConstant(None),
)
],
init_bindings,
[
ast.While(
test=ast.NameConstant(True),
body=loop_body_ast,
orelse=[],
)
],
)
),
) |
Return a Python AST Node for a quote expression. | def _quote_to_py_ast(ctx: GeneratorContext, node: Quote) -> GeneratedPyAST:
"""Return a Python AST Node for a `quote` expression."""
assert node.op == NodeOp.QUOTE
return _const_node_to_py_ast(ctx, node.expr) |
Return a Python AST node for recur occurring inside a fn *. | def __fn_recur_to_py_ast(ctx: GeneratorContext, node: Recur) -> GeneratedPyAST:
"""Return a Python AST node for `recur` occurring inside a `fn*`."""
assert node.op == NodeOp.RECUR
assert ctx.recur_point.is_variadic is not None
recur_nodes: List[ast.AST] = []
recur_deps: List[ast.AST] = []
for expr in node.exprs:
expr_ast = gen_py_ast(ctx, expr)
recur_nodes.append(expr_ast.node)
recur_deps.extend(expr_ast.dependencies)
return GeneratedPyAST(
node=ast.Call(
func=_TRAMPOLINE_ARGS_FN_NAME,
args=list(
chain([ast.NameConstant(ctx.recur_point.is_variadic)], recur_nodes)
),
keywords=[],
),
dependencies=recur_deps,
) |
Return a Python AST node for recur occurring inside a deftype * method. | def __deftype_method_recur_to_py_ast(
ctx: GeneratorContext, node: Recur
) -> GeneratedPyAST:
"""Return a Python AST node for `recur` occurring inside a `deftype*` method."""
assert node.op == NodeOp.RECUR
recur_nodes: List[ast.AST] = []
recur_deps: List[ast.AST] = []
for expr in node.exprs:
expr_ast = gen_py_ast(ctx, expr)
recur_nodes.append(expr_ast.node)
recur_deps.extend(expr_ast.dependencies)
this_entry = ctx.symbol_table.find_symbol(ctx.current_this)
assert this_entry is not None, "Field type local must have this"
return GeneratedPyAST(
node=ast.Call(
func=_TRAMPOLINE_ARGS_FN_NAME,
args=list(
chain(
[
ast.NameConstant(ctx.recur_point.is_variadic),
ast.Name(id=this_entry.munged, ctx=ast.Load()),
],
recur_nodes,
)
),
keywords=[],
),
dependencies=recur_deps,
) |
Return a Python AST node for recur occurring inside a loop. | def __loop_recur_to_py_ast(ctx: GeneratorContext, node: Recur) -> GeneratedPyAST:
"""Return a Python AST node for `recur` occurring inside a `loop`."""
assert node.op == NodeOp.RECUR
recur_deps: List[ast.AST] = []
recur_targets: List[ast.Name] = []
recur_exprs: List[ast.AST] = []
for name, expr in zip(ctx.recur_point.binding_names, node.exprs):
expr_ast = gen_py_ast(ctx, expr)
recur_deps.extend(expr_ast.dependencies)
recur_targets.append(ast.Name(id=name, ctx=ast.Store()))
recur_exprs.append(expr_ast.node)
if len(recur_targets) == 1:
assert len(recur_exprs) == 1
recur_deps.append(ast.Assign(targets=recur_targets, value=recur_exprs[0]))
else:
recur_deps.append(
ast.Assign(
targets=[ast.Tuple(elts=recur_targets, ctx=ast.Store())],
value=ast.Tuple(elts=recur_exprs, ctx=ast.Load()),
)
)
recur_deps.append(ast.Continue())
return GeneratedPyAST(node=ast.NameConstant(None), dependencies=recur_deps) |
Return a Python AST Node for a recur expression. | def _recur_to_py_ast(ctx: GeneratorContext, node: Recur) -> GeneratedPyAST:
"""Return a Python AST Node for a `recur` expression.
Note that `recur` nodes can only legally appear in two AST locations:
(1) in :then or :else expressions in :if nodes, and
(2) in :ret expressions in :do nodes
As such, both of these handlers special case the recur construct, as it
is the only case in which the code generator emits a statement rather than
an expression."""
assert node.op == NodeOp.RECUR
assert ctx.recur_point is not None, "Must have set a recur point to recur"
handle_recur = _RECUR_TYPE_HANDLER.get(ctx.recur_point.type)
assert (
handle_recur is not None
), f"No recur point handler defined for {ctx.recur_point.type}"
ctx.recur_point.has_recur = True
return handle_recur(ctx, node) |
Return a Python AST Node for a set! expression. | def _set_bang_to_py_ast(ctx: GeneratorContext, node: SetBang) -> GeneratedPyAST:
"""Return a Python AST Node for a `set!` expression."""
assert node.op == NodeOp.SET_BANG
val_temp_name = genname("set_bang_val")
val_ast = gen_py_ast(ctx, node.val)
target = node.target
assert isinstance(
target, (HostField, Local, VarRef)
), f"invalid set! target type {type(target)}"
if isinstance(target, HostField):
target_ast = _interop_prop_to_py_ast(ctx, target, is_assigning=True)
elif isinstance(target, VarRef):
target_ast = _var_sym_to_py_ast(ctx, target, is_assigning=True)
elif isinstance(target, Local):
target_ast = _local_sym_to_py_ast(ctx, target, is_assigning=True)
else: # pragma: no cover
raise GeneratorException(
f"invalid set! target type {type(target)}", lisp_ast=target
)
return GeneratedPyAST(
node=ast.Name(id=val_temp_name, ctx=ast.Load()),
dependencies=list(
chain(
val_ast.dependencies,
[
ast.Assign(
targets=[ast.Name(id=val_temp_name, ctx=ast.Store())],
value=val_ast.node,
)
],
target_ast.dependencies,
[ast.Assign(targets=[target_ast.node], value=val_ast.node)],
)
),
) |
Return a Python AST Node for a throw expression. | def _throw_to_py_ast(ctx: GeneratorContext, node: Throw) -> GeneratedPyAST:
"""Return a Python AST Node for a `throw` expression."""
assert node.op == NodeOp.THROW
throw_fn = genname(_THROW_PREFIX)
exc_ast = gen_py_ast(ctx, node.exception)
raise_body = ast.Raise(exc=exc_ast.node, cause=None)
return GeneratedPyAST(
node=ast.Call(func=ast.Name(id=throw_fn, ctx=ast.Load()), args=[], keywords=[]),
dependencies=[
ast.FunctionDef(
name=throw_fn,
args=ast.arguments(
args=[],
kwarg=None,
vararg=None,
kwonlyargs=[],
defaults=[],
kw_defaults=[],
),
body=list(chain(exc_ast.dependencies, [raise_body])),
decorator_list=[],
returns=None,
)
],
) |
Return a Python AST Node for a try expression. | def _try_to_py_ast(ctx: GeneratorContext, node: Try) -> GeneratedPyAST:
"""Return a Python AST Node for a `try` expression."""
assert node.op == NodeOp.TRY
try_expr_name = genname("try_expr")
body_ast = _synthetic_do_to_py_ast(ctx, node.body)
catch_handlers = list(
map(partial(__catch_to_py_ast, ctx, try_expr_name=try_expr_name), node.catches)
)
finallys: List[ast.AST] = []
if node.finally_ is not None:
finally_ast = _synthetic_do_to_py_ast(ctx, node.finally_)
finallys.extend(map(statementize, finally_ast.dependencies))
finallys.append(statementize(finally_ast.node))
return GeneratedPyAST(
node=ast.Name(id=try_expr_name, ctx=ast.Load()),
dependencies=[
ast.Try(
body=list(
chain(
body_ast.dependencies,
[
ast.Assign(
targets=[ast.Name(id=try_expr_name, ctx=ast.Store())],
value=body_ast.node,
)
],
)
),
handlers=catch_handlers,
orelse=[],
finalbody=finallys,
)
],
) |
Generate a Python AST node for accessing a locally defined Python variable. | def _local_sym_to_py_ast(
ctx: GeneratorContext, node: Local, is_assigning: bool = False
) -> GeneratedPyAST:
"""Generate a Python AST node for accessing a locally defined Python variable."""
assert node.op == NodeOp.LOCAL
sym_entry = ctx.symbol_table.find_symbol(sym.symbol(node.name))
assert sym_entry is not None
if node.local == LocalType.FIELD:
this_entry = ctx.symbol_table.find_symbol(ctx.current_this)
assert this_entry is not None, "Field type local must have this"
return GeneratedPyAST(
node=_load_attr(
f"{this_entry.munged}.{sym_entry.munged}",
ctx=ast.Store() if is_assigning else ast.Load(),
)
)
else:
return GeneratedPyAST(
node=ast.Name(
id=sym_entry.munged, ctx=ast.Store() if is_assigning else ast.Load()
)
) |
Generate Var. find calls for the named symbol. | def __var_find_to_py_ast(
var_name: str, ns_name: str, py_var_ctx: ast.AST
) -> GeneratedPyAST:
"""Generate Var.find calls for the named symbol."""
return GeneratedPyAST(
node=ast.Attribute(
value=ast.Call(
func=_FIND_VAR_FN_NAME,
args=[
ast.Call(
func=_NEW_SYM_FN_NAME,
args=[ast.Str(var_name)],
keywords=[ast.keyword(arg="ns", value=ast.Str(ns_name))],
)
],
keywords=[],
),
attr="value",
ctx=py_var_ctx,
)
) |
Generate a Python AST node for accessing a Var. | def _var_sym_to_py_ast(
ctx: GeneratorContext, node: VarRef, is_assigning: bool = False
) -> GeneratedPyAST:
"""Generate a Python AST node for accessing a Var.
If the Var is marked as :dynamic or :redef or the compiler option
USE_VAR_INDIRECTION is active, do not compile to a direct access.
If the corresponding function name is not defined in a Python module,
no direct variable access is possible and Var.find indirection must be
used."""
assert node.op == NodeOp.VAR
var = node.var
ns = var.ns
ns_name = ns.name
ns_module = ns.module
safe_ns = munge(ns_name)
var_name = var.name.name
py_var_ctx = ast.Store() if is_assigning else ast.Load()
# Return the actual var, rather than its value if requested
if node.return_var:
return GeneratedPyAST(
node=ast.Call(
func=_FIND_VAR_FN_NAME,
args=[
ast.Call(
func=_NEW_SYM_FN_NAME,
args=[ast.Str(var_name)],
keywords=[ast.keyword(arg="ns", value=ast.Str(ns_name))],
)
],
keywords=[],
)
)
# Check if we should use Var indirection
if ctx.use_var_indirection or _is_dynamic(var) or _is_redefable(var):
return __var_find_to_py_ast(var_name, ns_name, py_var_ctx)
# Otherwise, try to direct-link it like a Python variable
# Try without allowing builtins first
safe_name = munge(var_name)
if safe_name not in ns_module.__dict__:
# Try allowing builtins
safe_name = munge(var_name, allow_builtins=True)
if safe_name in ns_module.__dict__:
if ns is ctx.current_ns:
return GeneratedPyAST(node=ast.Name(id=safe_name, ctx=py_var_ctx))
return GeneratedPyAST(node=_load_attr(f"{safe_ns}.{safe_name}", ctx=py_var_ctx))
if ctx.warn_on_var_indirection:
logger.warning(f"could not resolve a direct link to Var '{var_name}'")
return __var_find_to_py_ast(var_name, ns_name, py_var_ctx) |
Generate a Python AST node for Python interop method calls. | def _interop_call_to_py_ast(ctx: GeneratorContext, node: HostCall) -> GeneratedPyAST:
"""Generate a Python AST node for Python interop method calls."""
assert node.op == NodeOp.HOST_CALL
target_ast = gen_py_ast(ctx, node.target)
args_deps, args_nodes = _collection_ast(ctx, node.args)
return GeneratedPyAST(
node=ast.Call(
func=ast.Attribute(
value=target_ast.node,
attr=munge(node.method, allow_builtins=True),
ctx=ast.Load(),
),
args=list(args_nodes),
keywords=[],
),
dependencies=list(chain(target_ast.dependencies, args_deps)),
) |
Generate a Python AST node for Python interop property access. | def _interop_prop_to_py_ast(
ctx: GeneratorContext, node: HostField, is_assigning: bool = False
) -> GeneratedPyAST:
"""Generate a Python AST node for Python interop property access."""
assert node.op == NodeOp.HOST_FIELD
target_ast = gen_py_ast(ctx, node.target)
return GeneratedPyAST(
node=ast.Attribute(
value=target_ast.node,
attr=munge(node.field),
ctx=ast.Store() if is_assigning else ast.Load(),
),
dependencies=target_ast.dependencies,
) |
Generate a Python AST node for accessing a potential Python module variable name. | def _maybe_class_to_py_ast(_: GeneratorContext, node: MaybeClass) -> GeneratedPyAST:
"""Generate a Python AST node for accessing a potential Python module
variable name."""
assert node.op == NodeOp.MAYBE_CLASS
return GeneratedPyAST(
node=ast.Name(
id=Maybe(_MODULE_ALIASES.get(node.class_)).or_else_get(node.class_),
ctx=ast.Load(),
)
) |
Generate a Python AST node for accessing a potential Python module variable name with a namespace. | def _maybe_host_form_to_py_ast(
_: GeneratorContext, node: MaybeHostForm
) -> GeneratedPyAST:
"""Generate a Python AST node for accessing a potential Python module
variable name with a namespace."""
assert node.op == NodeOp.MAYBE_HOST_FORM
return GeneratedPyAST(
node=_load_attr(
f"{Maybe(_MODULE_ALIASES.get(node.class_)).or_else_get(node.class_)}.{node.field}"
)
) |
Generate a Python AST node for Python interop method calls. | def _with_meta_to_py_ast(
ctx: GeneratorContext, node: WithMeta, **kwargs
) -> GeneratedPyAST:
"""Generate a Python AST node for Python interop method calls."""
assert node.op == NodeOp.WITH_META
handle_expr = _WITH_META_EXPR_HANDLER.get(node.expr.op)
assert (
handle_expr is not None
), "No expression handler for with-meta child node type"
return handle_expr(ctx, node.expr, meta_node=node.meta, **kwargs) |
Generate Python AST nodes for constant Lisp forms. | def _const_val_to_py_ast(ctx: GeneratorContext, form: LispForm) -> GeneratedPyAST:
"""Generate Python AST nodes for constant Lisp forms.
Nested values in collections for :const nodes are not parsed, so recursive
structures need to call into this function to generate Python AST nodes for
nested elements. For top-level :const Lisp AST nodes, see
`_const_node_to_py_ast`."""
handle_value = _CONST_VALUE_HANDLERS.get(type(form))
if handle_value is None and isinstance(form, ISeq):
handle_value = _const_seq_to_py_ast # type: ignore
assert handle_value is not None, "A type handler must be defined for constants"
return handle_value(ctx, form) |
Turn a quoted collection literal of Lisp forms into Python AST nodes. | def _collection_literal_to_py_ast(
ctx: GeneratorContext, form: Iterable[LispForm]
) -> Iterable[GeneratedPyAST]:
"""Turn a quoted collection literal of Lisp forms into Python AST nodes.
This function can only handle constant values. It does not call back into
the generic AST generators, so only constant values will be generated down
this path."""
yield from map(partial(_const_val_to_py_ast, ctx), form) |
Generate Python AST nodes for a: const Lisp AST node. | def _const_node_to_py_ast(ctx: GeneratorContext, lisp_ast: Const) -> GeneratedPyAST:
"""Generate Python AST nodes for a :const Lisp AST node.
Nested values in collections for :const nodes are not parsed. Consequently,
this function cannot be called recursively for those nested values. Instead,
call `_const_val_to_py_ast` on nested values."""
assert lisp_ast.op == NodeOp.CONST
node_type = lisp_ast.type
handle_const_node = _CONSTANT_HANDLER.get(node_type)
assert handle_const_node is not None, f"No :const AST type handler for {node_type}"
node_val = lisp_ast.val
return handle_const_node(ctx, node_val) |
Take a Lisp AST node as an argument and produce zero or more Python AST nodes. | def gen_py_ast(ctx: GeneratorContext, lisp_ast: Node) -> GeneratedPyAST:
"""Take a Lisp AST node as an argument and produce zero or more Python
AST nodes.
This is the primary entrypoint for generating AST nodes from Lisp
syntax. It may be called recursively to compile child forms."""
op: NodeOp = lisp_ast.op
assert op is not None, "Lisp AST nodes must have an :op key"
handle_node = _NODE_HANDLERS.get(op)
assert (
handle_node is not None
), f"Lisp AST nodes :op has no handler defined for op {op}"
return handle_node(ctx, lisp_ast) |
Generate the Python Import AST node for importing all required language support modules. | def _module_imports(ctx: GeneratorContext) -> Iterable[ast.Import]:
"""Generate the Python Import AST node for importing all required
language support modules."""
# Yield `import basilisp` so code attempting to call fully qualified
# `basilisp.lang...` modules don't result in compiler errors
yield ast.Import(names=[ast.alias(name="basilisp", asname=None)])
for imp in ctx.imports:
name = imp.key.name
alias = _MODULE_ALIASES.get(name, None)
yield ast.Import(names=[ast.alias(name=name, asname=alias)]) |
Generate the Python From... Import AST node for importing language support modules. | def _from_module_import() -> ast.ImportFrom:
"""Generate the Python From ... Import AST node for importing
language support modules."""
return ast.ImportFrom(
module="basilisp.lang.runtime",
names=[ast.alias(name="Var", asname=_VAR_ALIAS)],
level=0,
) |
Assign a Python variable named ns_var to the value of the current namespace. | def _ns_var(
py_ns_var: str = _NS_VAR, lisp_ns_var: str = LISP_NS_VAR, lisp_ns_ns: str = CORE_NS
) -> ast.Assign:
"""Assign a Python variable named `ns_var` to the value of the current
namespace."""
return ast.Assign(
targets=[ast.Name(id=py_ns_var, ctx=ast.Store())],
value=ast.Call(
func=_FIND_VAR_FN_NAME,
args=[
ast.Call(
func=_NEW_SYM_FN_NAME,
args=[ast.Str(lisp_ns_var)],
keywords=[ast.keyword(arg="ns", value=ast.Str(lisp_ns_ns))],
)
],
keywords=[],
),
) |
Bootstrap a new module with imports and other boilerplate. | def py_module_preamble(ctx: GeneratorContext,) -> GeneratedPyAST:
"""Bootstrap a new module with imports and other boilerplate."""
preamble: List[ast.AST] = []
preamble.extend(_module_imports(ctx))
preamble.append(_from_module_import())
preamble.append(_ns_var())
return GeneratedPyAST(node=ast.NameConstant(None), dependencies=preamble) |
If True warn when a Var reference cannot be direct linked ( iff use_var_indirection is False ).. | def warn_on_var_indirection(self) -> bool:
"""If True, warn when a Var reference cannot be direct linked (iff
use_var_indirection is False).."""
return not self.use_var_indirection and self._opts.entry(
WARN_ON_VAR_INDIRECTION, True
) |
Creates a new set. | def set(members: Iterable[T], meta=None) -> Set[T]: # pylint:disable=redefined-builtin
"""Creates a new set."""
return Set(pset(members), meta=meta) |
Creates a new set from members. | def s(*members: T, meta=None) -> Set[T]:
"""Creates a new set from members."""
return Set(pset(members), meta=meta) |
Return a list of body nodes trimming out unreachable code ( any statements appearing after break continue and return nodes ). | def _filter_dead_code(nodes: Iterable[ast.AST]) -> List[ast.AST]:
"""Return a list of body nodes, trimming out unreachable code (any
statements appearing after `break`, `continue`, and `return` nodes)."""
new_nodes: List[ast.AST] = []
for node in nodes:
if isinstance(node, (ast.Break, ast.Continue, ast.Return)):
new_nodes.append(node)
break
new_nodes.append(node)
return new_nodes |
Eliminate dead code from except handler bodies. | def visit_ExceptHandler(self, node: ast.ExceptHandler) -> Optional[ast.AST]:
"""Eliminate dead code from except handler bodies."""
new_node = self.generic_visit(node)
assert isinstance(new_node, ast.ExceptHandler)
return ast.copy_location(
ast.ExceptHandler(
type=new_node.type,
name=new_node.name,
body=_filter_dead_code(new_node.body),
),
new_node,
) |
Eliminate no - op constant expressions which are in the tree as standalone statements. | def visit_Expr(self, node: ast.Expr) -> Optional[ast.Expr]:
"""Eliminate no-op constant expressions which are in the tree
as standalone statements."""
if isinstance(
node.value,
(
ast.Constant, # type: ignore
ast.Name,
ast.NameConstant,
ast.Num,
ast.Str,
),
):
return None
return node |
Eliminate dead code from function bodies. | def visit_FunctionDef(self, node: ast.FunctionDef) -> Optional[ast.AST]:
"""Eliminate dead code from function bodies."""
new_node = self.generic_visit(node)
assert isinstance(new_node, ast.FunctionDef)
return ast.copy_location(
ast.FunctionDef(
name=new_node.name,
args=new_node.args,
body=_filter_dead_code(new_node.body),
decorator_list=new_node.decorator_list,
returns=new_node.returns,
),
new_node,
) |
Eliminate dead code from if/ elif bodies. | def visit_If(self, node: ast.If) -> Optional[ast.AST]:
"""Eliminate dead code from if/elif bodies."""
new_node = self.generic_visit(node)
assert isinstance(new_node, ast.If)
return ast.copy_location(
ast.If(
test=new_node.test,
body=_filter_dead_code(new_node.body),
orelse=_filter_dead_code(new_node.orelse),
),
new_node,
) |
Eliminate dead code from while bodies. | def visit_While(self, node: ast.While) -> Optional[ast.AST]:
"""Eliminate dead code from while bodies."""
new_node = self.generic_visit(node)
assert isinstance(new_node, ast.While)
return ast.copy_location(
ast.While(
test=new_node.test,
body=_filter_dead_code(new_node.body),
orelse=_filter_dead_code(new_node.orelse),
),
new_node,
) |
Eliminate dead code from except try bodies. | def visit_Try(self, node: ast.Try) -> Optional[ast.AST]:
"""Eliminate dead code from except try bodies."""
new_node = self.generic_visit(node)
assert isinstance(new_node, ast.Try)
return ast.copy_location(
ast.Try(
body=_filter_dead_code(new_node.body),
handlers=new_node.handlers,
orelse=_filter_dead_code(new_node.orelse),
finalbody=_filter_dead_code(new_node.finalbody),
),
new_node,
) |
Create a new empty Basilisp Python module. Modules are created for each Namespace when it is created. | def _new_module(name: str, doc=None) -> types.ModuleType:
"""Create a new empty Basilisp Python module.
Modules are created for each Namespace when it is created."""
mod = types.ModuleType(name, doc=doc)
mod.__loader__ = None
mod.__package__ = None
mod.__spec__ = None
mod.__basilisp_bootstrapped__ = False # type: ignore
return mod |
If o is a ISeq return the first element from o. If o is None return None. Otherwise coerces o to a Seq and returns the first. | def first(o):
"""If o is a ISeq, return the first element from o. If o is None, return
None. Otherwise, coerces o to a Seq and returns the first."""
if o is None:
return None
if isinstance(o, ISeq):
return o.first
s = to_seq(o)
if s is None:
return None
return s.first |
If o is a ISeq return the elements after the first in o. If o is None returns an empty seq. Otherwise coerces o to a seq and returns the rest. | def rest(o) -> Optional[ISeq]:
"""If o is a ISeq, return the elements after the first in o. If o is None,
returns an empty seq. Otherwise, coerces o to a seq and returns the rest."""
if o is None:
return None
if isinstance(o, ISeq):
s = o.rest
if s is None:
return lseq.EMPTY
return s
n = to_seq(o)
if n is None:
return lseq.EMPTY
return n.rest |
Returns the nth rest sequence of coll or coll if i is 0. | def nthrest(coll, i: int):
"""Returns the nth rest sequence of coll, or coll if i is 0."""
while True:
if coll is None:
return None
if i == 0:
return coll
i -= 1
coll = rest(coll) |
Returns the nth next sequence of coll. | def nthnext(coll, i: int) -> Optional[ISeq]:
"""Returns the nth next sequence of coll."""
while True:
if coll is None:
return None
if i == 0:
return to_seq(coll)
i -= 1
coll = next_(coll) |
Creates a new sequence where o is the first element and seq is the rest. If seq is None return a list containing o. If seq is not a ISeq attempt to coerce it to a ISeq and then cons o onto the resulting sequence. | def cons(o, seq) -> ISeq:
"""Creates a new sequence where o is the first element and seq is the rest.
If seq is None, return a list containing o. If seq is not a ISeq, attempt
to coerce it to a ISeq and then cons o onto the resulting sequence."""
if seq is None:
return llist.l(o)
if isinstance(seq, ISeq):
return seq.cons(o)
return Maybe(to_seq(seq)).map(lambda s: s.cons(o)).or_else(lambda: llist.l(o)) |
Coerce the argument o to a ISeq. If o is None return None. | def to_seq(o) -> Optional[ISeq]:
"""Coerce the argument o to a ISeq. If o is None, return None."""
if o is None:
return None
if isinstance(o, ISeq):
return _seq_or_nil(o)
if isinstance(o, ISeqable):
return _seq_or_nil(o.seq())
return _seq_or_nil(lseq.sequence(o)) |
Concatenate the sequences given by seqs into a single ISeq. | def concat(*seqs) -> ISeq:
"""Concatenate the sequences given by seqs into a single ISeq."""
allseqs = lseq.sequence(itertools.chain(*filter(None, map(to_seq, seqs))))
if allseqs is None:
return lseq.EMPTY
return allseqs |
Apply function f to the arguments provided. The last argument must always be coercible to a Seq. Intermediate arguments are not modified. For example: ( apply max [ 1 2 3 ] ) ; = > 3 ( apply max 4 [ 1 2 3 ] ) ; = > 4 | def apply(f, args):
"""Apply function f to the arguments provided.
The last argument must always be coercible to a Seq. Intermediate
arguments are not modified.
For example:
(apply max [1 2 3]) ;=> 3
(apply max 4 [1 2 3]) ;=> 4"""
final = list(args[:-1])
try:
last = args[-1]
except TypeError as e:
logger.debug("Ignored %s: %s", type(e).__name__, e)
s = to_seq(last)
if s is not None:
final.extend(s)
return f(*final) |
Apply function f to the arguments provided. The last argument must always be coercible to a Mapping. Intermediate arguments are not modified. For example: ( apply builtins/ dict {: a 1 } {: b 2 } ) ; = > #py {: a 1: b 2 } ( apply builtins/ dict {: a 1 } {: a 2 } ) ; = > #py {: a 2 } | def apply_kw(f, args):
"""Apply function f to the arguments provided.
The last argument must always be coercible to a Mapping. Intermediate
arguments are not modified.
For example:
(apply builtins/dict {:a 1} {:b 2}) ;=> #py {:a 1 :b 2}
(apply builtins/dict {:a 1} {:a 2}) ;=> #py {:a 2}"""
final = list(args[:-1])
try:
last = args[-1]
except TypeError as e:
logger.debug("Ignored %s: %s", type(e).__name__, e)
kwargs = to_py(last, lambda kw: munge(kw.name, allow_builtins=True))
return f(*final, **kwargs) |
Returns the ith element of coll ( 0 - indexed ) if it exists. None otherwise. If i is out of bounds throws an IndexError unless notfound is specified. | def nth(coll, i, notfound=__nth_sentinel):
"""Returns the ith element of coll (0-indexed), if it exists.
None otherwise. If i is out of bounds, throws an IndexError unless
notfound is specified."""
if coll is None:
return None
try:
return coll[i]
except IndexError as ex:
if notfound is not __nth_sentinel:
return notfound
raise ex
except TypeError as ex:
# Log these at TRACE so they don't gum up the DEBUG logs since most
# cases where this exception occurs are not bugs.
logger.log(TRACE, "Ignored %s: %s", type(ex).__name__, ex)
try:
for j, e in enumerate(coll):
if i == j:
return e
if notfound is not __nth_sentinel:
return notfound
raise IndexError(f"Index {i} out of bounds")
except TypeError:
pass
raise TypeError(f"nth not supported on object of type {type(coll)}") |
Associate keys to values in associative data structure m. If m is None returns a new Map with key - values kvs. | def assoc(m, *kvs):
"""Associate keys to values in associative data structure m. If m is None,
returns a new Map with key-values kvs."""
if m is None:
return lmap.Map.empty().assoc(*kvs)
if isinstance(m, IAssociative):
return m.assoc(*kvs)
raise TypeError(
f"Object of type {type(m)} does not implement Associative interface"
) |
Updates the value for key k in associative data structure m with the return value from calling f ( old_v * args ). If m is None use an empty map. If k is not in m old_v will be None. | def update(m, k, f, *args):
"""Updates the value for key k in associative data structure m with the return value from
calling f(old_v, *args). If m is None, use an empty map. If k is not in m, old_v will be
None."""
if m is None:
return lmap.Map.empty().assoc(k, f(None, *args))
if isinstance(m, IAssociative):
old_v = m.entry(k)
new_v = f(old_v, *args)
return m.assoc(k, new_v)
raise TypeError(
f"Object of type {type(m)} does not implement Associative interface"
) |
Conjoin xs to collection. New elements may be added in different positions depending on the type of coll. conj returns the same type as coll. If coll is None return a list with xs conjoined. | def conj(coll, *xs):
"""Conjoin xs to collection. New elements may be added in different positions
depending on the type of coll. conj returns the same type as coll. If coll
is None, return a list with xs conjoined."""
if coll is None:
l = llist.List.empty()
return l.cons(*xs)
if isinstance(coll, IPersistentCollection):
return coll.cons(*xs)
raise TypeError(
f"Object of type {type(coll)} does not implement Collection interface"
) |
Return a function which is the partial application of f with args. | def partial(f, *args):
"""Return a function which is the partial application of f with args."""
@functools.wraps(f)
def partial_f(*inner_args):
return f(*itertools.chain(args, inner_args))
return partial_f |
Dereference a Deref object and return its contents. | def deref(o, timeout_s=None, timeout_val=None):
"""Dereference a Deref object and return its contents.
If o is an object implementing IBlockingDeref and timeout_s and
timeout_val are supplied, deref will wait at most timeout_s seconds,
returning timeout_val if timeout_s seconds elapse and o has not
returned."""
if isinstance(o, IDeref):
return o.deref()
elif isinstance(o, IBlockingDeref):
return o.deref(timeout_s, timeout_val)
raise TypeError(f"Object of type {type(o)} cannot be dereferenced") |
Compare two objects by value. Unlike the standard Python equality operator this function does not consider 1 == True or 0 == False. All other equality operations are the same and performed using Python s equality operator. | def equals(v1, v2) -> bool:
"""Compare two objects by value. Unlike the standard Python equality operator,
this function does not consider 1 == True or 0 == False. All other equality
operations are the same and performed using Python's equality operator."""
if isinstance(v1, (bool, type(None))) or isinstance(v2, (bool, type(None))):
return v1 is v2
return v1 == v2 |
Division reducer. If both arguments are integers return a Fraction. Otherwise return the true division of x and y. | def divide(x: LispNumber, y: LispNumber) -> LispNumber:
"""Division reducer. If both arguments are integers, return a Fraction.
Otherwise, return the true division of x and y."""
if isinstance(x, int) and isinstance(y, int):
return Fraction(x, y)
return x / y |
Return a sorted sequence of the elements in coll. If a comparator function f is provided compare elements in coll using f. | def sort(coll, f=None) -> Optional[ISeq]:
"""Return a sorted sequence of the elements in coll. If a comparator
function f is provided, compare elements in coll using f."""
return to_seq(sorted(coll, key=Maybe(f).map(functools.cmp_to_key).value)) |
Return true if o contains the key k. | def contains(coll, k):
"""Return true if o contains the key k."""
if isinstance(coll, IAssociative):
return coll.contains(k)
return k in coll |
Return the value of k in m. Return default if k not found in m. | def get(m, k, default=None):
"""Return the value of k in m. Return default if k not found in m."""
if isinstance(m, IAssociative):
return m.entry(k, default=default)
try:
return m[k]
except (KeyError, IndexError, TypeError) as e:
logger.debug("Ignored %s: %s", type(e).__name__, e)
return default |
Recursively convert Python collections into Lisp collections. | def to_lisp(o, keywordize_keys: bool = True):
"""Recursively convert Python collections into Lisp collections."""
if not isinstance(o, (dict, frozenset, list, set, tuple)):
return o
else: # pragma: no cover
return _to_lisp_backup(o, keywordize_keys=keywordize_keys) |
Recursively convert Lisp collections into Python collections. | def to_py(o, keyword_fn: Callable[[kw.Keyword], Any] = _kw_name):
"""Recursively convert Lisp collections into Python collections."""
if isinstance(o, ISeq):
return _to_py_list(o, keyword_fn=keyword_fn)
elif not isinstance(
o, (IPersistentList, IPersistentMap, IPersistentSet, IPersistentVector)
):
return o
else: # pragma: no cover
return _to_py_backup(o, keyword_fn=keyword_fn) |
Produce a string representation of an object. If human_readable is False the string representation of Lisp objects is something that can be read back in by the reader as the same object. | def lrepr(o, human_readable: bool = False) -> str:
"""Produce a string representation of an object. If human_readable is False,
the string representation of Lisp objects is something that can be read back
in by the reader as the same object."""
core_ns = Namespace.get(sym.symbol(CORE_NS))
assert core_ns is not None
return lobj.lrepr(
o,
human_readable=human_readable,
print_dup=core_ns.find(sym.symbol(_PRINT_DUP_VAR_NAME)).value, # type: ignore
print_length=core_ns.find( # type: ignore
sym.symbol(_PRINT_LENGTH_VAR_NAME)
).value,
print_level=core_ns.find( # type: ignore
sym.symbol(_PRINT_LEVEL_VAR_NAME)
).value,
print_meta=core_ns.find(sym.symbol(_PRINT_META_VAR_NAME)).value, # type: ignore
print_readably=core_ns.find( # type: ignore
sym.symbol(_PRINT_READABLY_VAR_NAME)
).value,
) |
Completer function for Python s readline/ libedit implementation. | def repl_complete(text: str, state: int) -> Optional[str]:
"""Completer function for Python's readline/libedit implementation."""
# Can't complete Keywords, Numerals
if __NOT_COMPLETEABLE.match(text):
return None
elif text.startswith(":"):
completions = kw.complete(text)
else:
ns = get_current_ns()
completions = ns.complete(text)
return list(completions)[state] if completions is not None else None |
Collect Python starred arguments into a Basilisp list. | def _collect_args(args) -> ISeq:
"""Collect Python starred arguments into a Basilisp list."""
if isinstance(args, tuple):
return llist.list(args)
raise TypeError("Python variadic arguments should always be a tuple") |
Trampoline a function repeatedly until it is finished recurring to help avoid stack growth. | def _trampoline(f):
"""Trampoline a function repeatedly until it is finished recurring to help
avoid stack growth."""
@functools.wraps(f)
def trampoline(*args, **kwargs):
while True:
ret = f(*args, **kwargs)
if isinstance(ret, _TrampolineArgs):
args = ret.args
kwargs = ret.kwargs
continue
return ret
return trampoline |
Decorator to set attributes on a function. Returns the original function after setting the attributes named by the keyword arguments. | def _with_attrs(**kwargs):
"""Decorator to set attributes on a function. Returns the original
function after setting the attributes named by the keyword arguments."""
def decorator(f):
for k, v in kwargs.items():
setattr(f, k, v)
return f
return decorator |
Return a new function with the given meta. If the function f already has a meta map then merge the | def _fn_with_meta(f, meta: Optional[lmap.Map]):
"""Return a new function with the given meta. If the function f already
has a meta map, then merge the """
if not isinstance(meta, lmap.Map):
raise TypeError("meta must be a map")
if inspect.iscoroutinefunction(f):
@functools.wraps(f)
async def wrapped_f(*args, **kwargs):
return await f(*args, **kwargs)
else:
@functools.wraps(f) # type: ignore
def wrapped_f(*args, **kwargs):
return f(*args, **kwargs)
wrapped_f.meta = ( # type: ignore
f.meta.update(meta)
if hasattr(f, "meta") and isinstance(f.meta, lmap.Map)
else meta
)
wrapped_f.with_meta = partial(_fn_with_meta, wrapped_f) # type: ignore
return wrapped_f |
Create a Basilisp function setting meta and supplying a with_meta method implementation. | def _basilisp_fn(f):
"""Create a Basilisp function, setting meta and supplying a with_meta
method implementation."""
assert not hasattr(f, "meta")
f._basilisp_fn = True
f.meta = None
f.with_meta = partial(_fn_with_meta, f)
return f |
Initialize the dynamic * ns * variable in the Namespace which_ns. | def init_ns_var(which_ns: str = CORE_NS, ns_var_name: str = NS_VAR_NAME) -> Var:
"""Initialize the dynamic `*ns*` variable in the Namespace `which_ns`."""
core_sym = sym.Symbol(which_ns)
core_ns = Namespace.get_or_create(core_sym)
ns_var = Var.intern(core_sym, sym.Symbol(ns_var_name), core_ns, dynamic=True)
logger.debug(f"Created namespace variable {sym.symbol(ns_var_name, ns=which_ns)}")
return ns_var |
Set the value of the dynamic variable * ns * in the current thread. | def set_current_ns(
ns_name: str,
module: types.ModuleType = None,
ns_var_name: str = NS_VAR_NAME,
ns_var_ns: str = NS_VAR_NS,
) -> Var:
"""Set the value of the dynamic variable `*ns*` in the current thread."""
symbol = sym.Symbol(ns_name)
ns = Namespace.get_or_create(symbol, module=module)
ns_var_sym = sym.Symbol(ns_var_name, ns=ns_var_ns)
ns_var = Maybe(Var.find(ns_var_sym)).or_else_raise(
lambda: RuntimeException(
f"Dynamic Var {sym.Symbol(ns_var_name, ns=ns_var_ns)} not bound!"
)
)
ns_var.push_bindings(ns)
logger.debug(f"Setting {ns_var_sym} to {ns}")
return ns_var |
Context manager for temporarily changing the value of basilisp. core/ * ns *. | def ns_bindings(
ns_name: str,
module: types.ModuleType = None,
ns_var_name: str = NS_VAR_NAME,
ns_var_ns: str = NS_VAR_NS,
):
"""Context manager for temporarily changing the value of basilisp.core/*ns*."""
symbol = sym.Symbol(ns_name)
ns = Namespace.get_or_create(symbol, module=module)
ns_var_sym = sym.Symbol(ns_var_name, ns=ns_var_ns)
ns_var = Maybe(Var.find(ns_var_sym)).or_else_raise(
lambda: RuntimeException(
f"Dynamic Var {sym.Symbol(ns_var_name, ns=ns_var_ns)} not bound!"
)
)
try:
logger.debug(f"Binding {ns_var_sym} to {ns}")
ns_var.push_bindings(ns)
yield ns_var.value
finally:
ns_var.pop_bindings()
logger.debug(f"Reset bindings for {ns_var_sym} to {ns_var.value}") |
Context manager to pop the most recent bindings for basilisp. core/ * ns * after completion of the code under management. | def remove_ns_bindings(ns_var_name: str = NS_VAR_NAME, ns_var_ns: str = NS_VAR_NS):
"""Context manager to pop the most recent bindings for basilisp.core/*ns* after
completion of the code under management."""
ns_var_sym = sym.Symbol(ns_var_name, ns=ns_var_ns)
ns_var = Maybe(Var.find(ns_var_sym)).or_else_raise(
lambda: RuntimeException(
f"Dynamic Var {sym.Symbol(ns_var_name, ns=ns_var_ns)} not bound!"
)
)
try:
yield
finally:
ns_var.pop_bindings()
logger.debug(f"Reset bindings for {ns_var_sym} to {ns_var.value}") |
Get the value of the dynamic variable * ns * in the current thread. | def get_current_ns(
ns_var_name: str = NS_VAR_NAME, ns_var_ns: str = NS_VAR_NS
) -> Namespace:
"""Get the value of the dynamic variable `*ns*` in the current thread."""
ns_sym = sym.Symbol(ns_var_name, ns=ns_var_ns)
ns: Namespace = Maybe(Var.find(ns_sym)).map(lambda v: v.value).or_else_raise(
lambda: RuntimeException(f"Dynamic Var {ns_sym} not bound!")
)
return ns |
Resolve the aliased symbol in the current namespace. | def resolve_alias(s: sym.Symbol, ns: Optional[Namespace] = None) -> sym.Symbol:
"""Resolve the aliased symbol in the current namespace."""
if s in _SPECIAL_FORMS:
return s
ns = Maybe(ns).or_else(get_current_ns)
if s.ns is not None:
aliased_ns = ns.get_alias(sym.symbol(s.ns))
if aliased_ns is not None:
return sym.symbol(s.name, aliased_ns.name)
else:
return s
else:
which_var = ns.find(sym.symbol(s.name))
if which_var is not None:
return sym.symbol(which_var.name.name, which_var.ns.name)
else:
return sym.symbol(s.name, ns=ns.name) |
Resolve the aliased symbol to a Var from the specified namespace or the current namespace if none is specified. | def resolve_var(s: sym.Symbol, ns: Optional[Namespace] = None) -> Optional[Var]:
"""Resolve the aliased symbol to a Var from the specified
namespace, or the current namespace if none is specified."""
return Var.find(resolve_alias(s, ns)) |
Add generated Python code to a dynamic variable in which_ns. | def add_generated_python(
generated_python: str,
var_name: str = _GENERATED_PYTHON_VAR_NAME,
which_ns: Optional[str] = None,
) -> None:
"""Add generated Python code to a dynamic variable in which_ns."""
if which_ns is None:
which_ns = get_current_ns().name
ns_sym = sym.Symbol(var_name, ns=which_ns)
v = Maybe(Var.find(ns_sym)).or_else(
lambda: Var.intern(
sym.symbol(which_ns), # type: ignore
sym.symbol(var_name),
"",
dynamic=True,
meta=lmap.map({_PRIVATE_META_KEY: True}),
)
)
v.value = v.value + generated_python |
Return the value of the * print - generated - python * dynamic variable. | def print_generated_python(
var_name: str = _PRINT_GENERATED_PY_VAR_NAME, core_ns_name: str = CORE_NS
) -> bool:
"""Return the value of the `*print-generated-python*` dynamic variable."""
ns_sym = sym.Symbol(var_name, ns=core_ns_name)
return (
Maybe(Var.find(ns_sym))
.map(lambda v: v.value)
.or_else_raise(lambda: RuntimeException(f"Dynamic Var {ns_sym} not bound!"))
) |
Bootstrap the environment with functions that are are difficult to express with the very minimal lisp environment. | def bootstrap(ns_var_name: str = NS_VAR_NAME, core_ns_name: str = CORE_NS) -> None:
"""Bootstrap the environment with functions that are are difficult to
express with the very minimal lisp environment."""
core_ns_sym = sym.symbol(core_ns_name)
ns_var_sym = sym.symbol(ns_var_name, ns=core_ns_name)
__NS = Maybe(Var.find(ns_var_sym)).or_else_raise(
lambda: RuntimeException(f"Dynamic Var {ns_var_sym} not bound!")
)
def in_ns(s: sym.Symbol):
ns = Namespace.get_or_create(s)
__NS.value = ns
return ns
Var.intern_unbound(core_ns_sym, sym.symbol("unquote"))
Var.intern_unbound(core_ns_sym, sym.symbol("unquote-splicing"))
Var.intern(
core_ns_sym, sym.symbol("in-ns"), in_ns, meta=lmap.map({_REDEF_META_KEY: True})
)
Var.intern(
core_ns_sym,
sym.symbol(_PRINT_GENERATED_PY_VAR_NAME),
False,
dynamic=True,
meta=lmap.map({_PRIVATE_META_KEY: True}),
)
Var.intern(
core_ns_sym,
sym.symbol(_GENERATED_PYTHON_VAR_NAME),
"",
dynamic=True,
meta=lmap.map({_PRIVATE_META_KEY: True}),
)
# Dynamic Vars for controlling printing
Var.intern(
core_ns_sym, sym.symbol(_PRINT_DUP_VAR_NAME), lobj.PRINT_DUP, dynamic=True
)
Var.intern(
core_ns_sym, sym.symbol(_PRINT_LENGTH_VAR_NAME), lobj.PRINT_LENGTH, dynamic=True
)
Var.intern(
core_ns_sym, sym.symbol(_PRINT_LEVEL_VAR_NAME), lobj.PRINT_LEVEL, dynamic=True
)
Var.intern(
core_ns_sym, sym.symbol(_PRINT_META_VAR_NAME), lobj.PRINT_META, dynamic=True
)
Var.intern(
core_ns_sym,
sym.symbol(_PRINT_READABLY_VAR_NAME),
lobj.PRINT_READABLY,
dynamic=True,
) |
Intern the value bound to the symbol name in namespace ns. | def intern(
ns: sym.Symbol, name: sym.Symbol, val, dynamic: bool = False, meta=None
) -> "Var":
"""Intern the value bound to the symbol `name` in namespace `ns`."""
var_ns = Namespace.get_or_create(ns)
var = var_ns.intern(name, Var(var_ns, name, dynamic=dynamic, meta=meta))
var.root = val
return var |
Create a new unbound Var instance to the symbol name in namespace ns. | def intern_unbound(
ns: sym.Symbol, name: sym.Symbol, dynamic: bool = False, meta=None
) -> "Var":
"""Create a new unbound `Var` instance to the symbol `name` in namespace `ns`."""
var_ns = Namespace.get_or_create(ns)
return var_ns.intern(name, Var(var_ns, name, dynamic=dynamic, meta=meta)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.