code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
# If this is "import module", fromlist is None.
# If this this is "from module import a, b fromlist will be ('a', 'b').
fromlist = stack.pop().arg
# level argument to __import__. Should be 0, 1, or 2.
level = stack.pop().arg
module = instr.arg
if fromlist is None: # Regular import.
attr_loads = _pop_import_LOAD_ATTRs(module, queue)
store = queue.popleft()
# There are two cases where we should emit an alias:
# import a as <anything but a>
# import a.b.c as <anything (including a)>
if attr_loads or module.split('.')[0] != store.arg:
asname = store.arg
else:
asname = None
body.append(
ast.Import(
names=[
ast.alias(
name=module,
asname=(asname),
),
],
level=level,
),
)
return
elif fromlist == ('*',): # From module import *.
expect(queue.popleft(), instrs.IMPORT_STAR, "after IMPORT_NAME")
body.append(
ast.ImportFrom(
module=module,
names=[ast.alias(name='*', asname=None)],
level=level,
),
)
return
# Consume a pair of IMPORT_FROM, STORE_NAME instructions for each entry in
# fromlist.
names = list(map(make_importfrom_alias(queue, body, context), fromlist))
body.append(ast.ImportFrom(module=module, names=names, level=level))
# Remove the final POP_TOP of the imported module.
expect(queue.popleft(), instrs.POP_TOP, "after 'from import'")
|
def _process_instr_import_name(instr, queue, stack, body, context)
|
Process an IMPORT_NAME instruction.
Side Effects
------------
Pops two instuctions from `stack`
Consumes instructions from `queue` to the end of the import statement.
Appends an ast.Import or ast.ImportFrom node to `body`.
| 4.968962
| 4.881598
| 1.017897
|
popped = popwhile(is_a(instrs.LOAD_ATTR), queue, side='left')
if popped:
expected = module_name.split('.', maxsplit=1)[1]
actual = '.'.join(map(op.attrgetter('arg'), popped))
if expected != actual:
raise DecompilationError(
"Decompiling import of module %s, but LOAD_ATTRS imply %s" % (
expected, actual,
)
)
return popped
|
def _pop_import_LOAD_ATTRs(module_name, queue)
|
Pop LOAD_ATTR instructions for an import of the form::
import a.b.c as d
which should generate bytecode like this::
1 0 LOAD_CONST 0 (0)
3 LOAD_CONST 1 (None)
6 IMPORT_NAME 0 (a.b.c.d)
9 LOAD_ATTR 1 (b)
12 LOAD_ATTR 2 (c)
15 LOAD_ATTR 3 (d)
18 STORE_NAME 3 (d)
| 6.624353
| 8.55617
| 0.774219
|
import_from, store = queue.popleft(), queue.popleft()
expect(import_from, instrs.IMPORT_FROM, "after IMPORT_NAME")
if not import_from.arg == name:
raise DecompilationError(
"IMPORT_FROM name mismatch. Expected %r, but got %s." % (
name, import_from,
)
)
return ast.alias(
name=name,
asname=store.arg if store.arg != name else None,
)
|
def make_importfrom_alias(queue, body, context, name)
|
Make an ast.alias node for the names list of an ast.ImportFrom.
Parameters
----------
queue : deque
Instruction Queue
body : list
Current body.
context : DecompilationContext
name : str
Expected name of the IMPORT_FROM node to be popped.
Returns
-------
alias : ast.alias
Side Effects
------------
Consumes IMPORT_FROM and STORE_NAME instructions from queue.
| 5.380363
| 4.482185
| 1.200388
|
assert stack, "Empty stack before MAKE_FUNCTION."
prev = stack[-1]
expect(prev, instrs.LOAD_CONST, "before MAKE_FUNCTION")
stack.append(instr)
if is_lambda_name(prev.arg):
return
return context.update(
make_function_context=MakeFunctionContext(
closure=isinstance(instr, instrs.MAKE_CLOSURE),
)
)
|
def _make_function(instr, queue, stack, body, context)
|
Set a make_function_context, then push onto the stack.
| 7.924286
| 6.388716
| 1.240357
|
value = make_expr(stack)
# Make assignment targets.
# If there are multiple assignments (e.g. 'a = b = c'),
# each LHS expression except the last is preceded by a DUP_TOP instruction.
# Thus, we make targets until we don't see a DUP_TOP, and then make one
# more.
targets = []
while isinstance(instr, instrs.DUP_TOP):
targets.append(make_assign_target(queue.popleft(), queue, stack))
instr = queue.popleft()
targets.append(make_assign_target(instr, queue, stack))
return ast.Assign(targets=targets, value=value)
|
def make_assignment(instr, queue, stack)
|
Make an ast.Assign node.
| 4.91423
| 4.827687
| 1.017926
|
body_instrs = popwhile(op.is_not(setup_with_instr.arg), queue, side='left')
# Last two instructions should always be POP_BLOCK, LOAD_CONST(None).
# These don't correspond to anything in the AST, so remove them here.
load_none = body_instrs.pop()
expect(load_none, instrs.LOAD_CONST, "at end of with-block")
pop_block = body_instrs.pop()
expect(pop_block, instrs.POP_BLOCK, "at end of with-block")
if load_none.arg is not None:
raise DecompilationError(
"Expected LOAD_CONST(None), but got "
"%r instead" % (load_none)
)
# Target of the setup_with should be a WITH_CLEANUP instruction followed by
# an END_FINALLY. Neither of these correspond to anything in the AST.
with_cleanup = queue.popleft()
expect(with_cleanup, instrs.WITH_CLEANUP, "at end of with-block")
end_finally = queue.popleft()
expect(end_finally, instrs.END_FINALLY, "at end of with-block")
return body_instrs
|
def pop_with_body_instrs(setup_with_instr, queue)
|
Pop instructions from `queue` that form the body of a with block.
| 3.898503
| 3.93956
| 0.989578
|
context_expr = make_expr(stack)
# This is a POP_TOP for just "with <expr>:".
# This is a STORE_NAME(name) for "with <expr> as <name>:".
as_instr = queue.popleft()
if isinstance(as_instr, (instrs.STORE_FAST,
instrs.STORE_NAME,
instrs.STORE_DEREF,
instrs.STORE_GLOBAL)):
return ast.withitem(
context_expr=context_expr,
optional_vars=make_assign_target(as_instr, queue, stack),
)
elif isinstance(as_instr, instrs.POP_TOP):
return ast.withitem(context_expr=context_expr, optional_vars=None)
else:
raise DecompilationError(
"Don't know how to make withitem from %s" % as_instr,
)
|
def make_withitem(queue, stack)
|
Make an ast.withitem node.
| 3.330421
| 3.215477
| 1.035747
|
# Instructions from start until GET_ITER are the builders for the iterator
# expression.
iterator_expr = make_expr(
popwhile(not_a(instrs.GET_ITER), loop_body_instrs, side='left')
)
# Next is the GET_ITER instruction, which we don't need.
loop_body_instrs.popleft()
# Next is FOR_ITER, which is the jump target for Continue nodes.
top_of_loop = loop_body_instrs.popleft()
# This can be a STORE_* or an UNPACK_SEQUENCE followed by some number of
# stores.
target = make_assign_target(
loop_body_instrs.popleft(),
loop_body_instrs,
stack=[],
)
body, orelse_body = make_loop_body_and_orelse(
top_of_loop, loop_body_instrs, else_body_instrs, context
)
return ast.For(
target=target,
iter=iterator_expr,
body=body,
orelse=orelse_body,
)
|
def make_for_loop(loop_body_instrs, else_body_instrs, context)
|
Make an ast.For node.
| 5.431411
| 5.119592
| 1.060907
|
# Remove the JUMP_ABSOLUTE and POP_BLOCK instructions at the bottom of the
# loop.
body_instrs.pop()
body_instrs.pop()
body = instrs_to_body(body_instrs, context.update(top_of_loop=top_of_loop))
if else_instrs:
else_body = instrs_to_body(else_instrs, context)
else:
else_body = []
return body, else_body
|
def make_loop_body_and_orelse(top_of_loop, body_instrs, else_instrs, context)
|
Make body and orelse lists for a for/while loop whose first instruction is
`top_of_loop`.
Parameters
----------
top_of_loop : Instruction
The first body of the loop. For a for-loop, this should always be a
FOR_ITER. For a while loop, it's the first instruction of the stack
builders for the loop test expression
body_instrs : deque
Queue of Instructions that form the body of the loop. The last two
elements of body_instrs should be a JUMP_ABSOLUTE to `top_of_loop` and
a POP_BLOCK.
else_instrs : deque
Queue of Instructions that form the else block of the loop. Should be
an empty deque if there is no else block.
context : DecompilationContext
Returns
-------
body : list[ast.AST]
List of ast nodes forming the loop body.
orelse_body : list[ast.AST]
List of ast nodes forming the else-block body.
| 2.724198
| 2.521681
| 1.08031
|
top_of_loop = test_and_body_instrs[0]
# The popped elements are the stack_builders for the loop test expression.
# The top of the loop_body_instrs is either a POP_JUMP_IF_TRUE or a
# POP_JUMP_IF_FALSE.
test, body_instrs = make_while_loop_test_expr(test_and_body_instrs)
body, orelse_body = make_loop_body_and_orelse(
top_of_loop, body_instrs, else_body_instrs, context,
)
# while-else blocks are not yet supported or handled.
return ast.While(test=test, body=body, orelse=orelse_body)
|
def make_while_loop(test_and_body_instrs, else_body_instrs, context)
|
Make an ast.While node.
Parameters
----------
test_and_body_instrs : deque
Queue of instructions forming the loop test expression and body.
else_body_instrs : deque
Queue of instructions forming the else block of the loop.
context : DecompilationContext
| 4.350267
| 4.278111
| 1.016866
|
# Grab everything from left side of the queue until the jump target of
# SETUP_LOOP.
body = popwhile(op.is_not(setup_loop_instr.arg), queue, side='left')
# Anything after the last POP_BLOCK instruction is the else-block.
else_body = popwhile(not_a(instrs.POP_BLOCK), body, side='right')
jump_to_top, pop_block = body[-2], body[-1]
if not isinstance(jump_to_top, instrs.JUMP_ABSOLUTE):
raise DecompilationError(
"Penultimate instruction of loop body is "
"%s, not JUMP_ABSOLUTE." % jump_to_top,
)
if not isinstance(pop_block, instrs.POP_BLOCK):
raise DecompilationError(
"Last instruction of loop body is "
"%s, not pop_block." % pop_block,
)
loop_expr = jump_to_top.arg
if isinstance(loop_expr, instrs.FOR_ITER):
return 'for', body, else_body
return 'while', body, else_body
|
def pop_loop_instrs(setup_loop_instr, queue)
|
Determine whether setup_loop_instr is setting up a for-loop or a
while-loop. Then pop the loop instructions from queue.
The easiest way to tell the difference is to look at the target of the
JUMP_ABSOLUTE instruction at the end of the loop. If it jumps to a
FOR_ITER, then this is a for-loop. Otherwise it's a while-loop.
The jump we want to inspect is the first JUMP_ABSOLUTE instruction prior to
the jump target of `setup_loop_instr`.
Parameters
----------
setup_loop_instr : instructions.SETUP_LOOP
First instruction of the loop being parsed.
queue : collections.deque
Queue of unprocessed instructions.
Returns
-------
loop_type : str, {'for', 'while'}
The kind of loop being constructed.
loop_instrs : deque
The instructions forming body of the loop.
else_instrs : deque
The instructions forming the else-block of the loop.
Side Effects
------------
Pops all returned instructions from `queue`.
| 4.482406
| 4.292095
| 1.04434
|
base_expr = _make_expr_internal(toplevel, stack_builders)
if not toplevel._next_target_of:
return base_expr
subexprs = deque([base_expr])
ops = deque([])
while stack_builders and stack_builders[-1] in toplevel._next_target_of:
jump = stack_builders.pop()
if not isinstance(jump, _BOOLOP_JUMP_TYPES):
raise DecompilationError(
"Don't know how to decompile %s inside expression." % jump,
)
subexprs.appendleft(make_expr(stack_builders))
ops.appendleft(_BOOLOP_JUMP_TO_AST_OP[type(jump)]())
if len(subexprs) <= 1:
raise DecompilationError(
"Expected at least one JUMP instruction before expression."
)
return normalize_boolop(make_boolop(subexprs, ops))
|
def _make_expr(toplevel, stack_builders)
|
Override the single-dispatched make_expr with wrapper logic for handling
short-circuiting expressions.
| 4.679602
| 4.70329
| 0.994963
|
if len(op_types) > 1:
return ast.BoolOp(
op=op_types.popleft(),
values=[exprs.popleft(), make_boolop(exprs, op_types)],
)
assert len(exprs) == 2
return ast.BoolOp(op=op_types.popleft(), values=list(exprs))
|
def make_boolop(exprs, op_types)
|
Parameters
----------
exprs : deque
op_types : deque[{ast.And, ast.Or}]
| 2.463527
| 2.253188
| 1.093352
|
optype = expr.op
newvalues = []
for subexpr in expr.values:
if not isinstance(subexpr, ast.BoolOp):
newvalues.append(subexpr)
elif type(subexpr.op) != type(optype):
newvalues.append(normalize_boolop(subexpr))
else:
# Normalize subexpression, then inline its values into the
# top-level subexpr.
newvalues.extend(normalize_boolop(subexpr).values)
return ast.BoolOp(op=optype, values=newvalues)
|
def normalize_boolop(expr)
|
Normalize a boolop by folding together nested And/Or exprs.
| 3.266417
| 3.084426
| 1.059003
|
out = []
for _ in range(count):
value = make_expr(stack_builders)
load_kwname = stack_builders.pop()
if not isinstance(load_kwname, instrs.LOAD_CONST):
raise DecompilationError(
"Expected a LOAD_CONST, but got %r" % load_kwname
)
if not isinstance(load_kwname.arg, str):
raise DecompilationError(
"Expected LOAD_CONST of a str, but got %r." % load_kwname,
)
out.append(ast.keyword(arg=load_kwname.arg, value=value))
out.reverse()
return out
|
def make_call_keywords(stack_builders, count)
|
Make the keywords entry for an ast.Call node.
| 2.995651
| 2.917457
| 1.026802
|
out = [make_expr(stack_builders) for _ in range(count)]
out.reverse()
return out
|
def make_call_positionals(stack_builders, count)
|
Make the args entry for an ast.Call node.
| 5.132488
| 4.280443
| 1.199055
|
exprs = [make_expr(stack_builders) for _ in range(count)]
# Elements are on the stack from right to left, but we want them from right
# to left.
exprs.reverse()
return exprs
|
def make_exprs(stack_builders, count)
|
Make elements of set/list/tuple literal.
| 4.681255
| 4.180583
| 1.119761
|
if toplevel.arg:
raise DecompilationError(
"make_expr() called with nonzero BUILD_MAP arg %d" % toplevel.arg
)
if stack_builders:
raise DecompilationError(
"Unexpected stack_builders for BUILD_MAP(0): %s" % stack_builders
)
return ast.Dict(keys=[], values=[])
|
def _make_expr_empty_dict(toplevel, stack_builders)
|
This should only be hit for empty dicts. Anything else should hit the
STORE_MAP handler instead.
| 6.106355
| 5.43839
| 1.122824
|
assert isinstance(stack_builders[-1], instrs.STORE_MAP)
to_consume = 0
for instr in reversed(stack_builders):
if isinstance(instr, instrs.STORE_MAP):
# NOTE: This branch should always be hit on the first iteration.
to_consume += 1
elif isinstance(instr, instrs.BUILD_MAP):
to_consume -= instr.arg
if to_consume <= 0:
return instr
else:
raise DecompilationError(
"Couldn't find BUILD_MAP for last element of %s." % stack_builders
)
|
def find_build_map(stack_builders)
|
Find the BUILD_MAP instruction for which the last element of
``stack_builders`` is a store.
| 4.503932
| 3.778753
| 1.191909
|
keys = []
values = []
for _ in range(build_instr.arg):
popped = builders.pop()
if not isinstance(popped, instrs.STORE_MAP):
raise DecompilationError(
"Expected a STORE_MAP but got %s" % popped
)
keys.append(make_expr(builders))
values.append(make_expr(builders))
# Keys and values are emitted in reverse order of how they appear in the
# AST.
keys.reverse()
values.reverse()
return keys, values
|
def _make_dict_elems(build_instr, builders)
|
Return a list of keys and a list of values for the dictionary literal
generated by ``build_instr``.
| 4.20325
| 3.741064
| 1.123544
|
if not any(isinstance(elt, ast.Slice) for elt in node.elts):
return ast.Index(value=node)
return ast.ExtSlice(
[
# Wrap non-Slice nodes in Index nodes.
elt if isinstance(elt, ast.Slice) else ast.Index(value=elt)
for elt in node.elts
]
)
|
def normalize_tuple_slice(node)
|
Normalize an ast.Tuple node representing the internals of a slice.
Returns the node wrapped in an ast.Index.
Returns an ExtSlice node built from the tuple elements if there are any
slices.
| 3.451707
| 2.902321
| 1.189292
|
def _handler(toplevel, stack_builders):
right = make_expr(stack_builders)
left = make_expr(stack_builders)
return ast.BinOp(left=left, op=nodetype(), right=right)
return _handler
|
def _binop_handler(nodetype)
|
Factory function for binary operator handlers.
| 5.10007
| 4.83387
| 1.05507
|
decorator_calls = deque()
while isinstance(function_builders[-1], instrs.CALL_FUNCTION):
decorator_calls.appendleft(function_builders.pop())
*builders, load_code_instr, load_name_instr, make_function_instr = (
function_builders
)
_check_make_function_instrs(
load_code_instr, load_name_instr, make_function_instr,
)
co = load_code_instr.arg
name = load_name_instr.arg
args, kwonly, varargs, varkwargs = paramnames(co)
# Convert default and annotation builders to AST nodes.
defaults, kw_defaults, annotations = make_defaults_and_annotations(
make_function_instr,
builders,
)
# Convert decorator function builders. The stack is in reverse order.
decorators = [make_expr(builders) for _ in decorator_calls]
decorators.reverse()
if closure:
# There should be a tuple of closure cells still on the stack here.
# These don't appear in the AST, but we need to consume them to ensure
# correctness down the line.
closure_cells = make_closure_cells(builders) # noqa
# We should have consumed all our builders by this point.
if builders:
raise DecompilationError(
"Unexpected leftover builders for %s: %s." % (
make_function_instr, builders
)
)
return ast.FunctionDef(
body_code=co,
name=name.split('.')[-1],
args=make_function_arguments(
args,
kwonly,
varargs,
varkwargs,
defaults,
kw_defaults,
annotations,
),
body=pycode_to_body(co, DecompilationContext(in_function_block=True)),
decorator_list=decorators,
returns=annotations.get('return'),
)
|
def make_function(function_builders, *, closure)
|
Construct a FunctionDef AST node from a sequence of the form:
LOAD_CLOSURE, N times (when handling MAKE_CLOSURE)
BUILD_TUPLE(N) (when handling MAKE_CLOSURE)
<decorator builders> (optional)
<default builders>, (optional)
<annotation builders> (optional)
LOAD_CONST(<tuple of annotated names>) (optional)
LOAD_CONST(code),
LOAD_CONST(name),
MAKE_FUNCTION | MAKE_CLOSURE
<decorator calls> (optional)
| 4.803061
| 4.720316
| 1.017529
|
return ast.arguments(
args=[ast.arg(arg=a, annotation=annotations.get(a)) for a in args],
kwonlyargs=[
ast.arg(arg=a, annotation=annotations.get(a)) for a in kwonly
],
defaults=defaults,
kw_defaults=list(map(kw_defaults.get, kwonly)),
vararg=None if varargs is None else ast.arg(
arg=varargs, annotation=annotations.get(varargs),
),
kwarg=None if varkwargs is None else ast.arg(
arg=varkwargs, annotation=annotations.get(varkwargs)
),
)
|
def make_function_arguments(args,
kwonly,
varargs,
varkwargs,
defaults,
kw_defaults,
annotations)
|
Make an ast.arguments from the args parsed out of a code object.
| 1.786047
| 1.726726
| 1.034355
|
globals_ = sorted(set(
i.arg for i in code_instrs if isinstance(i, instrs.STORE_GLOBAL)
))
nonlocals = sorted(set(
i.arg for i in code_instrs
if isinstance(i, instrs.STORE_DEREF) and i.vartype == 'free'
))
out = []
if globals_:
out.append(ast.Global(names=globals_))
if nonlocals:
out.append(ast.Nonlocal(names=nonlocals))
return out
|
def make_global_and_nonlocal_decls(code_instrs)
|
Find all STORE_GLOBAL and STORE_DEREF instructions in `instrs` and convert
them into a canonical list of `ast.Global` and `ast.Nonlocal` declarations.
| 2.86683
| 2.488163
| 1.152188
|
# Integer counts.
n_defaults, n_kwonlydefaults, n_annotations = unpack_make_function_arg(
make_function_instr.arg
)
if n_annotations:
# TOS should be a tuple of annotation names.
load_annotation_names = builders.pop()
annotations = dict(zip(
reversed(load_annotation_names.arg),
(make_expr(builders) for _ in range(n_annotations - 1))
))
else:
annotations = {}
kwonlys = {}
while n_kwonlydefaults:
default_expr = make_expr(builders)
key_instr = builders.pop()
if not isinstance(key_instr, instrs.LOAD_CONST):
raise DecompilationError(
"kwonlydefault key is not a LOAD_CONST: %s" % key_instr
)
if not isinstance(key_instr.arg, str):
raise DecompilationError(
"kwonlydefault key builder is not a "
"'LOAD_CONST of a string: %s" % key_instr
)
kwonlys[key_instr.arg] = default_expr
n_kwonlydefaults -= 1
defaults = make_exprs(builders, n_defaults)
return defaults, kwonlys, annotations
|
def make_defaults_and_annotations(make_function_instr, builders)
|
Get the AST expressions corresponding to the defaults, kwonly defaults, and
annotations for a function created by `make_function_instr`.
| 3.489999
| 3.392494
| 1.028741
|
# Validate load_code_instr.
if not isinstance(load_code_instr, instrs.LOAD_CONST):
raise TypeError(
"make_function expected 'load_code_instr` to be a "
"LOAD_CONST, but got %s" % load_code_instr,
)
if not isinstance(load_code_instr.arg, types.CodeType):
raise TypeError(
"make_function expected load_code_instr "
"to load a code object, but got %s" % load_code_instr.arg,
)
# Validate load_name_instr
if not isinstance(load_name_instr, instrs.LOAD_CONST):
raise TypeError(
"make_function expected 'load_name_instr` to be a "
"LOAD_CONST, but got %s" % load_code_instr,
)
if not isinstance(load_name_instr.arg, str):
raise TypeError(
"make_function expected load_name_instr "
"to load a string, but got %r instead" % load_name_instr.arg
)
# This is an endswith rather than '==' because the arg is the
# fully-qualified name.
is_lambda = is_lambda_name(load_name_instr.arg)
if expect_lambda and not is_lambda:
raise ValueError(
"Expected to make a function named <lambda>, but "
"got %r instead." % load_name_instr.arg
)
if not expect_lambda and is_lambda:
raise ValueError("Unexpectedly received lambda function.")
# Validate make_function_instr
if not isinstance(make_function_instr, (instrs.MAKE_FUNCTION,
instrs.MAKE_CLOSURE)):
raise TypeError(
"make_function expected a MAKE_FUNCTION or MAKE_CLOSURE"
"instruction, but got %s instead." % make_function_instr
)
|
def _check_make_function_instrs(load_code_instr,
load_name_instr,
make_function_instr,
*,
expect_lambda=False)
|
Validate the instructions passed to a make_function call.
| 2.137845
| 2.087266
| 1.024232
|
needed = instr.stack_effect
if needed >= 0:
raise DecompilationError(
"%s is does not have a negative stack effect" % instr
)
for popcount, to_pop in enumerate(reversed(stack), start=1):
needed += to_pop.stack_effect
if not needed:
break
else:
raise DecompilationError(
"Reached end of stack without finding inputs to %s" % instr,
)
popped = stack[-popcount:]
stack[:] = stack[:-popcount]
return popped
|
def pop_arguments(instr, stack)
|
Pop instructions off `stack` until we pop all instructions that will
produce values popped by `instr`.
| 5.361593
| 5.041705
| 1.063448
|
fail = (
len(stack) != 1
or not isinstance(stack[0], instrs.LOAD_CONST)
or stack[0].arg is not None
)
if fail:
raise DecompilationError(
"Reached end of non-function code "
"block with unexpected stack: %s." % stack
)
|
def _check_stack_for_module_return(stack)
|
Verify that the stack is in the expected state before the dummy
RETURN_VALUE instruction of a module or class.
| 6.188246
| 6.101007
| 1.014299
|
if not isinstance(instr, expected):
raise DecompilationError(
"Expected a {expected} instruction {context}. Got {instr}.".format(
instr=instr, expected=expected, context=context,
)
)
return instr
|
def expect(instr, expected, context)
|
Check that an instruction is of the expected type.
| 4.318481
| 3.61372
| 1.195024
|
if side not in ('left', 'right'):
raise ValueError("`side` must be one of 'left' or 'right'")
out = deque()
if side == 'left':
popnext = queue.popleft
pushnext = out.append
nextidx = 0
else:
popnext = queue.pop
pushnext = out.appendleft
nextidx = -1
while queue:
if not cond(queue[nextidx]):
break
pushnext(popnext())
return out
|
def popwhile(cond, queue, *, side)
|
Pop elements off a queue while `cond(nextelem)` is True.
Parameters
----------
cond : predicate
queue : deque
side : {'left', 'right'}
Returns
-------
popped : deque
Examples
--------
>>> from collections import deque
>>> d = deque([1, 2, 3, 2, 1])
>>> popwhile(lambda x: x < 3, d, side='left')
deque([1, 2])
>>> d
deque([3, 2, 1])
>>> popwhile(lambda x: x < 3, d, side='right')
deque([2, 1])
>>> d
deque([3])
| 2.590564
| 3.073323
| 0.84292
|
typename = type_.__name__
if typename.endswith('x'):
typename += 'es'
elif not typename.endswith('s'):
typename += 's'
if __doc__ is None:
__doc__ = _format_constant_docstring(type_)
return type(
"overloaded_" + typename,
(_ConstantTransformerBase,), {
'_type': type_,
'__doc__': __doc__,
},
)
|
def overloaded_constants(type_, __doc__=None)
|
A factory for transformers that apply functions to literals.
Parameters
----------
type_ : type
The type to overload.
__doc__ : str, optional
Docstring for the generated transformer.
Returns
-------
transformer : subclass of CodeTransformer
A new code transformer class that will overload the provided
literal types.
| 3.85053
| 3.94956
| 0.974926
|
typename = type_.__name__
instrname = 'BUILD_' + typename.upper()
dict_ = OrderedDict(
__doc__=dedent(
.format(name=instrname)
)
)
try:
build_instr = getattr(instructions, instrname)
except AttributeError:
raise TypeError("type %s is not buildable" % typename)
if add_name is not None:
try:
add_instr = getattr(
instructions,
'_'.join((typename, add_name)).upper(),
)
except AttributeError:
TypeError("type %s is not addable" % typename)
dict_['_start_comprehension'] = pattern(
build_instr, matchany[var], add_instr,
)(_start_comprehension)
dict_['_return_value'] = pattern(
instructions.RETURN_VALUE, startcodes=(IN_COMPREHENSION,),
)(_return_value)
else:
add_instr = None
dict_['_build'] = pattern(build_instr)(_build)
if not typename.endswith('s'):
typename = typename + 's'
return type(
'overloaded_' + typename,
(overloaded_constants(type_),),
dict_,
)
|
def overloaded_build(type_, add_name=None)
|
Factory for constant transformers that apply to a given
build instruction.
Parameters
----------
type_ : type
The object type to overload the construction of. This must be one of
"buildable" types, or types with a "BUILD_*" instruction.
add_name : str, optional
The suffix of the instruction tha adds elements to the collection.
For example: 'add' or 'append'
Returns
-------
transformer : subclass of CodeTransformer
A new code transformer class that will overload the provided
literal types.
| 4.982973
| 5.047547
| 0.987207
|
flags = co.co_flags
varnames = co.co_varnames
argcount, kwonlyargcount = co.co_argcount, co.co_kwonlyargcount
total = argcount + kwonlyargcount
args = varnames[:argcount]
kwonlyargs = varnames[argcount:total]
varargs, varkwargs = None, None
if flags & Flag.CO_VARARGS:
varargs = varnames[total]
total += 1
if flags & Flag.CO_VARKEYWORDS:
varkwargs = varnames[total]
return args, kwonlyargs, varargs, varkwargs
|
def paramnames(co)
|
Get the parameter names from a pycode object.
Returns a 4-tuple of (args, kwonlyargs, varargs, varkwargs).
varargs and varkwargs will be None if the function doesn't take *args or
**kwargs, respectively.
| 2.145183
| 2.050498
| 1.046176
|
len_cellvars = len(cellvars)
if arg < len_cellvars:
return cellvars[arg]
return freevars[arg - len_cellvars]
|
def _freevar_argname(arg, cellvars, freevars)
|
Get the name of the variable manipulated by a 'uses_free' instruction.
Parameters
----------
arg : int
The raw argument to a uses_free instruction that we want to resolve to
a name.
cellvars : list[str]
The co_cellvars of the function for which we want to resolve `arg`.
freevars : list[str]
The co_freevars of the function for which we want to resolve `arg`.
Notes
-----
From https://docs.python.org/3.5/library/dis.html#opcode-LOAD_CLOSURE:
The name of the variable is co_cellvars[i] if i is less than the length
of co_cellvars. Otherwise it is co_freevars[i - len(co_cellvars)]
| 2.709688
| 3.220615
| 0.841357
|
return CodeType(
argcount,
kwonlyargcount,
nlocals,
stacksize,
flags,
codestring,
constants,
names,
varnames,
filename,
name,
firstlineno,
lnotab,
freevars,
cellvars,
)
|
def pycode(argcount,
kwonlyargcount,
nlocals,
stacksize,
flags,
codestring,
constants,
names,
varnames,
filename,
name,
firstlineno,
lnotab,
freevars=(),
cellvars=())
|
types.CodeType constructor that accepts keyword arguments.
See Also
--------
types.CodeType
| 1.451939
| 1.866715
| 0.777804
|
# Make it sparse to instrs[n] is the instruction at bytecode[n]
sparse_instrs = tuple(
_sparse_args(
Instruction.from_opcode(
b.opcode,
Instruction._no_arg if b.arg is None else _RawArg(b.arg),
) for b in Bytecode(co)
),
)
for idx, instr in enumerate(sparse_instrs):
if instr is None:
# The sparse value
continue
if instr.absjmp:
instr.arg = sparse_instrs[instr.arg]
elif instr.reljmp:
instr.arg = sparse_instrs[instr.arg + idx + argsize + 1]
elif isinstance(instr, LOAD_CONST):
instr.arg = co.co_consts[instr.arg]
elif instr.uses_name:
instr.arg = co.co_names[instr.arg]
elif instr.uses_varname:
instr.arg = co.co_varnames[instr.arg]
elif instr.uses_free:
instr.arg = _freevar_argname(
instr.arg,
co.co_freevars,
co.co_cellvars,
)
elif instr.have_arg and isinstance(instr.arg, _RawArg):
instr.arg = int(instr.arg)
flags = Flag.unpack(co.co_flags)
has_vargs = flags['CO_VARARGS']
has_kwargs = flags['CO_VARKEYWORDS']
# Here we convert the varnames format into our argnames format.
paramnames = co.co_varnames[
:(co.co_argcount +
co.co_kwonlyargcount +
has_vargs +
has_kwargs)
]
# We start with the positional arguments.
new_paramnames = list(paramnames[:co.co_argcount])
# Add *args next.
if has_vargs:
new_paramnames.append('*' + paramnames[-1 - has_kwargs])
# Add positional only arguments next.
new_paramnames.extend(paramnames[
co.co_argcount:co.co_argcount + co.co_kwonlyargcount
])
# Add **kwargs last.
if has_kwargs:
new_paramnames.append('**' + paramnames[-1])
return cls(
filter(bool, sparse_instrs),
argnames=new_paramnames,
cellvars=co.co_cellvars,
freevars=co.co_freevars,
name=co.co_name,
filename=co.co_filename,
firstlineno=co.co_firstlineno,
lnotab={
lno: sparse_instrs[off] for off, lno in findlinestarts(co)
},
flags=flags,
)
|
def from_pycode(cls, co)
|
Create a Code object from a python code object.
Parameters
----------
co : CodeType
The python code object.
Returns
-------
code : Code
The codetransformer Code object.
| 3.374465
| 3.38179
| 0.997834
|
consts = self.consts
names = self.names
varnames = self.varnames
freevars = self.freevars
cellvars = self.cellvars
bc = bytearray()
for instr in self.instrs:
bc.append(instr.opcode) # Write the opcode byte.
if isinstance(instr, LOAD_CONST):
# Resolve the constant index.
bc.extend(consts.index(instr.arg).to_bytes(argsize, 'little'))
elif instr.uses_name:
# Resolve the name index.
bc.extend(names.index(instr.arg).to_bytes(argsize, 'little'))
elif instr.uses_varname:
# Resolve the local variable index.
bc.extend(
varnames.index(instr.arg).to_bytes(argsize, 'little'),
)
elif instr.uses_free:
# uses_free is really "uses freevars **or** cellvars".
try:
# look for the name in cellvars
bc.extend(
cellvars.index(instr.arg).to_bytes(argsize, 'little'),
)
except ValueError:
# fall back to freevars, incrementing the length of
# cellvars.
bc.extend(
(freevars.index(instr.arg) + len(cellvars)).to_bytes(
argsize,
'little',
)
)
elif instr.absjmp:
# Resolve the absolute jump target.
bc.extend(
self.bytecode_offset(instr.arg).to_bytes(
argsize,
'little',
),
)
elif instr.reljmp:
# Resolve the relative jump target.
# We do this by subtracting the curren't instructions's
# sparse index from the sparse index of the argument.
# We then subtract argsize - 1 to account for the bytes the
# current instruction takes up.
bytecode_offset = self.bytecode_offset
bc.extend((
bytecode_offset(instr.arg) -
bytecode_offset(instr) -
argsize -
1
).to_bytes(argsize, 'little',))
elif instr.have_arg:
# Write any other arg here.
bc.extend(instr.arg.to_bytes(argsize, 'little'))
elif WORDCODE:
# with wordcode, all instructions are padded to 2 bytes
bc.append(0)
return CodeType(
self.argcount,
self.kwonlyargcount,
len(varnames),
self.stacksize,
self.py_flags,
bytes(bc),
consts,
names,
varnames,
self.filename,
self.name,
self.firstlineno,
self.py_lnotab,
freevars,
cellvars,
)
|
def to_pycode(self)
|
Create a python code object from the more abstract
codetransfomer.Code object.
Returns
-------
co : CodeType
The python code object.
| 3.38934
| 3.303401
| 1.026015
|
# We cannot use a set comprehension because consts do not need
# to be hashable.
consts = []
append_const = consts.append
for instr in self.instrs:
if isinstance(instr, LOAD_CONST) and instr.arg not in consts:
append_const(instr.arg)
return tuple(consts)
|
def consts(self)
|
The constants referenced in this code object.
| 4.375765
| 3.948324
| 1.108259
|
# We must sort to preserve the order between calls.
# The set comprehension is to drop the duplicates.
return tuple(sorted({
instr.arg for instr in self.instrs if instr.uses_name
}))
|
def names(self)
|
The names referenced in this code object.
Names come from instructions like LOAD_GLOBAL or STORE_ATTR
where the name of the global or attribute is needed at runtime.
| 14.740063
| 11.485235
| 1.283392
|
# We must sort to preserve the order between calls.
# The set comprehension is to drop the duplicates.
return self._argnames + tuple(sorted({
instr.arg
for instr in self.instrs
if instr.uses_varname and instr.arg not in self._argnames
}))
|
def varnames(self)
|
The names of all of the local variables in this code object.
| 9.573593
| 9.090487
| 1.053144
|
reverse_lnotab = reverse_dict(self.lnotab)
py_lnotab = []
prev_instr = 0
prev_lno = self.firstlineno
for addr, instr in enumerate(_sparse_args(self.instrs)):
lno = reverse_lnotab.get(instr)
if lno is None:
continue
delta = lno - prev_lno
py_lnotab.append(addr - prev_instr)
py_lnotab.append(min(delta, max_lnotab_increment))
delta -= max_lnotab_increment
while delta > 0:
py_lnotab.append(0)
py_lnotab.append(min(delta, max_lnotab_increment))
delta -= max_lnotab_increment
prev_lno = lno
prev_instr = addr
return bytes(py_lnotab)
|
def py_lnotab(self)
|
The encoded lnotab that python uses to compute when lines start.
Note
----
See Objects/lnotab_notes.txt in the cpython source for more details.
| 2.796721
| 2.809515
| 0.995446
|
return max(scanl(
op.add,
0,
map(op.attrgetter('stack_effect'), self.instrs),
))
|
def stacksize(self)
|
The maximum amount of stack space used by this code object.
| 11.980453
| 10.839035
| 1.105306
|
if not hasattr(obj, name):
object_setattr(obj, name, value)
|
def initialize_slot(obj, name, value)
|
Initalize an unitialized slot to a value.
If there is already a value for this slot, this is a nop.
Parameters
----------
obj : immutable
An immutable object.
name : str
The name of the slot to initialize.
value : any
The value to initialize the slot to.
| 4.979186
| 7.701674
| 0.646507
|
if any(s.startswith('__') for s in slots):
raise TypeError(
"immutable classes may not have slots that start with '__'",
)
# If we have no defaults, ignore all of this.
kwdefaults = None
if defaults is not None:
hit_default = False
_defaults = [] # positional defaults
kwdefaults = {} # kwonly defaults
kwdefs = False
for s in slots:
if s not in defaults and hit_default:
raise SyntaxError(
'non-default argument follows default argument'
)
if not kwdefs:
try:
# Try to grab the next default.
# Pop so that we know they were all consumed when we
# are done.
_defaults.append(defaults.pop(s))
except KeyError:
# Not in the dict, we haven't hit any defaults yet.
pass
else:
# We are now consuming default arguments.
hit_default = True
if s.startswith('*'):
if s in defaults:
raise TypeError(
'cannot set default for var args or var kwargs',
)
if not s.startswith('**'):
kwdefs = True
else:
kwdefaults[s] = defaults.pop(s)
if defaults:
# We didn't consume all of the defaults.
raise TypeError(
'default value for non-existent argument%s: %s' % (
's' if len(defaults) > 1 else '',
', '.join(starmap('{0}={1!r}'.format, defaults.items())),
)
)
# cast back to tuples
defaults = tuple(_defaults)
if not slots:
return _no_arg_init, ()
ns = {'__initialize_slot': initialize_slot}
# filter out lone star
slotnames = tuple(filter(None, (s.strip('*') for s in slots)))
# We are using exec here so that we can later inspect the call signature
# of the __init__. This makes the positional vs keywords work as intended.
# This is totally reasonable, no h8 m8!
exec(
'def __init__(_{name}__self, {args}): \n {assign}'.format(
name=name,
args=', '.join(slots),
assign='\n '.join(
map(
'__initialize_slot(_{1}__self, "{0}", {0})'.format,
slotnames,
repeat(name),
),
),
),
ns,
)
init = ns['__init__']
init.__defaults__ = defaults
init.__kwdefaults__ = kwdefaults
return init, slotnames
|
def _create_init(name, slots, defaults)
|
Create the __init__ function for an immutable object.
Parameters
----------
name : str
The name of the immutable class.
slots : iterable of str
The __slots__ field from the class.
defaults : dict or None
The default values for the arguments to __init__.
Returns
-------
init : callable
The __init__ function for the new immutable class.
| 4.695283
| 4.664849
| 1.006524
|
try:
spec = getfullargspec(init)
except TypeError:
# we cannot preserve the type signature.
def __init__(*args, **kwargs):
self = args[0]
__setattr__._initializing.add(self)
init(*args, **kwargs)
__setattr__._initializing.remove(self)
_check_missing_slots(self)
return __init__
args = spec.args
varargs = spec.varargs
if not (args or varargs):
raise TypeError(
"%r must accept at least one positional argument for 'self'" %
getattr(init, '__qualname__', getattr(init, '__name__', init)),
)
if not args:
self = '%s[0]' % varargs
forward = argspec = '*' + varargs
else:
self = args[0]
forward = argspec = ', '.join(args)
if args and varargs:
forward = '%s, *%s' % (forward, spec.varargs)
argspec = '%s, *%s' % (argspec, spec.varargs)
if spec.kwonlyargs:
forward = '%s, %s' % (
forward,
', '.join(map('{0}={0}'.format, spec.kwonlyargs))
)
argspec = '%s,%s%s' % (
argspec,
'*, ' if not spec.varargs else '',
', '.join(spec.kwonlyargs),
)
if spec.varkw:
forward = '%s, **%s' % (forward, spec.varkw)
argspec = '%s, **%s' % (argspec, spec.varkw)
ns = {
'__init': init,
'__initializing': __setattr__._initializing,
'__check_missing_slots': _check_missing_slots,
}
exec(
dedent(
.format(
argspec=argspec,
self=self,
forward=forward,
),
),
ns,
)
__init__ = ns['__init__']
__init__.__defaults__ = spec.defaults
__init__.__kwdefaults__ = spec.kwonlydefaults
__init__.__annotations__ = spec.annotations
return __init__
|
def _wrapinit(init)
|
Wrap an existing initialize function by thawing self for the duration
of the init.
Parameters
----------
init : callable
The user-provided init.
Returns
-------
wrapped : callable
The wrapped init method.
| 2.798479
| 2.844894
| 0.983685
|
missing_slots = tuple(
filter(lambda s: not hasattr(ob, s), ob.__slots__),
)
if missing_slots:
raise TypeError(
'not all slots initialized in __init__, missing: {0}'.format(
missing_slots,
),
)
|
def _check_missing_slots(ob)
|
Check that all slots have been initialized when a custom __init__ method
is provided.
Parameters
----------
ob : immutable
The instance that was just initialized.
Raises
------
TypeError
Raised when the instance has not set values that are named in the
__slots__.
| 4.008318
| 4.235313
| 0.946404
|
yield n
for m in ns:
n = f(n, m)
yield n
|
def scanl(f, n, ns)
|
Reduce ns by f starting with n yielding each intermediate value.
tuple(scanl(f, n, ns))[-1] == reduce(f, ns, n)
Parameters
----------
f : callable
A binary function.
n : any
The starting value.
ns : iterable of any
The iterable to scan over.
Yields
------
p : any
The value of reduce(f, ns[:idx]) where idx is the current index.
Examples
--------
>>> import operator as op
>>> tuple(scanl(op.add, 0, (1, 2, 3, 4)))
(0, 1, 3, 6, 10)
| 4.247955
| 12.549454
| 0.338497
|
it = iter(iterable)
previous = next(it)
yield previous
for e in it:
if e is None:
yield previous
else:
previous = e
yield e
|
def ffill(iterable)
|
Forward fill non None values in some iterable.
Parameters
----------
iterable : iterable
The iterable to forward fill.
Yields
------
e : any
The last non None value or None if there has not been a non None value.
| 3.370605
| 3.629889
| 0.92857
|
for elem in seq:
if isinstance(elem, recurse_types):
yield from flatten(elem)
else:
yield elem
|
def flatten(seq, *, recurse_types=(tuple, list, set, frozenset))
|
Convert a (possibly nested) iterator into a flattened iterator.
Parameters
----------
seq : iterable
The sequence to flatten.
recurse_types, optional
Types to recursively flatten.
Defaults to (tuple, list, set, frozenset).
>>> list(flatten((1, (2, 3), ((4,), 5))))
[1, 2, 3, 4, 5]
>>> list(flatten(["abc", "def"], recurse_types=(str,)))
['a', 'b', 'c', 'd', 'e', 'f']
| 2.638075
| 4.035169
| 0.653771
|
out = []
if self._transform_bytes:
out.append(bytes)
if self._transform_str:
out.append(str)
return tuple(out)
|
def types(self)
|
Tuple containing types transformed by this transformer.
| 4.64981
| 3.509135
| 1.325059
|
should_transform = is_a(self.types)
if not any(filter(should_transform, flatten(seq))):
# Tuple doesn't contain any transformable strings. Ignore.
yield LOAD_CONST(seq)
return
for const in seq:
if should_transform(const):
yield from self.transform_stringlike(const)
elif isinstance(const, (tuple, frozenset)):
yield from self._transform_constant_sequence(const)
else:
yield LOAD_CONST(const)
if isinstance(seq, tuple):
yield BUILD_TUPLE(len(seq))
else:
assert isinstance(seq, frozenset)
yield BUILD_TUPLE(len(seq))
yield LOAD_CONST(frozenset)
yield ROT_TWO()
yield CALL_FUNCTION(1)
|
def _transform_constant_sequence(self, seq)
|
Transform a frozenset or tuple.
| 3.927214
| 3.54094
| 1.109088
|
yield LOAD_CONST(const)
if isinstance(const, bytes):
yield from self.bytes_instrs
elif isinstance(const, str):
yield from self.str_instrs
|
def transform_stringlike(self, const)
|
Yield instructions to process a str or bytes constant.
| 5.832575
| 3.338758
| 1.746929
|
def __init__(self, packed=no_default, *, positional=0, keyword=0):
if packed is no_default:
arg = int.from_bytes(bytes((positional, keyword)), 'little')
elif not positional and not keyword:
arg = packed
else:
raise TypeError('cannot specify packed and unpacked arguments')
self.positional, self.keyword = arg.to_bytes(2, 'little')
super(class_, self).__init__(arg)
return __init__
|
def _mk_call_init(class_)
|
Create an __init__ function for a call type instruction.
Parameters
----------
class_ : type
The type to bind the function to.
Returns
-------
__init__ : callable
The __init__ method for the class.
| 4.536759
| 4.693193
| 0.966668
|
instr._stolen_by = self
for jmp in instr._target_of:
jmp.arg = self
self._target_of = instr._target_of
instr._target_of = set()
return self
|
def steal(self, instr)
|
Steal the jump index off of `instr`.
This makes anything that would have jumped to `instr` jump to
this Instruction instead.
Parameters
----------
instr : Instruction
The instruction to steal the jump sources from.
Returns
-------
self : Instruction
The instruction that owns this method.
Notes
-----
This mutates self and ``instr`` inplace.
| 5.883859
| 6.384315
| 0.921612
|
return type(cls)(opname[opcode], (cls,), {}, opcode=opcode)(arg)
|
def from_opcode(cls, opcode, arg=_no_arg)
|
Create an instruction from an opcode and raw argument.
Parameters
----------
opcode : int
Opcode for the instruction to create.
arg : int, optional
The argument for the instruction.
Returns
-------
intsr : Instruction
An instance of the instruction named by ``opcode``.
| 11.692113
| 30.092634
| 0.388537
|
if self.opcode == NOP.opcode: # noqa
# dis.stack_effect is broken here
return 0
return stack_effect(
self.opcode,
*((self.arg if isinstance(self.arg, int) else 0,)
if self.have_arg else ())
)
|
def stack_effect(self)
|
The net effect of executing this instruction on the interpreter stack.
Instructions that pop values off the stack have negative stack effect
equal to the number of popped values.
Instructions that push values onto the stack have positive stack effect
equal to the number of popped values.
Examples
--------
- LOAD_{FAST,NAME,GLOBAL,DEREF} push one value onto the stack.
They have a stack_effect of 1.
- POP_JUMP_IF_{TRUE,FALSE} always pop one value off the stack.
They have a stack effect of -1.
- BINARY_* instructions pop two instructions off the stack, apply a
binary operator, and push the resulting value onto the stack.
They have a stack effect of -1 (-2 values consumed + 1 value pushed).
| 8.9178
| 9.375915
| 0.951139
|
return type(self) == type(instr) and self.arg == instr.arg
|
def equiv(self, instr)
|
Check equivalence of instructions. This checks against the types
and the arguments of the instructions
Parameters
----------
instr : Instruction
The instruction to check against.
Returns
-------
is_equiv : bool
If the instructions are equivalent.
Notes
-----
This is a separate concept from instruction identity. Two separate
instructions can be equivalent without being the same exact instance.
This means that two equivalent instructions can be at different points
in the bytecode or be targeted by different jumps.
| 5.185937
| 9.670628
| 0.536257
|
home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
py_executable = os.path.abspath(install_python(
home_dir, lib_dir, inc_dir, bin_dir,
site_packages=site_packages, clear=clear, symlink=symlink))
install_distutils(home_dir)
to_install = []
if not no_setuptools:
to_install.append('setuptools')
if not no_pip:
to_install.append('pip')
if not no_wheel:
to_install.append('wheel')
if to_install:
install_wheel(
to_install,
py_executable,
search_dirs,
download=download,
)
install_activate(home_dir, bin_dir, prompt)
install_python_config(home_dir, bin_dir, prompt)
|
def create_environment(home_dir, site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, download=False,
no_setuptools=False, no_pip=False, no_wheel=False,
symlink=True)
|
Creates a new environment in ``home_dir``.
If ``site_packages`` is true, then the global ``site-packages/``
directory will be on the path.
If ``clear`` is true (default False) then the environment will
first be cleared.
| 2.654989
| 3.04274
| 0.872565
|
home_dir = os.path.abspath(home_dir)
# XXX: We'd use distutils.sysconfig.get_python_inc/lib but its
# prefix arg is broken: http://bugs.python.org/issue3386
if is_win:
# Windows has lots of problems with executables with spaces in
# the name; this function will remove them (using the ~1
# format):
mkdir(home_dir)
if ' ' in home_dir:
import ctypes
GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW
size = max(len(home_dir)+1, 256)
buf = ctypes.create_unicode_buffer(size)
try:
u = unicode
except NameError:
u = str
ret = GetShortPathName(u(home_dir), buf, size)
if not ret:
print('Error: the path "%s" has a space in it' % home_dir)
print('We could not determine the short pathname for it.')
print('Exiting.')
sys.exit(3)
home_dir = str(buf.value)
lib_dir = join(home_dir, 'Lib')
inc_dir = join(home_dir, 'Include')
bin_dir = join(home_dir, 'Scripts')
if is_jython:
lib_dir = join(home_dir, 'Lib')
inc_dir = join(home_dir, 'Include')
bin_dir = join(home_dir, 'bin')
elif is_pypy:
lib_dir = home_dir
inc_dir = join(home_dir, 'include')
bin_dir = join(home_dir, 'bin')
elif not is_win:
lib_dir = join(home_dir, 'lib', py_version)
inc_dir = join(home_dir, 'include', py_version + abiflags)
bin_dir = join(home_dir, 'bin')
return home_dir, lib_dir, inc_dir, bin_dir
|
def path_locations(home_dir)
|
Return the path locations for the environment (where libraries are,
where scripts go, etc)
| 2.787617
| 2.753282
| 1.01247
|
if majver == 2:
libver = '8.5'
else:
libver = '8.6'
for name in ['tcl', 'tk']:
srcdir = src + '/tcl/' + name + libver
dstdir = dest + '/tcl/' + name + libver
copyfileordir(srcdir, dstdir, symlink)
|
def copy_tcltk(src, dest, symlink)
|
copy tcl/tk libraries on Windows (issue #93)
| 4.037339
| 3.811589
| 1.059227
|
# PyPy's library path scheme is not affected by this.
# Return early or we will die on the following assert.
if is_pypy:
logger.debug('PyPy detected, skipping lib64 symlinking')
return
# Check we have a lib64 library path
if not [p for p in distutils.sysconfig.get_config_vars().values()
if isinstance(p, basestring) and 'lib64' in p]:
return
logger.debug('This system uses lib64; symlinking lib64 to lib')
assert os.path.basename(lib_dir) == 'python%s' % sys.version[:3], (
"Unexpected python lib dir: %r" % lib_dir)
lib_parent = os.path.dirname(lib_dir)
top_level = os.path.dirname(lib_parent)
lib_dir = os.path.join(top_level, 'lib')
lib64_link = os.path.join(top_level, 'lib64')
assert os.path.basename(lib_parent) == 'lib', (
"Unexpected parent dir: %r" % lib_parent)
if os.path.lexists(lib64_link):
return
if symlink:
os.symlink('lib', lib64_link)
else:
copyfile('lib', lib64_link)
|
def fix_lib64(lib_dir, symlink=True)
|
Some platforms (particularly Gentoo on x64) put things in lib64/pythonX.Y
instead of lib/pythonX.Y. If this is such a platform we'll just create a
symlink so lib64 points to lib
| 3.197391
| 3.131577
| 1.021016
|
def do_macho(file, bits, endian):
# Read Mach-O header (the magic number is assumed read by the caller)
cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = read_data(file, endian, 6)
# 64-bits header has one more field.
if bits == 64:
read_data(file, endian)
# The header is followed by ncmds commands
for n in range(ncmds):
where = file.tell()
# Read command header
cmd, cmdsize = read_data(file, endian, 2)
if cmd == LC_LOAD_DYLIB:
# The first data field in LC_LOAD_DYLIB commands is the
# offset of the name, starting from the beginning of the
# command.
name_offset = read_data(file, endian)
file.seek(where + name_offset, os.SEEK_SET)
# Read the NUL terminated string
load = file.read(cmdsize - name_offset).decode()
load = load[:load.index('\0')]
# If the string is what is being replaced, overwrite it.
if load == what:
file.seek(where + name_offset, os.SEEK_SET)
file.write(value.encode() + '\0'.encode())
# Seek to the next command
file.seek(where + cmdsize, os.SEEK_SET)
def do_file(file, offset=0, size=maxint):
file = fileview(file, offset, size)
# Read magic number
magic = read_data(file, BIG_ENDIAN)
if magic == FAT_MAGIC:
# Fat binaries contain nfat_arch Mach-O binaries
nfat_arch = read_data(file, BIG_ENDIAN)
for n in range(nfat_arch):
# Read arch header
cputype, cpusubtype, offset, size, align = read_data(file, BIG_ENDIAN, 5)
do_file(file, offset, size)
elif magic == MH_MAGIC:
do_macho(file, 32, BIG_ENDIAN)
elif magic == MH_CIGAM:
do_macho(file, 32, LITTLE_ENDIAN)
elif magic == MH_MAGIC_64:
do_macho(file, 64, BIG_ENDIAN)
elif magic == MH_CIGAM_64:
do_macho(file, 64, LITTLE_ENDIAN)
assert(len(what) >= len(value))
with open(path, 'r+b') as f:
do_file(f)
|
def mach_o_change(path, what, value)
|
Replace a given name (what) in any LC_LOAD_DYLIB command found in
the given binary with a new name (value), provided it's shorter.
| 2.856771
| 2.84902
| 1.002721
|
for subdir in ("bin", "Scripts"):
bin_dir = os.path.join(self.abs_home_dir, subdir)
if os.path.isdir(bin_dir):
print("bin dir: %r" % bin_dir)
return bin_dir
raise RuntimeError("Can't find 'bin/Scripts' dir in: %r" % self.abs_home_dir)
|
def _get_bin_dir(self)
|
Normaly we have a ...env/bin/ dir.
But under Windows we have ...env/Scripts/
But not PyPy2 under Windows, see:
https://bitbucket.org/pypy/pypy/issues/2125/tcl-doesnt-work-inside-a-virtualenv-on#comment-21247266
So just try to test via os.path.isdir()
| 2.900696
| 2.497663
| 1.161364
|
file_names = ["pypy.exe", "python.exe", "python"]
executable = sys.executable
if executable is not None:
executable = os.path.split(executable)[1]
file_names.insert(0, executable)
return self._get_bin_file(*file_names)
|
def _get_python_cmd(self)
|
return the python executable in the virtualenv.
Try first sys.executable but use fallbacks.
| 4.225902
| 3.673676
| 1.15032
|
txt = []
for k, v in sorted(d.items()):
if isinstance(v, int):
txt.append("%s=%s" % (k, nice_hex(v)))
else:
txt.append("%s=%s" % (k, v))
return " ".join(txt)
|
def hex_repr(d)
|
>>> hex_repr({"A":0x1,"B":0xabc})
'A=$01 B=$0abc'
| 2.276849
| 2.273761
| 1.001358
|
verbosity = self.frame_settings.var_verbosity.get()
verbosity_no = VERBOSITY_DICT2[verbosity]
log.debug("Verbosity: %i (%s)" % (verbosity_no, verbosity))
args = (
"--verbosity", "%s" % verbosity_no
# "--log_list",
# "--log",
# "dragonpy.components.cpu6809,40",
# "dragonpy.Dragon32.MC6821_PIA,50",
) + args
click.echo("\n")
run_dragonpy(*args, verbose=True)
|
def _run_dragonpy_cli(self, *args)
|
Run DragonPy cli with given args.
Add "--verbosity" from GUI.
| 9.535669
| 8.48685
| 1.123582
|
machine_name = self.frame_run_buttons.var_machine.get()
self._run_dragonpy_cli("--machine", machine_name, command)
|
def _run_command(self, command)
|
Run DragonPy cli with given command like "run" or "editor"
Add "--machine" from GUI.
"--verbosity" will also be set, later.
| 20.831667
| 7.580562
| 2.748037
|
to_hex = hex(integer)
xpos = to_hex.find('x')
hex_byte = to_hex[xpos+1 : len(to_hex)].upper()
if len(hex_byte) == 1:
hex_byte = ''.join(['0', hex_byte])
return hex_byte
|
def int_to_padded_hex_byte(integer)
|
Convert an int to a 0-padded hex byte string
example: 65 == 41, 10 == 0A
Returns: The hex byte as string (ex: "0C")
| 2.982563
| 3.277024
| 0.910144
|
# Get the summable data from srec
# start at 2 to remove the S* record entry
data = srec[2:len(srec)]
sum = 0
# For each byte, convert to int and add.
# (step each two character to form a byte)
for position in range(0, len(data), 2):
current_byte = data[position : position+2]
int_value = int(current_byte, 16)
sum += int_value
# Extract the Least significant byte from the hex form
hex_sum = hex(sum)
least_significant_byte = hex_sum[len(hex_sum)-2:]
least_significant_byte = least_significant_byte.replace('x', '0')
# turn back to int and find the 8-bit one's complement
int_lsb = int(least_significant_byte, 16)
computed_checksum = (~int_lsb) & 0xff
return computed_checksum
|
def compute_srec_checksum(srec)
|
Compute the checksum byte of a given S-Record
Returns: The checksum as a string hex byte (ex: "0C")
| 4.453402
| 4.488832
| 0.992107
|
checksum = srec[len(srec)-2:]
# Strip the original checksum and compare with the computed one
if compute_srec_checksum(srec[:len(srec) - 2]) == int(checksum, 16):
return True
else:
return False
|
def validate_srec_checksum(srec)
|
Validate if the checksum of the supplied s-record is valid
Returns: True if valid, False if not
| 3.705643
| 4.315817
| 0.858619
|
r
if integer == 9: #\t
readable_string = "\\t"
elif integer == 10: #\r
readable_string = "\\r"
elif integer == 13: #\n
readable_string = "\\n"
elif integer == 32: # space
readable_string = '__'
elif integer >= 33 and integer <= 126: # Readable ascii
readable_string = ''.join([chr(integer), '.'])
else: # rest
readable_string = int_to_padded_hex_byte(integer)
return readable_string
|
def get_readable_string(integer)
|
r"""
Convert an integer to a readable 2-character representation. This is useful for reversing
examples: 41 == ".A", 13 == "\n", 20 (space) == "__"
Returns a readable 2-char representation of an int.
| 3.665711
| 3.389064
| 1.081629
|
byte_pos = target_byte_pos * 2
prefix = target_data[:byte_pos]
suffix = target_data[byte_pos+2:]
target_byte = target_data[byte_pos:byte_pos+2]
int_value = int(target_byte, 16)
int_value += offset
# Wraparound
if int_value > 255 and wraparound:
int_value -= 256
# Extract readable char for analysis
if readable:
if int_value < 256 and int_value > 0:
offset_byte = get_readable_string(int_value)
else:
offset_byte = int_to_padded_hex_byte(int_value)
else:
offset_byte = int_to_padded_hex_byte(int_value)
return ''.join([prefix, offset_byte, suffix])
|
def offset_byte_in_data(target_data, offset, target_byte_pos, readable = False, wraparound = False)
|
Offset a given byte in the provided data payload (kind of rot(x))
readable will return a human-readable representation of the byte+offset
wraparound will wrap around 255 to 0 (ex: 257 = 2)
Returns: the offseted byte
| 2.662272
| 2.792365
| 0.953411
|
for pos in range(0, len(data_section)/2):
data_section = offset_byte_in_data(data_section, offset, pos, readable, wraparound)
return data_section
|
def offset_data(data_section, offset, readable = False, wraparound = False)
|
Offset the whole data section.
see offset_byte_in_data for more information
Returns: the entire data section + offset on each byte
| 4.040765
| 3.745106
| 1.078945
|
record_type = srec[0:2]
data_len = srec[2:4]
addr_len = __ADDR_LEN.get(record_type) * 2
addr = srec[4:4 + addr_len]
data = srec[4 + addr_len:len(srec)-2]
checksum = srec[len(srec) - 2:]
return record_type, data_len, addr, data, checksum
|
def parse_srec(srec)
|
Extract the data portion of a given S-Record (without checksum)
Returns: the record type, the lenght of the data section, the write address, the data itself and the checksum
| 2.773612
| 2.4357
| 1.138733
|
source_ext = os.path.splitext(source_file)[1]
source_ext = source_ext.lower()
dest_ext = os.path.splitext(destination_file)[1]
dest_ext = dest_ext.lower()
if source_ext not in (".wav", ".cas", ".bas"):
raise AssertionError(
"Source file type %r not supported." % repr(source_ext)
)
if dest_ext not in (".wav", ".cas", ".bas"):
raise AssertionError(
"Destination file type %r not supported." % repr(dest_ext)
)
print "Convert %s -> %s" % (source_ext, dest_ext)
c = Cassette(cfg)
if source_ext == ".wav":
c.add_from_wav(source_file)
elif source_ext == ".cas":
c.add_from_cas(source_file)
elif source_ext == ".bas":
c.add_from_bas(source_file)
else:
raise RuntimeError # Should never happen
c.print_debug_info()
if dest_ext == ".wav":
c.write_wave(destination_file)
elif dest_ext == ".cas":
c.write_cas(destination_file)
elif dest_ext == ".bas":
c.write_bas(destination_file)
else:
raise RuntimeError
|
def convert(source_file, destination_file, cfg)
|
convert in every way.
| 1.925962
| 1.907241
| 1.009816
|
mem_FF = [0xff for _ in xrange(4)]
mem_00 = [0x00 for _ in xrange(4)]
mem = []
for _ in xrange(self.RAM_SIZE // 8):
mem += mem_FF
mem += mem_00
return mem
|
def get_initial_RAM(self)
|
init the Dragon RAM
See: http://archive.worldofdragon.org/phpBB3/viewtopic.php?f=5&t=4444
| 4.213889
| 3.622597
| 1.163223
|
# save text cursor position:
self.old_text_pos = self.index(tkinter.INSERT)
# save scroll position:
self.old_first, self.old_last = self.yview()
|
def save_position(self)
|
save cursor and scroll position
| 6.239195
| 4.33355
| 1.439742
|
# restore text cursor position:
self.mark_set(tkinter.INSERT, self.old_text_pos)
# restore scroll position:
self.yview_moveto(self.old_first)
|
def restore_position(self)
|
restore cursor and scroll position
| 6.080189
| 4.953213
| 1.227524
|
self.update()
self.after_id = self.text.after(250, self.__update_interval)
|
def __update_interval(self)
|
highlight the current line
| 5.455336
| 4.411507
| 1.236615
|
line_no = self.text.index(tkinter.INSERT).split(".")[0]
# if not force:
# if line_no == self.current_line:
# log.critical("no highlight line needed.")
# return
# log.critical("highlight line: %s" % line_no)
# self.current_line = line_no
self.text.tag_remove(self.tag_current_line.id, "1.0", "end")
self.text.tag_add(self.tag_current_line.id, "%s.0" % line_no, "%s.0+1lines" % line_no)
|
def update(self, event=None, force=False)
|
highlight the current line
| 3.182359
| 2.869415
| 1.109062
|
if os.path.isfile(self.archive_path):
print("Use %r" % self.archive_path)
with open(self.archive_path, "rb") as f:
content = f.read()
else:
print("Request: %r..." % self.URL)
# Warning: HTTPS requests do not do any verification of the server's certificate.
f = urlopen(self.URL)
content = f.read()
with open(self.archive_path, "wb") as out_file:
out_file.write(content)
# Check SHA hash:
current_sha1 = hashlib.sha1(content).hexdigest()
assert current_sha1 == self.DOWNLOAD_SHA1, "Download sha1 value is wrong! SHA1 is: %r" % current_sha1
print("Download SHA1: %r, ok." % current_sha1)
|
def download(self)
|
Request url and return his content
The Requested content will be cached into the default temp directory.
| 3.462604
| 3.302927
| 1.048344
|
log.critical("paste clipboard")
clipboard = self.root.clipboard_get()
for line in clipboard.splitlines():
log.critical("paste line: %s", repr(line))
self.add_user_input(line + "\r")
|
def paste_clipboard(self, event)
|
Send the clipboard content as user input to the CPU.
| 4.406759
| 3.848598
| 1.14503
|
self.display.write_byte(cpu_cycles, op_address, address, value)
return value
|
def display_callback(self, cpu_cycles, op_address, address, value)
|
called via memory write_byte_middleware
| 4.495347
| 3.264349
| 1.377103
|
char = event.char
if not char:
return
if char in string.ascii_letters:
char = invert_shift(char)
self.user_input_queue.put(char)
# Don't insert the char in text widget, because it will be echoed
# back from the machine!
return "break"
|
def event_key_pressed(self, event)
|
So a "invert shift" for user inputs:
Convert all lowercase letters to uppercase and vice versa.
| 7.786783
| 6.143513
| 1.267481
|
if "TCL_LIBRARY" in os.environ:
# Fix not needed (e.g. virtualenv issues #93 fixed?)
return
if not hasattr(sys, "real_prefix"):
# we are not in a activated virtualenv
return
if sys.version_info[0] == 2:
# Python v2
virtualprefix = sys.prefix
sys.prefix = sys.real_prefix
import FixTk
if "TCL_LIBRARY" not in os.environ:
reload(FixTk)
sys.prefix = virtualprefix
else:
# Python v3
virtualprefix = sys.base_prefix
sys.base_prefix = sys.real_prefix
from tkinter import _fix
if "TCL_LIBRARY" not in os.environ:
from imp import reload
reload(_fix)
sys.base_prefix = virtualprefix
|
def fix_virtualenv_tkinter()
|
work-a-round for tkinter under windows in a virtualenv:
"TclError: Can't find a usable init.tcl..."
Known bug, see: https://github.com/pypa/virtualenv/issues/93
There are "fix tk" file here:
C:\Python27\Lib\lib-tk\FixTk.py
C:\Python34\Lib\tkinter\_fix.py
These modules will be automatic imported by tkinter import.
The fix set theses environment variables:
TCL_LIBRARY C:\Python27\tcl\tcl8.5
TIX_LIBRARY C:\Python27\tcl\tix8.4.3
TK_LIBRARY C:\Python27\tcl\tk8.5
TCL_LIBRARY C:\Python34\tcl\tcl8.6
TIX_LIBRARY C:\Python34\tcl\tix8.4.3
TK_LIBRARY C:\Python34\tcl\tk8.6
but only if:
os.path.exists(os.path.join(sys.prefix,"tcl"))
And the virtualenv activate script will change the sys.prefix
to the current env. So we temporary change it back to sys.real_prefix
and import the fix module.
If the fix module was imported before, then we reload it.
| 3.981778
| 3.285268
| 1.21201
|
log.critical("PIA internal_reset()")
self.empty_key_toggle = True
self.current_input_char = None
self.input_repead = 0
|
def internal_reset(self)
|
internal state reset.
used e.g. in unittests
| 19.925467
| 19.146305
| 1.040695
|
pia0b = self.pia_0_B_data.value # $ff02
# FIXME: Find a way to handle CoCo and Dragon in the same way!
if self.cfg.CONFIG_NAME == COCO2B:
# log.critical("\t count: %i", self.input_repead)
if self.input_repead == 7:
try:
self.current_input_char = self.user_input_queue.get_nowait()
except queue.Empty:
self.current_input_char = None
else:
log.critical(
"\tget new key from queue: %s", repr(self.current_input_char))
elif self.input_repead == 18:
# log.critical("\tForce send 'no key pressed'")
self.current_input_char = None
elif self.input_repead > 20:
self.input_repead = 0
self.input_repead += 1
else: # Dragon
if pia0b == self.cfg.PIA0B_KEYBOARD_START: # FIXME
if self.empty_key_toggle:
# Work-a-round for "poor" dragon keyboard scan routine:
# The scan routine in ROM ignores key pressed directly behind
# one another if they are in the same row!
# See "Inside the Dragon" book, page 203 ;)
#
# Here with the empty_key_toggle, we always send a "no key pressed"
# after every key press back and then we send the next key from
# the self.user_input_queue
#
# TODO: We can check the row of the previous key press and only
# force a 'no key pressed' if the row is the same
self.empty_key_toggle = False
self.current_input_char = None
# log.critical("\tForce send 'no key pressed'")
else:
try:
self.current_input_char = self.user_input_queue.get_nowait()
except queue.Empty:
# log.critical("\tinput_queue is empty"))
self.current_input_char = None
else:
# log.critical("\tget new key from queue: %s", repr(self.current_input_char))
self.empty_key_toggle = True
if self.current_input_char is None:
# log.critical("\tno key pressed")
result = 0xff
self.empty_key_toggle = False
else:
# log.critical("\tsend %s", repr(self.current_input_char))
result = self.cfg.pia_keymatrix_result(
self.current_input_char, pia0b)
# if not is_bit_set(pia0b, bit=7):
# bit 7 | PA7 | joystick comparison input
# result = clear_bit(result, bit=7)
# if self.current_input_char is not None:
# log.critical(
# "%04x| read $%04x ($ff02 is $%02x %s) send $%02x %s back\t|%s",
# op_address, address,
# pia0b, '{0:08b}'.format(pia0b),
# result, '{0:08b}'.format(result),
# self.cfg.mem_info.get_shortest(op_address)
# )
return result
|
def read_PIA0_A_data(self, cpu_cycles, op_address, address)
|
read from 0xff00 -> PIA 0 A side Data reg.
bit 7 | PA7 | joystick comparison input
bit 6 | PA6 | keyboard matrix row 7
bit 5 | PA5 | keyboard matrix row 6
bit 4 | PA4 | keyboard matrix row 5
bit 3 | PA3 | keyboard matrix row 4 & left joystick switch 2
bit 2 | PA2 | keyboard matrix row 3 & right joystick switch 2
bit 1 | PA1 | keyboard matrix row 2 & left joystick switch 1
bit 0 | PA0 | keyboard matrix row 1 & right joystick switch 1
| 4.881718
| 4.531848
| 1.077202
|
log.error("%04x| write $%02x (%s) to $%04x -> PIA 0 A side Data reg.\t|%s",
op_address, value, byte2bit_string(value), address,
self.cfg.mem_info.get_shortest(op_address)
)
self.pia_0_A_register.set(value)
|
def write_PIA0_A_data(self, cpu_cycles, op_address, address, value)
|
write to 0xff00 -> PIA 0 A side Data reg.
| 13.225353
| 7.713168
| 1.714646
|
value = 0xb3
log.error(
"%04x| read $%04x (PIA 0 A side Control reg.) send $%02x (%s) back.\t|%s",
op_address, address, value, byte2bit_string(value),
self.cfg.mem_info.get_shortest(op_address)
)
return value
|
def read_PIA0_A_control(self, cpu_cycles, op_address, address)
|
read from 0xff01 -> PIA 0 A side control register
| 15.125402
| 12.230441
| 1.236701
|
log.error(
"%04x| write $%02x (%s) to $%04x -> PIA 0 A side Control reg.\t|%s",
op_address, value, byte2bit_string(value), address,
self.cfg.mem_info.get_shortest(op_address)
)
if not is_bit_set(value, bit=2):
self.pia_0_A_register.select_pdr()
else:
self.pia_0_A_register.deselect_pdr()
|
def write_PIA0_A_control(self, cpu_cycles, op_address, address, value)
|
write to 0xff01 -> PIA 0 A side control register
TODO: Handle IRQ
bit 7 | IRQ 1 (HSYNC) flag
bit 6 | IRQ 2 flag(not used)
bit 5 | Control line 2 (CA2) is an output = 1
bit 4 | Control line 2 (CA2) set by bit 3 = 1
bit 3 | select line LSB of analog multiplexor (MUX): 0 = control line 2 LO / 1 = control line 2 HI
bit 2 | set data direction: 0 = $FF00 is DDR / 1 = $FF00 is normal data lines
bit 1 | control line 1 (CA1): IRQ polarity 0 = IRQ on HI to LO / 1 = IRQ on LO to HI
bit 0 | HSYNC IRQ: 0 = disabled IRQ / 1 = enabled IRQ
| 8.80141
| 7.683044
| 1.145563
|
value = self.pia_0_B_data.value # $ff02
log.debug(
"%04x| read $%04x (PIA 0 B side Data reg.) send $%02x (%s) back.\t|%s",
op_address, address, value, byte2bit_string(value),
self.cfg.mem_info.get_shortest(op_address)
)
return value
|
def read_PIA0_B_data(self, cpu_cycles, op_address, address)
|
read from 0xff02 -> PIA 0 B side Data reg.
bit 7 | PB7 | keyboard matrix column 8
bit 6 | PB6 | keyboard matrix column 7 / ram size output
bit 5 | PB5 | keyboard matrix column 6
bit 4 | PB4 | keyboard matrix column 5
bit 3 | PB3 | keyboard matrix column 4
bit 2 | PB2 | keyboard matrix column 3
bit 1 | PB1 | keyboard matrix column 2
bit 0 | PB0 | keyboard matrix column 1
bits 0-7 also printer data lines
| 13.490814
| 9.990828
| 1.35032
|
value = self.pia_0_B_control.value
log.error(
"%04x| read $%04x (PIA 0 B side Control reg.) send $%02x (%s) back.\t|%s",
op_address, address, value, byte2bit_string(value),
self.cfg.mem_info.get_shortest(op_address)
)
return value
|
def read_PIA0_B_control(self, cpu_cycles, op_address, address)
|
read from 0xff03 -> PIA 0 B side Control reg.
| 12.332876
| 8.811956
| 1.399562
|
log.critical(
"%04x| write $%02x (%s) to $%04x -> PIA 0 B side Control reg.\t|%s",
op_address, value, byte2bit_string(value),
address, self.cfg.mem_info.get_shortest(op_address)
)
if is_bit_set(value, bit=0):
log.critical(
"%04x| write $%02x (%s) to $%04x -> VSYNC IRQ: enable\t|%s",
op_address, value, byte2bit_string(value),
address, self.cfg.mem_info.get_shortest(op_address)
)
self.cpu.irq_enabled = True
value = set_bit(value, bit=7)
else:
log.critical(
"%04x| write $%02x (%s) to $%04x -> VSYNC IRQ: disable\t|%s",
op_address, value, byte2bit_string(value),
address, self.cfg.mem_info.get_shortest(op_address)
)
self.cpu.irq_enabled = False
if not is_bit_set(value, bit=2):
self.pia_0_B_control.select_pdr()
else:
self.pia_0_B_control.deselect_pdr()
self.pia_0_B_control.set(value)
|
def write_PIA0_B_control(self, cpu_cycles, op_address, address, value)
|
write to 0xff03 -> PIA 0 B side Control reg.
TODO: Handle IRQ
bit 7 | IRQ 1 (VSYNC) flag
bit 6 | IRQ 2 flag(not used)
bit 5 | Control line 2 (CB2) is an output = 1
bit 4 | Control line 2 (CB2) set by bit 3 = 1
bit 3 | select line MSB of analog multiplexor (MUX): 0 = control line 2 LO / 1 = control line 2 HI
bit 2 | set data direction: 0 = $FF02 is DDR / 1 = $FF02 is normal data lines
bit 1 | control line 1 (CB1): IRQ polarity 0 = IRQ on HI to LO / 1 = IRQ on LO to HI
bit 0 | VSYNC IRQ: 0 = disable IRQ / 1 = enable IRQ
| 2.991286
| 2.515585
| 1.189101
|
log.critical("%04x| write $%04x to $%04x", last_op_address, word, address)
return word
|
def basic_addresses_write(self, cycles, last_op_address, address, word)
|
0113 0019 TXTTAB RMB 2 *PV BEGINNING OF BASIC PROGRAM
0114 001B VARTAB RMB 2 *PV START OF VARIABLES
0115 001D ARYTAB RMB 2 *PV START OF ARRAYS
0116 001F ARYEND RMB 2 *PV END OF ARRAYS (+1)
0117 0021 FRETOP RMB 2 *PV START OF STRING STORAGE (TOP OF FREE RAM)
0118 0023 STRTAB RMB 2 *PV START OF STRING VARIABLES
0119 0025 FRESPC RMB 2 UTILITY STRING POINTER
0120 0027 MEMSIZ RMB 2 *PV TOP OF STRING SPACE
| 8.203054
| 8.924715
| 0.919139
|
char = event.char
if not char or char not in string.ascii_letters:
# ignore all non letter inputs
return
converted_char = invert_shift(char)
log.debug("convert keycode %s - char %s to %s", event.keycode, repr(char), converted_char)
# self.text.delete(Tkinter.INSERT + "-1c") # Delete last input char
self.text.insert(tkinter.INSERT, converted_char) # Insert converted char
return "break"
|
def event_text_key(self, event)
|
So a "invert shift" for user inputs:
Convert all lowercase letters to uppercase and vice versa.
| 6.23128
| 5.465586
| 1.140094
|
def pop_bytes_from_bit_list(bit_list, count):
data_bit_count = count * 8
data_bit_list = bit_list[:data_bit_count]
data = list(iter_steps(data_bit_list, steps=8))
bit_list = bit_list[data_bit_count:]
return bit_list, data
|
>>> bit_str = (
... "00110011"
... "00001111"
... "01010101"
... "11001100")
>>> bit_list = [int(i) for i in bit_str]
>>> bit_list, bytes = pop_bytes_from_bit_list(bit_list, 1)
>>> bytes
[[0, 0, 1, 1, 0, 0, 1, 1]]
>>> bit_list, bytes = pop_bytes_from_bit_list(bit_list, 2)
>>> bytes
[[0, 0, 0, 0, 1, 1, 1, 1], [0, 1, 0, 1, 0, 1, 0, 1]]
>>> bit_list, bytes = pop_bytes_from_bit_list(bit_list, 1)
>>> bytes
[[1, 1, 0, 0, 1, 1, 0, 0]]
| null | null | null |
|
def print_bit_list_stats(bit_list):
print "%i Bits:" % len(bit_list),
positive_count = 0
negative_count = 0
for bit in bit_list:
if bit == 1:
positive_count += 1
elif bit == 0:
negative_count += 1
else:
raise TypeError("Not a bit: %s" % repr(bit))
print "%i positive bits and %i negative bits" % (positive_count, negative_count)
|
>>> print_bit_list_stats([1,1,1,1,0,0,0,0])
8 Bits: 4 positive bits and 4 negative bits
| null | null | null |
|
program_start = self.cpu.memory.read_word(
self.machine_api.PROGRAM_START_ADDR
)
tokens = self.machine_api.ascii_listing2program_dump(ascii_listing)
self.cpu.memory.load(program_start, tokens)
log.critical("BASIC program injected into Memory.")
# Update the BASIC addresses:
program_end = program_start + len(tokens)
self.cpu.memory.write_word(self.machine_api.VARIABLES_START_ADDR, program_end)
self.cpu.memory.write_word(self.machine_api.ARRAY_START_ADDR, program_end)
self.cpu.memory.write_word(self.machine_api.FREE_SPACE_START_ADDR, program_end)
log.critical("BASIC addresses updated.")
|
def inject_basic_program(self, ascii_listing)
|
save the given ASCII BASIC program listing into the emulator RAM.
| 3.533063
| 3.363934
| 1.050277
|
if color == INVERTED:
foreground = (0, 255, 0)
background = (0, 65, 0)
elif color == NORMAL:
foreground = (0, 65, 0)
background = (0, 255, 0)
else:
foreground = (0, 0, 0)
background = COLOR_INFO[color]
return (foreground, background)
|
def get_rgb_color(color)
|
>>> get_rgb_color(BLUE)
((0, 0, 0), (33, 16, 181))
>>> get_rgb_color(NORMAL)
((0, 65, 0), (0, 255, 0))
| 2.625931
| 2.264062
| 1.159832
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.