id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
241,000
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_work_spec
|
def do_work_spec(self, args):
'''dump the contents of an existing work spec'''
work_spec_name = self._get_work_spec_name(args)
spec = self.task_master.get_work_spec(work_spec_name)
if args.json:
self.stdout.write(json.dumps(spec, indent=4, sort_keys=True) +
'\n')
else:
yaml.safe_dump(spec, self.stdout)
|
python
|
def do_work_spec(self, args):
'''dump the contents of an existing work spec'''
work_spec_name = self._get_work_spec_name(args)
spec = self.task_master.get_work_spec(work_spec_name)
if args.json:
self.stdout.write(json.dumps(spec, indent=4, sort_keys=True) +
'\n')
else:
yaml.safe_dump(spec, self.stdout)
|
[
"def",
"do_work_spec",
"(",
"self",
",",
"args",
")",
":",
"work_spec_name",
"=",
"self",
".",
"_get_work_spec_name",
"(",
"args",
")",
"spec",
"=",
"self",
".",
"task_master",
".",
"get_work_spec",
"(",
"work_spec_name",
")",
"if",
"args",
".",
"json",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"spec",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
")",
"+",
"'\\n'",
")",
"else",
":",
"yaml",
".",
"safe_dump",
"(",
"spec",
",",
"self",
".",
"stdout",
")"
] |
dump the contents of an existing work spec
|
[
"dump",
"the",
"contents",
"of",
"an",
"existing",
"work",
"spec"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L402-L410
|
241,001
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_status
|
def do_status(self, args):
'''print the number of work units in an existing work spec'''
work_spec_name = self._get_work_spec_name(args)
status = self.task_master.status(work_spec_name)
self.stdout.write(json.dumps(status, indent=4, sort_keys=True) +
'\n')
|
python
|
def do_status(self, args):
'''print the number of work units in an existing work spec'''
work_spec_name = self._get_work_spec_name(args)
status = self.task_master.status(work_spec_name)
self.stdout.write(json.dumps(status, indent=4, sort_keys=True) +
'\n')
|
[
"def",
"do_status",
"(",
"self",
",",
"args",
")",
":",
"work_spec_name",
"=",
"self",
".",
"_get_work_spec_name",
"(",
"args",
")",
"status",
"=",
"self",
".",
"task_master",
".",
"status",
"(",
"work_spec_name",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"status",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
")",
"+",
"'\\n'",
")"
] |
print the number of work units in an existing work spec
|
[
"print",
"the",
"number",
"of",
"work",
"units",
"in",
"an",
"existing",
"work",
"spec"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L415-L420
|
241,002
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_summary
|
def do_summary(self, args):
'''print a summary of running rejester work'''
assert args.json or args.text or (args.text is None)
do_text = args.text
xd = {}
for ws in self.task_master.iter_work_specs():
name = ws['name']
status = self.task_master.status(name)
xd[name] = status
xd['_NOW'] = time.time()
if args.json:
self.stdout.write(json.dumps(xd) + '\n')
else:
if do_text is None:
do_text = True
if do_text:
self.stdout.write('Work spec Avail Pending Blocked'
' Failed Finished Total\n')
self.stdout.write('==================== ======== ======== ========'
' ======== ======== ========\n')
for name in sorted(xd.keys()):
if name == '_NOW':
continue
status = xd[name]
self.stdout.write('{0:20s} {1[num_available]:8d} '
'{1[num_pending]:8d} {1[num_blocked]:8d} '
'{1[num_failed]:8d} {1[num_finished]:8d} '
'{1[num_tasks]:8d}\n'.format(name, status))
|
python
|
def do_summary(self, args):
'''print a summary of running rejester work'''
assert args.json or args.text or (args.text is None)
do_text = args.text
xd = {}
for ws in self.task_master.iter_work_specs():
name = ws['name']
status = self.task_master.status(name)
xd[name] = status
xd['_NOW'] = time.time()
if args.json:
self.stdout.write(json.dumps(xd) + '\n')
else:
if do_text is None:
do_text = True
if do_text:
self.stdout.write('Work spec Avail Pending Blocked'
' Failed Finished Total\n')
self.stdout.write('==================== ======== ======== ========'
' ======== ======== ========\n')
for name in sorted(xd.keys()):
if name == '_NOW':
continue
status = xd[name]
self.stdout.write('{0:20s} {1[num_available]:8d} '
'{1[num_pending]:8d} {1[num_blocked]:8d} '
'{1[num_failed]:8d} {1[num_finished]:8d} '
'{1[num_tasks]:8d}\n'.format(name, status))
|
[
"def",
"do_summary",
"(",
"self",
",",
"args",
")",
":",
"assert",
"args",
".",
"json",
"or",
"args",
".",
"text",
"or",
"(",
"args",
".",
"text",
"is",
"None",
")",
"do_text",
"=",
"args",
".",
"text",
"xd",
"=",
"{",
"}",
"for",
"ws",
"in",
"self",
".",
"task_master",
".",
"iter_work_specs",
"(",
")",
":",
"name",
"=",
"ws",
"[",
"'name'",
"]",
"status",
"=",
"self",
".",
"task_master",
".",
"status",
"(",
"name",
")",
"xd",
"[",
"name",
"]",
"=",
"status",
"xd",
"[",
"'_NOW'",
"]",
"=",
"time",
".",
"time",
"(",
")",
"if",
"args",
".",
"json",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"xd",
")",
"+",
"'\\n'",
")",
"else",
":",
"if",
"do_text",
"is",
"None",
":",
"do_text",
"=",
"True",
"if",
"do_text",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'Work spec Avail Pending Blocked'",
"' Failed Finished Total\\n'",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"'==================== ======== ======== ========'",
"' ======== ======== ========\\n'",
")",
"for",
"name",
"in",
"sorted",
"(",
"xd",
".",
"keys",
"(",
")",
")",
":",
"if",
"name",
"==",
"'_NOW'",
":",
"continue",
"status",
"=",
"xd",
"[",
"name",
"]",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0:20s} {1[num_available]:8d} '",
"'{1[num_pending]:8d} {1[num_blocked]:8d} '",
"'{1[num_failed]:8d} {1[num_finished]:8d} '",
"'{1[num_tasks]:8d}\\n'",
".",
"format",
"(",
"name",
",",
"status",
")",
")"
] |
print a summary of running rejester work
|
[
"print",
"a",
"summary",
"of",
"running",
"rejester",
"work"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L427-L457
|
241,003
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_work_units
|
def do_work_units(self, args):
'''list work units that have not yet completed'''
work_spec_name = self._get_work_spec_name(args)
if args.status:
status = args.status.upper()
statusi = getattr(self.task_master, status, None)
if statusi is None:
self.stdout.write('unknown status {0!r}\n'.format(args.status))
return
else:
statusi = None
work_units = dict(self.task_master.get_work_units(
work_spec_name, state=statusi, limit=args.limit))
work_unit_names = sorted(work_units.keys())
if args.limit:
work_unit_names = work_unit_names[:args.limit]
for k in work_unit_names:
if args.details:
tback = work_units[k].get('traceback', '')
if tback:
tback += '\n'
work_units[k]['traceback'] = 'displayed below'
self.stdout.write(
'{0!r}: {1}\n{2}'
.format(k, pprint.pformat(work_units[k], indent=4),
tback))
else:
self.stdout.write('{0}\n'.format(k))
|
python
|
def do_work_units(self, args):
'''list work units that have not yet completed'''
work_spec_name = self._get_work_spec_name(args)
if args.status:
status = args.status.upper()
statusi = getattr(self.task_master, status, None)
if statusi is None:
self.stdout.write('unknown status {0!r}\n'.format(args.status))
return
else:
statusi = None
work_units = dict(self.task_master.get_work_units(
work_spec_name, state=statusi, limit=args.limit))
work_unit_names = sorted(work_units.keys())
if args.limit:
work_unit_names = work_unit_names[:args.limit]
for k in work_unit_names:
if args.details:
tback = work_units[k].get('traceback', '')
if tback:
tback += '\n'
work_units[k]['traceback'] = 'displayed below'
self.stdout.write(
'{0!r}: {1}\n{2}'
.format(k, pprint.pformat(work_units[k], indent=4),
tback))
else:
self.stdout.write('{0}\n'.format(k))
|
[
"def",
"do_work_units",
"(",
"self",
",",
"args",
")",
":",
"work_spec_name",
"=",
"self",
".",
"_get_work_spec_name",
"(",
"args",
")",
"if",
"args",
".",
"status",
":",
"status",
"=",
"args",
".",
"status",
".",
"upper",
"(",
")",
"statusi",
"=",
"getattr",
"(",
"self",
".",
"task_master",
",",
"status",
",",
"None",
")",
"if",
"statusi",
"is",
"None",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'unknown status {0!r}\\n'",
".",
"format",
"(",
"args",
".",
"status",
")",
")",
"return",
"else",
":",
"statusi",
"=",
"None",
"work_units",
"=",
"dict",
"(",
"self",
".",
"task_master",
".",
"get_work_units",
"(",
"work_spec_name",
",",
"state",
"=",
"statusi",
",",
"limit",
"=",
"args",
".",
"limit",
")",
")",
"work_unit_names",
"=",
"sorted",
"(",
"work_units",
".",
"keys",
"(",
")",
")",
"if",
"args",
".",
"limit",
":",
"work_unit_names",
"=",
"work_unit_names",
"[",
":",
"args",
".",
"limit",
"]",
"for",
"k",
"in",
"work_unit_names",
":",
"if",
"args",
".",
"details",
":",
"tback",
"=",
"work_units",
"[",
"k",
"]",
".",
"get",
"(",
"'traceback'",
",",
"''",
")",
"if",
"tback",
":",
"tback",
"+=",
"'\\n'",
"work_units",
"[",
"k",
"]",
"[",
"'traceback'",
"]",
"=",
"'displayed below'",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0!r}: {1}\\n{2}'",
".",
"format",
"(",
"k",
",",
"pprint",
".",
"pformat",
"(",
"work_units",
"[",
"k",
"]",
",",
"indent",
"=",
"4",
")",
",",
"tback",
")",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0}\\n'",
".",
"format",
"(",
"k",
")",
")"
] |
list work units that have not yet completed
|
[
"list",
"work",
"units",
"that",
"have",
"not",
"yet",
"completed"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L470-L497
|
241,004
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_work_unit
|
def do_work_unit(self, args):
'''print basic details about work units'''
work_spec_name = self._get_work_spec_name(args)
for work_unit_name in args.unit:
status = self.task_master.get_work_unit_status(work_spec_name,
work_unit_name)
self.stdout.write('{0} ({1!r})\n'
.format(work_unit_name, status['status']))
if 'expiration' in status:
when = time.ctime(status['expiration'])
if status == 'available':
if status['expiration'] == 0:
self.stdout.write(' Never scheduled\n')
else:
self.stdout.write(' Available since: {0}\n'
.format(when))
else:
self.stdout.write(' Expires: {0}\n'.format(when))
if 'worker_id' in status:
try:
heartbeat = self.task_master.get_heartbeat(status['worker_id'])
except:
heartbeat = None
if heartbeat:
hostname = (heartbeat.get('fqdn', None) or
heartbeat.get('hostname', None) or
'')
ipaddrs = ', '.join(heartbeat.get('ipaddrs', ()))
if hostname and ipaddrs:
summary = '{0} on {1}'.format(hostname, ipaddrs)
else:
summary = hostname + ipaddrs
else:
summary = 'No information'
self.stdout.write(' Worker: {0} ({1})\n'.format(
status['worker_id'], summary))
if 'traceback' in status:
self.stdout.write(' Traceback:\n{0}\n'.format(
status['traceback']))
if 'depends_on' in status:
self.stdout.write(' Depends on:\n')
for what in status['depends_on']:
self.stdout.write(' {0!r}\n'.format(what))
|
python
|
def do_work_unit(self, args):
'''print basic details about work units'''
work_spec_name = self._get_work_spec_name(args)
for work_unit_name in args.unit:
status = self.task_master.get_work_unit_status(work_spec_name,
work_unit_name)
self.stdout.write('{0} ({1!r})\n'
.format(work_unit_name, status['status']))
if 'expiration' in status:
when = time.ctime(status['expiration'])
if status == 'available':
if status['expiration'] == 0:
self.stdout.write(' Never scheduled\n')
else:
self.stdout.write(' Available since: {0}\n'
.format(when))
else:
self.stdout.write(' Expires: {0}\n'.format(when))
if 'worker_id' in status:
try:
heartbeat = self.task_master.get_heartbeat(status['worker_id'])
except:
heartbeat = None
if heartbeat:
hostname = (heartbeat.get('fqdn', None) or
heartbeat.get('hostname', None) or
'')
ipaddrs = ', '.join(heartbeat.get('ipaddrs', ()))
if hostname and ipaddrs:
summary = '{0} on {1}'.format(hostname, ipaddrs)
else:
summary = hostname + ipaddrs
else:
summary = 'No information'
self.stdout.write(' Worker: {0} ({1})\n'.format(
status['worker_id'], summary))
if 'traceback' in status:
self.stdout.write(' Traceback:\n{0}\n'.format(
status['traceback']))
if 'depends_on' in status:
self.stdout.write(' Depends on:\n')
for what in status['depends_on']:
self.stdout.write(' {0!r}\n'.format(what))
|
[
"def",
"do_work_unit",
"(",
"self",
",",
"args",
")",
":",
"work_spec_name",
"=",
"self",
".",
"_get_work_spec_name",
"(",
"args",
")",
"for",
"work_unit_name",
"in",
"args",
".",
"unit",
":",
"status",
"=",
"self",
".",
"task_master",
".",
"get_work_unit_status",
"(",
"work_spec_name",
",",
"work_unit_name",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0} ({1!r})\\n'",
".",
"format",
"(",
"work_unit_name",
",",
"status",
"[",
"'status'",
"]",
")",
")",
"if",
"'expiration'",
"in",
"status",
":",
"when",
"=",
"time",
".",
"ctime",
"(",
"status",
"[",
"'expiration'",
"]",
")",
"if",
"status",
"==",
"'available'",
":",
"if",
"status",
"[",
"'expiration'",
"]",
"==",
"0",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"' Never scheduled\\n'",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"' Available since: {0}\\n'",
".",
"format",
"(",
"when",
")",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"' Expires: {0}\\n'",
".",
"format",
"(",
"when",
")",
")",
"if",
"'worker_id'",
"in",
"status",
":",
"try",
":",
"heartbeat",
"=",
"self",
".",
"task_master",
".",
"get_heartbeat",
"(",
"status",
"[",
"'worker_id'",
"]",
")",
"except",
":",
"heartbeat",
"=",
"None",
"if",
"heartbeat",
":",
"hostname",
"=",
"(",
"heartbeat",
".",
"get",
"(",
"'fqdn'",
",",
"None",
")",
"or",
"heartbeat",
".",
"get",
"(",
"'hostname'",
",",
"None",
")",
"or",
"''",
")",
"ipaddrs",
"=",
"', '",
".",
"join",
"(",
"heartbeat",
".",
"get",
"(",
"'ipaddrs'",
",",
"(",
")",
")",
")",
"if",
"hostname",
"and",
"ipaddrs",
":",
"summary",
"=",
"'{0} on {1}'",
".",
"format",
"(",
"hostname",
",",
"ipaddrs",
")",
"else",
":",
"summary",
"=",
"hostname",
"+",
"ipaddrs",
"else",
":",
"summary",
"=",
"'No information'",
"self",
".",
"stdout",
".",
"write",
"(",
"' Worker: {0} ({1})\\n'",
".",
"format",
"(",
"status",
"[",
"'worker_id'",
"]",
",",
"summary",
")",
")",
"if",
"'traceback'",
"in",
"status",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"' Traceback:\\n{0}\\n'",
".",
"format",
"(",
"status",
"[",
"'traceback'",
"]",
")",
")",
"if",
"'depends_on'",
"in",
"status",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"' Depends on:\\n'",
")",
"for",
"what",
"in",
"status",
"[",
"'depends_on'",
"]",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"' {0!r}\\n'",
".",
"format",
"(",
"what",
")",
")"
] |
print basic details about work units
|
[
"print",
"basic",
"details",
"about",
"work",
"units"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L516-L558
|
241,005
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_retry
|
def do_retry(self, args):
'''retry a specific failed job'''
work_spec_name = self._get_work_spec_name(args)
retried = 0
complained = False
try:
if args.all:
while True:
units = self.task_master.get_work_units(
work_spec_name, limit=1000,
state=self.task_master.FAILED)
units = [u[0] for u in units] # just need wu key
if not units: break
try:
self.task_master.retry(work_spec_name, *units)
retried += len(units)
except NoSuchWorkUnitError, e:
# Because of this sequence, this probably means
# something else retried the work unit. If we
# try again, we shouldn't see it in the failed
# list...so whatever
pass
else:
units = args.unit
try:
self.task_master.retry(work_spec_name, *units)
retried += len(units)
except NoSuchWorkUnitError, e:
unit = e.work_unit_name
self.stdout.write('No such failed work unit {0!r}.\n'
.format(unit))
complained = True
units.remove(unit)
# and try again
except NoSuchWorkSpecError, e:
# NB: you are not guaranteed to get this, especially with --all
self.stdout.write('Invalid work spec {0!r}.\n'
.format(work_spec_name))
return
if retried == 0 and not complained:
self.stdout.write('Nothing to do.\n')
elif retried == 1:
self.stdout.write('Retried {0} work unit.\n'.format(retried))
elif retried > 1:
self.stdout.write('Retried {0} work units.\n'.format(retried))
|
python
|
def do_retry(self, args):
'''retry a specific failed job'''
work_spec_name = self._get_work_spec_name(args)
retried = 0
complained = False
try:
if args.all:
while True:
units = self.task_master.get_work_units(
work_spec_name, limit=1000,
state=self.task_master.FAILED)
units = [u[0] for u in units] # just need wu key
if not units: break
try:
self.task_master.retry(work_spec_name, *units)
retried += len(units)
except NoSuchWorkUnitError, e:
# Because of this sequence, this probably means
# something else retried the work unit. If we
# try again, we shouldn't see it in the failed
# list...so whatever
pass
else:
units = args.unit
try:
self.task_master.retry(work_spec_name, *units)
retried += len(units)
except NoSuchWorkUnitError, e:
unit = e.work_unit_name
self.stdout.write('No such failed work unit {0!r}.\n'
.format(unit))
complained = True
units.remove(unit)
# and try again
except NoSuchWorkSpecError, e:
# NB: you are not guaranteed to get this, especially with --all
self.stdout.write('Invalid work spec {0!r}.\n'
.format(work_spec_name))
return
if retried == 0 and not complained:
self.stdout.write('Nothing to do.\n')
elif retried == 1:
self.stdout.write('Retried {0} work unit.\n'.format(retried))
elif retried > 1:
self.stdout.write('Retried {0} work units.\n'.format(retried))
|
[
"def",
"do_retry",
"(",
"self",
",",
"args",
")",
":",
"work_spec_name",
"=",
"self",
".",
"_get_work_spec_name",
"(",
"args",
")",
"retried",
"=",
"0",
"complained",
"=",
"False",
"try",
":",
"if",
"args",
".",
"all",
":",
"while",
"True",
":",
"units",
"=",
"self",
".",
"task_master",
".",
"get_work_units",
"(",
"work_spec_name",
",",
"limit",
"=",
"1000",
",",
"state",
"=",
"self",
".",
"task_master",
".",
"FAILED",
")",
"units",
"=",
"[",
"u",
"[",
"0",
"]",
"for",
"u",
"in",
"units",
"]",
"# just need wu key",
"if",
"not",
"units",
":",
"break",
"try",
":",
"self",
".",
"task_master",
".",
"retry",
"(",
"work_spec_name",
",",
"*",
"units",
")",
"retried",
"+=",
"len",
"(",
"units",
")",
"except",
"NoSuchWorkUnitError",
",",
"e",
":",
"# Because of this sequence, this probably means",
"# something else retried the work unit. If we",
"# try again, we shouldn't see it in the failed",
"# list...so whatever",
"pass",
"else",
":",
"units",
"=",
"args",
".",
"unit",
"try",
":",
"self",
".",
"task_master",
".",
"retry",
"(",
"work_spec_name",
",",
"*",
"units",
")",
"retried",
"+=",
"len",
"(",
"units",
")",
"except",
"NoSuchWorkUnitError",
",",
"e",
":",
"unit",
"=",
"e",
".",
"work_unit_name",
"self",
".",
"stdout",
".",
"write",
"(",
"'No such failed work unit {0!r}.\\n'",
".",
"format",
"(",
"unit",
")",
")",
"complained",
"=",
"True",
"units",
".",
"remove",
"(",
"unit",
")",
"# and try again",
"except",
"NoSuchWorkSpecError",
",",
"e",
":",
"# NB: you are not guaranteed to get this, especially with --all",
"self",
".",
"stdout",
".",
"write",
"(",
"'Invalid work spec {0!r}.\\n'",
".",
"format",
"(",
"work_spec_name",
")",
")",
"return",
"if",
"retried",
"==",
"0",
"and",
"not",
"complained",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'Nothing to do.\\n'",
")",
"elif",
"retried",
"==",
"1",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'Retried {0} work unit.\\n'",
".",
"format",
"(",
"retried",
")",
")",
"elif",
"retried",
">",
"1",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'Retried {0} work units.\\n'",
".",
"format",
"(",
"retried",
")",
")"
] |
retry a specific failed job
|
[
"retry",
"a",
"specific",
"failed",
"job"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L566-L610
|
241,006
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_clear
|
def do_clear(self, args):
'''remove work units from a work spec'''
# Which units?
work_spec_name = self._get_work_spec_name(args)
units = args.unit or None
# What to do?
count = 0
if args.status is None:
all = units is None
count += self.task_master.del_work_units(work_spec_name, work_unit_keys=units, all=all)
elif args.status == 'available':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.AVAILABLE)
elif args.status == 'pending':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.PENDING)
elif args.status == 'blocked':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.BLOCKED)
elif args.status == 'finished':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.FINISHED)
elif args.status == 'failed':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.FAILED)
self.stdout.write('Removed {0} work units.\n'.format(count))
|
python
|
def do_clear(self, args):
'''remove work units from a work spec'''
# Which units?
work_spec_name = self._get_work_spec_name(args)
units = args.unit or None
# What to do?
count = 0
if args.status is None:
all = units is None
count += self.task_master.del_work_units(work_spec_name, work_unit_keys=units, all=all)
elif args.status == 'available':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.AVAILABLE)
elif args.status == 'pending':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.PENDING)
elif args.status == 'blocked':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.BLOCKED)
elif args.status == 'finished':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.FINISHED)
elif args.status == 'failed':
count += self.task_master.del_work_units(
work_spec_name, work_unit_keys=units, state=self.task_master.FAILED)
self.stdout.write('Removed {0} work units.\n'.format(count))
|
[
"def",
"do_clear",
"(",
"self",
",",
"args",
")",
":",
"# Which units?",
"work_spec_name",
"=",
"self",
".",
"_get_work_spec_name",
"(",
"args",
")",
"units",
"=",
"args",
".",
"unit",
"or",
"None",
"# What to do?",
"count",
"=",
"0",
"if",
"args",
".",
"status",
"is",
"None",
":",
"all",
"=",
"units",
"is",
"None",
"count",
"+=",
"self",
".",
"task_master",
".",
"del_work_units",
"(",
"work_spec_name",
",",
"work_unit_keys",
"=",
"units",
",",
"all",
"=",
"all",
")",
"elif",
"args",
".",
"status",
"==",
"'available'",
":",
"count",
"+=",
"self",
".",
"task_master",
".",
"del_work_units",
"(",
"work_spec_name",
",",
"work_unit_keys",
"=",
"units",
",",
"state",
"=",
"self",
".",
"task_master",
".",
"AVAILABLE",
")",
"elif",
"args",
".",
"status",
"==",
"'pending'",
":",
"count",
"+=",
"self",
".",
"task_master",
".",
"del_work_units",
"(",
"work_spec_name",
",",
"work_unit_keys",
"=",
"units",
",",
"state",
"=",
"self",
".",
"task_master",
".",
"PENDING",
")",
"elif",
"args",
".",
"status",
"==",
"'blocked'",
":",
"count",
"+=",
"self",
".",
"task_master",
".",
"del_work_units",
"(",
"work_spec_name",
",",
"work_unit_keys",
"=",
"units",
",",
"state",
"=",
"self",
".",
"task_master",
".",
"BLOCKED",
")",
"elif",
"args",
".",
"status",
"==",
"'finished'",
":",
"count",
"+=",
"self",
".",
"task_master",
".",
"del_work_units",
"(",
"work_spec_name",
",",
"work_unit_keys",
"=",
"units",
",",
"state",
"=",
"self",
".",
"task_master",
".",
"FINISHED",
")",
"elif",
"args",
".",
"status",
"==",
"'failed'",
":",
"count",
"+=",
"self",
".",
"task_master",
".",
"del_work_units",
"(",
"work_spec_name",
",",
"work_unit_keys",
"=",
"units",
",",
"state",
"=",
"self",
".",
"task_master",
".",
"FAILED",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"'Removed {0} work units.\\n'",
".",
"format",
"(",
"count",
")",
")"
] |
remove work units from a work spec
|
[
"remove",
"work",
"units",
"from",
"a",
"work",
"spec"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L620-L645
|
241,007
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_mode
|
def do_mode(self, args):
'''get or set the global rejester worker mode'''
if args.mode:
mode = { 'idle': self.task_master.IDLE,
'run': self.task_master.RUN,
'terminate': self.task_master.TERMINATE }[args.mode]
self.task_master.set_mode(mode)
self.stdout.write('set mode to {0!r}\n'.format(args.mode))
else:
mode = self.task_master.get_mode()
self.stdout.write('{0!s}\n'.format(mode))
|
python
|
def do_mode(self, args):
'''get or set the global rejester worker mode'''
if args.mode:
mode = { 'idle': self.task_master.IDLE,
'run': self.task_master.RUN,
'terminate': self.task_master.TERMINATE }[args.mode]
self.task_master.set_mode(mode)
self.stdout.write('set mode to {0!r}\n'.format(args.mode))
else:
mode = self.task_master.get_mode()
self.stdout.write('{0!s}\n'.format(mode))
|
[
"def",
"do_mode",
"(",
"self",
",",
"args",
")",
":",
"if",
"args",
".",
"mode",
":",
"mode",
"=",
"{",
"'idle'",
":",
"self",
".",
"task_master",
".",
"IDLE",
",",
"'run'",
":",
"self",
".",
"task_master",
".",
"RUN",
",",
"'terminate'",
":",
"self",
".",
"task_master",
".",
"TERMINATE",
"}",
"[",
"args",
".",
"mode",
"]",
"self",
".",
"task_master",
".",
"set_mode",
"(",
"mode",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"'set mode to {0!r}\\n'",
".",
"format",
"(",
"args",
".",
"mode",
")",
")",
"else",
":",
"mode",
"=",
"self",
".",
"task_master",
".",
"get_mode",
"(",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0!s}\\n'",
".",
"format",
"(",
"mode",
")",
")"
] |
get or set the global rejester worker mode
|
[
"get",
"or",
"set",
"the",
"global",
"rejester",
"worker",
"mode"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L651-L661
|
241,008
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_workers
|
def do_workers(self, args):
'''list all known workers'''
workers = self.task_master.workers(alive=not args.all)
for k in sorted(workers.iterkeys()):
self.stdout.write('{0} ({1})\n'.format(k, workers[k]))
if args.details:
heartbeat = self.task_master.get_heartbeat(k)
for hk, hv in heartbeat.iteritems():
self.stdout.write(' {0}: {1}\n'.format(hk, hv))
|
python
|
def do_workers(self, args):
'''list all known workers'''
workers = self.task_master.workers(alive=not args.all)
for k in sorted(workers.iterkeys()):
self.stdout.write('{0} ({1})\n'.format(k, workers[k]))
if args.details:
heartbeat = self.task_master.get_heartbeat(k)
for hk, hv in heartbeat.iteritems():
self.stdout.write(' {0}: {1}\n'.format(hk, hv))
|
[
"def",
"do_workers",
"(",
"self",
",",
"args",
")",
":",
"workers",
"=",
"self",
".",
"task_master",
".",
"workers",
"(",
"alive",
"=",
"not",
"args",
".",
"all",
")",
"for",
"k",
"in",
"sorted",
"(",
"workers",
".",
"iterkeys",
"(",
")",
")",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0} ({1})\\n'",
".",
"format",
"(",
"k",
",",
"workers",
"[",
"k",
"]",
")",
")",
"if",
"args",
".",
"details",
":",
"heartbeat",
"=",
"self",
".",
"task_master",
".",
"get_heartbeat",
"(",
"k",
")",
"for",
"hk",
",",
"hv",
"in",
"heartbeat",
".",
"iteritems",
"(",
")",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"' {0}: {1}\\n'",
".",
"format",
"(",
"hk",
",",
"hv",
")",
")"
] |
list all known workers
|
[
"list",
"all",
"known",
"workers"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L687-L695
|
241,009
|
diffeo/rejester
|
rejester/run.py
|
Manager.do_run_one
|
def do_run_one(self, args):
'''run a single job'''
work_spec_names = args.from_work_spec or None
worker = SingleWorker(self.config, task_master=self.task_master, work_spec_names=work_spec_names, max_jobs=args.max_jobs)
worker.register()
rc = False
starttime = time.time()
count = 0
try:
while True:
rc = worker.run()
if not rc:
break
count += 1
if (args.limit_seconds is None) and (args.limit_count is None):
# only do one
break
if (args.limit_seconds is not None) and ((time.time() - starttime) >= args.limit_seconds):
break
if (args.limit_count is not None) and (count >= args.limit_count):
break
finally:
worker.unregister()
if not rc:
self.exitcode = 2
|
python
|
def do_run_one(self, args):
'''run a single job'''
work_spec_names = args.from_work_spec or None
worker = SingleWorker(self.config, task_master=self.task_master, work_spec_names=work_spec_names, max_jobs=args.max_jobs)
worker.register()
rc = False
starttime = time.time()
count = 0
try:
while True:
rc = worker.run()
if not rc:
break
count += 1
if (args.limit_seconds is None) and (args.limit_count is None):
# only do one
break
if (args.limit_seconds is not None) and ((time.time() - starttime) >= args.limit_seconds):
break
if (args.limit_count is not None) and (count >= args.limit_count):
break
finally:
worker.unregister()
if not rc:
self.exitcode = 2
|
[
"def",
"do_run_one",
"(",
"self",
",",
"args",
")",
":",
"work_spec_names",
"=",
"args",
".",
"from_work_spec",
"or",
"None",
"worker",
"=",
"SingleWorker",
"(",
"self",
".",
"config",
",",
"task_master",
"=",
"self",
".",
"task_master",
",",
"work_spec_names",
"=",
"work_spec_names",
",",
"max_jobs",
"=",
"args",
".",
"max_jobs",
")",
"worker",
".",
"register",
"(",
")",
"rc",
"=",
"False",
"starttime",
"=",
"time",
".",
"time",
"(",
")",
"count",
"=",
"0",
"try",
":",
"while",
"True",
":",
"rc",
"=",
"worker",
".",
"run",
"(",
")",
"if",
"not",
"rc",
":",
"break",
"count",
"+=",
"1",
"if",
"(",
"args",
".",
"limit_seconds",
"is",
"None",
")",
"and",
"(",
"args",
".",
"limit_count",
"is",
"None",
")",
":",
"# only do one",
"break",
"if",
"(",
"args",
".",
"limit_seconds",
"is",
"not",
"None",
")",
"and",
"(",
"(",
"time",
".",
"time",
"(",
")",
"-",
"starttime",
")",
">=",
"args",
".",
"limit_seconds",
")",
":",
"break",
"if",
"(",
"args",
".",
"limit_count",
"is",
"not",
"None",
")",
"and",
"(",
"count",
">=",
"args",
".",
"limit_count",
")",
":",
"break",
"finally",
":",
"worker",
".",
"unregister",
"(",
")",
"if",
"not",
"rc",
":",
"self",
".",
"exitcode",
"=",
"2"
] |
run a single job
|
[
"run",
"a",
"single",
"job"
] |
5438a4a18be2801d7826c46e2079ba9639d2ecb4
|
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/run.py#L702-L726
|
241,010
|
se-esss-litterbox/Pynac
|
Pynac/Core.py
|
get_number_of_particles
|
def get_number_of_particles():
"""
Queries the ``dynac.short`` file for the number of particles used in the
simulation.
"""
with open('dynac.short') as f:
data_str = ''.join(line for line in f.readlines())
num_of_parts = int(data_str.split('Simulation with')[1].strip().split()[0])
return num_of_parts
|
python
|
def get_number_of_particles():
"""
Queries the ``dynac.short`` file for the number of particles used in the
simulation.
"""
with open('dynac.short') as f:
data_str = ''.join(line for line in f.readlines())
num_of_parts = int(data_str.split('Simulation with')[1].strip().split()[0])
return num_of_parts
|
[
"def",
"get_number_of_particles",
"(",
")",
":",
"with",
"open",
"(",
"'dynac.short'",
")",
"as",
"f",
":",
"data_str",
"=",
"''",
".",
"join",
"(",
"line",
"for",
"line",
"in",
"f",
".",
"readlines",
"(",
")",
")",
"num_of_parts",
"=",
"int",
"(",
"data_str",
".",
"split",
"(",
"'Simulation with'",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"0",
"]",
")",
"return",
"num_of_parts"
] |
Queries the ``dynac.short`` file for the number of particles used in the
simulation.
|
[
"Queries",
"the",
"dynac",
".",
"short",
"file",
"for",
"the",
"number",
"of",
"particles",
"used",
"in",
"the",
"simulation",
"."
] |
97e20aa85d20112cd114faa54a8197c5d0f61209
|
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Core.py#L666-L674
|
241,011
|
se-esss-litterbox/Pynac
|
Pynac/Core.py
|
multi_process_pynac
|
def multi_process_pynac(file_list, pynac_func, num_iters=100, max_workers=8):
"""
Use a ProcessPool from the ``concurrent.futures`` module to execute ``num_iters``
number of instances of ``pynac_func``. This function takes advantage of ``do_single_dynac_process``
and ``pynac_in_sub_directory``.
"""
with ProcessPoolExecutor(max_workers=max_workers) as executor:
tasks = [executor.submit(do_single_dynac_process, num, file_list, pynac_func) for num in range(num_iters)]
exc = [task.exception() for task in tasks if task.exception()]
if exc:
return exc
else:
return "No errors encountered"
|
python
|
def multi_process_pynac(file_list, pynac_func, num_iters=100, max_workers=8):
"""
Use a ProcessPool from the ``concurrent.futures`` module to execute ``num_iters``
number of instances of ``pynac_func``. This function takes advantage of ``do_single_dynac_process``
and ``pynac_in_sub_directory``.
"""
with ProcessPoolExecutor(max_workers=max_workers) as executor:
tasks = [executor.submit(do_single_dynac_process, num, file_list, pynac_func) for num in range(num_iters)]
exc = [task.exception() for task in tasks if task.exception()]
if exc:
return exc
else:
return "No errors encountered"
|
[
"def",
"multi_process_pynac",
"(",
"file_list",
",",
"pynac_func",
",",
"num_iters",
"=",
"100",
",",
"max_workers",
"=",
"8",
")",
":",
"with",
"ProcessPoolExecutor",
"(",
"max_workers",
"=",
"max_workers",
")",
"as",
"executor",
":",
"tasks",
"=",
"[",
"executor",
".",
"submit",
"(",
"do_single_dynac_process",
",",
"num",
",",
"file_list",
",",
"pynac_func",
")",
"for",
"num",
"in",
"range",
"(",
"num_iters",
")",
"]",
"exc",
"=",
"[",
"task",
".",
"exception",
"(",
")",
"for",
"task",
"in",
"tasks",
"if",
"task",
".",
"exception",
"(",
")",
"]",
"if",
"exc",
":",
"return",
"exc",
"else",
":",
"return",
"\"No errors encountered\""
] |
Use a ProcessPool from the ``concurrent.futures`` module to execute ``num_iters``
number of instances of ``pynac_func``. This function takes advantage of ``do_single_dynac_process``
and ``pynac_in_sub_directory``.
|
[
"Use",
"a",
"ProcessPool",
"from",
"the",
"concurrent",
".",
"futures",
"module",
"to",
"execute",
"num_iters",
"number",
"of",
"instances",
"of",
"pynac_func",
".",
"This",
"function",
"takes",
"advantage",
"of",
"do_single_dynac_process",
"and",
"pynac_in_sub_directory",
"."
] |
97e20aa85d20112cd114faa54a8197c5d0f61209
|
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Core.py#L694-L706
|
241,012
|
se-esss-litterbox/Pynac
|
Pynac/Core.py
|
pynac_in_sub_directory
|
def pynac_in_sub_directory(num, file_list):
"""
A context manager to create a new directory, move the files listed in ``file_list``
to that directory, and change to that directory before handing control back to
context. The closing action is to change back to the original directory.
The directory name is based on the ``num`` input, and if it already exists, it
will be deleted upon entering the context.
The primary purpose of this function is to enable multiprocess use of Pynac via
the ``multi_process_pynac`` function.
"""
print('Running %d' % num)
new_dir = 'dynacProc_%04d' % num
if os.path.isdir(new_dir):
shutil.rmtree(new_dir)
os.mkdir(new_dir)
for f in file_list:
shutil.copy(f, new_dir)
os.chdir(new_dir)
yield
os.chdir('..')
|
python
|
def pynac_in_sub_directory(num, file_list):
"""
A context manager to create a new directory, move the files listed in ``file_list``
to that directory, and change to that directory before handing control back to
context. The closing action is to change back to the original directory.
The directory name is based on the ``num`` input, and if it already exists, it
will be deleted upon entering the context.
The primary purpose of this function is to enable multiprocess use of Pynac via
the ``multi_process_pynac`` function.
"""
print('Running %d' % num)
new_dir = 'dynacProc_%04d' % num
if os.path.isdir(new_dir):
shutil.rmtree(new_dir)
os.mkdir(new_dir)
for f in file_list:
shutil.copy(f, new_dir)
os.chdir(new_dir)
yield
os.chdir('..')
|
[
"def",
"pynac_in_sub_directory",
"(",
"num",
",",
"file_list",
")",
":",
"print",
"(",
"'Running %d'",
"%",
"num",
")",
"new_dir",
"=",
"'dynacProc_%04d'",
"%",
"num",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"new_dir",
")",
":",
"shutil",
".",
"rmtree",
"(",
"new_dir",
")",
"os",
".",
"mkdir",
"(",
"new_dir",
")",
"for",
"f",
"in",
"file_list",
":",
"shutil",
".",
"copy",
"(",
"f",
",",
"new_dir",
")",
"os",
".",
"chdir",
"(",
"new_dir",
")",
"yield",
"os",
".",
"chdir",
"(",
"'..'",
")"
] |
A context manager to create a new directory, move the files listed in ``file_list``
to that directory, and change to that directory before handing control back to
context. The closing action is to change back to the original directory.
The directory name is based on the ``num`` input, and if it already exists, it
will be deleted upon entering the context.
The primary purpose of this function is to enable multiprocess use of Pynac via
the ``multi_process_pynac`` function.
|
[
"A",
"context",
"manager",
"to",
"create",
"a",
"new",
"directory",
"move",
"the",
"files",
"listed",
"in",
"file_list",
"to",
"that",
"directory",
"and",
"change",
"to",
"that",
"directory",
"before",
"handing",
"control",
"back",
"to",
"context",
".",
"The",
"closing",
"action",
"is",
"to",
"change",
"back",
"to",
"the",
"original",
"directory",
"."
] |
97e20aa85d20112cd114faa54a8197c5d0f61209
|
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Core.py#L723-L746
|
241,013
|
se-esss-litterbox/Pynac
|
Pynac/Core.py
|
Pynac.run
|
def run(self):
"""
Run the simulation in the current directory.
"""
self._start_dynac_proc(stdin=subp.PIPE, stdout=subp.PIPE)
str2write = self.name + '\r\n'
if self._DEBUG:
with open('pynacrun.log', 'a') as f:
f.write(str2write)
self.dynacProc.stdin.write(str2write.encode()) # The name field
for pynEle in self.lattice:
try:
ele = pynEle.dynacRepresentation()
except AttributeError:
ele = pynEle
str2write = ele[0]
if self._DEBUG:
with open('pynacrun.log', 'a') as f:
f.write(str2write + '\r\n')
try:
self.dynacProc.stdin.write((str2write + '\r\n').encode())
except IOError:
break
for datum in ele[1]:
str2write = ' '.join([str(i) for i in datum])
if self._DEBUG:
with open('pynacrun.log', 'a') as f:
f.write(str2write+'\r\n')
try:
self.dynacProc.stdin.write((str2write+'\r\n').encode())
except IOError:
break
self.dynacProc.stdin.close()
if self.dynacProc.wait() != 0:
raise RuntimeError("Errors occured during execution of Dynac")
|
python
|
def run(self):
"""
Run the simulation in the current directory.
"""
self._start_dynac_proc(stdin=subp.PIPE, stdout=subp.PIPE)
str2write = self.name + '\r\n'
if self._DEBUG:
with open('pynacrun.log', 'a') as f:
f.write(str2write)
self.dynacProc.stdin.write(str2write.encode()) # The name field
for pynEle in self.lattice:
try:
ele = pynEle.dynacRepresentation()
except AttributeError:
ele = pynEle
str2write = ele[0]
if self._DEBUG:
with open('pynacrun.log', 'a') as f:
f.write(str2write + '\r\n')
try:
self.dynacProc.stdin.write((str2write + '\r\n').encode())
except IOError:
break
for datum in ele[1]:
str2write = ' '.join([str(i) for i in datum])
if self._DEBUG:
with open('pynacrun.log', 'a') as f:
f.write(str2write+'\r\n')
try:
self.dynacProc.stdin.write((str2write+'\r\n').encode())
except IOError:
break
self.dynacProc.stdin.close()
if self.dynacProc.wait() != 0:
raise RuntimeError("Errors occured during execution of Dynac")
|
[
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"_start_dynac_proc",
"(",
"stdin",
"=",
"subp",
".",
"PIPE",
",",
"stdout",
"=",
"subp",
".",
"PIPE",
")",
"str2write",
"=",
"self",
".",
"name",
"+",
"'\\r\\n'",
"if",
"self",
".",
"_DEBUG",
":",
"with",
"open",
"(",
"'pynacrun.log'",
",",
"'a'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"str2write",
")",
"self",
".",
"dynacProc",
".",
"stdin",
".",
"write",
"(",
"str2write",
".",
"encode",
"(",
")",
")",
"# The name field",
"for",
"pynEle",
"in",
"self",
".",
"lattice",
":",
"try",
":",
"ele",
"=",
"pynEle",
".",
"dynacRepresentation",
"(",
")",
"except",
"AttributeError",
":",
"ele",
"=",
"pynEle",
"str2write",
"=",
"ele",
"[",
"0",
"]",
"if",
"self",
".",
"_DEBUG",
":",
"with",
"open",
"(",
"'pynacrun.log'",
",",
"'a'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"str2write",
"+",
"'\\r\\n'",
")",
"try",
":",
"self",
".",
"dynacProc",
".",
"stdin",
".",
"write",
"(",
"(",
"str2write",
"+",
"'\\r\\n'",
")",
".",
"encode",
"(",
")",
")",
"except",
"IOError",
":",
"break",
"for",
"datum",
"in",
"ele",
"[",
"1",
"]",
":",
"str2write",
"=",
"' '",
".",
"join",
"(",
"[",
"str",
"(",
"i",
")",
"for",
"i",
"in",
"datum",
"]",
")",
"if",
"self",
".",
"_DEBUG",
":",
"with",
"open",
"(",
"'pynacrun.log'",
",",
"'a'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"str2write",
"+",
"'\\r\\n'",
")",
"try",
":",
"self",
".",
"dynacProc",
".",
"stdin",
".",
"write",
"(",
"(",
"str2write",
"+",
"'\\r\\n'",
")",
".",
"encode",
"(",
")",
")",
"except",
"IOError",
":",
"break",
"self",
".",
"dynacProc",
".",
"stdin",
".",
"close",
"(",
")",
"if",
"self",
".",
"dynacProc",
".",
"wait",
"(",
")",
"!=",
"0",
":",
"raise",
"RuntimeError",
"(",
"\"Errors occured during execution of Dynac\"",
")"
] |
Run the simulation in the current directory.
|
[
"Run",
"the",
"simulation",
"in",
"the",
"current",
"directory",
"."
] |
97e20aa85d20112cd114faa54a8197c5d0f61209
|
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Core.py#L73-L107
|
241,014
|
AndresMWeber/Nomenclate
|
nomenclate/core/nomenclature.py
|
Nomenclate.format
|
def format(self, format_target, remove_obsolete=True):
""" Changes the internal self.format_string_object format target based on input. Also checks to see if input
is an entry in the config file in case we want to switch to a preexisting config format.
:param format_target: str, input for the new format type. All strings will be the new tokens.
:param remove_obsolete: bool, dictates whether we are removing the obselete tokens or not that
previously existed
:return: None
"""
original_format, original_format_order = (self.format, self.format_order)
try:
format_target = self.CFG.get(format_target, return_type=str, throw_null_return_error=True)
except (errors.ResourceNotFoundError, KeyError):
pass
self.format_string_object.swap_format(format_target)
self._update_tokens_from_swap_format(original_format, original_format_order, remove_obsolete=remove_obsolete)
|
python
|
def format(self, format_target, remove_obsolete=True):
""" Changes the internal self.format_string_object format target based on input. Also checks to see if input
is an entry in the config file in case we want to switch to a preexisting config format.
:param format_target: str, input for the new format type. All strings will be the new tokens.
:param remove_obsolete: bool, dictates whether we are removing the obselete tokens or not that
previously existed
:return: None
"""
original_format, original_format_order = (self.format, self.format_order)
try:
format_target = self.CFG.get(format_target, return_type=str, throw_null_return_error=True)
except (errors.ResourceNotFoundError, KeyError):
pass
self.format_string_object.swap_format(format_target)
self._update_tokens_from_swap_format(original_format, original_format_order, remove_obsolete=remove_obsolete)
|
[
"def",
"format",
"(",
"self",
",",
"format_target",
",",
"remove_obsolete",
"=",
"True",
")",
":",
"original_format",
",",
"original_format_order",
"=",
"(",
"self",
".",
"format",
",",
"self",
".",
"format_order",
")",
"try",
":",
"format_target",
"=",
"self",
".",
"CFG",
".",
"get",
"(",
"format_target",
",",
"return_type",
"=",
"str",
",",
"throw_null_return_error",
"=",
"True",
")",
"except",
"(",
"errors",
".",
"ResourceNotFoundError",
",",
"KeyError",
")",
":",
"pass",
"self",
".",
"format_string_object",
".",
"swap_format",
"(",
"format_target",
")",
"self",
".",
"_update_tokens_from_swap_format",
"(",
"original_format",
",",
"original_format_order",
",",
"remove_obsolete",
"=",
"remove_obsolete",
")"
] |
Changes the internal self.format_string_object format target based on input. Also checks to see if input
is an entry in the config file in case we want to switch to a preexisting config format.
:param format_target: str, input for the new format type. All strings will be the new tokens.
:param remove_obsolete: bool, dictates whether we are removing the obselete tokens or not that
previously existed
:return: None
|
[
"Changes",
"the",
"internal",
"self",
".",
"format_string_object",
"format",
"target",
"based",
"on",
"input",
".",
"Also",
"checks",
"to",
"see",
"if",
"input",
"is",
"an",
"entry",
"in",
"the",
"config",
"file",
"in",
"case",
"we",
"want",
"to",
"switch",
"to",
"a",
"preexisting",
"config",
"format",
"."
] |
e6d6fc28beac042bad588e56fbe77531d2de6b6f
|
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/nomenclature.py#L77-L94
|
241,015
|
AndresMWeber/Nomenclate
|
nomenclate/core/nomenclature.py
|
Nomenclate.initialize_format_options
|
def initialize_format_options(self, format_target=''):
""" First attempts to use format_target as a config path or gets the default format
if it's invalid or is empty.
:param format_target: (str, list(str)), can be either a query path to a format
or in format of a naming string the sections should be spaced around
e.g. - this_is_a_naming_string
:raises: IOError
"""
try:
if format_target:
self.format = format_target
else:
raise errors.FormatError
except errors.FormatError:
self.format_string_object.swap_format(self.CFG.get(self.DEFAULT_FORMAT_PATH, return_type=str))
|
python
|
def initialize_format_options(self, format_target=''):
""" First attempts to use format_target as a config path or gets the default format
if it's invalid or is empty.
:param format_target: (str, list(str)), can be either a query path to a format
or in format of a naming string the sections should be spaced around
e.g. - this_is_a_naming_string
:raises: IOError
"""
try:
if format_target:
self.format = format_target
else:
raise errors.FormatError
except errors.FormatError:
self.format_string_object.swap_format(self.CFG.get(self.DEFAULT_FORMAT_PATH, return_type=str))
|
[
"def",
"initialize_format_options",
"(",
"self",
",",
"format_target",
"=",
"''",
")",
":",
"try",
":",
"if",
"format_target",
":",
"self",
".",
"format",
"=",
"format_target",
"else",
":",
"raise",
"errors",
".",
"FormatError",
"except",
"errors",
".",
"FormatError",
":",
"self",
".",
"format_string_object",
".",
"swap_format",
"(",
"self",
".",
"CFG",
".",
"get",
"(",
"self",
".",
"DEFAULT_FORMAT_PATH",
",",
"return_type",
"=",
"str",
")",
")"
] |
First attempts to use format_target as a config path or gets the default format
if it's invalid or is empty.
:param format_target: (str, list(str)), can be either a query path to a format
or in format of a naming string the sections should be spaced around
e.g. - this_is_a_naming_string
:raises: IOError
|
[
"First",
"attempts",
"to",
"use",
"format_target",
"as",
"a",
"config",
"path",
"or",
"gets",
"the",
"default",
"format",
"if",
"it",
"s",
"invalid",
"or",
"is",
"empty",
"."
] |
e6d6fc28beac042bad588e56fbe77531d2de6b6f
|
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/nomenclature.py#L107-L122
|
241,016
|
AndresMWeber/Nomenclate
|
nomenclate/core/nomenclature.py
|
Nomenclate.initialize_options
|
def initialize_options(cls):
""" Stores options from the config file
"""
cls.CONFIG_OPTIONS = cls.CFG.get(cls.CONFIG_PATH, return_type=dict)
|
python
|
def initialize_options(cls):
""" Stores options from the config file
"""
cls.CONFIG_OPTIONS = cls.CFG.get(cls.CONFIG_PATH, return_type=dict)
|
[
"def",
"initialize_options",
"(",
"cls",
")",
":",
"cls",
".",
"CONFIG_OPTIONS",
"=",
"cls",
".",
"CFG",
".",
"get",
"(",
"cls",
".",
"CONFIG_PATH",
",",
"return_type",
"=",
"dict",
")"
] |
Stores options from the config file
|
[
"Stores",
"options",
"from",
"the",
"config",
"file"
] |
e6d6fc28beac042bad588e56fbe77531d2de6b6f
|
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/nomenclature.py#L125-L129
|
241,017
|
AndresMWeber/Nomenclate
|
nomenclate/core/nomenclature.py
|
Nomenclate.merge_dict
|
def merge_dict(self, *args, **kwargs):
""" Takes variable inputs, compiles them into a dictionary then merges it to the current nomenclate's state
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
"""
input_dict = self._convert_input(*args, **kwargs)
if input_dict:
self._sift_and_init_configs(input_dict)
self.token_dict.merge_serialization(input_dict)
|
python
|
def merge_dict(self, *args, **kwargs):
""" Takes variable inputs, compiles them into a dictionary then merges it to the current nomenclate's state
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
"""
input_dict = self._convert_input(*args, **kwargs)
if input_dict:
self._sift_and_init_configs(input_dict)
self.token_dict.merge_serialization(input_dict)
|
[
"def",
"merge_dict",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"input_dict",
"=",
"self",
".",
"_convert_input",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"input_dict",
":",
"self",
".",
"_sift_and_init_configs",
"(",
"input_dict",
")",
"self",
".",
"token_dict",
".",
"merge_serialization",
"(",
"input_dict",
")"
] |
Takes variable inputs, compiles them into a dictionary then merges it to the current nomenclate's state
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
|
[
"Takes",
"variable",
"inputs",
"compiles",
"them",
"into",
"a",
"dictionary",
"then",
"merges",
"it",
"to",
"the",
"current",
"nomenclate",
"s",
"state"
] |
e6d6fc28beac042bad588e56fbe77531d2de6b6f
|
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/nomenclature.py#L141-L150
|
241,018
|
AndresMWeber/Nomenclate
|
nomenclate/core/nomenclature.py
|
Nomenclate.get_token_settings
|
def get_token_settings(cls, token, default=None):
""" Get the value for a specific token as a dictionary or replace with default
:param token: str, token to query the nomenclate for
:param default: object, substitution if the token is not found
:return: (dict, object, None), token setting dictionary or default
"""
setting_dict = {}
for key, value in iteritems(cls.__dict__):
if '%s_' % token in key and not callable(key) and not isinstance(value, tokens.TokenAttr):
setting_dict[key] = cls.__dict__.get(key, default)
return setting_dict
|
python
|
def get_token_settings(cls, token, default=None):
""" Get the value for a specific token as a dictionary or replace with default
:param token: str, token to query the nomenclate for
:param default: object, substitution if the token is not found
:return: (dict, object, None), token setting dictionary or default
"""
setting_dict = {}
for key, value in iteritems(cls.__dict__):
if '%s_' % token in key and not callable(key) and not isinstance(value, tokens.TokenAttr):
setting_dict[key] = cls.__dict__.get(key, default)
return setting_dict
|
[
"def",
"get_token_settings",
"(",
"cls",
",",
"token",
",",
"default",
"=",
"None",
")",
":",
"setting_dict",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"iteritems",
"(",
"cls",
".",
"__dict__",
")",
":",
"if",
"'%s_'",
"%",
"token",
"in",
"key",
"and",
"not",
"callable",
"(",
"key",
")",
"and",
"not",
"isinstance",
"(",
"value",
",",
"tokens",
".",
"TokenAttr",
")",
":",
"setting_dict",
"[",
"key",
"]",
"=",
"cls",
".",
"__dict__",
".",
"get",
"(",
"key",
",",
"default",
")",
"return",
"setting_dict"
] |
Get the value for a specific token as a dictionary or replace with default
:param token: str, token to query the nomenclate for
:param default: object, substitution if the token is not found
:return: (dict, object, None), token setting dictionary or default
|
[
"Get",
"the",
"value",
"for",
"a",
"specific",
"token",
"as",
"a",
"dictionary",
"or",
"replace",
"with",
"default"
] |
e6d6fc28beac042bad588e56fbe77531d2de6b6f
|
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/nomenclature.py#L153-L165
|
241,019
|
AndresMWeber/Nomenclate
|
nomenclate/core/nomenclature.py
|
Nomenclate._update_tokens_from_swap_format
|
def _update_tokens_from_swap_format(self, original_format, original_format_order, remove_obsolete=True):
""" Updates tokens based on a swap format call that will maintain synchronicity between token_dict and attrs
If there was an accidental setting already set to one of the attrs that should now be a token attr due
to the format swap, we wipe it and add a new TokenAttr to the Nomenclate attribute.
:param original_format: str, original format string to compare to
:param original_format_order: list(str), the original format order to compare to
:param remove_obsolete: bool, whether to remove obsolete tokens
if off: persistent state across format swaps of missing tokens
"""
old_format_order = [_.lower() for _ in original_format_order]
new_format_order = [_.lower() for _ in self.format_order]
if hasattr(self, 'token_dict') and self.format != original_format:
old_tokens = [token for token in list(set(old_format_order) - set(new_format_order))
if hasattr(self, token)]
new_tokens = [token for token in set(new_format_order) - set(old_format_order)
if not hasattr(self, token) or isinstance(getattr(self, token, ''), str)]
self.merge_dict(dict.fromkeys(new_tokens, ''))
if remove_obsolete:
self.token_dict.purge_tokens(old_tokens)
for new_token in new_tokens:
try:
delattr(self, new_token)
except AttributeError:
pass
|
python
|
def _update_tokens_from_swap_format(self, original_format, original_format_order, remove_obsolete=True):
""" Updates tokens based on a swap format call that will maintain synchronicity between token_dict and attrs
If there was an accidental setting already set to one of the attrs that should now be a token attr due
to the format swap, we wipe it and add a new TokenAttr to the Nomenclate attribute.
:param original_format: str, original format string to compare to
:param original_format_order: list(str), the original format order to compare to
:param remove_obsolete: bool, whether to remove obsolete tokens
if off: persistent state across format swaps of missing tokens
"""
old_format_order = [_.lower() for _ in original_format_order]
new_format_order = [_.lower() for _ in self.format_order]
if hasattr(self, 'token_dict') and self.format != original_format:
old_tokens = [token for token in list(set(old_format_order) - set(new_format_order))
if hasattr(self, token)]
new_tokens = [token for token in set(new_format_order) - set(old_format_order)
if not hasattr(self, token) or isinstance(getattr(self, token, ''), str)]
self.merge_dict(dict.fromkeys(new_tokens, ''))
if remove_obsolete:
self.token_dict.purge_tokens(old_tokens)
for new_token in new_tokens:
try:
delattr(self, new_token)
except AttributeError:
pass
|
[
"def",
"_update_tokens_from_swap_format",
"(",
"self",
",",
"original_format",
",",
"original_format_order",
",",
"remove_obsolete",
"=",
"True",
")",
":",
"old_format_order",
"=",
"[",
"_",
".",
"lower",
"(",
")",
"for",
"_",
"in",
"original_format_order",
"]",
"new_format_order",
"=",
"[",
"_",
".",
"lower",
"(",
")",
"for",
"_",
"in",
"self",
".",
"format_order",
"]",
"if",
"hasattr",
"(",
"self",
",",
"'token_dict'",
")",
"and",
"self",
".",
"format",
"!=",
"original_format",
":",
"old_tokens",
"=",
"[",
"token",
"for",
"token",
"in",
"list",
"(",
"set",
"(",
"old_format_order",
")",
"-",
"set",
"(",
"new_format_order",
")",
")",
"if",
"hasattr",
"(",
"self",
",",
"token",
")",
"]",
"new_tokens",
"=",
"[",
"token",
"for",
"token",
"in",
"set",
"(",
"new_format_order",
")",
"-",
"set",
"(",
"old_format_order",
")",
"if",
"not",
"hasattr",
"(",
"self",
",",
"token",
")",
"or",
"isinstance",
"(",
"getattr",
"(",
"self",
",",
"token",
",",
"''",
")",
",",
"str",
")",
"]",
"self",
".",
"merge_dict",
"(",
"dict",
".",
"fromkeys",
"(",
"new_tokens",
",",
"''",
")",
")",
"if",
"remove_obsolete",
":",
"self",
".",
"token_dict",
".",
"purge_tokens",
"(",
"old_tokens",
")",
"for",
"new_token",
"in",
"new_tokens",
":",
"try",
":",
"delattr",
"(",
"self",
",",
"new_token",
")",
"except",
"AttributeError",
":",
"pass"
] |
Updates tokens based on a swap format call that will maintain synchronicity between token_dict and attrs
If there was an accidental setting already set to one of the attrs that should now be a token attr due
to the format swap, we wipe it and add a new TokenAttr to the Nomenclate attribute.
:param original_format: str, original format string to compare to
:param original_format_order: list(str), the original format order to compare to
:param remove_obsolete: bool, whether to remove obsolete tokens
if off: persistent state across format swaps of missing tokens
|
[
"Updates",
"tokens",
"based",
"on",
"a",
"swap",
"format",
"call",
"that",
"will",
"maintain",
"synchronicity",
"between",
"token_dict",
"and",
"attrs",
"If",
"there",
"was",
"an",
"accidental",
"setting",
"already",
"set",
"to",
"one",
"of",
"the",
"attrs",
"that",
"should",
"now",
"be",
"a",
"token",
"attr",
"due",
"to",
"the",
"format",
"swap",
"we",
"wipe",
"it",
"and",
"add",
"a",
"new",
"TokenAttr",
"to",
"the",
"Nomenclate",
"attribute",
"."
] |
e6d6fc28beac042bad588e56fbe77531d2de6b6f
|
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/nomenclature.py#L167-L195
|
241,020
|
AndresMWeber/Nomenclate
|
nomenclate/core/nomenclature.py
|
Nomenclate._convert_input
|
def _convert_input(self, *args, **kwargs):
""" Takes variable inputs
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
:return: dict, combined dictionary of all inputs
"""
args = [arg.state if isinstance(arg, Nomenclate) else arg for arg in args]
input_dict = combine_dicts(*args, **kwargs)
return input_dict
|
python
|
def _convert_input(self, *args, **kwargs):
""" Takes variable inputs
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
:return: dict, combined dictionary of all inputs
"""
args = [arg.state if isinstance(arg, Nomenclate) else arg for arg in args]
input_dict = combine_dicts(*args, **kwargs)
return input_dict
|
[
"def",
"_convert_input",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"[",
"arg",
".",
"state",
"if",
"isinstance",
"(",
"arg",
",",
"Nomenclate",
")",
"else",
"arg",
"for",
"arg",
"in",
"args",
"]",
"input_dict",
"=",
"combine_dicts",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"input_dict"
] |
Takes variable inputs
:param args: (dict, Nomenclate), any number of dictionary inputs or Nomenclates to be converted to dicts
:param kwargs: str, any number of kwargs that represent token:value pairs
:return: dict, combined dictionary of all inputs
|
[
"Takes",
"variable",
"inputs"
] |
e6d6fc28beac042bad588e56fbe77531d2de6b6f
|
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/nomenclature.py#L197-L206
|
241,021
|
smbapps/isolcss
|
isolcss/parser.py
|
matchiter
|
def matchiter(r, s, flags=0):
"""
Yields contiguous MatchObjects of r in s.
Raises ValueError if r eventually doesn't match contiguously.
"""
if isinstance(r, basestring):
r = re.compile(r, flags)
i = 0
while s:
m = r.match(s)
g = m and m.group(0)
if not m or not g:
raise ValueError("{}: {!r}".format(i, s[:50]))
i += len(g)
s = s[len(g):]
yield m
|
python
|
def matchiter(r, s, flags=0):
"""
Yields contiguous MatchObjects of r in s.
Raises ValueError if r eventually doesn't match contiguously.
"""
if isinstance(r, basestring):
r = re.compile(r, flags)
i = 0
while s:
m = r.match(s)
g = m and m.group(0)
if not m or not g:
raise ValueError("{}: {!r}".format(i, s[:50]))
i += len(g)
s = s[len(g):]
yield m
|
[
"def",
"matchiter",
"(",
"r",
",",
"s",
",",
"flags",
"=",
"0",
")",
":",
"if",
"isinstance",
"(",
"r",
",",
"basestring",
")",
":",
"r",
"=",
"re",
".",
"compile",
"(",
"r",
",",
"flags",
")",
"i",
"=",
"0",
"while",
"s",
":",
"m",
"=",
"r",
".",
"match",
"(",
"s",
")",
"g",
"=",
"m",
"and",
"m",
".",
"group",
"(",
"0",
")",
"if",
"not",
"m",
"or",
"not",
"g",
":",
"raise",
"ValueError",
"(",
"\"{}: {!r}\"",
".",
"format",
"(",
"i",
",",
"s",
"[",
":",
"50",
"]",
")",
")",
"i",
"+=",
"len",
"(",
"g",
")",
"s",
"=",
"s",
"[",
"len",
"(",
"g",
")",
":",
"]",
"yield",
"m"
] |
Yields contiguous MatchObjects of r in s.
Raises ValueError if r eventually doesn't match contiguously.
|
[
"Yields",
"contiguous",
"MatchObjects",
"of",
"r",
"in",
"s",
".",
"Raises",
"ValueError",
"if",
"r",
"eventually",
"doesn",
"t",
"match",
"contiguously",
"."
] |
1613dfd297f64292af1216855b6d096f2bed82fe
|
https://github.com/smbapps/isolcss/blob/1613dfd297f64292af1216855b6d096f2bed82fe/isolcss/parser.py#L131-L146
|
241,022
|
smbapps/isolcss
|
isolcss/parser.py
|
matchall
|
def matchall(r, s, flags=0):
"""
Returns the list of contiguous string matches of r in s,
or None if r does not successively match the entire s.
"""
try:
return [m.group(0) for m in matchiter(r, s, flags)]
except ValueError:
return None
|
python
|
def matchall(r, s, flags=0):
"""
Returns the list of contiguous string matches of r in s,
or None if r does not successively match the entire s.
"""
try:
return [m.group(0) for m in matchiter(r, s, flags)]
except ValueError:
return None
|
[
"def",
"matchall",
"(",
"r",
",",
"s",
",",
"flags",
"=",
"0",
")",
":",
"try",
":",
"return",
"[",
"m",
".",
"group",
"(",
"0",
")",
"for",
"m",
"in",
"matchiter",
"(",
"r",
",",
"s",
",",
"flags",
")",
"]",
"except",
"ValueError",
":",
"return",
"None"
] |
Returns the list of contiguous string matches of r in s,
or None if r does not successively match the entire s.
|
[
"Returns",
"the",
"list",
"of",
"contiguous",
"string",
"matches",
"of",
"r",
"in",
"s",
"or",
"None",
"if",
"r",
"does",
"not",
"successively",
"match",
"the",
"entire",
"s",
"."
] |
1613dfd297f64292af1216855b6d096f2bed82fe
|
https://github.com/smbapps/isolcss/blob/1613dfd297f64292af1216855b6d096f2bed82fe/isolcss/parser.py#L149-L157
|
241,023
|
diffeo/yakonfig
|
yakonfig/configurable.py
|
check_subconfig
|
def check_subconfig(config, name, sub):
"""Validate the configuration of an object within this.
This calls :meth:`Configurable.check_config` or equivalent on `sub`.
A dictionary configuration for `sub` is required in `config`.
>>> def check_config(config, name):
... for sub in sub_modules:
... check_subconfig(config, name, sub)
:param dict config: parent configuration
:param str name: name of the parent configuration block
:param sub: Configurable-like subobject to check
:raise yakonfig.ConfigurationError: if there is no
configuration for `sub`, or it is not a dictionary
"""
subname = sub.config_name
subconfig = config.setdefault(subname, {})
if not isinstance(subconfig, collections.Mapping):
raise ProgrammerError('configuration for {0} in {1} must be a mapping'
.format(subname, name))
checker = getattr(sub, 'check_config', None)
if checker is not None:
checker(subconfig, '{0}.{1}'.format(name, subname))
|
python
|
def check_subconfig(config, name, sub):
"""Validate the configuration of an object within this.
This calls :meth:`Configurable.check_config` or equivalent on `sub`.
A dictionary configuration for `sub` is required in `config`.
>>> def check_config(config, name):
... for sub in sub_modules:
... check_subconfig(config, name, sub)
:param dict config: parent configuration
:param str name: name of the parent configuration block
:param sub: Configurable-like subobject to check
:raise yakonfig.ConfigurationError: if there is no
configuration for `sub`, or it is not a dictionary
"""
subname = sub.config_name
subconfig = config.setdefault(subname, {})
if not isinstance(subconfig, collections.Mapping):
raise ProgrammerError('configuration for {0} in {1} must be a mapping'
.format(subname, name))
checker = getattr(sub, 'check_config', None)
if checker is not None:
checker(subconfig, '{0}.{1}'.format(name, subname))
|
[
"def",
"check_subconfig",
"(",
"config",
",",
"name",
",",
"sub",
")",
":",
"subname",
"=",
"sub",
".",
"config_name",
"subconfig",
"=",
"config",
".",
"setdefault",
"(",
"subname",
",",
"{",
"}",
")",
"if",
"not",
"isinstance",
"(",
"subconfig",
",",
"collections",
".",
"Mapping",
")",
":",
"raise",
"ProgrammerError",
"(",
"'configuration for {0} in {1} must be a mapping'",
".",
"format",
"(",
"subname",
",",
"name",
")",
")",
"checker",
"=",
"getattr",
"(",
"sub",
",",
"'check_config'",
",",
"None",
")",
"if",
"checker",
"is",
"not",
"None",
":",
"checker",
"(",
"subconfig",
",",
"'{0}.{1}'",
".",
"format",
"(",
"name",
",",
"subname",
")",
")"
] |
Validate the configuration of an object within this.
This calls :meth:`Configurable.check_config` or equivalent on `sub`.
A dictionary configuration for `sub` is required in `config`.
>>> def check_config(config, name):
... for sub in sub_modules:
... check_subconfig(config, name, sub)
:param dict config: parent configuration
:param str name: name of the parent configuration block
:param sub: Configurable-like subobject to check
:raise yakonfig.ConfigurationError: if there is no
configuration for `sub`, or it is not a dictionary
|
[
"Validate",
"the",
"configuration",
"of",
"an",
"object",
"within",
"this",
"."
] |
412e195da29b4f4fc7b72967c192714a6f5eaeb5
|
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/configurable.py#L299-L323
|
241,024
|
pacificclimate/cfmeta
|
cfmeta/cmip5file.py
|
get_cmor_fp_meta
|
def get_cmor_fp_meta(fp):
"""Processes a CMOR style file path.
Section 3.1 of the `Data Reference Syntax`_ details:
The standard CMIP5 output tool CMOR optionally writes output files
to a directory structure mapping DRS components to directory names as:
<activity>/<product>/<institute>/<model>/<experiment>/<frequency>/
<modeling_realm>/<variable_name>/<ensemble_member>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
# Copy metadata list then reverse to start at end of path
directory_meta = list(CMIP5_FP_ATTS)
# Prefer meta extracted from filename
meta = get_dir_meta(fp, directory_meta)
meta.update(get_cmor_fname_meta(fp))
return meta
|
python
|
def get_cmor_fp_meta(fp):
"""Processes a CMOR style file path.
Section 3.1 of the `Data Reference Syntax`_ details:
The standard CMIP5 output tool CMOR optionally writes output files
to a directory structure mapping DRS components to directory names as:
<activity>/<product>/<institute>/<model>/<experiment>/<frequency>/
<modeling_realm>/<variable_name>/<ensemble_member>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
# Copy metadata list then reverse to start at end of path
directory_meta = list(CMIP5_FP_ATTS)
# Prefer meta extracted from filename
meta = get_dir_meta(fp, directory_meta)
meta.update(get_cmor_fname_meta(fp))
return meta
|
[
"def",
"get_cmor_fp_meta",
"(",
"fp",
")",
":",
"# Copy metadata list then reverse to start at end of path",
"directory_meta",
"=",
"list",
"(",
"CMIP5_FP_ATTS",
")",
"# Prefer meta extracted from filename",
"meta",
"=",
"get_dir_meta",
"(",
"fp",
",",
"directory_meta",
")",
"meta",
".",
"update",
"(",
"get_cmor_fname_meta",
"(",
"fp",
")",
")",
"return",
"meta"
] |
Processes a CMOR style file path.
Section 3.1 of the `Data Reference Syntax`_ details:
The standard CMIP5 output tool CMOR optionally writes output files
to a directory structure mapping DRS components to directory names as:
<activity>/<product>/<institute>/<model>/<experiment>/<frequency>/
<modeling_realm>/<variable_name>/<ensemble_member>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
|
[
"Processes",
"a",
"CMOR",
"style",
"file",
"path",
"."
] |
a6eef78d0bce523bb44920ba96233f034b60316a
|
https://github.com/pacificclimate/cfmeta/blob/a6eef78d0bce523bb44920ba96233f034b60316a/cfmeta/cmip5file.py#L124-L152
|
241,025
|
pacificclimate/cfmeta
|
cfmeta/cmip5file.py
|
get_datanode_fp_meta
|
def get_datanode_fp_meta(fp):
"""Processes a datanode style file path.
Section 3.2 of the `Data Reference Syntax`_ details:
It is recommended that ESGF data nodes should layout datasets
on disk mapping DRS components to directories as:
<activity>/<product>/<institute>/<model>/<experiment>/
<frequency>/<modeling_realm>/<mip_table>/<ensemble_member>/
<version_number>/<variable_name>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
# Copy metadata list then reverse to start at end of path
directory_meta = list(CMIP5_DATANODE_FP_ATTS)
# Prefer meta extracted from filename
meta = get_dir_meta(fp, directory_meta)
meta.update(get_cmor_fname_meta(fp))
return meta
|
python
|
def get_datanode_fp_meta(fp):
"""Processes a datanode style file path.
Section 3.2 of the `Data Reference Syntax`_ details:
It is recommended that ESGF data nodes should layout datasets
on disk mapping DRS components to directories as:
<activity>/<product>/<institute>/<model>/<experiment>/
<frequency>/<modeling_realm>/<mip_table>/<ensemble_member>/
<version_number>/<variable_name>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
# Copy metadata list then reverse to start at end of path
directory_meta = list(CMIP5_DATANODE_FP_ATTS)
# Prefer meta extracted from filename
meta = get_dir_meta(fp, directory_meta)
meta.update(get_cmor_fname_meta(fp))
return meta
|
[
"def",
"get_datanode_fp_meta",
"(",
"fp",
")",
":",
"# Copy metadata list then reverse to start at end of path",
"directory_meta",
"=",
"list",
"(",
"CMIP5_DATANODE_FP_ATTS",
")",
"# Prefer meta extracted from filename",
"meta",
"=",
"get_dir_meta",
"(",
"fp",
",",
"directory_meta",
")",
"meta",
".",
"update",
"(",
"get_cmor_fname_meta",
"(",
"fp",
")",
")",
"return",
"meta"
] |
Processes a datanode style file path.
Section 3.2 of the `Data Reference Syntax`_ details:
It is recommended that ESGF data nodes should layout datasets
on disk mapping DRS components to directories as:
<activity>/<product>/<institute>/<model>/<experiment>/
<frequency>/<modeling_realm>/<mip_table>/<ensemble_member>/
<version_number>/<variable_name>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
|
[
"Processes",
"a",
"datanode",
"style",
"file",
"path",
"."
] |
a6eef78d0bce523bb44920ba96233f034b60316a
|
https://github.com/pacificclimate/cfmeta/blob/a6eef78d0bce523bb44920ba96233f034b60316a/cfmeta/cmip5file.py#L154-L183
|
241,026
|
pacificclimate/cfmeta
|
cfmeta/cmip5file.py
|
get_cmor_fname_meta
|
def get_cmor_fname_meta(fname):
"""Processes a CMOR style file name.
Section 3.3 of the `Data Reference Syntax`_ details:
filename = <variable name>_<mip_table>_<model>_<experiment>_
<ensemble_member>[_<temporal_subset>][_<geographical_info>].nc
Temporal subsets are detailed in section 2.4:
Time instants or periods will be represented by a construction
of the form “N1-N2”, where N1 and N2 are of the form
‘yyyy[MM[dd[hh[mm[ss]]]]][-suffix]’, where ‘yyyy’, ‘MM’, ‘dd’,
‘hh’ ‘mm’ and ‘ss’ are integer year, month, day, hour, minute,
and second, respectively, and the precision with which time is
expressed must unambiguously resolve the interval between
timesamples contained in the file or virtual file
Geographic subsets are also detailed in section 2.4:
The DRS specification for this indicator is a string of the
form g-XXXX[-YYYY]. The “g-” indicates that some spatial selection
or processing has been done (i.e., selection of a sub-global region
and possibly spatial averaging).
Arguments:
fname (str): A file name conforming to DRS spec.
Returns:
dict: Metadata as extracted from the filename.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
if '/' in fname:
fname = os.path.split(fname)[1]
fname = os.path.splitext(fname)[0]
meta = fname.split('_')
res = {}
try:
for key in CMIP5_FNAME_REQUIRED_ATTS:
res[key] = meta.pop(0)
except IndexError:
raise PathError(fname)
# Determine presence and order of optional metadata
if len(meta) > 2:
raise PathError(fname)
is_geo = lambda x: x[0] == 'g'
for key in meta:
if is_geo(key):
res['geographical_info'] = key
else:
res['temporal_subset'] = key
return res
|
python
|
def get_cmor_fname_meta(fname):
"""Processes a CMOR style file name.
Section 3.3 of the `Data Reference Syntax`_ details:
filename = <variable name>_<mip_table>_<model>_<experiment>_
<ensemble_member>[_<temporal_subset>][_<geographical_info>].nc
Temporal subsets are detailed in section 2.4:
Time instants or periods will be represented by a construction
of the form “N1-N2”, where N1 and N2 are of the form
‘yyyy[MM[dd[hh[mm[ss]]]]][-suffix]’, where ‘yyyy’, ‘MM’, ‘dd’,
‘hh’ ‘mm’ and ‘ss’ are integer year, month, day, hour, minute,
and second, respectively, and the precision with which time is
expressed must unambiguously resolve the interval between
timesamples contained in the file or virtual file
Geographic subsets are also detailed in section 2.4:
The DRS specification for this indicator is a string of the
form g-XXXX[-YYYY]. The “g-” indicates that some spatial selection
or processing has been done (i.e., selection of a sub-global region
and possibly spatial averaging).
Arguments:
fname (str): A file name conforming to DRS spec.
Returns:
dict: Metadata as extracted from the filename.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
if '/' in fname:
fname = os.path.split(fname)[1]
fname = os.path.splitext(fname)[0]
meta = fname.split('_')
res = {}
try:
for key in CMIP5_FNAME_REQUIRED_ATTS:
res[key] = meta.pop(0)
except IndexError:
raise PathError(fname)
# Determine presence and order of optional metadata
if len(meta) > 2:
raise PathError(fname)
is_geo = lambda x: x[0] == 'g'
for key in meta:
if is_geo(key):
res['geographical_info'] = key
else:
res['temporal_subset'] = key
return res
|
[
"def",
"get_cmor_fname_meta",
"(",
"fname",
")",
":",
"if",
"'/'",
"in",
"fname",
":",
"fname",
"=",
"os",
".",
"path",
".",
"split",
"(",
"fname",
")",
"[",
"1",
"]",
"fname",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"fname",
")",
"[",
"0",
"]",
"meta",
"=",
"fname",
".",
"split",
"(",
"'_'",
")",
"res",
"=",
"{",
"}",
"try",
":",
"for",
"key",
"in",
"CMIP5_FNAME_REQUIRED_ATTS",
":",
"res",
"[",
"key",
"]",
"=",
"meta",
".",
"pop",
"(",
"0",
")",
"except",
"IndexError",
":",
"raise",
"PathError",
"(",
"fname",
")",
"# Determine presence and order of optional metadata",
"if",
"len",
"(",
"meta",
")",
">",
"2",
":",
"raise",
"PathError",
"(",
"fname",
")",
"is_geo",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
"==",
"'g'",
"for",
"key",
"in",
"meta",
":",
"if",
"is_geo",
"(",
"key",
")",
":",
"res",
"[",
"'geographical_info'",
"]",
"=",
"key",
"else",
":",
"res",
"[",
"'temporal_subset'",
"]",
"=",
"key",
"return",
"res"
] |
Processes a CMOR style file name.
Section 3.3 of the `Data Reference Syntax`_ details:
filename = <variable name>_<mip_table>_<model>_<experiment>_
<ensemble_member>[_<temporal_subset>][_<geographical_info>].nc
Temporal subsets are detailed in section 2.4:
Time instants or periods will be represented by a construction
of the form “N1-N2”, where N1 and N2 are of the form
‘yyyy[MM[dd[hh[mm[ss]]]]][-suffix]’, where ‘yyyy’, ‘MM’, ‘dd’,
‘hh’ ‘mm’ and ‘ss’ are integer year, month, day, hour, minute,
and second, respectively, and the precision with which time is
expressed must unambiguously resolve the interval between
timesamples contained in the file or virtual file
Geographic subsets are also detailed in section 2.4:
The DRS specification for this indicator is a string of the
form g-XXXX[-YYYY]. The “g-” indicates that some spatial selection
or processing has been done (i.e., selection of a sub-global region
and possibly spatial averaging).
Arguments:
fname (str): A file name conforming to DRS spec.
Returns:
dict: Metadata as extracted from the filename.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
|
[
"Processes",
"a",
"CMOR",
"style",
"file",
"name",
"."
] |
a6eef78d0bce523bb44920ba96233f034b60316a
|
https://github.com/pacificclimate/cfmeta/blob/a6eef78d0bce523bb44920ba96233f034b60316a/cfmeta/cmip5file.py#L185-L244
|
241,027
|
za-creature/gulpless
|
gulpless/reactor.py
|
Reactor.run
|
def run(self, once=False):
"""Runs the reactor in the main thread."""
self._once = once
self.start()
while self.running:
try:
time.sleep(1.0)
except KeyboardInterrupt:
self.stop()
self.join()
|
python
|
def run(self, once=False):
"""Runs the reactor in the main thread."""
self._once = once
self.start()
while self.running:
try:
time.sleep(1.0)
except KeyboardInterrupt:
self.stop()
self.join()
|
[
"def",
"run",
"(",
"self",
",",
"once",
"=",
"False",
")",
":",
"self",
".",
"_once",
"=",
"once",
"self",
".",
"start",
"(",
")",
"while",
"self",
".",
"running",
":",
"try",
":",
"time",
".",
"sleep",
"(",
"1.0",
")",
"except",
"KeyboardInterrupt",
":",
"self",
".",
"stop",
"(",
")",
"self",
".",
"join",
"(",
")"
] |
Runs the reactor in the main thread.
|
[
"Runs",
"the",
"reactor",
"in",
"the",
"main",
"thread",
"."
] |
fd73907dbe86880086719816bb042233f85121f6
|
https://github.com/za-creature/gulpless/blob/fd73907dbe86880086719816bb042233f85121f6/gulpless/reactor.py#L47-L56
|
241,028
|
lewisjared/credkeep
|
credkeep/util.py
|
clear_to_enc_filename
|
def clear_to_enc_filename(fname):
"""
Converts the filename of a cleartext file and convert it to an encrypted filename
:param fname:
:return: filename of encrypted secret file if found, else None
"""
if not fname.lower().endswith('.json'):
raise CredkeepException('Invalid filetype')
if fname.lower().endswith('.enc.json'):
raise CredkeepException('File already encrypted')
enc_fname = fname[:-4] + 'enc.json'
return enc_fname if exists(enc_fname) else None
|
python
|
def clear_to_enc_filename(fname):
"""
Converts the filename of a cleartext file and convert it to an encrypted filename
:param fname:
:return: filename of encrypted secret file if found, else None
"""
if not fname.lower().endswith('.json'):
raise CredkeepException('Invalid filetype')
if fname.lower().endswith('.enc.json'):
raise CredkeepException('File already encrypted')
enc_fname = fname[:-4] + 'enc.json'
return enc_fname if exists(enc_fname) else None
|
[
"def",
"clear_to_enc_filename",
"(",
"fname",
")",
":",
"if",
"not",
"fname",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"'.json'",
")",
":",
"raise",
"CredkeepException",
"(",
"'Invalid filetype'",
")",
"if",
"fname",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"'.enc.json'",
")",
":",
"raise",
"CredkeepException",
"(",
"'File already encrypted'",
")",
"enc_fname",
"=",
"fname",
"[",
":",
"-",
"4",
"]",
"+",
"'enc.json'",
"return",
"enc_fname",
"if",
"exists",
"(",
"enc_fname",
")",
"else",
"None"
] |
Converts the filename of a cleartext file and convert it to an encrypted filename
:param fname:
:return: filename of encrypted secret file if found, else None
|
[
"Converts",
"the",
"filename",
"of",
"a",
"cleartext",
"file",
"and",
"convert",
"it",
"to",
"an",
"encrypted",
"filename"
] |
63638ced094992552a28109b91839bcbbbe9230a
|
https://github.com/lewisjared/credkeep/blob/63638ced094992552a28109b91839bcbbbe9230a/credkeep/util.py#L12-L27
|
241,029
|
lewisjared/credkeep
|
credkeep/util.py
|
enc_to_clear_filename
|
def enc_to_clear_filename(fname):
"""
Converts the filename of an encrypted file to cleartext
:param fname:
:return: filename of clear secret file if found, else None
"""
if not fname.lower().endswith('.json'):
raise CredkeepException('Invalid filetype')
if not fname.lower().endswith('.enc.json'):
raise CredkeepException('Not filename of encrypted file')
clear_fname = fname.replace('.enc.json', '.json')
print clear_fname
return clear_fname if exists(clear_fname) else None
|
python
|
def enc_to_clear_filename(fname):
"""
Converts the filename of an encrypted file to cleartext
:param fname:
:return: filename of clear secret file if found, else None
"""
if not fname.lower().endswith('.json'):
raise CredkeepException('Invalid filetype')
if not fname.lower().endswith('.enc.json'):
raise CredkeepException('Not filename of encrypted file')
clear_fname = fname.replace('.enc.json', '.json')
print clear_fname
return clear_fname if exists(clear_fname) else None
|
[
"def",
"enc_to_clear_filename",
"(",
"fname",
")",
":",
"if",
"not",
"fname",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"'.json'",
")",
":",
"raise",
"CredkeepException",
"(",
"'Invalid filetype'",
")",
"if",
"not",
"fname",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"'.enc.json'",
")",
":",
"raise",
"CredkeepException",
"(",
"'Not filename of encrypted file'",
")",
"clear_fname",
"=",
"fname",
".",
"replace",
"(",
"'.enc.json'",
",",
"'.json'",
")",
"print",
"clear_fname",
"return",
"clear_fname",
"if",
"exists",
"(",
"clear_fname",
")",
"else",
"None"
] |
Converts the filename of an encrypted file to cleartext
:param fname:
:return: filename of clear secret file if found, else None
|
[
"Converts",
"the",
"filename",
"of",
"an",
"encrypted",
"file",
"to",
"cleartext"
] |
63638ced094992552a28109b91839bcbbbe9230a
|
https://github.com/lewisjared/credkeep/blob/63638ced094992552a28109b91839bcbbbe9230a/credkeep/util.py#L30-L46
|
241,030
|
kevinsprong23/aperture
|
aperture/heatmaps.py
|
init_heatmap
|
def init_heatmap(x_vec, y_vec, hist_matrix, fig, colormap='Blues',
alpha=1, grid=False, colorbar=True,
vmax='auto', vmin='auto', crop=True):
"""
convenience function to initialize a standard colormap in a figure
"""
plt.figure(fig.number)
ax = fig.gca()
# set vmax and vmin
vma = np.amax(hist_matrix) if vmax == 'auto' else vmax
vmi = np.amin(hist_matrix) if vmin == 'auto' else vmin
# an error check
if vma <= vmi:
vma = vmi + 1
# grid the space for pcolormesh
x_grid, y_grid = np.meshgrid(x_vec, y_vec)
hmap = ax.pcolormesh(x_grid, y_grid, np.array(hist_matrix),
cmap=colormap, alpha=alpha, shading='gouraud',
vmax=vma, vmin=vmi)
if colorbar:
plt.colorbar(hmap)
if not grid:
ax.grid(False)
if crop:
ax.set_xlim([x_vec[0], x_vec[-1]])
ax.set_ylim([y_vec[0], y_vec[-1]])
|
python
|
def init_heatmap(x_vec, y_vec, hist_matrix, fig, colormap='Blues',
alpha=1, grid=False, colorbar=True,
vmax='auto', vmin='auto', crop=True):
"""
convenience function to initialize a standard colormap in a figure
"""
plt.figure(fig.number)
ax = fig.gca()
# set vmax and vmin
vma = np.amax(hist_matrix) if vmax == 'auto' else vmax
vmi = np.amin(hist_matrix) if vmin == 'auto' else vmin
# an error check
if vma <= vmi:
vma = vmi + 1
# grid the space for pcolormesh
x_grid, y_grid = np.meshgrid(x_vec, y_vec)
hmap = ax.pcolormesh(x_grid, y_grid, np.array(hist_matrix),
cmap=colormap, alpha=alpha, shading='gouraud',
vmax=vma, vmin=vmi)
if colorbar:
plt.colorbar(hmap)
if not grid:
ax.grid(False)
if crop:
ax.set_xlim([x_vec[0], x_vec[-1]])
ax.set_ylim([y_vec[0], y_vec[-1]])
|
[
"def",
"init_heatmap",
"(",
"x_vec",
",",
"y_vec",
",",
"hist_matrix",
",",
"fig",
",",
"colormap",
"=",
"'Blues'",
",",
"alpha",
"=",
"1",
",",
"grid",
"=",
"False",
",",
"colorbar",
"=",
"True",
",",
"vmax",
"=",
"'auto'",
",",
"vmin",
"=",
"'auto'",
",",
"crop",
"=",
"True",
")",
":",
"plt",
".",
"figure",
"(",
"fig",
".",
"number",
")",
"ax",
"=",
"fig",
".",
"gca",
"(",
")",
"# set vmax and vmin",
"vma",
"=",
"np",
".",
"amax",
"(",
"hist_matrix",
")",
"if",
"vmax",
"==",
"'auto'",
"else",
"vmax",
"vmi",
"=",
"np",
".",
"amin",
"(",
"hist_matrix",
")",
"if",
"vmin",
"==",
"'auto'",
"else",
"vmin",
"# an error check",
"if",
"vma",
"<=",
"vmi",
":",
"vma",
"=",
"vmi",
"+",
"1",
"# grid the space for pcolormesh",
"x_grid",
",",
"y_grid",
"=",
"np",
".",
"meshgrid",
"(",
"x_vec",
",",
"y_vec",
")",
"hmap",
"=",
"ax",
".",
"pcolormesh",
"(",
"x_grid",
",",
"y_grid",
",",
"np",
".",
"array",
"(",
"hist_matrix",
")",
",",
"cmap",
"=",
"colormap",
",",
"alpha",
"=",
"alpha",
",",
"shading",
"=",
"'gouraud'",
",",
"vmax",
"=",
"vma",
",",
"vmin",
"=",
"vmi",
")",
"if",
"colorbar",
":",
"plt",
".",
"colorbar",
"(",
"hmap",
")",
"if",
"not",
"grid",
":",
"ax",
".",
"grid",
"(",
"False",
")",
"if",
"crop",
":",
"ax",
".",
"set_xlim",
"(",
"[",
"x_vec",
"[",
"0",
"]",
",",
"x_vec",
"[",
"-",
"1",
"]",
"]",
")",
"ax",
".",
"set_ylim",
"(",
"[",
"y_vec",
"[",
"0",
"]",
",",
"y_vec",
"[",
"-",
"1",
"]",
"]",
")"
] |
convenience function to initialize a standard colormap in a figure
|
[
"convenience",
"function",
"to",
"initialize",
"a",
"standard",
"colormap",
"in",
"a",
"figure"
] |
d0420fef3b25d8afc0e5ddcfb6fe5f0ff42b9799
|
https://github.com/kevinsprong23/aperture/blob/d0420fef3b25d8afc0e5ddcfb6fe5f0ff42b9799/aperture/heatmaps.py#L88-L120
|
241,031
|
kevinsprong23/aperture
|
aperture/heatmaps.py
|
heatmap
|
def heatmap(x, y, step=None, min_pt=None, max_pt=None,
colormap='Blues', alpha=1, grid=False,
colorbar=True, scale='lin',
vmax='auto', vmin='auto', crop=True):
"""
function to take vectors x and y and hist them
"""
(x_vec, y_vec, hist_matrix) = calc_2d_hist(x, y, step, min_pt, max_pt)
# simple in this case because it is positive counts
if scale == 'log':
for row in hist_matrix:
for i, el in enumerate(row):
row[i] = 0 if row[i] == 0 else log10(row[i])
# plot
fig = plt.figure()
init_heatmap(x_vec, y_vec, hist_matrix, fig, colormap=colormap,
alpha=alpha, grid=grid, colorbar=colorbar,
vmax=vmax, vmin=vmin, crop=crop)
return fig
|
python
|
def heatmap(x, y, step=None, min_pt=None, max_pt=None,
colormap='Blues', alpha=1, grid=False,
colorbar=True, scale='lin',
vmax='auto', vmin='auto', crop=True):
"""
function to take vectors x and y and hist them
"""
(x_vec, y_vec, hist_matrix) = calc_2d_hist(x, y, step, min_pt, max_pt)
# simple in this case because it is positive counts
if scale == 'log':
for row in hist_matrix:
for i, el in enumerate(row):
row[i] = 0 if row[i] == 0 else log10(row[i])
# plot
fig = plt.figure()
init_heatmap(x_vec, y_vec, hist_matrix, fig, colormap=colormap,
alpha=alpha, grid=grid, colorbar=colorbar,
vmax=vmax, vmin=vmin, crop=crop)
return fig
|
[
"def",
"heatmap",
"(",
"x",
",",
"y",
",",
"step",
"=",
"None",
",",
"min_pt",
"=",
"None",
",",
"max_pt",
"=",
"None",
",",
"colormap",
"=",
"'Blues'",
",",
"alpha",
"=",
"1",
",",
"grid",
"=",
"False",
",",
"colorbar",
"=",
"True",
",",
"scale",
"=",
"'lin'",
",",
"vmax",
"=",
"'auto'",
",",
"vmin",
"=",
"'auto'",
",",
"crop",
"=",
"True",
")",
":",
"(",
"x_vec",
",",
"y_vec",
",",
"hist_matrix",
")",
"=",
"calc_2d_hist",
"(",
"x",
",",
"y",
",",
"step",
",",
"min_pt",
",",
"max_pt",
")",
"# simple in this case because it is positive counts",
"if",
"scale",
"==",
"'log'",
":",
"for",
"row",
"in",
"hist_matrix",
":",
"for",
"i",
",",
"el",
"in",
"enumerate",
"(",
"row",
")",
":",
"row",
"[",
"i",
"]",
"=",
"0",
"if",
"row",
"[",
"i",
"]",
"==",
"0",
"else",
"log10",
"(",
"row",
"[",
"i",
"]",
")",
"# plot",
"fig",
"=",
"plt",
".",
"figure",
"(",
")",
"init_heatmap",
"(",
"x_vec",
",",
"y_vec",
",",
"hist_matrix",
",",
"fig",
",",
"colormap",
"=",
"colormap",
",",
"alpha",
"=",
"alpha",
",",
"grid",
"=",
"grid",
",",
"colorbar",
"=",
"colorbar",
",",
"vmax",
"=",
"vmax",
",",
"vmin",
"=",
"vmin",
",",
"crop",
"=",
"crop",
")",
"return",
"fig"
] |
function to take vectors x and y and hist them
|
[
"function",
"to",
"take",
"vectors",
"x",
"and",
"y",
"and",
"hist",
"them"
] |
d0420fef3b25d8afc0e5ddcfb6fe5f0ff42b9799
|
https://github.com/kevinsprong23/aperture/blob/d0420fef3b25d8afc0e5ddcfb6fe5f0ff42b9799/aperture/heatmaps.py#L123-L144
|
241,032
|
jalanb/pysyte
|
pysyte/paths.py
|
ext_language
|
def ext_language(ext, exts=None):
"""Language of the extension in those extensions
If exts is supplied, then restrict recognition to those exts only
If exts is not supplied, then use all known extensions
>>> ext_language('.py') == 'python'
True
"""
languages = {
'.py': 'python',
'.py2': 'python2',
'.py3': 'python3',
'.sh': 'bash',
'.bash': 'bash',
'.pl': 'perl',
'.js': 'javascript',
'.txt': 'english',
}
ext_languages = {_: languages[_] for _ in exts} if exts else languages
return ext_languages.get(ext)
|
python
|
def ext_language(ext, exts=None):
"""Language of the extension in those extensions
If exts is supplied, then restrict recognition to those exts only
If exts is not supplied, then use all known extensions
>>> ext_language('.py') == 'python'
True
"""
languages = {
'.py': 'python',
'.py2': 'python2',
'.py3': 'python3',
'.sh': 'bash',
'.bash': 'bash',
'.pl': 'perl',
'.js': 'javascript',
'.txt': 'english',
}
ext_languages = {_: languages[_] for _ in exts} if exts else languages
return ext_languages.get(ext)
|
[
"def",
"ext_language",
"(",
"ext",
",",
"exts",
"=",
"None",
")",
":",
"languages",
"=",
"{",
"'.py'",
":",
"'python'",
",",
"'.py2'",
":",
"'python2'",
",",
"'.py3'",
":",
"'python3'",
",",
"'.sh'",
":",
"'bash'",
",",
"'.bash'",
":",
"'bash'",
",",
"'.pl'",
":",
"'perl'",
",",
"'.js'",
":",
"'javascript'",
",",
"'.txt'",
":",
"'english'",
",",
"}",
"ext_languages",
"=",
"{",
"_",
":",
"languages",
"[",
"_",
"]",
"for",
"_",
"in",
"exts",
"}",
"if",
"exts",
"else",
"languages",
"return",
"ext_languages",
".",
"get",
"(",
"ext",
")"
] |
Language of the extension in those extensions
If exts is supplied, then restrict recognition to those exts only
If exts is not supplied, then use all known extensions
>>> ext_language('.py') == 'python'
True
|
[
"Language",
"of",
"the",
"extension",
"in",
"those",
"extensions"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L273-L293
|
241,033
|
jalanb/pysyte
|
pysyte/paths.py
|
find_language
|
def find_language(script, exts=None):
"""Determine the script's language extension
>>> this_script = __file__.rstrip('c')
>>> find_language(makepath(this_script)) == 'python'
True
If exts are given they restrict which extensions are allowed
>>> find_language(makepath(this_script), ('.sh', '.txt')) is None
True
If there is no extension, but shebang is present, then use that
(Expecting to find "#!" in ~/.bashrc for this test,
but ~/.bashrc might not exist - then there's no language)
>>> bashrc = home() / '.bashrc'
>>> find_language(bashrc) in ('bash', None)
True
"""
if not script.isfile():
return None
if script.ext:
return ext_language(script.ext, exts)
shebang = script.shebang()
return shebang and str(shebang.name) or None
|
python
|
def find_language(script, exts=None):
"""Determine the script's language extension
>>> this_script = __file__.rstrip('c')
>>> find_language(makepath(this_script)) == 'python'
True
If exts are given they restrict which extensions are allowed
>>> find_language(makepath(this_script), ('.sh', '.txt')) is None
True
If there is no extension, but shebang is present, then use that
(Expecting to find "#!" in ~/.bashrc for this test,
but ~/.bashrc might not exist - then there's no language)
>>> bashrc = home() / '.bashrc'
>>> find_language(bashrc) in ('bash', None)
True
"""
if not script.isfile():
return None
if script.ext:
return ext_language(script.ext, exts)
shebang = script.shebang()
return shebang and str(shebang.name) or None
|
[
"def",
"find_language",
"(",
"script",
",",
"exts",
"=",
"None",
")",
":",
"if",
"not",
"script",
".",
"isfile",
"(",
")",
":",
"return",
"None",
"if",
"script",
".",
"ext",
":",
"return",
"ext_language",
"(",
"script",
".",
"ext",
",",
"exts",
")",
"shebang",
"=",
"script",
".",
"shebang",
"(",
")",
"return",
"shebang",
"and",
"str",
"(",
"shebang",
".",
"name",
")",
"or",
"None"
] |
Determine the script's language extension
>>> this_script = __file__.rstrip('c')
>>> find_language(makepath(this_script)) == 'python'
True
If exts are given they restrict which extensions are allowed
>>> find_language(makepath(this_script), ('.sh', '.txt')) is None
True
If there is no extension, but shebang is present, then use that
(Expecting to find "#!" in ~/.bashrc for this test,
but ~/.bashrc might not exist - then there's no language)
>>> bashrc = home() / '.bashrc'
>>> find_language(bashrc) in ('bash', None)
True
|
[
"Determine",
"the",
"script",
"s",
"language",
"extension"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L296-L320
|
241,034
|
jalanb/pysyte
|
pysyte/paths.py
|
makepath
|
def makepath(s, as_file=False):
"""Make a path from a string
Expand out any variables, home squiggles, and normalise it
See also http://stackoverflow.com/questions/26403972
"""
if s is None:
return None
result = FilePath(s) if (os.path.isfile(s) or as_file) else DirectPath(s)
return result.expandall()
|
python
|
def makepath(s, as_file=False):
"""Make a path from a string
Expand out any variables, home squiggles, and normalise it
See also http://stackoverflow.com/questions/26403972
"""
if s is None:
return None
result = FilePath(s) if (os.path.isfile(s) or as_file) else DirectPath(s)
return result.expandall()
|
[
"def",
"makepath",
"(",
"s",
",",
"as_file",
"=",
"False",
")",
":",
"if",
"s",
"is",
"None",
":",
"return",
"None",
"result",
"=",
"FilePath",
"(",
"s",
")",
"if",
"(",
"os",
".",
"path",
".",
"isfile",
"(",
"s",
")",
"or",
"as_file",
")",
"else",
"DirectPath",
"(",
"s",
")",
"return",
"result",
".",
"expandall",
"(",
")"
] |
Make a path from a string
Expand out any variables, home squiggles, and normalise it
See also http://stackoverflow.com/questions/26403972
|
[
"Make",
"a",
"path",
"from",
"a",
"string"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L608-L617
|
241,035
|
jalanb/pysyte
|
pysyte/paths.py
|
cd
|
def cd(path_to): # pylint: disable=invalid-name
"""cd to the given path
If the path is a file, then cd to its parent directory
Remember current directory before the cd
so that we can cd back there with cd('-')
"""
if path_to == '-':
if not cd.previous:
raise PathError('No previous directory to return to')
return cd(cd.previous)
if not hasattr(path_to, 'cd'):
path_to = makepath(path_to)
try:
previous = os.getcwd()
except OSError as e:
if 'No such file or directory' in str(e):
return False
raise
if path_to.isdir():
os.chdir(path_to)
elif path_to.isfile():
os.chdir(path_to.parent)
elif not os.path.exists(path_to):
return False
else:
raise PathError('Cannot cd to %s' % path_to)
cd.previous = previous
return True
|
python
|
def cd(path_to): # pylint: disable=invalid-name
"""cd to the given path
If the path is a file, then cd to its parent directory
Remember current directory before the cd
so that we can cd back there with cd('-')
"""
if path_to == '-':
if not cd.previous:
raise PathError('No previous directory to return to')
return cd(cd.previous)
if not hasattr(path_to, 'cd'):
path_to = makepath(path_to)
try:
previous = os.getcwd()
except OSError as e:
if 'No such file or directory' in str(e):
return False
raise
if path_to.isdir():
os.chdir(path_to)
elif path_to.isfile():
os.chdir(path_to.parent)
elif not os.path.exists(path_to):
return False
else:
raise PathError('Cannot cd to %s' % path_to)
cd.previous = previous
return True
|
[
"def",
"cd",
"(",
"path_to",
")",
":",
"# pylint: disable=invalid-name",
"if",
"path_to",
"==",
"'-'",
":",
"if",
"not",
"cd",
".",
"previous",
":",
"raise",
"PathError",
"(",
"'No previous directory to return to'",
")",
"return",
"cd",
"(",
"cd",
".",
"previous",
")",
"if",
"not",
"hasattr",
"(",
"path_to",
",",
"'cd'",
")",
":",
"path_to",
"=",
"makepath",
"(",
"path_to",
")",
"try",
":",
"previous",
"=",
"os",
".",
"getcwd",
"(",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"'No such file or directory'",
"in",
"str",
"(",
"e",
")",
":",
"return",
"False",
"raise",
"if",
"path_to",
".",
"isdir",
"(",
")",
":",
"os",
".",
"chdir",
"(",
"path_to",
")",
"elif",
"path_to",
".",
"isfile",
"(",
")",
":",
"os",
".",
"chdir",
"(",
"path_to",
".",
"parent",
")",
"elif",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path_to",
")",
":",
"return",
"False",
"else",
":",
"raise",
"PathError",
"(",
"'Cannot cd to %s'",
"%",
"path_to",
")",
"cd",
".",
"previous",
"=",
"previous",
"return",
"True"
] |
cd to the given path
If the path is a file, then cd to its parent directory
Remember current directory before the cd
so that we can cd back there with cd('-')
|
[
"cd",
"to",
"the",
"given",
"path"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L622-L651
|
241,036
|
jalanb/pysyte
|
pysyte/paths.py
|
make_needed
|
def make_needed(pattern, path_to_directory, wanted):
"""Make a method to check if an item matches the pattern, and is wanted
If wanted is None just check the pattern
"""
if wanted:
def needed(name):
return fnmatch(name, pattern) and wanted(
os.path.join(path_to_directory, name))
return needed
else:
return lambda name: fnmatch(name, pattern)
|
python
|
def make_needed(pattern, path_to_directory, wanted):
"""Make a method to check if an item matches the pattern, and is wanted
If wanted is None just check the pattern
"""
if wanted:
def needed(name):
return fnmatch(name, pattern) and wanted(
os.path.join(path_to_directory, name))
return needed
else:
return lambda name: fnmatch(name, pattern)
|
[
"def",
"make_needed",
"(",
"pattern",
",",
"path_to_directory",
",",
"wanted",
")",
":",
"if",
"wanted",
":",
"def",
"needed",
"(",
"name",
")",
":",
"return",
"fnmatch",
"(",
"name",
",",
"pattern",
")",
"and",
"wanted",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path_to_directory",
",",
"name",
")",
")",
"return",
"needed",
"else",
":",
"return",
"lambda",
"name",
":",
"fnmatch",
"(",
"name",
",",
"pattern",
")"
] |
Make a method to check if an item matches the pattern, and is wanted
If wanted is None just check the pattern
|
[
"Make",
"a",
"method",
"to",
"check",
"if",
"an",
"item",
"matches",
"the",
"pattern",
"and",
"is",
"wanted"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L763-L774
|
241,037
|
jalanb/pysyte
|
pysyte/paths.py
|
list_items
|
def list_items(path_to_directory, pattern, wanted):
"""All items in the given path which match the given glob and are wanted"""
if not path_to_directory:
return set()
needed = make_needed(pattern, path_to_directory, wanted)
return [os.path.join(path_to_directory, name)
for name in _names_in_directory(path_to_directory)
if needed(name)]
|
python
|
def list_items(path_to_directory, pattern, wanted):
"""All items in the given path which match the given glob and are wanted"""
if not path_to_directory:
return set()
needed = make_needed(pattern, path_to_directory, wanted)
return [os.path.join(path_to_directory, name)
for name in _names_in_directory(path_to_directory)
if needed(name)]
|
[
"def",
"list_items",
"(",
"path_to_directory",
",",
"pattern",
",",
"wanted",
")",
":",
"if",
"not",
"path_to_directory",
":",
"return",
"set",
"(",
")",
"needed",
"=",
"make_needed",
"(",
"pattern",
",",
"path_to_directory",
",",
"wanted",
")",
"return",
"[",
"os",
".",
"path",
".",
"join",
"(",
"path_to_directory",
",",
"name",
")",
"for",
"name",
"in",
"_names_in_directory",
"(",
"path_to_directory",
")",
"if",
"needed",
"(",
"name",
")",
"]"
] |
All items in the given path which match the given glob and are wanted
|
[
"All",
"items",
"in",
"the",
"given",
"path",
"which",
"match",
"the",
"given",
"glob",
"and",
"are",
"wanted"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L777-L784
|
241,038
|
jalanb/pysyte
|
pysyte/paths.py
|
contains_glob
|
def contains_glob(path_to_directory, pattern, wanted=None):
"""Whether the given path contains an item matching the given glob"""
if not path_to_directory:
return False
needed = make_needed(pattern, path_to_directory, wanted)
for name in _names_in_directory(path_to_directory):
if needed(name):
return True
return False
|
python
|
def contains_glob(path_to_directory, pattern, wanted=None):
"""Whether the given path contains an item matching the given glob"""
if not path_to_directory:
return False
needed = make_needed(pattern, path_to_directory, wanted)
for name in _names_in_directory(path_to_directory):
if needed(name):
return True
return False
|
[
"def",
"contains_glob",
"(",
"path_to_directory",
",",
"pattern",
",",
"wanted",
"=",
"None",
")",
":",
"if",
"not",
"path_to_directory",
":",
"return",
"False",
"needed",
"=",
"make_needed",
"(",
"pattern",
",",
"path_to_directory",
",",
"wanted",
")",
"for",
"name",
"in",
"_names_in_directory",
"(",
"path_to_directory",
")",
":",
"if",
"needed",
"(",
"name",
")",
":",
"return",
"True",
"return",
"False"
] |
Whether the given path contains an item matching the given glob
|
[
"Whether",
"the",
"given",
"path",
"contains",
"an",
"item",
"matching",
"the",
"given",
"glob"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L801-L809
|
241,039
|
jalanb/pysyte
|
pysyte/paths.py
|
tab_complete
|
def tab_complete(string):
"""Finish file names "left short" by tab-completion
For example, if an argument is "fred."
and no file called "fred." exists
but "fred.py" does exist
then return fred.py
"""
if is_option(string):
return string
if not missing_extension(string):
return string
if os.path.isfile(string):
return string
extended_files = [f for f in extend(string) if os.path.isfile(f)]
try:
return extended_files[0]
except IndexError:
return string
|
python
|
def tab_complete(string):
"""Finish file names "left short" by tab-completion
For example, if an argument is "fred."
and no file called "fred." exists
but "fred.py" does exist
then return fred.py
"""
if is_option(string):
return string
if not missing_extension(string):
return string
if os.path.isfile(string):
return string
extended_files = [f for f in extend(string) if os.path.isfile(f)]
try:
return extended_files[0]
except IndexError:
return string
|
[
"def",
"tab_complete",
"(",
"string",
")",
":",
"if",
"is_option",
"(",
"string",
")",
":",
"return",
"string",
"if",
"not",
"missing_extension",
"(",
"string",
")",
":",
"return",
"string",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"string",
")",
":",
"return",
"string",
"extended_files",
"=",
"[",
"f",
"for",
"f",
"in",
"extend",
"(",
"string",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"f",
")",
"]",
"try",
":",
"return",
"extended_files",
"[",
"0",
"]",
"except",
"IndexError",
":",
"return",
"string"
] |
Finish file names "left short" by tab-completion
For example, if an argument is "fred."
and no file called "fred." exists
but "fred.py" does exist
then return fred.py
|
[
"Finish",
"file",
"names",
"left",
"short",
"by",
"tab",
"-",
"completion"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L834-L852
|
241,040
|
jalanb/pysyte
|
pysyte/paths.py
|
pyc_to_py
|
def pyc_to_py(path_to_file):
"""Change some file extensions to those which are more likely to be text
>>> pyc_to_py('vim.pyc') == 'vim.py'
True
"""
stem, ext = os.path.splitext(path_to_file)
if ext == '.pyc':
return '%s.py' % stem
return path_to_file
|
python
|
def pyc_to_py(path_to_file):
"""Change some file extensions to those which are more likely to be text
>>> pyc_to_py('vim.pyc') == 'vim.py'
True
"""
stem, ext = os.path.splitext(path_to_file)
if ext == '.pyc':
return '%s.py' % stem
return path_to_file
|
[
"def",
"pyc_to_py",
"(",
"path_to_file",
")",
":",
"stem",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"path_to_file",
")",
"if",
"ext",
"==",
"'.pyc'",
":",
"return",
"'%s.py'",
"%",
"stem",
"return",
"path_to_file"
] |
Change some file extensions to those which are more likely to be text
>>> pyc_to_py('vim.pyc') == 'vim.py'
True
|
[
"Change",
"some",
"file",
"extensions",
"to",
"those",
"which",
"are",
"more",
"likely",
"to",
"be",
"text"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L855-L864
|
241,041
|
jalanb/pysyte
|
pysyte/paths.py
|
DotPath.as_existing_file
|
def as_existing_file(self, filepath):
"""Return the file class for existing files only"""
if os.path.isfile(filepath) and hasattr(self, '__file_class__'):
return self.__file_class__(filepath) # pylint: disable=no-member
return self.__class__(filepath)
|
python
|
def as_existing_file(self, filepath):
"""Return the file class for existing files only"""
if os.path.isfile(filepath) and hasattr(self, '__file_class__'):
return self.__file_class__(filepath) # pylint: disable=no-member
return self.__class__(filepath)
|
[
"def",
"as_existing_file",
"(",
"self",
",",
"filepath",
")",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"filepath",
")",
"and",
"hasattr",
"(",
"self",
",",
"'__file_class__'",
")",
":",
"return",
"self",
".",
"__file_class__",
"(",
"filepath",
")",
"# pylint: disable=no-member",
"return",
"self",
".",
"__class__",
"(",
"filepath",
")"
] |
Return the file class for existing files only
|
[
"Return",
"the",
"file",
"class",
"for",
"existing",
"files",
"only"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L76-L80
|
241,042
|
jalanb/pysyte
|
pysyte/paths.py
|
DotPath.dirpaths
|
def dirpaths(self):
"""Split the dirname into individual directory names
An absolute path starts with an empty string, a relative path does not
>>> p = DotPath(u'/path/to/x.py')
>>> p.paths == p.dirpaths()
True
"""
parts = self.parts()
result = [DotPath(parts[0] or '/')]
for name in parts[1:]:
result.append(result[-1] / name)
return result
|
python
|
def dirpaths(self):
"""Split the dirname into individual directory names
An absolute path starts with an empty string, a relative path does not
>>> p = DotPath(u'/path/to/x.py')
>>> p.paths == p.dirpaths()
True
"""
parts = self.parts()
result = [DotPath(parts[0] or '/')]
for name in parts[1:]:
result.append(result[-1] / name)
return result
|
[
"def",
"dirpaths",
"(",
"self",
")",
":",
"parts",
"=",
"self",
".",
"parts",
"(",
")",
"result",
"=",
"[",
"DotPath",
"(",
"parts",
"[",
"0",
"]",
"or",
"'/'",
")",
"]",
"for",
"name",
"in",
"parts",
"[",
"1",
":",
"]",
":",
"result",
".",
"append",
"(",
"result",
"[",
"-",
"1",
"]",
"/",
"name",
")",
"return",
"result"
] |
Split the dirname into individual directory names
An absolute path starts with an empty string, a relative path does not
>>> p = DotPath(u'/path/to/x.py')
>>> p.paths == p.dirpaths()
True
|
[
"Split",
"the",
"dirname",
"into",
"individual",
"directory",
"names"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L103-L116
|
241,043
|
jalanb/pysyte
|
pysyte/paths.py
|
DotPath.parts
|
def parts(self):
"""Split the path into parts like Pathlib
>>> expected = ['/', 'path', 'to', 'there']
>>> assert DotPath('/path/to/there').parts() == expected
"""
parts = self.split(os.path.sep)
parts[0] = parts[0] and parts[0] or '/'
return parts
|
python
|
def parts(self):
"""Split the path into parts like Pathlib
>>> expected = ['/', 'path', 'to', 'there']
>>> assert DotPath('/path/to/there').parts() == expected
"""
parts = self.split(os.path.sep)
parts[0] = parts[0] and parts[0] or '/'
return parts
|
[
"def",
"parts",
"(",
"self",
")",
":",
"parts",
"=",
"self",
".",
"split",
"(",
"os",
".",
"path",
".",
"sep",
")",
"parts",
"[",
"0",
"]",
"=",
"parts",
"[",
"0",
"]",
"and",
"parts",
"[",
"0",
"]",
"or",
"'/'",
"return",
"parts"
] |
Split the path into parts like Pathlib
>>> expected = ['/', 'path', 'to', 'there']
>>> assert DotPath('/path/to/there').parts() == expected
|
[
"Split",
"the",
"path",
"into",
"parts",
"like",
"Pathlib"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L118-L126
|
241,044
|
jalanb/pysyte
|
pysyte/paths.py
|
DotPath.short_relative_path_to
|
def short_relative_path_to(self, destination):
"""The shorter of either the absolute path of the destination,
or the relative path to it
>>> print(DotPath('/home/guido/bin').short_relative_path_to(
... '/home/guido/build/python.tar'))
../build/python.tar
>>> print(DotPath('/home/guido/bin').short_relative_path_to(
... '/mnt/guido/build/python.tar'))
/mnt/guido/build/python.tar
"""
relative = self.relpathto(destination)
absolute = self.__class__(destination).abspath()
if len(relative) < len(absolute):
return relative
return absolute
|
python
|
def short_relative_path_to(self, destination):
"""The shorter of either the absolute path of the destination,
or the relative path to it
>>> print(DotPath('/home/guido/bin').short_relative_path_to(
... '/home/guido/build/python.tar'))
../build/python.tar
>>> print(DotPath('/home/guido/bin').short_relative_path_to(
... '/mnt/guido/build/python.tar'))
/mnt/guido/build/python.tar
"""
relative = self.relpathto(destination)
absolute = self.__class__(destination).abspath()
if len(relative) < len(absolute):
return relative
return absolute
|
[
"def",
"short_relative_path_to",
"(",
"self",
",",
"destination",
")",
":",
"relative",
"=",
"self",
".",
"relpathto",
"(",
"destination",
")",
"absolute",
"=",
"self",
".",
"__class__",
"(",
"destination",
")",
".",
"abspath",
"(",
")",
"if",
"len",
"(",
"relative",
")",
"<",
"len",
"(",
"absolute",
")",
":",
"return",
"relative",
"return",
"absolute"
] |
The shorter of either the absolute path of the destination,
or the relative path to it
>>> print(DotPath('/home/guido/bin').short_relative_path_to(
... '/home/guido/build/python.tar'))
../build/python.tar
>>> print(DotPath('/home/guido/bin').short_relative_path_to(
... '/mnt/guido/build/python.tar'))
/mnt/guido/build/python.tar
|
[
"The",
"shorter",
"of",
"either",
"the",
"absolute",
"path",
"of",
"the",
"destination",
"or",
"the",
"relative",
"path",
"to",
"it"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L165-L180
|
241,045
|
jalanb/pysyte
|
pysyte/paths.py
|
FilePath.extend_by
|
def extend_by(self, extension):
"""The path to the file changed to use the given extension
>>> FilePath('/path/to/fred').extend_by('.txt')
<FilePath '/path/to/fred.txt'>
>>> FilePath('/path/to/fred.txt').extend_by('..tmp')
<FilePath '/path/to/fred.tmp'>
>>> FilePath('/path/to/fred.txt').extend_by('fred')
<FilePath '/path/to/fred.fred'>
"""
copy = self[:]
filename, _ = os.path.splitext(copy)
return self.__class__('%s.%s' % (filename, extension.lstrip('.')))
|
python
|
def extend_by(self, extension):
"""The path to the file changed to use the given extension
>>> FilePath('/path/to/fred').extend_by('.txt')
<FilePath '/path/to/fred.txt'>
>>> FilePath('/path/to/fred.txt').extend_by('..tmp')
<FilePath '/path/to/fred.tmp'>
>>> FilePath('/path/to/fred.txt').extend_by('fred')
<FilePath '/path/to/fred.fred'>
"""
copy = self[:]
filename, _ = os.path.splitext(copy)
return self.__class__('%s.%s' % (filename, extension.lstrip('.')))
|
[
"def",
"extend_by",
"(",
"self",
",",
"extension",
")",
":",
"copy",
"=",
"self",
"[",
":",
"]",
"filename",
",",
"_",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"copy",
")",
"return",
"self",
".",
"__class__",
"(",
"'%s.%s'",
"%",
"(",
"filename",
",",
"extension",
".",
"lstrip",
"(",
"'.'",
")",
")",
")"
] |
The path to the file changed to use the given extension
>>> FilePath('/path/to/fred').extend_by('.txt')
<FilePath '/path/to/fred.txt'>
>>> FilePath('/path/to/fred.txt').extend_by('..tmp')
<FilePath '/path/to/fred.tmp'>
>>> FilePath('/path/to/fred.txt').extend_by('fred')
<FilePath '/path/to/fred.fred'>
|
[
"The",
"path",
"to",
"the",
"file",
"changed",
"to",
"use",
"the",
"given",
"extension"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L390-L402
|
241,046
|
jalanb/pysyte
|
pysyte/paths.py
|
FilePath.make_file_exist
|
def make_file_exist(self):
"""Make sure the parent directory exists, then touch the file"""
self.parent.make_directory_exist()
self.parent.touch_file(self.name)
return self
|
python
|
def make_file_exist(self):
"""Make sure the parent directory exists, then touch the file"""
self.parent.make_directory_exist()
self.parent.touch_file(self.name)
return self
|
[
"def",
"make_file_exist",
"(",
"self",
")",
":",
"self",
".",
"parent",
".",
"make_directory_exist",
"(",
")",
"self",
".",
"parent",
".",
"touch_file",
"(",
"self",
".",
"name",
")",
"return",
"self"
] |
Make sure the parent directory exists, then touch the file
|
[
"Make",
"sure",
"the",
"parent",
"directory",
"exists",
"then",
"touch",
"the",
"file"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L438-L442
|
241,047
|
jalanb/pysyte
|
pysyte/paths.py
|
FilePath.language
|
def language(self):
"""The language of this file"""
try:
return self._language
except AttributeError:
self._language = find_language(self, getattr(self, 'exts', None))
return self._language
|
python
|
def language(self):
"""The language of this file"""
try:
return self._language
except AttributeError:
self._language = find_language(self, getattr(self, 'exts', None))
return self._language
|
[
"def",
"language",
"(",
"self",
")",
":",
"try",
":",
"return",
"self",
".",
"_language",
"except",
"AttributeError",
":",
"self",
".",
"_language",
"=",
"find_language",
"(",
"self",
",",
"getattr",
"(",
"self",
",",
"'exts'",
",",
"None",
")",
")",
"return",
"self",
".",
"_language"
] |
The language of this file
|
[
"The",
"language",
"of",
"this",
"file"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L445-L451
|
241,048
|
jalanb/pysyte
|
pysyte/paths.py
|
DirectPath.try_remove
|
def try_remove(self):
"""Try to remove the path
If it is a directory, try recursive removal of contents too
"""
if self.islink():
self.unlink()
elif self.isfile():
self.remove()
elif self.isdir():
self.empty_directory()
if self.isdir():
self.rmdir()
else:
return False
return True
|
python
|
def try_remove(self):
"""Try to remove the path
If it is a directory, try recursive removal of contents too
"""
if self.islink():
self.unlink()
elif self.isfile():
self.remove()
elif self.isdir():
self.empty_directory()
if self.isdir():
self.rmdir()
else:
return False
return True
|
[
"def",
"try_remove",
"(",
"self",
")",
":",
"if",
"self",
".",
"islink",
"(",
")",
":",
"self",
".",
"unlink",
"(",
")",
"elif",
"self",
".",
"isfile",
"(",
")",
":",
"self",
".",
"remove",
"(",
")",
"elif",
"self",
".",
"isdir",
"(",
")",
":",
"self",
".",
"empty_directory",
"(",
")",
"if",
"self",
".",
"isdir",
"(",
")",
":",
"self",
".",
"rmdir",
"(",
")",
"else",
":",
"return",
"False",
"return",
"True"
] |
Try to remove the path
If it is a directory, try recursive removal of contents too
|
[
"Try",
"to",
"remove",
"the",
"path"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L483-L498
|
241,049
|
jalanb/pysyte
|
pysyte/paths.py
|
DirectPath.empty_directory
|
def empty_directory(self):
"""Remove all contents of a directory
Including any sub-directories and their contents"""
for child in self.walkfiles():
child.remove()
for child in reversed([d for d in self.walkdirs()]):
if child == self or not child.isdir():
continue
child.rmdir()
|
python
|
def empty_directory(self):
"""Remove all contents of a directory
Including any sub-directories and their contents"""
for child in self.walkfiles():
child.remove()
for child in reversed([d for d in self.walkdirs()]):
if child == self or not child.isdir():
continue
child.rmdir()
|
[
"def",
"empty_directory",
"(",
"self",
")",
":",
"for",
"child",
"in",
"self",
".",
"walkfiles",
"(",
")",
":",
"child",
".",
"remove",
"(",
")",
"for",
"child",
"in",
"reversed",
"(",
"[",
"d",
"for",
"d",
"in",
"self",
".",
"walkdirs",
"(",
")",
"]",
")",
":",
"if",
"child",
"==",
"self",
"or",
"not",
"child",
".",
"isdir",
"(",
")",
":",
"continue",
"child",
".",
"rmdir",
"(",
")"
] |
Remove all contents of a directory
Including any sub-directories and their contents
|
[
"Remove",
"all",
"contents",
"of",
"a",
"directory"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L500-L509
|
241,050
|
jalanb/pysyte
|
pysyte/paths.py
|
DirectPath.make_file_exist
|
def make_file_exist(self, filename=None):
"""Make the directory exist, then touch the file
If the filename is None, then use self.name as filename
"""
if filename is None:
path_to_file = FilePath(self)
path_to_file.make_file_exist()
return path_to_file
else:
self.make_directory_exist()
path_to_file = self.touch_file(filename)
return FilePath(path_to_file)
|
python
|
def make_file_exist(self, filename=None):
"""Make the directory exist, then touch the file
If the filename is None, then use self.name as filename
"""
if filename is None:
path_to_file = FilePath(self)
path_to_file.make_file_exist()
return path_to_file
else:
self.make_directory_exist()
path_to_file = self.touch_file(filename)
return FilePath(path_to_file)
|
[
"def",
"make_file_exist",
"(",
"self",
",",
"filename",
"=",
"None",
")",
":",
"if",
"filename",
"is",
"None",
":",
"path_to_file",
"=",
"FilePath",
"(",
"self",
")",
"path_to_file",
".",
"make_file_exist",
"(",
")",
"return",
"path_to_file",
"else",
":",
"self",
".",
"make_directory_exist",
"(",
")",
"path_to_file",
"=",
"self",
".",
"touch_file",
"(",
"filename",
")",
"return",
"FilePath",
"(",
"path_to_file",
")"
] |
Make the directory exist, then touch the file
If the filename is None, then use self.name as filename
|
[
"Make",
"the",
"directory",
"exist",
"then",
"touch",
"the",
"file"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L542-L554
|
241,051
|
jalanb/pysyte
|
pysyte/paths.py
|
DirectPath.touch_file
|
def touch_file(self, filename):
"""Touch a file in the directory"""
path_to_file = self.__file_class__(os.path.join(self, filename))
path_to_file.touch()
return path_to_file
|
python
|
def touch_file(self, filename):
"""Touch a file in the directory"""
path_to_file = self.__file_class__(os.path.join(self, filename))
path_to_file.touch()
return path_to_file
|
[
"def",
"touch_file",
"(",
"self",
",",
"filename",
")",
":",
"path_to_file",
"=",
"self",
".",
"__file_class__",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
",",
"filename",
")",
")",
"path_to_file",
".",
"touch",
"(",
")",
"return",
"path_to_file"
] |
Touch a file in the directory
|
[
"Touch",
"a",
"file",
"in",
"the",
"directory"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L560-L564
|
241,052
|
jalanb/pysyte
|
pysyte/paths.py
|
DirectPath.existing_sub_paths
|
def existing_sub_paths(self, sub_paths):
"""Those in the given list of sub_paths which do exist"""
paths_to_subs = [self / _ for _ in sub_paths]
return [_ for _ in paths_to_subs if _.exists()]
|
python
|
def existing_sub_paths(self, sub_paths):
"""Those in the given list of sub_paths which do exist"""
paths_to_subs = [self / _ for _ in sub_paths]
return [_ for _ in paths_to_subs if _.exists()]
|
[
"def",
"existing_sub_paths",
"(",
"self",
",",
"sub_paths",
")",
":",
"paths_to_subs",
"=",
"[",
"self",
"/",
"_",
"for",
"_",
"in",
"sub_paths",
"]",
"return",
"[",
"_",
"for",
"_",
"in",
"paths_to_subs",
"if",
"_",
".",
"exists",
"(",
")",
"]"
] |
Those in the given list of sub_paths which do exist
|
[
"Those",
"in",
"the",
"given",
"list",
"of",
"sub_paths",
"which",
"do",
"exist"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/paths.py#L566-L569
|
241,053
|
frigg/frigg-worker
|
frigg_worker/deployments.py
|
Deployment.load_preset
|
def load_preset(self):
"""
Loads preset if it is specified in the .frigg.yml
"""
if 'preset' in self.settings.preview:
with open(os.path.join(os.path.dirname(__file__), 'presets.yaml')) as f:
presets = yaml.load(f.read())
if self.settings.preview['preset'] in presets:
self.preset = presets[self.settings.preview['preset']]
return self.preset
|
python
|
def load_preset(self):
"""
Loads preset if it is specified in the .frigg.yml
"""
if 'preset' in self.settings.preview:
with open(os.path.join(os.path.dirname(__file__), 'presets.yaml')) as f:
presets = yaml.load(f.read())
if self.settings.preview['preset'] in presets:
self.preset = presets[self.settings.preview['preset']]
return self.preset
|
[
"def",
"load_preset",
"(",
"self",
")",
":",
"if",
"'preset'",
"in",
"self",
".",
"settings",
".",
"preview",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'presets.yaml'",
")",
")",
"as",
"f",
":",
"presets",
"=",
"yaml",
".",
"load",
"(",
"f",
".",
"read",
"(",
")",
")",
"if",
"self",
".",
"settings",
".",
"preview",
"[",
"'preset'",
"]",
"in",
"presets",
":",
"self",
".",
"preset",
"=",
"presets",
"[",
"self",
".",
"settings",
".",
"preview",
"[",
"'preset'",
"]",
"]",
"return",
"self",
".",
"preset"
] |
Loads preset if it is specified in the .frigg.yml
|
[
"Loads",
"preset",
"if",
"it",
"is",
"specified",
"in",
"the",
".",
"frigg",
".",
"yml"
] |
8c215cd8f5a27ff9f5a4fedafe93d2ef0fbca86c
|
https://github.com/frigg/frigg-worker/blob/8c215cd8f5a27ff9f5a4fedafe93d2ef0fbca86c/frigg_worker/deployments.py#L86-L96
|
241,054
|
lukaszb/monolith
|
monolith/cli/base.py
|
ExecutionManager.call_command
|
def call_command(self, cmd, *argv):
"""
Runs a command.
:param cmd: command to run (key at the registry)
:param argv: arguments that would be passed to the command
"""
parser = self.get_parser()
args = [cmd] + list(argv)
namespace = parser.parse_args(args)
self.run_command(namespace)
|
python
|
def call_command(self, cmd, *argv):
"""
Runs a command.
:param cmd: command to run (key at the registry)
:param argv: arguments that would be passed to the command
"""
parser = self.get_parser()
args = [cmd] + list(argv)
namespace = parser.parse_args(args)
self.run_command(namespace)
|
[
"def",
"call_command",
"(",
"self",
",",
"cmd",
",",
"*",
"argv",
")",
":",
"parser",
"=",
"self",
".",
"get_parser",
"(",
")",
"args",
"=",
"[",
"cmd",
"]",
"+",
"list",
"(",
"argv",
")",
"namespace",
"=",
"parser",
".",
"parse_args",
"(",
"args",
")",
"self",
".",
"run_command",
"(",
"namespace",
")"
] |
Runs a command.
:param cmd: command to run (key at the registry)
:param argv: arguments that would be passed to the command
|
[
"Runs",
"a",
"command",
"."
] |
cd8ab0483829ed4be6439c4f787ea59d48ad35b8
|
https://github.com/lukaszb/monolith/blob/cd8ab0483829ed4be6439c4f787ea59d48ad35b8/monolith/cli/base.py#L125-L135
|
241,055
|
lukaszb/monolith
|
monolith/cli/base.py
|
ExecutionManager.execute
|
def execute(self, argv=None):
"""
Executes command based on given arguments.
"""
if self.completion:
self.autocomplete()
parser = self.get_parser()
namespace = parser.parse_args(argv)
if hasattr(namespace, 'func'):
self.run_command(namespace)
|
python
|
def execute(self, argv=None):
"""
Executes command based on given arguments.
"""
if self.completion:
self.autocomplete()
parser = self.get_parser()
namespace = parser.parse_args(argv)
if hasattr(namespace, 'func'):
self.run_command(namespace)
|
[
"def",
"execute",
"(",
"self",
",",
"argv",
"=",
"None",
")",
":",
"if",
"self",
".",
"completion",
":",
"self",
".",
"autocomplete",
"(",
")",
"parser",
"=",
"self",
".",
"get_parser",
"(",
")",
"namespace",
"=",
"parser",
".",
"parse_args",
"(",
"argv",
")",
"if",
"hasattr",
"(",
"namespace",
",",
"'func'",
")",
":",
"self",
".",
"run_command",
"(",
"namespace",
")"
] |
Executes command based on given arguments.
|
[
"Executes",
"command",
"based",
"on",
"given",
"arguments",
"."
] |
cd8ab0483829ed4be6439c4f787ea59d48ad35b8
|
https://github.com/lukaszb/monolith/blob/cd8ab0483829ed4be6439c4f787ea59d48ad35b8/monolith/cli/base.py#L137-L146
|
241,056
|
lukaszb/monolith
|
monolith/cli/base.py
|
SimpleExecutionManager.get_commands_to_register
|
def get_commands_to_register(self):
"""
Returns dictionary with commands given during construction. If value is
a string, it would be converted into proper class pointer.
"""
return dict((key, get_class(value)) for key, value in
self.simple_commands.items())
|
python
|
def get_commands_to_register(self):
"""
Returns dictionary with commands given during construction. If value is
a string, it would be converted into proper class pointer.
"""
return dict((key, get_class(value)) for key, value in
self.simple_commands.items())
|
[
"def",
"get_commands_to_register",
"(",
"self",
")",
":",
"return",
"dict",
"(",
"(",
"key",
",",
"get_class",
"(",
"value",
")",
")",
"for",
"key",
",",
"value",
"in",
"self",
".",
"simple_commands",
".",
"items",
"(",
")",
")"
] |
Returns dictionary with commands given during construction. If value is
a string, it would be converted into proper class pointer.
|
[
"Returns",
"dictionary",
"with",
"commands",
"given",
"during",
"construction",
".",
"If",
"value",
"is",
"a",
"string",
"it",
"would",
"be",
"converted",
"into",
"proper",
"class",
"pointer",
"."
] |
cd8ab0483829ed4be6439c4f787ea59d48ad35b8
|
https://github.com/lukaszb/monolith/blob/cd8ab0483829ed4be6439c4f787ea59d48ad35b8/monolith/cli/base.py#L195-L201
|
241,057
|
datakortet/dkfileutils
|
dkfileutils/which.py
|
get_path_directories
|
def get_path_directories():
"""Return a list of all the directories on the path.
"""
pth = os.environ['PATH']
if sys.platform == 'win32' and os.environ.get("BASH"):
# winbash has a bug..
if pth[1] == ';': # pragma: nocover
pth = pth.replace(';', ':', 1)
return [p.strip() for p in pth.split(os.pathsep) if p.strip()]
|
python
|
def get_path_directories():
"""Return a list of all the directories on the path.
"""
pth = os.environ['PATH']
if sys.platform == 'win32' and os.environ.get("BASH"):
# winbash has a bug..
if pth[1] == ';': # pragma: nocover
pth = pth.replace(';', ':', 1)
return [p.strip() for p in pth.split(os.pathsep) if p.strip()]
|
[
"def",
"get_path_directories",
"(",
")",
":",
"pth",
"=",
"os",
".",
"environ",
"[",
"'PATH'",
"]",
"if",
"sys",
".",
"platform",
"==",
"'win32'",
"and",
"os",
".",
"environ",
".",
"get",
"(",
"\"BASH\"",
")",
":",
"# winbash has a bug..",
"if",
"pth",
"[",
"1",
"]",
"==",
"';'",
":",
"# pragma: nocover",
"pth",
"=",
"pth",
".",
"replace",
"(",
"';'",
",",
"':'",
",",
"1",
")",
"return",
"[",
"p",
".",
"strip",
"(",
")",
"for",
"p",
"in",
"pth",
".",
"split",
"(",
"os",
".",
"pathsep",
")",
"if",
"p",
".",
"strip",
"(",
")",
"]"
] |
Return a list of all the directories on the path.
|
[
"Return",
"a",
"list",
"of",
"all",
"the",
"directories",
"on",
"the",
"path",
"."
] |
924098d6e2edf88ad9b3ffdec9c74530f80a7d77
|
https://github.com/datakortet/dkfileutils/blob/924098d6e2edf88ad9b3ffdec9c74530f80a7d77/dkfileutils/which.py#L18-L26
|
241,058
|
datakortet/dkfileutils
|
dkfileutils/which.py
|
_listdir
|
def _listdir(pth, extensions):
"""Non-raising listdir."""
try:
return [fname for fname in os.listdir(pth)
if os.path.splitext(fname)[1] in extensions]
except OSError: # pragma: nocover
pass
|
python
|
def _listdir(pth, extensions):
"""Non-raising listdir."""
try:
return [fname for fname in os.listdir(pth)
if os.path.splitext(fname)[1] in extensions]
except OSError: # pragma: nocover
pass
|
[
"def",
"_listdir",
"(",
"pth",
",",
"extensions",
")",
":",
"try",
":",
"return",
"[",
"fname",
"for",
"fname",
"in",
"os",
".",
"listdir",
"(",
"pth",
")",
"if",
"os",
".",
"path",
".",
"splitext",
"(",
"fname",
")",
"[",
"1",
"]",
"in",
"extensions",
"]",
"except",
"OSError",
":",
"# pragma: nocover",
"pass"
] |
Non-raising listdir.
|
[
"Non",
"-",
"raising",
"listdir",
"."
] |
924098d6e2edf88ad9b3ffdec9c74530f80a7d77
|
https://github.com/datakortet/dkfileutils/blob/924098d6e2edf88ad9b3ffdec9c74530f80a7d77/dkfileutils/which.py#L35-L41
|
241,059
|
datakortet/dkfileutils
|
dkfileutils/which.py
|
which
|
def which(filename, interactive=False, verbose=False):
"""Yield all executable files on path that matches `filename`.
"""
exe = [e.lower() for e in os.environ.get('PATHEXT', '').split(';')]
if sys.platform != 'win32': # pragma: nocover
exe.append('')
name, ext = os.path.splitext(filename)
has_extension = bool(ext)
if has_extension and ext.lower() not in exe:
raise ValueError("which can only search for executable files")
def match(filenames):
"""Returns the sorted subset of ``filenames`` that matches ``filename``.
"""
res = set()
for fname in filenames:
if fname == filename: # pragma: nocover
res.add(fname) # exact match
continue
fname_name, fname_ext = os.path.splitext(fname)
if fname_name == name and fname_ext.lower() in exe: # pragma: nocover
res.add(fname)
return sorted(res)
returnset = set()
found = False
for pth in get_path_directories():
if verbose: # pragma: nocover
print('checking pth..')
fnames = _listdir(pth, exe)
if not fnames:
continue
for m in match(fnames):
found_file = _normalize(os.path.join(pth, m))
if found_file not in returnset: # pragma: nocover
if is_executable(found_file):
yield found_file
returnset.add(found_file)
found = True
if not found and interactive: # pragma: nocover
print("Couldn't find %r anywhere on the path.." % filename)
sys.exit(1)
|
python
|
def which(filename, interactive=False, verbose=False):
"""Yield all executable files on path that matches `filename`.
"""
exe = [e.lower() for e in os.environ.get('PATHEXT', '').split(';')]
if sys.platform != 'win32': # pragma: nocover
exe.append('')
name, ext = os.path.splitext(filename)
has_extension = bool(ext)
if has_extension and ext.lower() not in exe:
raise ValueError("which can only search for executable files")
def match(filenames):
"""Returns the sorted subset of ``filenames`` that matches ``filename``.
"""
res = set()
for fname in filenames:
if fname == filename: # pragma: nocover
res.add(fname) # exact match
continue
fname_name, fname_ext = os.path.splitext(fname)
if fname_name == name and fname_ext.lower() in exe: # pragma: nocover
res.add(fname)
return sorted(res)
returnset = set()
found = False
for pth in get_path_directories():
if verbose: # pragma: nocover
print('checking pth..')
fnames = _listdir(pth, exe)
if not fnames:
continue
for m in match(fnames):
found_file = _normalize(os.path.join(pth, m))
if found_file not in returnset: # pragma: nocover
if is_executable(found_file):
yield found_file
returnset.add(found_file)
found = True
if not found and interactive: # pragma: nocover
print("Couldn't find %r anywhere on the path.." % filename)
sys.exit(1)
|
[
"def",
"which",
"(",
"filename",
",",
"interactive",
"=",
"False",
",",
"verbose",
"=",
"False",
")",
":",
"exe",
"=",
"[",
"e",
".",
"lower",
"(",
")",
"for",
"e",
"in",
"os",
".",
"environ",
".",
"get",
"(",
"'PATHEXT'",
",",
"''",
")",
".",
"split",
"(",
"';'",
")",
"]",
"if",
"sys",
".",
"platform",
"!=",
"'win32'",
":",
"# pragma: nocover",
"exe",
".",
"append",
"(",
"''",
")",
"name",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"has_extension",
"=",
"bool",
"(",
"ext",
")",
"if",
"has_extension",
"and",
"ext",
".",
"lower",
"(",
")",
"not",
"in",
"exe",
":",
"raise",
"ValueError",
"(",
"\"which can only search for executable files\"",
")",
"def",
"match",
"(",
"filenames",
")",
":",
"\"\"\"Returns the sorted subset of ``filenames`` that matches ``filename``.\n \"\"\"",
"res",
"=",
"set",
"(",
")",
"for",
"fname",
"in",
"filenames",
":",
"if",
"fname",
"==",
"filename",
":",
"# pragma: nocover",
"res",
".",
"add",
"(",
"fname",
")",
"# exact match",
"continue",
"fname_name",
",",
"fname_ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"fname",
")",
"if",
"fname_name",
"==",
"name",
"and",
"fname_ext",
".",
"lower",
"(",
")",
"in",
"exe",
":",
"# pragma: nocover",
"res",
".",
"add",
"(",
"fname",
")",
"return",
"sorted",
"(",
"res",
")",
"returnset",
"=",
"set",
"(",
")",
"found",
"=",
"False",
"for",
"pth",
"in",
"get_path_directories",
"(",
")",
":",
"if",
"verbose",
":",
"# pragma: nocover",
"print",
"(",
"'checking pth..'",
")",
"fnames",
"=",
"_listdir",
"(",
"pth",
",",
"exe",
")",
"if",
"not",
"fnames",
":",
"continue",
"for",
"m",
"in",
"match",
"(",
"fnames",
")",
":",
"found_file",
"=",
"_normalize",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pth",
",",
"m",
")",
")",
"if",
"found_file",
"not",
"in",
"returnset",
":",
"# pragma: nocover",
"if",
"is_executable",
"(",
"found_file",
")",
":",
"yield",
"found_file",
"returnset",
".",
"add",
"(",
"found_file",
")",
"found",
"=",
"True",
"if",
"not",
"found",
"and",
"interactive",
":",
"# pragma: nocover",
"print",
"(",
"\"Couldn't find %r anywhere on the path..\"",
"%",
"filename",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] |
Yield all executable files on path that matches `filename`.
|
[
"Yield",
"all",
"executable",
"files",
"on",
"path",
"that",
"matches",
"filename",
"."
] |
924098d6e2edf88ad9b3ffdec9c74530f80a7d77
|
https://github.com/datakortet/dkfileutils/blob/924098d6e2edf88ad9b3ffdec9c74530f80a7d77/dkfileutils/which.py#L48-L93
|
241,060
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble.enableEffect
|
def enableEffect(self, name, **kwargs):
"""Enable an effect in the KezMenu."""
if name not in VALID_EFFECTS:
raise KeyError("KezMenu doesn't know an effect of type %s" % name)
self.__getattribute__(
'_effectinit_{}'.format(name.replace("-", "_"))
)(name, **kwargs)
|
python
|
def enableEffect(self, name, **kwargs):
"""Enable an effect in the KezMenu."""
if name not in VALID_EFFECTS:
raise KeyError("KezMenu doesn't know an effect of type %s" % name)
self.__getattribute__(
'_effectinit_{}'.format(name.replace("-", "_"))
)(name, **kwargs)
|
[
"def",
"enableEffect",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"name",
"not",
"in",
"VALID_EFFECTS",
":",
"raise",
"KeyError",
"(",
"\"KezMenu doesn't know an effect of type %s\"",
"%",
"name",
")",
"self",
".",
"__getattribute__",
"(",
"'_effectinit_{}'",
".",
"format",
"(",
"name",
".",
"replace",
"(",
"\"-\"",
",",
"\"_\"",
")",
")",
")",
"(",
"name",
",",
"*",
"*",
"kwargs",
")"
] |
Enable an effect in the KezMenu.
|
[
"Enable",
"an",
"effect",
"in",
"the",
"KezMenu",
"."
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L20-L26
|
241,061
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble.disableEffect
|
def disableEffect(self, name):
"""Disable an effect."""
try:
del self._effects[name]
self.__getattribute__(
'_effectdisable_%s' % name.replace("-", "_")
)()
except KeyError:
pass
except AttributeError:
pass
|
python
|
def disableEffect(self, name):
"""Disable an effect."""
try:
del self._effects[name]
self.__getattribute__(
'_effectdisable_%s' % name.replace("-", "_")
)()
except KeyError:
pass
except AttributeError:
pass
|
[
"def",
"disableEffect",
"(",
"self",
",",
"name",
")",
":",
"try",
":",
"del",
"self",
".",
"_effects",
"[",
"name",
"]",
"self",
".",
"__getattribute__",
"(",
"'_effectdisable_%s'",
"%",
"name",
".",
"replace",
"(",
"\"-\"",
",",
"\"_\"",
")",
")",
"(",
")",
"except",
"KeyError",
":",
"pass",
"except",
"AttributeError",
":",
"pass"
] |
Disable an effect.
|
[
"Disable",
"an",
"effect",
"."
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L28-L38
|
241,062
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble._updateEffects
|
def _updateEffects(self, time_passed):
"""Update method for the effects handle"""
for name in self._effects:
update_func = getattr(
self,
'_effectupdate_{}'.format(name.replace("-", "_")),
)
update_func(time_passed)
|
python
|
def _updateEffects(self, time_passed):
"""Update method for the effects handle"""
for name in self._effects:
update_func = getattr(
self,
'_effectupdate_{}'.format(name.replace("-", "_")),
)
update_func(time_passed)
|
[
"def",
"_updateEffects",
"(",
"self",
",",
"time_passed",
")",
":",
"for",
"name",
"in",
"self",
".",
"_effects",
":",
"update_func",
"=",
"getattr",
"(",
"self",
",",
"'_effectupdate_{}'",
".",
"format",
"(",
"name",
".",
"replace",
"(",
"\"-\"",
",",
"\"_\"",
")",
")",
",",
")",
"update_func",
"(",
"time_passed",
")"
] |
Update method for the effects handle
|
[
"Update",
"method",
"for",
"the",
"effects",
"handle"
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L40-L48
|
241,063
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble._effectinit_enlarge_font_on_focus
|
def _effectinit_enlarge_font_on_focus(self, name, **kwargs):
"""Init the effect that enlarge the focused menu entry.
Keyword arguments can contain enlarge_time and enlarge_factor.
"""
self._effects[name] = kwargs
if "font" not in kwargs:
raise TypeError(
"enlarge_font_on_focus: font parameter is required"
)
if "size" not in kwargs:
raise TypeError(
"enlarge_font_on_focus: size parameter is required"
)
if "enlarge_time" not in kwargs:
kwargs['enlarge_time'] = 0.5
if "enlarge_factor" not in kwargs:
kwargs['enlarge_factor'] = 2.0
kwargs['raise_font_ps'] = (
kwargs['enlarge_factor'] / kwargs['enlarge_time'] # pixel-per-sec
)
for option in self.options:
option['font'] = pygame.font.Font(kwargs['font'], kwargs['size'])
option['font_current_size'] = kwargs['size']
option['raise_font_factor'] = 1.0
|
python
|
def _effectinit_enlarge_font_on_focus(self, name, **kwargs):
"""Init the effect that enlarge the focused menu entry.
Keyword arguments can contain enlarge_time and enlarge_factor.
"""
self._effects[name] = kwargs
if "font" not in kwargs:
raise TypeError(
"enlarge_font_on_focus: font parameter is required"
)
if "size" not in kwargs:
raise TypeError(
"enlarge_font_on_focus: size parameter is required"
)
if "enlarge_time" not in kwargs:
kwargs['enlarge_time'] = 0.5
if "enlarge_factor" not in kwargs:
kwargs['enlarge_factor'] = 2.0
kwargs['raise_font_ps'] = (
kwargs['enlarge_factor'] / kwargs['enlarge_time'] # pixel-per-sec
)
for option in self.options:
option['font'] = pygame.font.Font(kwargs['font'], kwargs['size'])
option['font_current_size'] = kwargs['size']
option['raise_font_factor'] = 1.0
|
[
"def",
"_effectinit_enlarge_font_on_focus",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_effects",
"[",
"name",
"]",
"=",
"kwargs",
"if",
"\"font\"",
"not",
"in",
"kwargs",
":",
"raise",
"TypeError",
"(",
"\"enlarge_font_on_focus: font parameter is required\"",
")",
"if",
"\"size\"",
"not",
"in",
"kwargs",
":",
"raise",
"TypeError",
"(",
"\"enlarge_font_on_focus: size parameter is required\"",
")",
"if",
"\"enlarge_time\"",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'enlarge_time'",
"]",
"=",
"0.5",
"if",
"\"enlarge_factor\"",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'enlarge_factor'",
"]",
"=",
"2.0",
"kwargs",
"[",
"'raise_font_ps'",
"]",
"=",
"(",
"kwargs",
"[",
"'enlarge_factor'",
"]",
"/",
"kwargs",
"[",
"'enlarge_time'",
"]",
"# pixel-per-sec",
")",
"for",
"option",
"in",
"self",
".",
"options",
":",
"option",
"[",
"'font'",
"]",
"=",
"pygame",
".",
"font",
".",
"Font",
"(",
"kwargs",
"[",
"'font'",
"]",
",",
"kwargs",
"[",
"'size'",
"]",
")",
"option",
"[",
"'font_current_size'",
"]",
"=",
"kwargs",
"[",
"'size'",
"]",
"option",
"[",
"'raise_font_factor'",
"]",
"=",
"1.0"
] |
Init the effect that enlarge the focused menu entry.
Keyword arguments can contain enlarge_time and enlarge_factor.
|
[
"Init",
"the",
"effect",
"that",
"enlarge",
"the",
"focused",
"menu",
"entry",
".",
"Keyword",
"arguments",
"can",
"contain",
"enlarge_time",
"and",
"enlarge_factor",
"."
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L50-L75
|
241,064
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble._effectupdate_enlarge_font_on_focus
|
def _effectupdate_enlarge_font_on_focus(self, time_passed):
"""Gradually enlarge the font size of the focused line."""
data = self._effects['enlarge-font-on-focus']
fps = data['raise_font_ps']
final_size = data['size'] * data['enlarge_factor']
for i, option in enumerate(self.options):
if i == self.option:
# Raise me
if option['font_current_size'] < final_size:
option['raise_font_factor'] += fps * time_passed
elif option['font_current_size'] > final_size:
option['raise_font_factor'] = data['enlarge_factor']
elif option['raise_font_factor'] != 1.0:
# decrease
if option['raise_font_factor'] > 1.0:
option['raise_font_factor'] -= fps * time_passed
elif option['raise_font_factor'] < 1.0:
option['raise_font_factor'] = 1.0
new_size = int(data['size'] * option['raise_font_factor'])
if new_size != option['font_current_size']:
option['font'] = pygame.font.Font(data['font'], new_size)
option['font_current_size'] = new_size
|
python
|
def _effectupdate_enlarge_font_on_focus(self, time_passed):
"""Gradually enlarge the font size of the focused line."""
data = self._effects['enlarge-font-on-focus']
fps = data['raise_font_ps']
final_size = data['size'] * data['enlarge_factor']
for i, option in enumerate(self.options):
if i == self.option:
# Raise me
if option['font_current_size'] < final_size:
option['raise_font_factor'] += fps * time_passed
elif option['font_current_size'] > final_size:
option['raise_font_factor'] = data['enlarge_factor']
elif option['raise_font_factor'] != 1.0:
# decrease
if option['raise_font_factor'] > 1.0:
option['raise_font_factor'] -= fps * time_passed
elif option['raise_font_factor'] < 1.0:
option['raise_font_factor'] = 1.0
new_size = int(data['size'] * option['raise_font_factor'])
if new_size != option['font_current_size']:
option['font'] = pygame.font.Font(data['font'], new_size)
option['font_current_size'] = new_size
|
[
"def",
"_effectupdate_enlarge_font_on_focus",
"(",
"self",
",",
"time_passed",
")",
":",
"data",
"=",
"self",
".",
"_effects",
"[",
"'enlarge-font-on-focus'",
"]",
"fps",
"=",
"data",
"[",
"'raise_font_ps'",
"]",
"final_size",
"=",
"data",
"[",
"'size'",
"]",
"*",
"data",
"[",
"'enlarge_factor'",
"]",
"for",
"i",
",",
"option",
"in",
"enumerate",
"(",
"self",
".",
"options",
")",
":",
"if",
"i",
"==",
"self",
".",
"option",
":",
"# Raise me",
"if",
"option",
"[",
"'font_current_size'",
"]",
"<",
"final_size",
":",
"option",
"[",
"'raise_font_factor'",
"]",
"+=",
"fps",
"*",
"time_passed",
"elif",
"option",
"[",
"'font_current_size'",
"]",
">",
"final_size",
":",
"option",
"[",
"'raise_font_factor'",
"]",
"=",
"data",
"[",
"'enlarge_factor'",
"]",
"elif",
"option",
"[",
"'raise_font_factor'",
"]",
"!=",
"1.0",
":",
"# decrease",
"if",
"option",
"[",
"'raise_font_factor'",
"]",
">",
"1.0",
":",
"option",
"[",
"'raise_font_factor'",
"]",
"-=",
"fps",
"*",
"time_passed",
"elif",
"option",
"[",
"'raise_font_factor'",
"]",
"<",
"1.0",
":",
"option",
"[",
"'raise_font_factor'",
"]",
"=",
"1.0",
"new_size",
"=",
"int",
"(",
"data",
"[",
"'size'",
"]",
"*",
"option",
"[",
"'raise_font_factor'",
"]",
")",
"if",
"new_size",
"!=",
"option",
"[",
"'font_current_size'",
"]",
":",
"option",
"[",
"'font'",
"]",
"=",
"pygame",
".",
"font",
".",
"Font",
"(",
"data",
"[",
"'font'",
"]",
",",
"new_size",
")",
"option",
"[",
"'font_current_size'",
"]",
"=",
"new_size"
] |
Gradually enlarge the font size of the focused line.
|
[
"Gradually",
"enlarge",
"the",
"font",
"size",
"of",
"the",
"focused",
"line",
"."
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L77-L100
|
241,065
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble._effectinit_raise_line_padding_on_focus
|
def _effectinit_raise_line_padding_on_focus(self, name, **kwargs):
"""Init the effect for the empty space around the focused entry.
Keyword arguments can contain enlarge_time and padding.
"""
self._effects[name] = kwargs
if "enlarge_time" not in kwargs:
kwargs['enlarge_time'] = 0.5
if "padding" not in kwargs:
kwargs['padding'] = 10
kwargs['padding_pps'] = kwargs['padding'] / kwargs['enlarge_time']
# Now, every menu voices need additional infos
for o in self.options:
o['padding_line'] = 0.0
|
python
|
def _effectinit_raise_line_padding_on_focus(self, name, **kwargs):
"""Init the effect for the empty space around the focused entry.
Keyword arguments can contain enlarge_time and padding.
"""
self._effects[name] = kwargs
if "enlarge_time" not in kwargs:
kwargs['enlarge_time'] = 0.5
if "padding" not in kwargs:
kwargs['padding'] = 10
kwargs['padding_pps'] = kwargs['padding'] / kwargs['enlarge_time']
# Now, every menu voices need additional infos
for o in self.options:
o['padding_line'] = 0.0
|
[
"def",
"_effectinit_raise_line_padding_on_focus",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_effects",
"[",
"name",
"]",
"=",
"kwargs",
"if",
"\"enlarge_time\"",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'enlarge_time'",
"]",
"=",
"0.5",
"if",
"\"padding\"",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'padding'",
"]",
"=",
"10",
"kwargs",
"[",
"'padding_pps'",
"]",
"=",
"kwargs",
"[",
"'padding'",
"]",
"/",
"kwargs",
"[",
"'enlarge_time'",
"]",
"# Now, every menu voices need additional infos",
"for",
"o",
"in",
"self",
".",
"options",
":",
"o",
"[",
"'padding_line'",
"]",
"=",
"0.0"
] |
Init the effect for the empty space around the focused entry.
Keyword arguments can contain enlarge_time and padding.
|
[
"Init",
"the",
"effect",
"for",
"the",
"empty",
"space",
"around",
"the",
"focused",
"entry",
".",
"Keyword",
"arguments",
"can",
"contain",
"enlarge_time",
"and",
"padding",
"."
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L107-L120
|
241,066
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble._effectupdate_raise_line_padding_on_focus
|
def _effectupdate_raise_line_padding_on_focus(self, time_passed):
"""Gradually enlarge the padding of the focused line."""
data = self._effects['raise-line-padding-on-focus']
pps = data['padding_pps']
for i, option in enumerate(self.options):
if i == self.option:
# Raise me
if option['padding_line'] < data['padding']:
option['padding_line'] += pps * time_passed
elif option['padding_line'] > data['padding']:
option['padding_line'] = data['padding']
elif option['padding_line']:
if option['padding_line'] > 0:
option['padding_line'] -= pps * time_passed
elif option['padding_line'] < 0:
option['padding_line'] = 0
|
python
|
def _effectupdate_raise_line_padding_on_focus(self, time_passed):
"""Gradually enlarge the padding of the focused line."""
data = self._effects['raise-line-padding-on-focus']
pps = data['padding_pps']
for i, option in enumerate(self.options):
if i == self.option:
# Raise me
if option['padding_line'] < data['padding']:
option['padding_line'] += pps * time_passed
elif option['padding_line'] > data['padding']:
option['padding_line'] = data['padding']
elif option['padding_line']:
if option['padding_line'] > 0:
option['padding_line'] -= pps * time_passed
elif option['padding_line'] < 0:
option['padding_line'] = 0
|
[
"def",
"_effectupdate_raise_line_padding_on_focus",
"(",
"self",
",",
"time_passed",
")",
":",
"data",
"=",
"self",
".",
"_effects",
"[",
"'raise-line-padding-on-focus'",
"]",
"pps",
"=",
"data",
"[",
"'padding_pps'",
"]",
"for",
"i",
",",
"option",
"in",
"enumerate",
"(",
"self",
".",
"options",
")",
":",
"if",
"i",
"==",
"self",
".",
"option",
":",
"# Raise me",
"if",
"option",
"[",
"'padding_line'",
"]",
"<",
"data",
"[",
"'padding'",
"]",
":",
"option",
"[",
"'padding_line'",
"]",
"+=",
"pps",
"*",
"time_passed",
"elif",
"option",
"[",
"'padding_line'",
"]",
">",
"data",
"[",
"'padding'",
"]",
":",
"option",
"[",
"'padding_line'",
"]",
"=",
"data",
"[",
"'padding'",
"]",
"elif",
"option",
"[",
"'padding_line'",
"]",
":",
"if",
"option",
"[",
"'padding_line'",
"]",
">",
"0",
":",
"option",
"[",
"'padding_line'",
"]",
"-=",
"pps",
"*",
"time_passed",
"elif",
"option",
"[",
"'padding_line'",
"]",
"<",
"0",
":",
"option",
"[",
"'padding_line'",
"]",
"=",
"0"
] |
Gradually enlarge the padding of the focused line.
|
[
"Gradually",
"enlarge",
"the",
"padding",
"of",
"the",
"focused",
"line",
"."
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L122-L138
|
241,067
|
a-tal/kezmenu3
|
kezmenu3/kezmenu_effects.py
|
KezMenuEffectAble._effectinit_raise_col_padding_on_focus
|
def _effectinit_raise_col_padding_on_focus(self, name, **kwargs):
"""Init the column padding on focus effect.
Keyword arguments can contain enlarge_time and padding.
"""
self._effects[name] = kwargs
if "enlarge_time" not in kwargs:
kwargs['enlarge_time'] = 0.5
if "padding" not in kwargs:
kwargs['padding'] = 10
kwargs['padding_pps'] = kwargs['padding'] / kwargs['enlarge_time']
for option in self.options:
option['padding_col'] = 0.0
|
python
|
def _effectinit_raise_col_padding_on_focus(self, name, **kwargs):
"""Init the column padding on focus effect.
Keyword arguments can contain enlarge_time and padding.
"""
self._effects[name] = kwargs
if "enlarge_time" not in kwargs:
kwargs['enlarge_time'] = 0.5
if "padding" not in kwargs:
kwargs['padding'] = 10
kwargs['padding_pps'] = kwargs['padding'] / kwargs['enlarge_time']
for option in self.options:
option['padding_col'] = 0.0
|
[
"def",
"_effectinit_raise_col_padding_on_focus",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_effects",
"[",
"name",
"]",
"=",
"kwargs",
"if",
"\"enlarge_time\"",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'enlarge_time'",
"]",
"=",
"0.5",
"if",
"\"padding\"",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'padding'",
"]",
"=",
"10",
"kwargs",
"[",
"'padding_pps'",
"]",
"=",
"kwargs",
"[",
"'padding'",
"]",
"/",
"kwargs",
"[",
"'enlarge_time'",
"]",
"for",
"option",
"in",
"self",
".",
"options",
":",
"option",
"[",
"'padding_col'",
"]",
"=",
"0.0"
] |
Init the column padding on focus effect.
Keyword arguments can contain enlarge_time and padding.
|
[
"Init",
"the",
"column",
"padding",
"on",
"focus",
"effect",
".",
"Keyword",
"arguments",
"can",
"contain",
"enlarge_time",
"and",
"padding",
"."
] |
3b06f9cb67fdc98a73928f877eea86692f832fa4
|
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu_effects.py#L146-L159
|
241,068
|
crcresearch/py-utils
|
crc_nd/utils/django.py
|
make_choices_tuple
|
def make_choices_tuple(choices, get_display_name):
"""
Make a tuple for the choices parameter for a data model field.
:param choices: sequence of valid values for the model field
:param get_display_name: callable that returns the human-readable name for a choice
:return: A tuple of 2-tuples (choice, display_name) suitable for the choices parameter
"""
assert callable(get_display_name)
return tuple((x, get_display_name(x)) for x in choices)
|
python
|
def make_choices_tuple(choices, get_display_name):
"""
Make a tuple for the choices parameter for a data model field.
:param choices: sequence of valid values for the model field
:param get_display_name: callable that returns the human-readable name for a choice
:return: A tuple of 2-tuples (choice, display_name) suitable for the choices parameter
"""
assert callable(get_display_name)
return tuple((x, get_display_name(x)) for x in choices)
|
[
"def",
"make_choices_tuple",
"(",
"choices",
",",
"get_display_name",
")",
":",
"assert",
"callable",
"(",
"get_display_name",
")",
"return",
"tuple",
"(",
"(",
"x",
",",
"get_display_name",
"(",
"x",
")",
")",
"for",
"x",
"in",
"choices",
")"
] |
Make a tuple for the choices parameter for a data model field.
:param choices: sequence of valid values for the model field
:param get_display_name: callable that returns the human-readable name for a choice
:return: A tuple of 2-tuples (choice, display_name) suitable for the choices parameter
|
[
"Make",
"a",
"tuple",
"for",
"the",
"choices",
"parameter",
"for",
"a",
"data",
"model",
"field",
"."
] |
04caf0425a047baf900da726cf47c42413b0dd81
|
https://github.com/crcresearch/py-utils/blob/04caf0425a047baf900da726cf47c42413b0dd81/crc_nd/utils/django.py#L16-L26
|
241,069
|
jaraco/jaraco.ui
|
jaraco/ui/editor.py
|
EditableFile.edit
|
def edit(self):
"""
Edit the file
"""
self.changed = False
with self:
editor = self.get_editor()
cmd = [editor, self.name]
try:
res = subprocess.call(cmd)
except Exception as e:
print("Error launching editor %(editor)s" % locals())
print(e)
return
if res != 0:
msg = '%(editor)s returned error status %(res)d' % locals()
raise EditProcessException(msg)
new_data = self.read()
if new_data != self.data:
self.changed = self._save_diff(self.data, new_data)
self.data = new_data
|
python
|
def edit(self):
"""
Edit the file
"""
self.changed = False
with self:
editor = self.get_editor()
cmd = [editor, self.name]
try:
res = subprocess.call(cmd)
except Exception as e:
print("Error launching editor %(editor)s" % locals())
print(e)
return
if res != 0:
msg = '%(editor)s returned error status %(res)d' % locals()
raise EditProcessException(msg)
new_data = self.read()
if new_data != self.data:
self.changed = self._save_diff(self.data, new_data)
self.data = new_data
|
[
"def",
"edit",
"(",
"self",
")",
":",
"self",
".",
"changed",
"=",
"False",
"with",
"self",
":",
"editor",
"=",
"self",
".",
"get_editor",
"(",
")",
"cmd",
"=",
"[",
"editor",
",",
"self",
".",
"name",
"]",
"try",
":",
"res",
"=",
"subprocess",
".",
"call",
"(",
"cmd",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"Error launching editor %(editor)s\"",
"%",
"locals",
"(",
")",
")",
"print",
"(",
"e",
")",
"return",
"if",
"res",
"!=",
"0",
":",
"msg",
"=",
"'%(editor)s returned error status %(res)d'",
"%",
"locals",
"(",
")",
"raise",
"EditProcessException",
"(",
"msg",
")",
"new_data",
"=",
"self",
".",
"read",
"(",
")",
"if",
"new_data",
"!=",
"self",
".",
"data",
":",
"self",
".",
"changed",
"=",
"self",
".",
"_save_diff",
"(",
"self",
".",
"data",
",",
"new_data",
")",
"self",
".",
"data",
"=",
"new_data"
] |
Edit the file
|
[
"Edit",
"the",
"file"
] |
10e844c03f3afb3d37bd5d727ba9334af2547fcf
|
https://github.com/jaraco/jaraco.ui/blob/10e844c03f3afb3d37bd5d727ba9334af2547fcf/jaraco/ui/editor.py#L63-L83
|
241,070
|
jaraco/jaraco.ui
|
jaraco/ui/editor.py
|
EditableFile._search_env
|
def _search_env(keys):
"""
Search the environment for the supplied keys, returning the first
one found or None if none was found.
"""
matches = (os.environ[key] for key in keys if key in os.environ)
return next(matches, None)
|
python
|
def _search_env(keys):
"""
Search the environment for the supplied keys, returning the first
one found or None if none was found.
"""
matches = (os.environ[key] for key in keys if key in os.environ)
return next(matches, None)
|
[
"def",
"_search_env",
"(",
"keys",
")",
":",
"matches",
"=",
"(",
"os",
".",
"environ",
"[",
"key",
"]",
"for",
"key",
"in",
"keys",
"if",
"key",
"in",
"os",
".",
"environ",
")",
"return",
"next",
"(",
"matches",
",",
"None",
")"
] |
Search the environment for the supplied keys, returning the first
one found or None if none was found.
|
[
"Search",
"the",
"environment",
"for",
"the",
"supplied",
"keys",
"returning",
"the",
"first",
"one",
"found",
"or",
"None",
"if",
"none",
"was",
"found",
"."
] |
10e844c03f3afb3d37bd5d727ba9334af2547fcf
|
https://github.com/jaraco/jaraco.ui/blob/10e844c03f3afb3d37bd5d727ba9334af2547fcf/jaraco/ui/editor.py#L86-L92
|
241,071
|
jaraco/jaraco.ui
|
jaraco/ui/editor.py
|
EditableFile.get_editor
|
def get_editor(self):
"""
Give preference to an XML_EDITOR or EDITOR defined in the
environment. Otherwise use a default editor based on platform.
"""
env_search = ['EDITOR']
if 'xml' in self.content_type:
env_search.insert(0, 'XML_EDITOR')
default_editor = self.platform_default_editors[sys.platform]
return self._search_env(env_search) or default_editor
|
python
|
def get_editor(self):
"""
Give preference to an XML_EDITOR or EDITOR defined in the
environment. Otherwise use a default editor based on platform.
"""
env_search = ['EDITOR']
if 'xml' in self.content_type:
env_search.insert(0, 'XML_EDITOR')
default_editor = self.platform_default_editors[sys.platform]
return self._search_env(env_search) or default_editor
|
[
"def",
"get_editor",
"(",
"self",
")",
":",
"env_search",
"=",
"[",
"'EDITOR'",
"]",
"if",
"'xml'",
"in",
"self",
".",
"content_type",
":",
"env_search",
".",
"insert",
"(",
"0",
",",
"'XML_EDITOR'",
")",
"default_editor",
"=",
"self",
".",
"platform_default_editors",
"[",
"sys",
".",
"platform",
"]",
"return",
"self",
".",
"_search_env",
"(",
"env_search",
")",
"or",
"default_editor"
] |
Give preference to an XML_EDITOR or EDITOR defined in the
environment. Otherwise use a default editor based on platform.
|
[
"Give",
"preference",
"to",
"an",
"XML_EDITOR",
"or",
"EDITOR",
"defined",
"in",
"the",
"environment",
".",
"Otherwise",
"use",
"a",
"default",
"editor",
"based",
"on",
"platform",
"."
] |
10e844c03f3afb3d37bd5d727ba9334af2547fcf
|
https://github.com/jaraco/jaraco.ui/blob/10e844c03f3afb3d37bd5d727ba9334af2547fcf/jaraco/ui/editor.py#L94-L103
|
241,072
|
sbuss/pypercube
|
pypercube/event.py
|
Event.from_json
|
def from_json(cls, json_obj):
"""Build an Event from JSON.
:param json_obj: JSON data representing a Cube Event
:type json_obj: `String` or `json`
:throws: `InvalidEventError` when any of time field is not present
in json_obj.
"""
if isinstance(json_obj, str):
json_obj = json.loads(json_obj)
type = None
time = None
data = None
if cls.TYPE_FIELD_NAME in json_obj:
type = json_obj[cls.TYPE_FIELD_NAME]
if cls.TIME_FIELD_NAME in json_obj:
time = json_obj[cls.TIME_FIELD_NAME]
else:
raise InvalidEventError("{field} must be present!".format(
field=cls.TIME_FIELD_NAME))
if cls.DATA_FIELD_NAME in json_obj:
data = json_obj[cls.DATA_FIELD_NAME]
return cls(type, time, data)
|
python
|
def from_json(cls, json_obj):
"""Build an Event from JSON.
:param json_obj: JSON data representing a Cube Event
:type json_obj: `String` or `json`
:throws: `InvalidEventError` when any of time field is not present
in json_obj.
"""
if isinstance(json_obj, str):
json_obj = json.loads(json_obj)
type = None
time = None
data = None
if cls.TYPE_FIELD_NAME in json_obj:
type = json_obj[cls.TYPE_FIELD_NAME]
if cls.TIME_FIELD_NAME in json_obj:
time = json_obj[cls.TIME_FIELD_NAME]
else:
raise InvalidEventError("{field} must be present!".format(
field=cls.TIME_FIELD_NAME))
if cls.DATA_FIELD_NAME in json_obj:
data = json_obj[cls.DATA_FIELD_NAME]
return cls(type, time, data)
|
[
"def",
"from_json",
"(",
"cls",
",",
"json_obj",
")",
":",
"if",
"isinstance",
"(",
"json_obj",
",",
"str",
")",
":",
"json_obj",
"=",
"json",
".",
"loads",
"(",
"json_obj",
")",
"type",
"=",
"None",
"time",
"=",
"None",
"data",
"=",
"None",
"if",
"cls",
".",
"TYPE_FIELD_NAME",
"in",
"json_obj",
":",
"type",
"=",
"json_obj",
"[",
"cls",
".",
"TYPE_FIELD_NAME",
"]",
"if",
"cls",
".",
"TIME_FIELD_NAME",
"in",
"json_obj",
":",
"time",
"=",
"json_obj",
"[",
"cls",
".",
"TIME_FIELD_NAME",
"]",
"else",
":",
"raise",
"InvalidEventError",
"(",
"\"{field} must be present!\"",
".",
"format",
"(",
"field",
"=",
"cls",
".",
"TIME_FIELD_NAME",
")",
")",
"if",
"cls",
".",
"DATA_FIELD_NAME",
"in",
"json_obj",
":",
"data",
"=",
"json_obj",
"[",
"cls",
".",
"DATA_FIELD_NAME",
"]",
"return",
"cls",
"(",
"type",
",",
"time",
",",
"data",
")"
] |
Build an Event from JSON.
:param json_obj: JSON data representing a Cube Event
:type json_obj: `String` or `json`
:throws: `InvalidEventError` when any of time field is not present
in json_obj.
|
[
"Build",
"an",
"Event",
"from",
"JSON",
"."
] |
e9d2cca9c004b8bad6d1e0b68b080f887a186a22
|
https://github.com/sbuss/pypercube/blob/e9d2cca9c004b8bad6d1e0b68b080f887a186a22/pypercube/event.py#L31-L57
|
241,073
|
Perfectial/django-view-acl
|
view_acl/__init__.py
|
autodiscover
|
def autodiscover():
"""Autodiscover for urls.py"""
# Get permissions based on urlpatterns from urls.py
url_conf = getattr(settings, 'ROOT_URLCONF', ())
resolver = urlresolvers.get_resolver(url_conf)
urlpatterns = resolver.url_patterns
permissions = generate_permissions(urlpatterns)
# Refresh permissions
refresh_permissions(permissions)
|
python
|
def autodiscover():
"""Autodiscover for urls.py"""
# Get permissions based on urlpatterns from urls.py
url_conf = getattr(settings, 'ROOT_URLCONF', ())
resolver = urlresolvers.get_resolver(url_conf)
urlpatterns = resolver.url_patterns
permissions = generate_permissions(urlpatterns)
# Refresh permissions
refresh_permissions(permissions)
|
[
"def",
"autodiscover",
"(",
")",
":",
"# Get permissions based on urlpatterns from urls.py",
"url_conf",
"=",
"getattr",
"(",
"settings",
",",
"'ROOT_URLCONF'",
",",
"(",
")",
")",
"resolver",
"=",
"urlresolvers",
".",
"get_resolver",
"(",
"url_conf",
")",
"urlpatterns",
"=",
"resolver",
".",
"url_patterns",
"permissions",
"=",
"generate_permissions",
"(",
"urlpatterns",
")",
"# Refresh permissions",
"refresh_permissions",
"(",
"permissions",
")"
] |
Autodiscover for urls.py
|
[
"Autodiscover",
"for",
"urls",
".",
"py"
] |
71f514f65761895bc64d5ca735997c5455c254fa
|
https://github.com/Perfectial/django-view-acl/blob/71f514f65761895bc64d5ca735997c5455c254fa/view_acl/__init__.py#L7-L15
|
241,074
|
Apitax/Apitax
|
apitax/api/controllers/drivers_controller.py
|
get_driver_blacklist
|
def get_driver_blacklist(driver): # noqa: E501
"""Retrieve the blacklist in the driver
Retrieve the blacklist in the driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add({'blacklist': driver.getDriverBlacklist()})
return Response(status=200, body=response.getResponseBody())
|
python
|
def get_driver_blacklist(driver): # noqa: E501
"""Retrieve the blacklist in the driver
Retrieve the blacklist in the driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add({'blacklist': driver.getDriverBlacklist()})
return Response(status=200, body=response.getResponseBody())
|
[
"def",
"get_driver_blacklist",
"(",
"driver",
")",
":",
"# noqa: E501",
"response",
"=",
"errorIfUnauthorized",
"(",
"role",
"=",
"'admin'",
")",
"if",
"response",
":",
"return",
"response",
"else",
":",
"response",
"=",
"ApitaxResponse",
"(",
")",
"driver",
":",
"Driver",
"=",
"LoadedDrivers",
".",
"getDriver",
"(",
"driver",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"'blacklist'",
":",
"driver",
".",
"getDriverBlacklist",
"(",
")",
"}",
")",
"return",
"Response",
"(",
"status",
"=",
"200",
",",
"body",
"=",
"response",
".",
"getResponseBody",
"(",
")",
")"
] |
Retrieve the blacklist in the driver
Retrieve the blacklist in the driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
|
[
"Retrieve",
"the",
"blacklist",
"in",
"the",
"driver"
] |
3883e45f17e01eba4edac9d1bba42f0e7a748682
|
https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/drivers_controller.py#L16-L36
|
241,075
|
Apitax/Apitax
|
apitax/api/controllers/drivers_controller.py
|
get_driver_config
|
def get_driver_config(driver): # noqa: E501
"""Retrieve the config of a loaded driver
Retrieve the config of a loaded driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
# TODO: This needs an implementation, but likely requires a change to configs in apitaxcore
return Response(status=200, body=response.getResponseBody())
|
python
|
def get_driver_config(driver): # noqa: E501
"""Retrieve the config of a loaded driver
Retrieve the config of a loaded driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
# TODO: This needs an implementation, but likely requires a change to configs in apitaxcore
return Response(status=200, body=response.getResponseBody())
|
[
"def",
"get_driver_config",
"(",
"driver",
")",
":",
"# noqa: E501",
"response",
"=",
"errorIfUnauthorized",
"(",
"role",
"=",
"'admin'",
")",
"if",
"response",
":",
"return",
"response",
"else",
":",
"response",
"=",
"ApitaxResponse",
"(",
")",
"# TODO: This needs an implementation, but likely requires a change to configs in apitaxcore",
"return",
"Response",
"(",
"status",
"=",
"200",
",",
"body",
"=",
"response",
".",
"getResponseBody",
"(",
")",
")"
] |
Retrieve the config of a loaded driver
Retrieve the config of a loaded driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
|
[
"Retrieve",
"the",
"config",
"of",
"a",
"loaded",
"driver"
] |
3883e45f17e01eba4edac9d1bba42f0e7a748682
|
https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/drivers_controller.py#L39-L58
|
241,076
|
Apitax/Apitax
|
apitax/api/controllers/drivers_controller.py
|
get_driver_list
|
def get_driver_list(): # noqa: E501
"""Retrieve the catalog of drivers
Retrieve the catalog of drivers # noqa: E501
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
response.body.add({'drivers': LoadedDrivers.drivers})
return Response(status=200, body=response.getResponseBody())
|
python
|
def get_driver_list(): # noqa: E501
"""Retrieve the catalog of drivers
Retrieve the catalog of drivers # noqa: E501
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
response.body.add({'drivers': LoadedDrivers.drivers})
return Response(status=200, body=response.getResponseBody())
|
[
"def",
"get_driver_list",
"(",
")",
":",
"# noqa: E501",
"response",
"=",
"errorIfUnauthorized",
"(",
"role",
"=",
"'admin'",
")",
"if",
"response",
":",
"return",
"response",
"else",
":",
"response",
"=",
"ApitaxResponse",
"(",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"'drivers'",
":",
"LoadedDrivers",
".",
"drivers",
"}",
")",
"return",
"Response",
"(",
"status",
"=",
"200",
",",
"body",
"=",
"response",
".",
"getResponseBody",
"(",
")",
")"
] |
Retrieve the catalog of drivers
Retrieve the catalog of drivers # noqa: E501
:rtype: Response
|
[
"Retrieve",
"the",
"catalog",
"of",
"drivers"
] |
3883e45f17e01eba4edac9d1bba42f0e7a748682
|
https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/drivers_controller.py#L61-L78
|
241,077
|
Apitax/Apitax
|
apitax/api/controllers/drivers_controller.py
|
get_driver_whitelist
|
def get_driver_whitelist(driver): # noqa: E501
"""Retrieve the whitelist in the driver
Retrieve the whitelist in the driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add({'whitelist': driver.getDriverWhitelist()})
return Response(status=200, body=response.getResponseBody())
|
python
|
def get_driver_whitelist(driver): # noqa: E501
"""Retrieve the whitelist in the driver
Retrieve the whitelist in the driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='admin')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add({'whitelist': driver.getDriverWhitelist()})
return Response(status=200, body=response.getResponseBody())
|
[
"def",
"get_driver_whitelist",
"(",
"driver",
")",
":",
"# noqa: E501",
"response",
"=",
"errorIfUnauthorized",
"(",
"role",
"=",
"'admin'",
")",
"if",
"response",
":",
"return",
"response",
"else",
":",
"response",
"=",
"ApitaxResponse",
"(",
")",
"driver",
":",
"Driver",
"=",
"LoadedDrivers",
".",
"getDriver",
"(",
"driver",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"'whitelist'",
":",
"driver",
".",
"getDriverWhitelist",
"(",
")",
"}",
")",
"return",
"Response",
"(",
"status",
"=",
"200",
",",
"body",
"=",
"response",
".",
"getResponseBody",
"(",
")",
")"
] |
Retrieve the whitelist in the driver
Retrieve the whitelist in the driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
|
[
"Retrieve",
"the",
"whitelist",
"in",
"the",
"driver"
] |
3883e45f17e01eba4edac9d1bba42f0e7a748682
|
https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/drivers_controller.py#L116-L136
|
241,078
|
bitlabstudio/django-tagging-translated
|
tagging_translated/models.py
|
tag_post_save_handler
|
def tag_post_save_handler(sender, **kwargs):
"""
Makes sure that a translation is created when a tag is saved.
Also ensures that the original tag name gets updated when the english
translation is updated.
TODO: This will create two tags when a tag is saved through the admin
"""
instance = kwargs.get('instance')
try:
translation = instance.tagtitle_set.get(language='en')
except TagTitle.DoesNotExist:
translation = TagTitle.objects.create(
trans_name=instance.name, tag=instance, language='en')
if translation.trans_name != instance.name:
instance.name = translation.trans_name
instance.save_base(raw=True)
|
python
|
def tag_post_save_handler(sender, **kwargs):
"""
Makes sure that a translation is created when a tag is saved.
Also ensures that the original tag name gets updated when the english
translation is updated.
TODO: This will create two tags when a tag is saved through the admin
"""
instance = kwargs.get('instance')
try:
translation = instance.tagtitle_set.get(language='en')
except TagTitle.DoesNotExist:
translation = TagTitle.objects.create(
trans_name=instance.name, tag=instance, language='en')
if translation.trans_name != instance.name:
instance.name = translation.trans_name
instance.save_base(raw=True)
|
[
"def",
"tag_post_save_handler",
"(",
"sender",
",",
"*",
"*",
"kwargs",
")",
":",
"instance",
"=",
"kwargs",
".",
"get",
"(",
"'instance'",
")",
"try",
":",
"translation",
"=",
"instance",
".",
"tagtitle_set",
".",
"get",
"(",
"language",
"=",
"'en'",
")",
"except",
"TagTitle",
".",
"DoesNotExist",
":",
"translation",
"=",
"TagTitle",
".",
"objects",
".",
"create",
"(",
"trans_name",
"=",
"instance",
".",
"name",
",",
"tag",
"=",
"instance",
",",
"language",
"=",
"'en'",
")",
"if",
"translation",
".",
"trans_name",
"!=",
"instance",
".",
"name",
":",
"instance",
".",
"name",
"=",
"translation",
".",
"trans_name",
"instance",
".",
"save_base",
"(",
"raw",
"=",
"True",
")"
] |
Makes sure that a translation is created when a tag is saved.
Also ensures that the original tag name gets updated when the english
translation is updated.
TODO: This will create two tags when a tag is saved through the admin
|
[
"Makes",
"sure",
"that",
"a",
"translation",
"is",
"created",
"when",
"a",
"tag",
"is",
"saved",
"."
] |
8766d88077959efb07a97a116538e53b800b6019
|
https://github.com/bitlabstudio/django-tagging-translated/blob/8766d88077959efb07a97a116538e53b800b6019/tagging_translated/models.py#L23-L41
|
241,079
|
tmr232/rage
|
rage/rage.py
|
require_editable
|
def require_editable(f):
"""
Makes sure the registry key is editable before trying to edit it.
"""
def wrapper(self, *args, **kwargs):
if not self._edit:
raise RegistryKeyNotEditable("The key is not set as editable.")
return f(self, *args, **kwargs)
return wrapper
|
python
|
def require_editable(f):
"""
Makes sure the registry key is editable before trying to edit it.
"""
def wrapper(self, *args, **kwargs):
if not self._edit:
raise RegistryKeyNotEditable("The key is not set as editable.")
return f(self, *args, **kwargs)
return wrapper
|
[
"def",
"require_editable",
"(",
"f",
")",
":",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"_edit",
":",
"raise",
"RegistryKeyNotEditable",
"(",
"\"The key is not set as editable.\"",
")",
"return",
"f",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper"
] |
Makes sure the registry key is editable before trying to edit it.
|
[
"Makes",
"sure",
"the",
"registry",
"key",
"is",
"editable",
"before",
"trying",
"to",
"edit",
"it",
"."
] |
0973a43a6956e8bacd14245509b26608f6649edb
|
https://github.com/tmr232/rage/blob/0973a43a6956e8bacd14245509b26608f6649edb/rage/rage.py#L31-L41
|
241,080
|
Synerty/pytmpdir
|
pytmpdir/Directory.py
|
Directory.createHiddenFolder
|
def createHiddenFolder(self) -> 'File':
""" Create Hidden Folder
Create a hidden folder. Raise exception if auto delete isn't True.
@return: Created folder.
"""
if not self._autoDelete:
raise Exception("Hidden folders can only be created within"
" an autoDelete directory")
return tempfile.mkdtemp(dir=self._path, prefix=".")
|
python
|
def createHiddenFolder(self) -> 'File':
""" Create Hidden Folder
Create a hidden folder. Raise exception if auto delete isn't True.
@return: Created folder.
"""
if not self._autoDelete:
raise Exception("Hidden folders can only be created within"
" an autoDelete directory")
return tempfile.mkdtemp(dir=self._path, prefix=".")
|
[
"def",
"createHiddenFolder",
"(",
"self",
")",
"->",
"'File'",
":",
"if",
"not",
"self",
".",
"_autoDelete",
":",
"raise",
"Exception",
"(",
"\"Hidden folders can only be created within\"",
"\" an autoDelete directory\"",
")",
"return",
"tempfile",
".",
"mkdtemp",
"(",
"dir",
"=",
"self",
".",
"_path",
",",
"prefix",
"=",
"\".\"",
")"
] |
Create Hidden Folder
Create a hidden folder. Raise exception if auto delete isn't True.
@return: Created folder.
|
[
"Create",
"Hidden",
"Folder"
] |
8f21d7a0b28d4f5c3a0ed91f9660ac5310773605
|
https://github.com/Synerty/pytmpdir/blob/8f21d7a0b28d4f5c3a0ed91f9660ac5310773605/pytmpdir/Directory.py#L220-L231
|
241,081
|
Synerty/pytmpdir
|
pytmpdir/Directory.py
|
Directory._listFilesWin
|
def _listFilesWin(self) -> ['File']:
""" List Files for Windows OS
Search and list the files and folder in the current directory for the
Windows file system.
@return: List of directory files and folders.
"""
output = []
for dirname, dirnames, filenames in os.walk(self._path):
for subdirname in dirnames:
output.append(os.path.join(dirname, subdirname))
for filename in filenames:
output.append(os.path.join(dirname, filename))
return output
|
python
|
def _listFilesWin(self) -> ['File']:
""" List Files for Windows OS
Search and list the files and folder in the current directory for the
Windows file system.
@return: List of directory files and folders.
"""
output = []
for dirname, dirnames, filenames in os.walk(self._path):
for subdirname in dirnames:
output.append(os.path.join(dirname, subdirname))
for filename in filenames:
output.append(os.path.join(dirname, filename))
return output
|
[
"def",
"_listFilesWin",
"(",
"self",
")",
"->",
"[",
"'File'",
"]",
":",
"output",
"=",
"[",
"]",
"for",
"dirname",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"self",
".",
"_path",
")",
":",
"for",
"subdirname",
"in",
"dirnames",
":",
"output",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"subdirname",
")",
")",
"for",
"filename",
"in",
"filenames",
":",
"output",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"filename",
")",
")",
"return",
"output"
] |
List Files for Windows OS
Search and list the files and folder in the current directory for the
Windows file system.
@return: List of directory files and folders.
|
[
"List",
"Files",
"for",
"Windows",
"OS"
] |
8f21d7a0b28d4f5c3a0ed91f9660ac5310773605
|
https://github.com/Synerty/pytmpdir/blob/8f21d7a0b28d4f5c3a0ed91f9660ac5310773605/pytmpdir/Directory.py#L233-L248
|
241,082
|
Synerty/pytmpdir
|
pytmpdir/Directory.py
|
Directory._listFilesPosix
|
def _listFilesPosix(self) -> ['File']:
""" List Files for POSIX
Search and list the files and folder in the current directory for the
POSIX file system.
@return: List of directory files and folders.
"""
find = "find %s -type f" % self._path
output = check_output(args=find.split()).strip().decode().split(
'\n')
return output
|
python
|
def _listFilesPosix(self) -> ['File']:
""" List Files for POSIX
Search and list the files and folder in the current directory for the
POSIX file system.
@return: List of directory files and folders.
"""
find = "find %s -type f" % self._path
output = check_output(args=find.split()).strip().decode().split(
'\n')
return output
|
[
"def",
"_listFilesPosix",
"(",
"self",
")",
"->",
"[",
"'File'",
"]",
":",
"find",
"=",
"\"find %s -type f\"",
"%",
"self",
".",
"_path",
"output",
"=",
"check_output",
"(",
"args",
"=",
"find",
".",
"split",
"(",
")",
")",
".",
"strip",
"(",
")",
".",
"decode",
"(",
")",
".",
"split",
"(",
"'\\n'",
")",
"return",
"output"
] |
List Files for POSIX
Search and list the files and folder in the current directory for the
POSIX file system.
@return: List of directory files and folders.
|
[
"List",
"Files",
"for",
"POSIX"
] |
8f21d7a0b28d4f5c3a0ed91f9660ac5310773605
|
https://github.com/Synerty/pytmpdir/blob/8f21d7a0b28d4f5c3a0ed91f9660ac5310773605/pytmpdir/Directory.py#L250-L262
|
241,083
|
Synerty/pytmpdir
|
pytmpdir/Directory.py
|
File.pathName
|
def pathName(self, pathName: str):
""" Path Name Setter
Set path name with passed in variable, create new directory and move
previous directory contents to new path name.
@param pathName: New path name string.
@type pathName: String
"""
if self.pathName == pathName:
return
pathName = self.sanitise(pathName)
before = self.realPath
after = self._realPath(pathName)
assert (not os.path.exists(after))
newRealDir = os.path.dirname(after)
if not os.path.exists(newRealDir):
os.makedirs(newRealDir, DirSettings.defaultDirChmod)
shutil.move(before, after)
oldPathName = self._pathName
self._pathName = pathName
self._directory()._fileMoved(oldPathName, self)
|
python
|
def pathName(self, pathName: str):
""" Path Name Setter
Set path name with passed in variable, create new directory and move
previous directory contents to new path name.
@param pathName: New path name string.
@type pathName: String
"""
if self.pathName == pathName:
return
pathName = self.sanitise(pathName)
before = self.realPath
after = self._realPath(pathName)
assert (not os.path.exists(after))
newRealDir = os.path.dirname(after)
if not os.path.exists(newRealDir):
os.makedirs(newRealDir, DirSettings.defaultDirChmod)
shutil.move(before, after)
oldPathName = self._pathName
self._pathName = pathName
self._directory()._fileMoved(oldPathName, self)
|
[
"def",
"pathName",
"(",
"self",
",",
"pathName",
":",
"str",
")",
":",
"if",
"self",
".",
"pathName",
"==",
"pathName",
":",
"return",
"pathName",
"=",
"self",
".",
"sanitise",
"(",
"pathName",
")",
"before",
"=",
"self",
".",
"realPath",
"after",
"=",
"self",
".",
"_realPath",
"(",
"pathName",
")",
"assert",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"after",
")",
")",
"newRealDir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"after",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"newRealDir",
")",
":",
"os",
".",
"makedirs",
"(",
"newRealDir",
",",
"DirSettings",
".",
"defaultDirChmod",
")",
"shutil",
".",
"move",
"(",
"before",
",",
"after",
")",
"oldPathName",
"=",
"self",
".",
"_pathName",
"self",
".",
"_pathName",
"=",
"pathName",
"self",
".",
"_directory",
"(",
")",
".",
"_fileMoved",
"(",
"oldPathName",
",",
"self",
")"
] |
Path Name Setter
Set path name with passed in variable, create new directory and move
previous directory contents to new path name.
@param pathName: New path name string.
@type pathName: String
|
[
"Path",
"Name",
"Setter"
] |
8f21d7a0b28d4f5c3a0ed91f9660ac5310773605
|
https://github.com/Synerty/pytmpdir/blob/8f21d7a0b28d4f5c3a0ed91f9660ac5310773605/pytmpdir/Directory.py#L519-L547
|
241,084
|
Synerty/pytmpdir
|
pytmpdir/Directory.py
|
File.namedTempFileReader
|
def namedTempFileReader(self) -> NamedTempFileReader:
""" Named Temporary File Reader
This provides an object compatible with NamedTemporaryFile, used for reading this
files contents. This will still delete after the object falls out of scope.
This solves the problem on windows where a NamedTemporaryFile can not be read
while it's being written to
"""
# Get the weak ref
directory = self._directory()
assert isinstance(directory, Directory), (
"Expected Directory, receieved %s" % directory)
# Return the object
return NamedTempFileReader(directory, self)
|
python
|
def namedTempFileReader(self) -> NamedTempFileReader:
""" Named Temporary File Reader
This provides an object compatible with NamedTemporaryFile, used for reading this
files contents. This will still delete after the object falls out of scope.
This solves the problem on windows where a NamedTemporaryFile can not be read
while it's being written to
"""
# Get the weak ref
directory = self._directory()
assert isinstance(directory, Directory), (
"Expected Directory, receieved %s" % directory)
# Return the object
return NamedTempFileReader(directory, self)
|
[
"def",
"namedTempFileReader",
"(",
"self",
")",
"->",
"NamedTempFileReader",
":",
"# Get the weak ref",
"directory",
"=",
"self",
".",
"_directory",
"(",
")",
"assert",
"isinstance",
"(",
"directory",
",",
"Directory",
")",
",",
"(",
"\"Expected Directory, receieved %s\"",
"%",
"directory",
")",
"# Return the object",
"return",
"NamedTempFileReader",
"(",
"directory",
",",
"self",
")"
] |
Named Temporary File Reader
This provides an object compatible with NamedTemporaryFile, used for reading this
files contents. This will still delete after the object falls out of scope.
This solves the problem on windows where a NamedTemporaryFile can not be read
while it's being written to
|
[
"Named",
"Temporary",
"File",
"Reader"
] |
8f21d7a0b28d4f5c3a0ed91f9660ac5310773605
|
https://github.com/Synerty/pytmpdir/blob/8f21d7a0b28d4f5c3a0ed91f9660ac5310773605/pytmpdir/Directory.py#L574-L590
|
241,085
|
Synerty/pytmpdir
|
pytmpdir/Directory.py
|
File._realPath
|
def _realPath(self, newPathName: str = None) -> str:
""" Private Real Path
Get path name.
@param newPathName: variable for new path name if passed argument.
@type newPathName: String
@return: Path Name as string.
"""
directory = self._directory()
assert directory
return os.path.join(directory.path,
newPathName if newPathName else self._pathName)
|
python
|
def _realPath(self, newPathName: str = None) -> str:
""" Private Real Path
Get path name.
@param newPathName: variable for new path name if passed argument.
@type newPathName: String
@return: Path Name as string.
"""
directory = self._directory()
assert directory
return os.path.join(directory.path,
newPathName if newPathName else self._pathName)
|
[
"def",
"_realPath",
"(",
"self",
",",
"newPathName",
":",
"str",
"=",
"None",
")",
"->",
"str",
":",
"directory",
"=",
"self",
".",
"_directory",
"(",
")",
"assert",
"directory",
"return",
"os",
".",
"path",
".",
"join",
"(",
"directory",
".",
"path",
",",
"newPathName",
"if",
"newPathName",
"else",
"self",
".",
"_pathName",
")"
] |
Private Real Path
Get path name.
@param newPathName: variable for new path name if passed argument.
@type newPathName: String
@return: Path Name as string.
|
[
"Private",
"Real",
"Path"
] |
8f21d7a0b28d4f5c3a0ed91f9660ac5310773605
|
https://github.com/Synerty/pytmpdir/blob/8f21d7a0b28d4f5c3a0ed91f9660ac5310773605/pytmpdir/Directory.py#L664-L677
|
241,086
|
JNRowe/jnrbase
|
setup.py
|
import_file
|
def import_file(package: str, fname: str) -> ModuleType:
"""Import file directly.
This is a hack to import files from packages without importing
<package>/__init__.py, its purpose is to allow import without requiring
all the dependencies at this point.
Args:
package: Package to import from
fname: File to import
Returns:
Imported module
"""
mod_name = fname.rstrip('.py')
spec = spec_from_file_location(mod_name, '{}/{}'.format(package, fname))
module = module_from_spec(spec)
spec.loader.exec_module(module)
return module
|
python
|
def import_file(package: str, fname: str) -> ModuleType:
"""Import file directly.
This is a hack to import files from packages without importing
<package>/__init__.py, its purpose is to allow import without requiring
all the dependencies at this point.
Args:
package: Package to import from
fname: File to import
Returns:
Imported module
"""
mod_name = fname.rstrip('.py')
spec = spec_from_file_location(mod_name, '{}/{}'.format(package, fname))
module = module_from_spec(spec)
spec.loader.exec_module(module)
return module
|
[
"def",
"import_file",
"(",
"package",
":",
"str",
",",
"fname",
":",
"str",
")",
"->",
"ModuleType",
":",
"mod_name",
"=",
"fname",
".",
"rstrip",
"(",
"'.py'",
")",
"spec",
"=",
"spec_from_file_location",
"(",
"mod_name",
",",
"'{}/{}'",
".",
"format",
"(",
"package",
",",
"fname",
")",
")",
"module",
"=",
"module_from_spec",
"(",
"spec",
")",
"spec",
".",
"loader",
".",
"exec_module",
"(",
"module",
")",
"return",
"module"
] |
Import file directly.
This is a hack to import files from packages without importing
<package>/__init__.py, its purpose is to allow import without requiring
all the dependencies at this point.
Args:
package: Package to import from
fname: File to import
Returns:
Imported module
|
[
"Import",
"file",
"directly",
"."
] |
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
|
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/setup.py#L42-L59
|
241,087
|
honzamach/pynspect
|
pynspect/lexer.py
|
PynspectFilterLexer.t_FLOAT
|
def t_FLOAT(tok): # pylint: disable=locally-disabled,invalid-name
r'\d+\.\d+'
tok.value = (tok.type, float(tok.value))
return tok
|
python
|
def t_FLOAT(tok): # pylint: disable=locally-disabled,invalid-name
r'\d+\.\d+'
tok.value = (tok.type, float(tok.value))
return tok
|
[
"def",
"t_FLOAT",
"(",
"tok",
")",
":",
"# pylint: disable=locally-disabled,invalid-name",
"tok",
".",
"value",
"=",
"(",
"tok",
".",
"type",
",",
"float",
"(",
"tok",
".",
"value",
")",
")",
"return",
"tok"
] |
r'\d+\.\d+
|
[
"r",
"\\",
"d",
"+",
"\\",
".",
"\\",
"d",
"+"
] |
0582dcc1f7aafe50e25a21c792ea1b3367ea5881
|
https://github.com/honzamach/pynspect/blob/0582dcc1f7aafe50e25a21c792ea1b3367ea5881/pynspect/lexer.py#L292-L295
|
241,088
|
armenzg/buildapi_client
|
buildapi_client/buildapi_client.py
|
trigger_arbitrary_job
|
def trigger_arbitrary_job(repo_name, builder, revision, auth, files=None, dry_run=False,
extra_properties=None):
"""
Request buildapi to trigger a job for us.
We return the request or None if dry_run is True.
Raises BuildapiAuthError if credentials are invalid.
"""
assert len(revision) == 40, \
'We do not accept revisions shorter than 40 chars'
url = _builders_api_url(repo_name, builder, revision)
payload = _payload(repo_name, revision, files, extra_properties)
if dry_run:
LOG.info("Dry-run: We were going to request a job for '{}'".format(builder))
LOG.info(" with this payload: {}".format(str(payload)))
LOG.info(" with these files: {}".format(files))
return None
# NOTE: A good response returns json with request_id as one of the keys
req = requests.post(
url,
headers={'Accept': 'application/json'},
data=payload,
auth=auth,
timeout=TCP_TIMEOUT,
)
if req.status_code == 401:
raise BuildapiAuthError("Your credentials were invalid. Please try again.")
elif req.status_code == 503:
raise BuildapiDown("Please file a bug {}".format(url))
try:
req.json()
return req
except ValueError:
LOG.info('repo: {}, builder: {}, revision: {}'.format(repo_name, builder, revision))
LOG.error("We did not get info from %s (status code: %s)" % (url, req.status_code))
return None
|
python
|
def trigger_arbitrary_job(repo_name, builder, revision, auth, files=None, dry_run=False,
extra_properties=None):
"""
Request buildapi to trigger a job for us.
We return the request or None if dry_run is True.
Raises BuildapiAuthError if credentials are invalid.
"""
assert len(revision) == 40, \
'We do not accept revisions shorter than 40 chars'
url = _builders_api_url(repo_name, builder, revision)
payload = _payload(repo_name, revision, files, extra_properties)
if dry_run:
LOG.info("Dry-run: We were going to request a job for '{}'".format(builder))
LOG.info(" with this payload: {}".format(str(payload)))
LOG.info(" with these files: {}".format(files))
return None
# NOTE: A good response returns json with request_id as one of the keys
req = requests.post(
url,
headers={'Accept': 'application/json'},
data=payload,
auth=auth,
timeout=TCP_TIMEOUT,
)
if req.status_code == 401:
raise BuildapiAuthError("Your credentials were invalid. Please try again.")
elif req.status_code == 503:
raise BuildapiDown("Please file a bug {}".format(url))
try:
req.json()
return req
except ValueError:
LOG.info('repo: {}, builder: {}, revision: {}'.format(repo_name, builder, revision))
LOG.error("We did not get info from %s (status code: %s)" % (url, req.status_code))
return None
|
[
"def",
"trigger_arbitrary_job",
"(",
"repo_name",
",",
"builder",
",",
"revision",
",",
"auth",
",",
"files",
"=",
"None",
",",
"dry_run",
"=",
"False",
",",
"extra_properties",
"=",
"None",
")",
":",
"assert",
"len",
"(",
"revision",
")",
"==",
"40",
",",
"'We do not accept revisions shorter than 40 chars'",
"url",
"=",
"_builders_api_url",
"(",
"repo_name",
",",
"builder",
",",
"revision",
")",
"payload",
"=",
"_payload",
"(",
"repo_name",
",",
"revision",
",",
"files",
",",
"extra_properties",
")",
"if",
"dry_run",
":",
"LOG",
".",
"info",
"(",
"\"Dry-run: We were going to request a job for '{}'\"",
".",
"format",
"(",
"builder",
")",
")",
"LOG",
".",
"info",
"(",
"\" with this payload: {}\"",
".",
"format",
"(",
"str",
"(",
"payload",
")",
")",
")",
"LOG",
".",
"info",
"(",
"\" with these files: {}\"",
".",
"format",
"(",
"files",
")",
")",
"return",
"None",
"# NOTE: A good response returns json with request_id as one of the keys",
"req",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"{",
"'Accept'",
":",
"'application/json'",
"}",
",",
"data",
"=",
"payload",
",",
"auth",
"=",
"auth",
",",
"timeout",
"=",
"TCP_TIMEOUT",
",",
")",
"if",
"req",
".",
"status_code",
"==",
"401",
":",
"raise",
"BuildapiAuthError",
"(",
"\"Your credentials were invalid. Please try again.\"",
")",
"elif",
"req",
".",
"status_code",
"==",
"503",
":",
"raise",
"BuildapiDown",
"(",
"\"Please file a bug {}\"",
".",
"format",
"(",
"url",
")",
")",
"try",
":",
"req",
".",
"json",
"(",
")",
"return",
"req",
"except",
"ValueError",
":",
"LOG",
".",
"info",
"(",
"'repo: {}, builder: {}, revision: {}'",
".",
"format",
"(",
"repo_name",
",",
"builder",
",",
"revision",
")",
")",
"LOG",
".",
"error",
"(",
"\"We did not get info from %s (status code: %s)\"",
"%",
"(",
"url",
",",
"req",
".",
"status_code",
")",
")",
"return",
"None"
] |
Request buildapi to trigger a job for us.
We return the request or None if dry_run is True.
Raises BuildapiAuthError if credentials are invalid.
|
[
"Request",
"buildapi",
"to",
"trigger",
"a",
"job",
"for",
"us",
"."
] |
59af70ad54bf65e09e5eb02241c61b326153b761
|
https://github.com/armenzg/buildapi_client/blob/59af70ad54bf65e09e5eb02241c61b326153b761/buildapi_client/buildapi_client.py#L35-L75
|
241,089
|
armenzg/buildapi_client
|
buildapi_client/buildapi_client.py
|
make_retrigger_request
|
def make_retrigger_request(repo_name, request_id, auth, count=DEFAULT_COUNT_NUM,
priority=DEFAULT_PRIORITY, dry_run=True):
"""
Retrigger a request using buildapi self-serve. Returns a request.
Buildapi documentation:
POST /self-serve/{branch}/request
Rebuild `request_id`, which must be passed in as a POST parameter.
`priority` and `count` are also accepted as optional
parameters. `count` defaults to 1, and represents the number
of times this build will be rebuilt.
"""
url = '{}/{}/request'.format(SELF_SERVE, repo_name)
payload = {'request_id': request_id}
if count != DEFAULT_COUNT_NUM or priority != DEFAULT_PRIORITY:
payload.update({'count': count,
'priority': priority})
if dry_run:
LOG.info('We would make a POST request to %s with the payload: %s' % (url, str(payload)))
return None
LOG.info("We're going to re-trigger an existing completed job with request_id: %s %i time(s)."
% (request_id, count))
req = requests.post(
url,
headers={'Accept': 'application/json'},
data=payload,
auth=auth,
timeout=TCP_TIMEOUT,
)
# TODO: add debug message with job_id URL.
return req
|
python
|
def make_retrigger_request(repo_name, request_id, auth, count=DEFAULT_COUNT_NUM,
priority=DEFAULT_PRIORITY, dry_run=True):
"""
Retrigger a request using buildapi self-serve. Returns a request.
Buildapi documentation:
POST /self-serve/{branch}/request
Rebuild `request_id`, which must be passed in as a POST parameter.
`priority` and `count` are also accepted as optional
parameters. `count` defaults to 1, and represents the number
of times this build will be rebuilt.
"""
url = '{}/{}/request'.format(SELF_SERVE, repo_name)
payload = {'request_id': request_id}
if count != DEFAULT_COUNT_NUM or priority != DEFAULT_PRIORITY:
payload.update({'count': count,
'priority': priority})
if dry_run:
LOG.info('We would make a POST request to %s with the payload: %s' % (url, str(payload)))
return None
LOG.info("We're going to re-trigger an existing completed job with request_id: %s %i time(s)."
% (request_id, count))
req = requests.post(
url,
headers={'Accept': 'application/json'},
data=payload,
auth=auth,
timeout=TCP_TIMEOUT,
)
# TODO: add debug message with job_id URL.
return req
|
[
"def",
"make_retrigger_request",
"(",
"repo_name",
",",
"request_id",
",",
"auth",
",",
"count",
"=",
"DEFAULT_COUNT_NUM",
",",
"priority",
"=",
"DEFAULT_PRIORITY",
",",
"dry_run",
"=",
"True",
")",
":",
"url",
"=",
"'{}/{}/request'",
".",
"format",
"(",
"SELF_SERVE",
",",
"repo_name",
")",
"payload",
"=",
"{",
"'request_id'",
":",
"request_id",
"}",
"if",
"count",
"!=",
"DEFAULT_COUNT_NUM",
"or",
"priority",
"!=",
"DEFAULT_PRIORITY",
":",
"payload",
".",
"update",
"(",
"{",
"'count'",
":",
"count",
",",
"'priority'",
":",
"priority",
"}",
")",
"if",
"dry_run",
":",
"LOG",
".",
"info",
"(",
"'We would make a POST request to %s with the payload: %s'",
"%",
"(",
"url",
",",
"str",
"(",
"payload",
")",
")",
")",
"return",
"None",
"LOG",
".",
"info",
"(",
"\"We're going to re-trigger an existing completed job with request_id: %s %i time(s).\"",
"%",
"(",
"request_id",
",",
"count",
")",
")",
"req",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"{",
"'Accept'",
":",
"'application/json'",
"}",
",",
"data",
"=",
"payload",
",",
"auth",
"=",
"auth",
",",
"timeout",
"=",
"TCP_TIMEOUT",
",",
")",
"# TODO: add debug message with job_id URL.",
"return",
"req"
] |
Retrigger a request using buildapi self-serve. Returns a request.
Buildapi documentation:
POST /self-serve/{branch}/request
Rebuild `request_id`, which must be passed in as a POST parameter.
`priority` and `count` are also accepted as optional
parameters. `count` defaults to 1, and represents the number
of times this build will be rebuilt.
|
[
"Retrigger",
"a",
"request",
"using",
"buildapi",
"self",
"-",
"serve",
".",
"Returns",
"a",
"request",
"."
] |
59af70ad54bf65e09e5eb02241c61b326153b761
|
https://github.com/armenzg/buildapi_client/blob/59af70ad54bf65e09e5eb02241c61b326153b761/buildapi_client/buildapi_client.py#L78-L111
|
241,090
|
armenzg/buildapi_client
|
buildapi_client/buildapi_client.py
|
make_cancel_request
|
def make_cancel_request(repo_name, request_id, auth, dry_run=True):
"""
Cancel a request using buildapi self-serve. Returns a request.
Buildapi documentation:
DELETE /self-serve/{branch}/request/{request_id} Cancel the given request
"""
url = '{}/{}/request/{}'.format(SELF_SERVE, repo_name, request_id)
if dry_run:
LOG.info('We would make a DELETE request to %s.' % url)
return None
LOG.info("We're going to cancel the job at %s" % url)
req = requests.delete(url, auth=auth, timeout=TCP_TIMEOUT)
# TODO: add debug message with the canceled job_id URL. Find a way
# to do that without doing an additional request.
return req
|
python
|
def make_cancel_request(repo_name, request_id, auth, dry_run=True):
"""
Cancel a request using buildapi self-serve. Returns a request.
Buildapi documentation:
DELETE /self-serve/{branch}/request/{request_id} Cancel the given request
"""
url = '{}/{}/request/{}'.format(SELF_SERVE, repo_name, request_id)
if dry_run:
LOG.info('We would make a DELETE request to %s.' % url)
return None
LOG.info("We're going to cancel the job at %s" % url)
req = requests.delete(url, auth=auth, timeout=TCP_TIMEOUT)
# TODO: add debug message with the canceled job_id URL. Find a way
# to do that without doing an additional request.
return req
|
[
"def",
"make_cancel_request",
"(",
"repo_name",
",",
"request_id",
",",
"auth",
",",
"dry_run",
"=",
"True",
")",
":",
"url",
"=",
"'{}/{}/request/{}'",
".",
"format",
"(",
"SELF_SERVE",
",",
"repo_name",
",",
"request_id",
")",
"if",
"dry_run",
":",
"LOG",
".",
"info",
"(",
"'We would make a DELETE request to %s.'",
"%",
"url",
")",
"return",
"None",
"LOG",
".",
"info",
"(",
"\"We're going to cancel the job at %s\"",
"%",
"url",
")",
"req",
"=",
"requests",
".",
"delete",
"(",
"url",
",",
"auth",
"=",
"auth",
",",
"timeout",
"=",
"TCP_TIMEOUT",
")",
"# TODO: add debug message with the canceled job_id URL. Find a way",
"# to do that without doing an additional request.",
"return",
"req"
] |
Cancel a request using buildapi self-serve. Returns a request.
Buildapi documentation:
DELETE /self-serve/{branch}/request/{request_id} Cancel the given request
|
[
"Cancel",
"a",
"request",
"using",
"buildapi",
"self",
"-",
"serve",
".",
"Returns",
"a",
"request",
"."
] |
59af70ad54bf65e09e5eb02241c61b326153b761
|
https://github.com/armenzg/buildapi_client/blob/59af70ad54bf65e09e5eb02241c61b326153b761/buildapi_client/buildapi_client.py#L114-L131
|
241,091
|
armenzg/buildapi_client
|
buildapi_client/buildapi_client.py
|
query_jobs_schedule
|
def query_jobs_schedule(repo_name, revision, auth):
"""
Query Buildapi for jobs.
"""
url = "%s/%s/rev/%s?format=json" % (SELF_SERVE, repo_name, revision)
LOG.debug("About to fetch %s" % url)
req = requests.get(url, auth=auth, timeout=TCP_TIMEOUT)
# If the revision doesn't exist on buildapi, that means there are
# no buildapi jobs for this revision
if req.status_code not in [200]:
return []
return req.json()
|
python
|
def query_jobs_schedule(repo_name, revision, auth):
"""
Query Buildapi for jobs.
"""
url = "%s/%s/rev/%s?format=json" % (SELF_SERVE, repo_name, revision)
LOG.debug("About to fetch %s" % url)
req = requests.get(url, auth=auth, timeout=TCP_TIMEOUT)
# If the revision doesn't exist on buildapi, that means there are
# no buildapi jobs for this revision
if req.status_code not in [200]:
return []
return req.json()
|
[
"def",
"query_jobs_schedule",
"(",
"repo_name",
",",
"revision",
",",
"auth",
")",
":",
"url",
"=",
"\"%s/%s/rev/%s?format=json\"",
"%",
"(",
"SELF_SERVE",
",",
"repo_name",
",",
"revision",
")",
"LOG",
".",
"debug",
"(",
"\"About to fetch %s\"",
"%",
"url",
")",
"req",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"auth",
"=",
"auth",
",",
"timeout",
"=",
"TCP_TIMEOUT",
")",
"# If the revision doesn't exist on buildapi, that means there are",
"# no buildapi jobs for this revision",
"if",
"req",
".",
"status_code",
"not",
"in",
"[",
"200",
"]",
":",
"return",
"[",
"]",
"return",
"req",
".",
"json",
"(",
")"
] |
Query Buildapi for jobs.
|
[
"Query",
"Buildapi",
"for",
"jobs",
"."
] |
59af70ad54bf65e09e5eb02241c61b326153b761
|
https://github.com/armenzg/buildapi_client/blob/59af70ad54bf65e09e5eb02241c61b326153b761/buildapi_client/buildapi_client.py#L220-L233
|
241,092
|
armenzg/buildapi_client
|
buildapi_client/buildapi_client.py
|
query_pending_jobs
|
def query_pending_jobs(auth, repo_name=None, return_raw=False):
"""Return pending jobs"""
url = '%s/pending?format=json' % HOST_ROOT
LOG.debug('About to fetch %s' % url)
req = requests.get(url, auth=auth, timeout=TCP_TIMEOUT)
# If the revision doesn't exist on buildapi, that means there are
# no builapi jobs for this revision
if req.status_code not in [200]:
return []
raw = req.json()
# If we don't want the data structure to be reduced
if return_raw:
return raw
# If we only want pending jobs of a specific repo
if repo_name and repo_name in list(raw['pending'].keys()):
repo_list = [repo_name]
else:
repo_list = list(raw['pending'].keys())
# Data structure to return
data = {}
for repo in repo_list:
data[repo] = {}
repo_jobs = raw['pending'][repo]
for revision in repo_jobs.items():
data[repo][revision[0]] = revision[1]
return data
|
python
|
def query_pending_jobs(auth, repo_name=None, return_raw=False):
"""Return pending jobs"""
url = '%s/pending?format=json' % HOST_ROOT
LOG.debug('About to fetch %s' % url)
req = requests.get(url, auth=auth, timeout=TCP_TIMEOUT)
# If the revision doesn't exist on buildapi, that means there are
# no builapi jobs for this revision
if req.status_code not in [200]:
return []
raw = req.json()
# If we don't want the data structure to be reduced
if return_raw:
return raw
# If we only want pending jobs of a specific repo
if repo_name and repo_name in list(raw['pending'].keys()):
repo_list = [repo_name]
else:
repo_list = list(raw['pending'].keys())
# Data structure to return
data = {}
for repo in repo_list:
data[repo] = {}
repo_jobs = raw['pending'][repo]
for revision in repo_jobs.items():
data[repo][revision[0]] = revision[1]
return data
|
[
"def",
"query_pending_jobs",
"(",
"auth",
",",
"repo_name",
"=",
"None",
",",
"return_raw",
"=",
"False",
")",
":",
"url",
"=",
"'%s/pending?format=json'",
"%",
"HOST_ROOT",
"LOG",
".",
"debug",
"(",
"'About to fetch %s'",
"%",
"url",
")",
"req",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"auth",
"=",
"auth",
",",
"timeout",
"=",
"TCP_TIMEOUT",
")",
"# If the revision doesn't exist on buildapi, that means there are",
"# no builapi jobs for this revision",
"if",
"req",
".",
"status_code",
"not",
"in",
"[",
"200",
"]",
":",
"return",
"[",
"]",
"raw",
"=",
"req",
".",
"json",
"(",
")",
"# If we don't want the data structure to be reduced",
"if",
"return_raw",
":",
"return",
"raw",
"# If we only want pending jobs of a specific repo",
"if",
"repo_name",
"and",
"repo_name",
"in",
"list",
"(",
"raw",
"[",
"'pending'",
"]",
".",
"keys",
"(",
")",
")",
":",
"repo_list",
"=",
"[",
"repo_name",
"]",
"else",
":",
"repo_list",
"=",
"list",
"(",
"raw",
"[",
"'pending'",
"]",
".",
"keys",
"(",
")",
")",
"# Data structure to return",
"data",
"=",
"{",
"}",
"for",
"repo",
"in",
"repo_list",
":",
"data",
"[",
"repo",
"]",
"=",
"{",
"}",
"repo_jobs",
"=",
"raw",
"[",
"'pending'",
"]",
"[",
"repo",
"]",
"for",
"revision",
"in",
"repo_jobs",
".",
"items",
"(",
")",
":",
"data",
"[",
"repo",
"]",
"[",
"revision",
"[",
"0",
"]",
"]",
"=",
"revision",
"[",
"1",
"]",
"return",
"data"
] |
Return pending jobs
|
[
"Return",
"pending",
"jobs"
] |
59af70ad54bf65e09e5eb02241c61b326153b761
|
https://github.com/armenzg/buildapi_client/blob/59af70ad54bf65e09e5eb02241c61b326153b761/buildapi_client/buildapi_client.py#L241-L272
|
241,093
|
krukas/Trionyx
|
trionyx/layout.py
|
Component.set_object
|
def set_object(self, object):
"""
Set object for rendering component and set object to all components
:param object:
:return:
"""
if self.object is False:
self.object = object
# Pass object along to child components for rendering
for component in self.components:
component.set_object(object)
|
python
|
def set_object(self, object):
"""
Set object for rendering component and set object to all components
:param object:
:return:
"""
if self.object is False:
self.object = object
# Pass object along to child components for rendering
for component in self.components:
component.set_object(object)
|
[
"def",
"set_object",
"(",
"self",
",",
"object",
")",
":",
"if",
"self",
".",
"object",
"is",
"False",
":",
"self",
".",
"object",
"=",
"object",
"# Pass object along to child components for rendering",
"for",
"component",
"in",
"self",
".",
"components",
":",
"component",
".",
"set_object",
"(",
"object",
")"
] |
Set object for rendering component and set object to all components
:param object:
:return:
|
[
"Set",
"object",
"for",
"rendering",
"component",
"and",
"set",
"object",
"to",
"all",
"components"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/layout.py#L118-L130
|
241,094
|
krukas/Trionyx
|
trionyx/layout.py
|
ComponentFieldsMixin.get_fields
|
def get_fields(self):
"""Get all fields"""
if not hasattr(self, '__fields'):
self.__fields = [
self.parse_field(field, index)
for index, field in enumerate(getattr(self, 'fields', []))
]
return self.__fields
|
python
|
def get_fields(self):
"""Get all fields"""
if not hasattr(self, '__fields'):
self.__fields = [
self.parse_field(field, index)
for index, field in enumerate(getattr(self, 'fields', []))
]
return self.__fields
|
[
"def",
"get_fields",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'__fields'",
")",
":",
"self",
".",
"__fields",
"=",
"[",
"self",
".",
"parse_field",
"(",
"field",
",",
"index",
")",
"for",
"index",
",",
"field",
"in",
"enumerate",
"(",
"getattr",
"(",
"self",
",",
"'fields'",
",",
"[",
"]",
")",
")",
"]",
"return",
"self",
".",
"__fields"
] |
Get all fields
|
[
"Get",
"all",
"fields"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/layout.py#L194-L201
|
241,095
|
krukas/Trionyx
|
trionyx/layout.py
|
ComponentFieldsMixin.parse_field
|
def parse_field(self, field_data, index=0):
"""Parse field and add missing options"""
field = {
'__index__': index,
}
if isinstance(field_data, str):
field.update(self.parse_string_field(field_data))
elif isinstance(field_data, dict):
field.update(field_data)
else:
raise TypeError('Expected a str or dict get {}'.format(type(field_data)))
if 'field' not in field:
field['field'] = None
if 'label' not in field and field['field']:
try:
field['label'] = self.object._meta.get_field(field['field']).verbose_name.capitalize()
except Exception:
field['label'] = field['field'].replace('_', '').capitalize()
elif 'label' not in field:
field['label'] = ''
if 'format' not in field:
field['format'] = '{0}'
# Set default options
for name, options in self.fields_options.items():
if 'default' in options and name not in field:
field[name] = options['default']
return field
|
python
|
def parse_field(self, field_data, index=0):
"""Parse field and add missing options"""
field = {
'__index__': index,
}
if isinstance(field_data, str):
field.update(self.parse_string_field(field_data))
elif isinstance(field_data, dict):
field.update(field_data)
else:
raise TypeError('Expected a str or dict get {}'.format(type(field_data)))
if 'field' not in field:
field['field'] = None
if 'label' not in field and field['field']:
try:
field['label'] = self.object._meta.get_field(field['field']).verbose_name.capitalize()
except Exception:
field['label'] = field['field'].replace('_', '').capitalize()
elif 'label' not in field:
field['label'] = ''
if 'format' not in field:
field['format'] = '{0}'
# Set default options
for name, options in self.fields_options.items():
if 'default' in options and name not in field:
field[name] = options['default']
return field
|
[
"def",
"parse_field",
"(",
"self",
",",
"field_data",
",",
"index",
"=",
"0",
")",
":",
"field",
"=",
"{",
"'__index__'",
":",
"index",
",",
"}",
"if",
"isinstance",
"(",
"field_data",
",",
"str",
")",
":",
"field",
".",
"update",
"(",
"self",
".",
"parse_string_field",
"(",
"field_data",
")",
")",
"elif",
"isinstance",
"(",
"field_data",
",",
"dict",
")",
":",
"field",
".",
"update",
"(",
"field_data",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'Expected a str or dict get {}'",
".",
"format",
"(",
"type",
"(",
"field_data",
")",
")",
")",
"if",
"'field'",
"not",
"in",
"field",
":",
"field",
"[",
"'field'",
"]",
"=",
"None",
"if",
"'label'",
"not",
"in",
"field",
"and",
"field",
"[",
"'field'",
"]",
":",
"try",
":",
"field",
"[",
"'label'",
"]",
"=",
"self",
".",
"object",
".",
"_meta",
".",
"get_field",
"(",
"field",
"[",
"'field'",
"]",
")",
".",
"verbose_name",
".",
"capitalize",
"(",
")",
"except",
"Exception",
":",
"field",
"[",
"'label'",
"]",
"=",
"field",
"[",
"'field'",
"]",
".",
"replace",
"(",
"'_'",
",",
"''",
")",
".",
"capitalize",
"(",
")",
"elif",
"'label'",
"not",
"in",
"field",
":",
"field",
"[",
"'label'",
"]",
"=",
"''",
"if",
"'format'",
"not",
"in",
"field",
":",
"field",
"[",
"'format'",
"]",
"=",
"'{0}'",
"# Set default options",
"for",
"name",
",",
"options",
"in",
"self",
".",
"fields_options",
".",
"items",
"(",
")",
":",
"if",
"'default'",
"in",
"options",
"and",
"name",
"not",
"in",
"field",
":",
"field",
"[",
"name",
"]",
"=",
"options",
"[",
"'default'",
"]",
"return",
"field"
] |
Parse field and add missing options
|
[
"Parse",
"field",
"and",
"add",
"missing",
"options"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/layout.py#L203-L235
|
241,096
|
krukas/Trionyx
|
trionyx/layout.py
|
ComponentFieldsMixin.render_field
|
def render_field(self, field, data):
"""Render field for given data"""
from trionyx.renderer import renderer
if 'value' in field:
value = field['value']
elif isinstance(data, object) and hasattr(data, field['field']):
value = getattr(data, field['field'])
if 'renderer' not in field:
value = renderer.render_field(data, field['field'], **field)
elif isinstance(data, dict) and field['field'] in data:
value = data.get(field['field'])
elif isinstance(data, list) and field['__index__'] < len(data):
value = data[field['__index__']]
else:
return ''
options = {key: value for key, value in field.items() if key not in ['value', 'data_object']}
if 'renderer' in field:
value = field['renderer'](value, data_object=data, **options)
else:
value = renderer.render_value(value, data_object=data, **options)
return field['format'].format(value)
|
python
|
def render_field(self, field, data):
"""Render field for given data"""
from trionyx.renderer import renderer
if 'value' in field:
value = field['value']
elif isinstance(data, object) and hasattr(data, field['field']):
value = getattr(data, field['field'])
if 'renderer' not in field:
value = renderer.render_field(data, field['field'], **field)
elif isinstance(data, dict) and field['field'] in data:
value = data.get(field['field'])
elif isinstance(data, list) and field['__index__'] < len(data):
value = data[field['__index__']]
else:
return ''
options = {key: value for key, value in field.items() if key not in ['value', 'data_object']}
if 'renderer' in field:
value = field['renderer'](value, data_object=data, **options)
else:
value = renderer.render_value(value, data_object=data, **options)
return field['format'].format(value)
|
[
"def",
"render_field",
"(",
"self",
",",
"field",
",",
"data",
")",
":",
"from",
"trionyx",
".",
"renderer",
"import",
"renderer",
"if",
"'value'",
"in",
"field",
":",
"value",
"=",
"field",
"[",
"'value'",
"]",
"elif",
"isinstance",
"(",
"data",
",",
"object",
")",
"and",
"hasattr",
"(",
"data",
",",
"field",
"[",
"'field'",
"]",
")",
":",
"value",
"=",
"getattr",
"(",
"data",
",",
"field",
"[",
"'field'",
"]",
")",
"if",
"'renderer'",
"not",
"in",
"field",
":",
"value",
"=",
"renderer",
".",
"render_field",
"(",
"data",
",",
"field",
"[",
"'field'",
"]",
",",
"*",
"*",
"field",
")",
"elif",
"isinstance",
"(",
"data",
",",
"dict",
")",
"and",
"field",
"[",
"'field'",
"]",
"in",
"data",
":",
"value",
"=",
"data",
".",
"get",
"(",
"field",
"[",
"'field'",
"]",
")",
"elif",
"isinstance",
"(",
"data",
",",
"list",
")",
"and",
"field",
"[",
"'__index__'",
"]",
"<",
"len",
"(",
"data",
")",
":",
"value",
"=",
"data",
"[",
"field",
"[",
"'__index__'",
"]",
"]",
"else",
":",
"return",
"''",
"options",
"=",
"{",
"key",
":",
"value",
"for",
"key",
",",
"value",
"in",
"field",
".",
"items",
"(",
")",
"if",
"key",
"not",
"in",
"[",
"'value'",
",",
"'data_object'",
"]",
"}",
"if",
"'renderer'",
"in",
"field",
":",
"value",
"=",
"field",
"[",
"'renderer'",
"]",
"(",
"value",
",",
"data_object",
"=",
"data",
",",
"*",
"*",
"options",
")",
"else",
":",
"value",
"=",
"renderer",
".",
"render_value",
"(",
"value",
",",
"data_object",
"=",
"data",
",",
"*",
"*",
"options",
")",
"return",
"field",
"[",
"'format'",
"]",
".",
"format",
"(",
"value",
")"
] |
Render field for given data
|
[
"Render",
"field",
"for",
"given",
"data"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/layout.py#L290-L313
|
241,097
|
krukas/Trionyx
|
trionyx/layout.py
|
HtmlTagWrapper.get_attr_text
|
def get_attr_text(self):
"""Get html attr text to render in template"""
return ' '.join([
'{}="{}"'.format(key, value)
for key, value in self.attr.items()
])
|
python
|
def get_attr_text(self):
"""Get html attr text to render in template"""
return ' '.join([
'{}="{}"'.format(key, value)
for key, value in self.attr.items()
])
|
[
"def",
"get_attr_text",
"(",
"self",
")",
":",
"return",
"' '",
".",
"join",
"(",
"[",
"'{}=\"{}\"'",
".",
"format",
"(",
"key",
",",
"value",
")",
"for",
"key",
",",
"value",
"in",
"self",
".",
"attr",
".",
"items",
"(",
")",
"]",
")"
] |
Get html attr text to render in template
|
[
"Get",
"html",
"attr",
"text",
"to",
"render",
"in",
"template"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/layout.py#L358-L363
|
241,098
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/forms.py
|
OutlineMoveNodeForm.mk_dropdown_tree
|
def mk_dropdown_tree(cls, model, root_node, for_node=None):
'''
Override of ``treebeard`` method to enforce the same root.
'''
options = []
# The difference is that we only generate the subtree for the current root.
logger.debug("Using root node pk of %s" % root_node.pk)
cls.add_subtree(for_node, root_node, options)
return options[1:]
|
python
|
def mk_dropdown_tree(cls, model, root_node, for_node=None):
'''
Override of ``treebeard`` method to enforce the same root.
'''
options = []
# The difference is that we only generate the subtree for the current root.
logger.debug("Using root node pk of %s" % root_node.pk)
cls.add_subtree(for_node, root_node, options)
return options[1:]
|
[
"def",
"mk_dropdown_tree",
"(",
"cls",
",",
"model",
",",
"root_node",
",",
"for_node",
"=",
"None",
")",
":",
"options",
"=",
"[",
"]",
"# The difference is that we only generate the subtree for the current root.",
"logger",
".",
"debug",
"(",
"\"Using root node pk of %s\"",
"%",
"root_node",
".",
"pk",
")",
"cls",
".",
"add_subtree",
"(",
"for_node",
",",
"root_node",
",",
"options",
")",
"return",
"options",
"[",
"1",
":",
"]"
] |
Override of ``treebeard`` method to enforce the same root.
|
[
"Override",
"of",
"treebeard",
"method",
"to",
"enforce",
"the",
"same",
"root",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/forms.py#L88-L96
|
241,099
|
olsoneric/pedemath
|
pedemath/vec2.py
|
scale_v2
|
def scale_v2(vec, amount):
"""Return a new Vec2 with x and y from vec and multiplied by amount."""
return Vec2(vec.x * amount, vec.y * amount)
|
python
|
def scale_v2(vec, amount):
"""Return a new Vec2 with x and y from vec and multiplied by amount."""
return Vec2(vec.x * amount, vec.y * amount)
|
[
"def",
"scale_v2",
"(",
"vec",
",",
"amount",
")",
":",
"return",
"Vec2",
"(",
"vec",
".",
"x",
"*",
"amount",
",",
"vec",
".",
"y",
"*",
"amount",
")"
] |
Return a new Vec2 with x and y from vec and multiplied by amount.
|
[
"Return",
"a",
"new",
"Vec2",
"with",
"x",
"and",
"y",
"from",
"vec",
"and",
"multiplied",
"by",
"amount",
"."
] |
4bffcfe7089e421d603eb0a9708b84789c2d16be
|
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec2.py#L58-L61
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.