_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q44300 | everyonesAverage | train | def everyonesAverage(year, badFormat, length):
''' creates list of weighted average results for everyone in year
Arguments:
year {int}
badFormat {dict} -- candNumber : [results for candidate]
length {int} -- length of each row in badFormat divided by 2
returns:
list -- weighted average results of everyone in year
'''
return [myGrades(year, cand, badFormat, length) for cand in list(badFormat.keys())[1:]] | python | {
"resource": ""
} |
q44301 | askInitial | train | def askInitial():
'''Asks the user for what it wants the script to do
Returns:
[dictionary] -- answers to the questions
'''
return inquirer.prompt([
inquirer.Text(
'inputPath', message="What's the path of your input file (eg input.csv)"),
inquirer.List(
'year',
message="What year are you in",
choices=[1, 2, 3, 4]
),
inquirer.Checkbox(
'whatToDo',
message="What can I do for you (select with your spacebar)",
choices=[
"Get your weighted average",
"Get your rank in the year",
"Reformat results by module and output to csv",
"Plot the results by module"
]),
]) | python | {
"resource": ""
} |
q44302 | howPlotAsk | train | def howPlotAsk(goodFormat):
'''plots using inquirer prompts
Arguments:
goodFormat {dict} -- module : [results for module]
'''
plotAnswer = askPlot()
if "Save" in plotAnswer['plotQ']:
exportPlotsPath = pathlib.Path(askSave())
if "Show" in plotAnswer['plotQ']:
plotter(exportPlotsPath, True, goodFormat)
else:
plotter(exportPlotsPath, False, goodFormat)
elif "Show" in plotAnswer['plotQ']:
plotter(None, True, goodFormat) | python | {
"resource": ""
} |
q44303 | read_config | train | def read_config(config_path_or_dict=None):
"""
Read config from given path string or dict object.
:param config_path_or_dict:
:type config_path_or_dict: str or dict
:return: Returns config object or None if not found.
:rtype: :class:`revision.config.Config`
"""
config = None
if isinstance(config_path_or_dict, dict):
config = Config(config_path_or_dict)
if isinstance(config_path_or_dict, string_types):
if os.path.isabs(config_path_or_dict):
config_path = config_path_or_dict
else:
config_path = os.path.join(
os.getcwd(),
os.path.normpath(config_path_or_dict)
)
else:
config_path = os.path.join(
os.getcwd(),
DEFAULT_CONFIG_PATH
)
if os.path.exists(config_path):
with open(config_path, 'r') as f:
data = json.load(f)
config = Config(data)
if config is None:
raise ConfigNotFound()
else:
config.validate()
return config | python | {
"resource": ""
} |
q44304 | Config.validate | train | def validate(self):
"""
Check the value of the config attributes.
"""
for client in self.clients:
for key in REQUIRED_KEYS:
if key not in client:
raise MissingConfigValue(key)
if 'revision_file' not in client:
client.revision_file = DEFAULT_REVISION_FILEPATH.format(
client.key
) | python | {
"resource": ""
} |
q44305 | wrap | train | def wrap(func, with_func):
r"""Copies the function signature from the wrapped function to the wrapping function.
"""
func.__name__ = with_func.__name__
func.__doc__ = with_func.__doc__
func.__dict__.update(with_func.__dict__)
return func | python | {
"resource": ""
} |
q44306 | decorator | train | def decorator(func):
r"""Makes the passed decorators to support optional args.
"""
def wrapper(__decorated__=None, *Args, **KwArgs):
if __decorated__ is None: # the decorator has some optional arguments.
return lambda _func: func(_func, *Args, **KwArgs)
else:
return func(__decorated__, *Args, **KwArgs)
return wrap(wrapper, func) | python | {
"resource": ""
} |
q44307 | task | train | def task(__decorated__=None, **Config):
r"""A decorator to make tasks out of functions.
Config:
* name (str): The name of the task. Defaults to __decorated__.__name__.
* desc (str): The description of the task (optional).
* alias (str): The alias for the task (optional).
"""
if isinstance(__decorated__, tuple): # the task has some args
_Task = Task(__decorated__[0], __decorated__[1], Config=Config)
else:
_Task = Task(__decorated__, [], Config)
state.ActiveModuleMemberQ.insert(0, _Task)
return _Task.Underlying | python | {
"resource": ""
} |
q44308 | arg | train | def arg(name=None, **Config): # wraps the _arg decorator, in order to allow unnamed args
r"""A decorator to configure an argument of a task.
Config:
* name (str): The name of the arg. When ommited the agument will be identified through the order of configuration.
* desc (str): The description of the arg (optional).
* type (type, CustomType, callable): The alias for the task (optional).
Notes:
* It always follows a @task or an @arg.
"""
if name is not None: # allow name as a positional arg
Config['name'] = name
return lambda decorated: _arg(decorated, **Config) | python | {
"resource": ""
} |
q44309 | _arg | train | def _arg(__decorated__, **Config):
r"""The worker for the arg decorator.
"""
if isinstance(__decorated__, tuple): # this decorator is followed by another arg decorator
__decorated__[1].insert(0, Config)
return __decorated__
else:
return __decorated__, [Config] | python | {
"resource": ""
} |
q44310 | group | train | def group(__decorated__, **Config):
r"""A decorator to make groups out of classes.
Config:
* name (str): The name of the group. Defaults to __decorated__.__name__.
* desc (str): The description of the group (optional).
* alias (str): The alias for the group (optional).
"""
_Group = Group(__decorated__, Config)
if isclass(__decorated__): # convert the method of the class to static methods so that they could be accessed like object methods; ir: g1/t1(...).
static(__decorated__)
state.ActiveModuleMemberQ.insert(0, _Group)
return _Group.Underlying | python | {
"resource": ""
} |
q44311 | exit_hook | train | def exit_hook(callable, once=True):
r"""A decorator that makes the decorated function to run while ec exits.
Args:
callable (callable): The target callable.
once (bool): Avoids adding a func to the hooks, if it has been added already. Defaults to True.
Note:
Hooks are processedd in a LIFO order.
"""
if once and callable in ExitHooks:
return
ExitHooks.append(callable) | python | {
"resource": ""
} |
q44312 | member | train | def member(Imported, **Config):
r"""Helps with adding imported members to Scripts.
Note:
Config depends upon the Imported. It could be that of a **task** or a **group**.
"""
__ec_member__ = Imported.__ec_member__
__ec_member__.Config.update(**Config)
state.ActiveModuleMemberQ.insert(0, __ec_member__) | python | {
"resource": ""
} |
q44313 | ConfigStruct.configure_basic_logging | train | def configure_basic_logging(self, main_module_name, **kwargs):
'''Use common logging options to configure all logging.
Basic logging configuration is used to set levels for all logs from the main module and to
filter out logs from other modules unless they are of one level in priority higher.
:param main_module_name: name of the primary module for normal logging
'''
if not self._log_options_parent:
raise ValueError('Missing log_options_parent')
options = self[self._log_options_parent]
log_level_index = LOG_LEVELS.index(options.log_level)
log_kwargs = {
'level': getattr(logging, options.log_level.upper()),
'format': '[%(asctime)s #%(process)d] %(levelname)-8s %(name)-12s %(message)s',
'datefmt': '%Y-%m-%dT%H:%M:%S%z',
}
if options.log_file == 'STDERR':
log_kwargs['stream'] = sys.stderr
elif options.log_file == 'STDOUT':
log_kwargs['stream'] = sys.stdout
else:
log_kwargs['filename'] = options.log_file
log_kwargs.update(kwargs) # allow overrides from caller
logging.basicConfig(**log_kwargs)
# now filter out any other module's logging unless it's one level above the main
other_log_level = getattr(logging, LOG_LEVELS[log_level_index + 1].upper())
other_filter = OtherLoggingFilter(main_module_name, other_log_level)
for handler in logging.root.handlers:
handler.addFilter(other_filter) | python | {
"resource": ""
} |
q44314 | ConfigStruct.save | train | def save(self, conflict_resolver=choose_mine):
'''Save all options in memory to the `config_file`.
Options are read once more from the file (to allow other writers to save configuration),
keys in conflict are resolved, and the final results are written back to the file.
:param conflict_resolver: a simple lambda or function to choose when an option key is
provided from an outside source (THEIRS, usually a file on disk) but is also already
set on this ConfigStruct (MINE)
'''
config = self._load(conflict_resolver) # in case some other process has added items
with open(self._config_file, 'wb') as cf:
config.write(cf) | python | {
"resource": ""
} |
q44315 | kw_str_parse | train | def kw_str_parse(a_string):
"""convert a string in the form 'a=b, c=d, e=f' to a dict"""
try:
return dict((k, eval(v.rstrip(',')))
for k, v in kw_list_re.findall(a_string))
except (AttributeError, TypeError):
if isinstance(a_string, collections.Mapping):
return a_string
return {} | python | {
"resource": ""
} |
q44316 | is_not_null_predicate | train | def is_not_null_predicate(
raw_crash, dumps, processed_crash, processor, key=''
):
"""a predicate that converts the key'd source to boolean.
parameters:
raw_crash - dict
dumps - placeholder in a fat interface - unused
processed_crash - placeholder in a fat interface - unused
processor - placeholder in a fat interface - unused
"""
try:
return bool(raw_crash[key])
except KeyError:
return False | python | {
"resource": ""
} |
q44317 | Rule.predicate | train | def predicate(self, *args, **kwargs):
"""the default predicate for Support Classifiers invokes any derivied
_predicate function, trapping any exceptions raised in the process. We
are obligated to catch these exceptions to give subsequent rules the
opportunity to act. An error during the predicate application is a
failure of the rule, not a failure of the classification system itself
"""
try:
return self._predicate(*args, **kwargs)
except Exception, x:
self.config.logger.debug(
'Rule %s predicicate failed because of "%s"',
to_str(self.__class__),
x,
exc_info=True
)
return False | python | {
"resource": ""
} |
q44318 | Rule.action | train | def action(self, *args, **kwargs):
"""the default action for Support Classifiers invokes any derivied
_action function, trapping any exceptions raised in the process. We
are obligated to catch these exceptions to give subsequent rules the
opportunity to act and perhaps mitigate the error. An error during the
action application is a failure of the rule, not a failure of the
classification system itself."""
try:
return self._action(*args, **kwargs)
except KeyError, x:
self.config.logger.debug(
'Rule %s action failed because of missing key "%s"',
to_str(self.__class__),
x,
)
except Exception, x:
self.config.logger.debug(
'Rule %s action failed because of "%s"',
to_str(self.__class__),
x,
exc_info=True
)
return False | python | {
"resource": ""
} |
q44319 | TransformRule.function_invocation_proxy | train | def function_invocation_proxy(fn, proxy_args, proxy_kwargs):
"""execute the fuction if it is one, else evaluate the fn as a boolean
and return that value.
Sometimes rather than providing a predicate, we just give the value of
True. This is shorthand for writing a predicate that always returns
true."""
try:
return fn(*proxy_args, **proxy_kwargs)
except TypeError:
return bool(fn) | python | {
"resource": ""
} |
q44320 | TransformRuleSystem.load_rules | train | def load_rules(self, an_iterable):
"""cycle through a collection of Transform rule tuples loading them
into the TransformRuleSystem"""
self.rules = [
TransformRule(*x, config=self.config) for x in an_iterable
] | python | {
"resource": ""
} |
q44321 | TransformRuleSystem.append_rules | train | def append_rules(self, an_iterable):
"""add rules to the TransformRuleSystem"""
self.rules.extend(
TransformRule(*x, config=self.config) for x in an_iterable
) | python | {
"resource": ""
} |
q44322 | TransformRuleSystem.apply_all_rules | train | def apply_all_rules(self, *args, **kwargs):
"""cycle through all rules and apply them all without regard to
success or failure
returns:
True - since success or failure is ignored"""
for x in self.rules:
self._quit_check()
if self.config.chatty_rules:
self.config.logger.debug(
'apply_all_rules: %s',
to_str(x.__class__)
)
predicate_result, action_result = x.act(*args, **kwargs)
if self.config.chatty_rules:
self.config.logger.debug(
' : pred - %s; act - %s',
predicate_result,
action_result
)
return True | python | {
"resource": ""
} |
q44323 | get_wordset | train | def get_wordset(poems):
"""get all words"""
words = sorted(list(set(reduce(lambda x, y: x + y, poems))))
return words | python | {
"resource": ""
} |
q44324 | AwsStack.printStack | train | def printStack(self,wrappedStack,include=None,filters=["*"]):
"""Prints the stack"""
rawStack = wrappedStack['rawStack']
print "==== Stack {} ====".format(rawStack.name)
print "Status: {} {}".format(rawStack.stack_status,defaultify(rawStack.stack_status_reason,''))
for resourceType, resources in wrappedStack['resourcesByTypeIndex'].items():
if resourceType in AwsProcessor.resourceTypeAliases:
resourceType = AwsProcessor.resourceTypeAliases[resourceType];
if (None == include or resourceType in include) and len(resources):
print "== {}:".format(resourceType)
logicalIdWidth = 1
resourceStatusWidth = 1
resourceStatusReasonWidth = 1
for index, resource in resources.items():
logicalIdWidth = max(logicalIdWidth,len(resource.logical_id))
resourceStatusWidth = min(50,max(resourceStatusWidth,len(resource.resource_status)))
resourceStatusReasonWidth = min(50,max(resourceStatusReasonWidth,len(defaultify(resource.resource_status_reason,''))))
frm = " {{0:3d}}: {{1:{0}}} {{2:{1}}} {{3}}".format(logicalIdWidth,resourceStatusWidth)
for index, resource in resources.items():
if fnmatches(resource.logical_id.lower(),filters):
print frm.format(index,resource.logical_id,
elipsifyMiddle(repr(resource.resource_status),50),
elipsifyMiddle(repr(defaultify(resource.resource_status_reason,'')),150)) | python | {
"resource": ""
} |
q44325 | AwsStack.do_browse | train | def do_browse(self,args):
"""Open the current stack in a browser."""
rawStack = self.wrappedStack['rawStack']
os.system("open -a \"Google Chrome\" https://us-west-2.console.aws.amazon.com/cloudformation/home?region=us-west-2#/stack/detail?stackId={}".format(rawStack.stack_id)) | python | {
"resource": ""
} |
q44326 | AwsStack.do_refresh | train | def do_refresh(self,args):
"""Refresh view of the current stack. refresh -h for detailed help"""
self.wrappedStack = self.wrapStack(AwsConnectionFactory.instance.getCfResource().Stack(self.wrappedStack['rawStack'].name)) | python | {
"resource": ""
} |
q44327 | AwsStack.do_print | train | def do_print(self,args):
"""Print the current stack. print -h for detailed help"""
parser = CommandArgumentParser("print")
parser.add_argument('-r','--refresh',dest='refresh',action='store_true',help='refresh view of the current stack')
parser.add_argument('-i','--include',dest='include',default=None,nargs='+',help='resource types to include')
parser.add_argument(dest='filters',nargs='*',default=["*"],help='Filter stacks');
args = vars(parser.parse_args(args))
if args['refresh']:
self.do_refresh('')
self.printStack(self.wrappedStack,args['include'],args['filters']) | python | {
"resource": ""
} |
q44328 | AwsStack.do_resource | train | def do_resource(self,args):
"""Go to the specified resource. resource -h for detailed help"""
parser = CommandArgumentParser("resource")
parser.add_argument('-i','--logical-id',dest='logical-id',help='logical id of the child resource');
args = vars(parser.parse_args(args))
stackName = self.wrappedStack['rawStack'].name
logicalId = args['logical-id']
self.stackResource(stackName,logicalId) | python | {
"resource": ""
} |
q44329 | AwsStack.do_asg | train | def do_asg(self,args):
"""Go to the specified auto scaling group. asg -h for detailed help"""
parser = CommandArgumentParser("asg")
parser.add_argument(dest='asg',help='asg index or name');
args = vars(parser.parse_args(args))
print "loading auto scaling group {}".format(args['asg'])
try:
index = int(args['asg'])
asgSummary = self.wrappedStack['resourcesByTypeIndex']['AWS::AutoScaling::AutoScalingGroup'][index]
except:
asgSummary = self.wrappedStack['resourcesByTypeName']['AWS::AutoScaling::AutoScalingGroup'][args['asg']]
self.stackResource(asgSummary.stack_name,asgSummary.logical_id) | python | {
"resource": ""
} |
q44330 | AwsStack.do_eni | train | def do_eni(self,args):
"""Go to the specified eni. eni -h for detailed help."""
parser = CommandArgumentParser("eni")
parser.add_argument(dest='eni',help='eni index or name');
args = vars(parser.parse_args(args))
print "loading eni {}".format(args['eni'])
try:
index = int(args['eni'])
eniSummary = self.wrappedStack['resourcesByTypeIndex']['AWS::EC2::NetworkInterface'][index]
except ValueError:
eniSummary = self.wrappedStack['resourcesByTypeName']['AWS::EC2::NetworkInterface'][args['eni']]
pprint(eniSummary)
self.stackResource(eniSummary.stack_name,eniSummary.logical_id) | python | {
"resource": ""
} |
q44331 | AwsStack.do_logGroup | train | def do_logGroup(self,args):
"""Go to the specified log group. logGroup -h for detailed help"""
parser = CommandArgumentParser("logGroup")
parser.add_argument(dest='logGroup',help='logGroup index or name');
args = vars(parser.parse_args(args))
print "loading log group {}".format(args['logGroup'])
try:
index = int(args['logGroup'])
logGroup = self.wrappedStack['resourcesByTypeIndex']['AWS::Logs::LogGroup'][index]
except:
logGroup = self.wrappedStack['resourcesByTypeName']['AWS::Logs::LogGroup'][args['logGroup']]
print "logGroup:{}".format(logGroup)
self.stackResource(logGroup.stack_name,logGroup.logical_id) | python | {
"resource": ""
} |
q44332 | AwsStack.do_stack | train | def do_stack(self,args):
"""Go to the specified stack. stack -h for detailed help."""
parser = CommandArgumentParser("stack")
parser.add_argument(dest='stack',help='stack index or name');
args = vars(parser.parse_args(args))
print "loading stack {}".format(args['stack'])
try:
index = int(args['stack'])
stackSummary = self.wrappedStack['resourcesByTypeIndex']['AWS::CloudFormation::Stack'][index]
except ValueError:
stackSummary = self.wrappedStack['resourcesByTypeName']['AWS::CloudFormation::Stack'][args['stack']]
self.stackResource(stackSummary.stack_name,stackSummary.logical_id) | python | {
"resource": ""
} |
q44333 | AwsStack.do_template | train | def do_template(self,args):
"""Print the template for the current stack. template -h for detailed help"""
parser = CommandArgumentParser("template")
args = vars(parser.parse_args(args))
print "reading template for stack."
rawStack = self.wrappedStack['rawStack']
template = AwsConnectionFactory.getCfClient().get_template(StackName=rawStack.name)
print template['TemplateBody'] | python | {
"resource": ""
} |
q44334 | AwsStack.do_copy | train | def do_copy(self,args):
"""Copy specified id to stack. copy -h for detailed help."""
parser = CommandArgumentParser("copy")
parser.add_argument('-a','--asg',dest='asg',nargs='+',required=False,default=[],help='Copy specified ASG info.')
parser.add_argument('-o','--output',dest='output',nargs='+',required=False,default=[],help='Copy specified output info.')
args = vars(parser.parse_args(args))
values = []
if args['output']:
values.extend(self.getOutputs(args['output']))
if args['asg']:
for asg in args['asg']:
try:
index = int(asg)
asgSummary = self.wrappedStack['resourcesByTypeIndex']['AWS::AutoScaling::AutoScalingGroup'][index]
except:
asgSummary = self.wrappedStack['resourcesByTypeName']['AWS::AutoScaling::AutoScalingGroup'][asg]
values.append(asgSummary.physical_resource_id)
print("values:{}".format(values))
pyperclip.copy("\n".join(values)) | python | {
"resource": ""
} |
q44335 | AwsStack.do_parameter | train | def do_parameter(self,args):
"""Print a parameter"""
parser = CommandArgumentParser("parameter")
parser.add_argument(dest="id",help="Parameter to print")
args = vars(parser.parse_args(args))
print "printing parameter {}".format(args['id'])
try:
index = int(args['id'])
parameter = self.wrappedStack['resourcesByTypeName']['parameters'][index]
except ValueError:
parameter = self.wrappedStack['resourcesByTypeName']['parameters'][args['id']]
print(parameter.resource_status) | python | {
"resource": ""
} |
q44336 | GitData.commit | train | def commit(self, msg):
"""
Commit outstanding data changes
"""
self.logger.info('Commit config: {}'.format(msg))
with Dir(self.data_path):
self.cmd.check_assert('git add .')
self.cmd.check_assert('git commit --allow-empty -m "{}"'.format(msg)) | python | {
"resource": ""
} |
q44337 | GitData.push | train | def push(self):
"""
Push changes back to data repo.
Will of course fail if user does not have write access.
"""
self.logger.info('Pushing config...')
with Dir(self.data_path):
self.cmd.check_assert('git push') | python | {
"resource": ""
} |
q44338 | prompt_for_new_password | train | def prompt_for_new_password():
""" Prompt the user to enter a new password, with confirmation """
while True:
passw = getpass.getpass()
passw2 = getpass.getpass()
if passw == passw2: return passw
print 'Passwords do not match' | python | {
"resource": ""
} |
q44339 | unicode2Date | train | def unicode2Date(value, format=None):
"""
CONVERT UNICODE STRING TO UNIX TIMESTAMP VALUE
"""
# http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior
if value == None:
return None
if format != None:
try:
if format.endswith("%S.%f") and "." not in value:
value += ".000"
return _unix2Date(datetime2unix(datetime.strptime(value, format)))
except Exception as e:
from mo_logs import Log
Log.error("Can not format {{value}} with {{format}}", value=value, format=format, cause=e)
value = value.strip()
if value.lower() == "now":
return _unix2Date(datetime2unix(_utcnow()))
elif value.lower() == "today":
return _unix2Date(math.floor(datetime2unix(_utcnow()) / 86400) * 86400)
elif value.lower() in ["eod", "tomorrow"]:
return _unix2Date(math.floor(datetime2unix(_utcnow()) / 86400) * 86400 + 86400)
if any(value.lower().find(n) >= 0 for n in ["now", "today", "eod", "tomorrow"] + list(MILLI_VALUES.keys())):
return parse_time_expression(value)
try: # 2.7 DOES NOT SUPPORT %z
local_value = parse_date(value) #eg 2014-07-16 10:57 +0200
return _unix2Date(datetime2unix((local_value - local_value.utcoffset()).replace(tzinfo=None)))
except Exception as e:
e = Except.wrap(e) # FOR DEBUGGING
pass
formats = [
"%Y-%m-%dT%H:%M:%S",
"%Y-%m-%dT%H:%M:%S.%f"
]
for f in formats:
try:
return _unix2Date(datetime2unix(datetime.strptime(value, f)))
except Exception:
pass
deformats = [
"%Y-%m",# eg 2014-07-16 10:57 +0200
"%Y%m%d",
"%d%m%Y",
"%d%m%y",
"%d%b%Y",
"%d%b%y",
"%d%B%Y",
"%d%B%y",
"%Y%m%d%H%M%S",
"%Y%m%dT%H%M%S",
"%d%m%Y%H%M%S",
"%d%m%y%H%M%S",
"%d%b%Y%H%M%S",
"%d%b%y%H%M%S",
"%d%B%Y%H%M%S",
"%d%B%y%H%M%S"
]
value = deformat(value)
for f in deformats:
try:
return unicode2Date(value, format=f)
except Exception:
pass
else:
from mo_logs import Log
Log.error("Can not interpret {{value}} as a datetime", value=value) | python | {
"resource": ""
} |
q44340 | listIDs | train | def listIDs(basedir):
"""Lists digital object identifiers of Pairtree directory structure.
Walks a Pairtree directory structure to get IDs. Prepends prefix
found in pairtree_prefix file. Outputs to standard output.
"""
prefix = ''
# check for pairtree_prefix file
prefixfile = os.path.join(basedir, 'pairtree_prefix')
if os.path.isfile(prefixfile):
rff = open(prefixfile, 'r')
prefix = rff.readline().strip()
rff.close()
# check for pairtree_root dir
root = os.path.join(basedir, 'pairtree_root')
if os.path.isdir(root):
objects = pairtree.findObjects(root)
for obj in objects:
doi = os.path.split(obj)[1]
# print with prefix and original chars in place
print(prefix + pairtree.deSanitizeString(doi))
else:
print('pairtree_root directory not found') | python | {
"resource": ""
} |
q44341 | FreeIPAServer._set_conn | train | def _set_conn(self):
"""Establish connection to the server"""
if self._tls:
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
try:
conn = ldap.initialize(self._url)
conn.set_option(ldap.OPT_NETWORK_TIMEOUT, self._timeout)
conn.simple_bind_s(self._binddn, self._bindpw)
except Exception as e:
if hasattr(e, 'message') and 'desc' in e.message:
msg = e.message['desc']
else:
msg = e.args[0]['desc']
log.critical(msg)
raise
log.debug('%s connection established' % ('LDAPS' if self._tls else 'LDAP'))
self._conn = conn | python | {
"resource": ""
} |
q44342 | FreeIPAServer._get_ldap_msg | train | def _get_ldap_msg(e):
"""Extract LDAP exception message"""
msg = e
if hasattr(e, 'message'):
msg = e.message
if 'desc' in e.message:
msg = e.message['desc']
elif hasattr(e, 'args'):
msg = e.args[0]['desc']
return msg | python | {
"resource": ""
} |
q44343 | FreeIPAServer._search | train | def _search(self, base, fltr, attrs=None, scope=ldap.SCOPE_SUBTREE):
"""Perform LDAP search"""
try:
results = self._conn.search_s(base, scope, fltr, attrs)
except Exception as e:
log.exception(self._get_ldap_msg(e))
results = False
return results | python | {
"resource": ""
} |
q44344 | FreeIPAServer._set_fqdn | train | def _set_fqdn(self):
"""Get FQDN from LDAP"""
results = self._search(
'cn=config',
'(objectClass=*)',
['nsslapd-localhost'],
scope=ldap.SCOPE_BASE
)
if not results and type(results) is not list:
r = None
else:
dn, attrs = results[0]
r = attrs['nsslapd-localhost'][0].decode('utf-8')
self._fqdn = r
log.debug('FQDN: %s' % self._fqdn) | python | {
"resource": ""
} |
q44345 | FreeIPAServer._set_hostname_domain | train | def _set_hostname_domain(self):
"""Extract hostname and domain"""
self._hostname, _, self._domain = str(self._fqdn).partition('.')
log.debug('Hostname: %s, Domain: %s' % (self._hostname, self._domain)) | python | {
"resource": ""
} |
q44346 | FreeIPAServer._set_ip | train | def _set_ip(self):
"""Resolve FQDN to IP address"""
self._ip = socket.gethostbyname(self._fqdn)
log.debug('IP: %s' % self._ip) | python | {
"resource": ""
} |
q44347 | FreeIPAServer._set_base_dn | train | def _set_base_dn(self):
"""Get Base DN from LDAP"""
results = self._search(
'cn=config',
'(objectClass=*)',
['nsslapd-defaultnamingcontext'],
scope=ldap.SCOPE_BASE
)
if results and type(results) is list:
dn, attrs = results[0]
r = attrs['nsslapd-defaultnamingcontext'][0].decode('utf-8')
else:
raise Exception
self._base_dn = r
self._active_user_base = 'cn=users,cn=accounts,' + self._base_dn
self._stage_user_base = 'cn=staged users,cn=accounts,cn=provisioning,' + self._base_dn
self._preserved_user_base = 'cn=deleted users,cn=accounts,cn=provisioning,' + self._base_dn
self._groups_base = 'cn=groups,cn=accounts,' + self._base_dn
log.debug('Base DN: %s' % self._base_dn) | python | {
"resource": ""
} |
q44348 | FreeIPAServer.users | train | def users(self, user_base='active'):
"""Return dict of users"""
if not getattr(self, '_%s_users' % user_base):
self._get_users(user_base)
return getattr(self, '_%s_users' % user_base) | python | {
"resource": ""
} |
q44349 | FreeIPAServer._get_users | train | def _get_users(self, user_base):
""""Get users from LDAP"""
results = self._search(
getattr(self, '_%s_user_base' % user_base),
'(objectClass=*)',
['*'],
scope=ldap.SCOPE_ONELEVEL
)
for dn, attrs in results:
uid = attrs.get('uid')[0].decode('utf-8', 'ignore')
getattr(self, '_%s_users' % user_base)[uid] = FreeIPAUser(dn, attrs)
# print(attrs)
log.debug('%s users: %s' % (user_base.capitalize(), len(getattr(self, '_%s_users' % user_base)))) | python | {
"resource": ""
} |
q44350 | FreeIPAServer.find_users_by_email | train | def find_users_by_email(self, email, user_base='active'):
"""Return list of users with given email address"""
users = []
for user in getattr(self, 'users')(user_base).values():
mail = user.mail
if mail and email in mail:
users.append(user)
log.debug('%s users with email address %s: %s' % (user_base.capitalize(), email, len(users)))
return users | python | {
"resource": ""
} |
q44351 | expand | train | def expand(data):
'''Generates configuration sets based on the YAML input contents
For an introduction to the YAML mark-up, just search the net. Here is one of
its references: https://en.wikipedia.org/wiki/YAML
A configuration set corresponds to settings for **all** variables in the
input template that needs replacing. For example, if your template mentions
the variables ``name`` and ``version``, then each configuration set should
yield values for both ``name`` and ``version``.
For example:
.. code-block:: yaml
name: [john, lisa]
version: [v1, v2]
This should yield to the following configuration sets:
.. code-block:: python
[
{'name': 'john', 'version': 'v1'},
{'name': 'john', 'version': 'v2'},
{'name': 'lisa', 'version': 'v1'},
{'name': 'lisa', 'version': 'v2'},
]
Each key in the input file should correspond to either an object or a YAML
array. If the object is a list, then we'll iterate over it for every possible
combination of elements in the lists. If the element in question is not a
list, then it is considered unique and repeated for each yielded
configuration set. Example
.. code-block:: yaml
name: [john, lisa]
version: [v1, v2]
text: >
hello,
world!
Should yield to the following configuration sets:
.. code-block:: python
[
{'name': 'john', 'version': 'v1', 'text': 'hello, world!'},
{'name': 'john', 'version': 'v2', 'text': 'hello, world!'},
{'name': 'lisa', 'version': 'v1', 'text': 'hello, world!'},
{'name': 'lisa', 'version': 'v2', 'text': 'hello, world!'},
]
Keys starting with one `_` (underscore) are treated as "unique" objects as
well. Example:
.. code-block:: yaml
name: [john, lisa]
version: [v1, v2]
_unique: [i1, i2]
Should yield to the following configuration sets:
.. code-block:: python
[
{'name': 'john', 'version': 'v1', '_unique': ['i1', 'i2']},
{'name': 'john', 'version': 'v2', '_unique': ['i1', 'i2']},
{'name': 'lisa', 'version': 'v1', '_unique': ['i1', 'i2']},
{'name': 'lisa', 'version': 'v2', '_unique': ['i1', 'i2']},
]
Parameters:
data (str): YAML data to be parsed
Yields:
dict: A dictionary of key-value pairs for building the templates
'''
data = _ordered_load(data, yaml.SafeLoader)
# separates "unique" objects from the ones we have to iterate
# pre-assemble return dictionary
iterables = dict()
unique = dict()
for key, value in data.items():
if isinstance(value, list) and not key.startswith('_'):
iterables[key] = value
else:
unique[key] = value
# generates all possible combinations of iterables
for values in itertools.product(*iterables.values()):
retval = dict(unique)
keys = list(iterables.keys())
retval.update(dict(zip(keys, values)))
yield retval | python | {
"resource": ""
} |
q44352 | generate | train | def generate(variables, template):
'''Yields a resolved "template" for each config set and dumps on output
This function will extrapolate the ``template`` file using the contents of
``variables`` and will output individual (extrapolated, expanded) files in
the output directory ``output``.
Parameters:
variables (str): A string stream containing the variables to parse, in YAML
format as explained on :py:func:`expand`.
template (str): A string stream containing the template to extrapolate
Yields:
str: A generated template you can save
Raises:
jinja2.UndefinedError: if a variable used in the template is undefined
'''
env = jinja2.Environment(undefined=jinja2.StrictUndefined)
for c in expand(variables):
c['rc'] = rc
yield env.from_string(template).render(c) | python | {
"resource": ""
} |
q44353 | I2B2CoreWithUploadId._nested_fcn | train | def _nested_fcn(f: Callable, filters: List):
""" Distribute binary function f across list L
:param f: Binary function
:param filters: function arguments
:return: chain of binary filters
"""
return None if len(filters) == 0 \
else filters[0] if len(filters) == 1 \
else f(filters[0], I2B2CoreWithUploadId._nested_fcn(f, filters[1:])) | python | {
"resource": ""
} |
q44354 | I2B2CoreWithUploadId._add_or_update_records | train | def _add_or_update_records(cls, conn: Connection, table: Table,
records: List["I2B2CoreWithUploadId"]) -> Tuple[int, int]:
"""Add or update the supplied table as needed to reflect the contents of records
:param table: i2b2 sql connection
:param records: records to apply
:return: number of records added / modified
"""
num_updates = 0
num_inserts = 0
inserts = []
# Iterate over the records doing updates
# Note: This is slow as molasses - definitely not optimal for batch work, but hopefully we'll be dealing with
# thousands to tens of thousands of records. May want to move to ORM model if this gets to be an issue
for record in records:
keys = [(table.c[k] == getattr(record, k)) for k in cls.key_fields]
key_filter = I2B2CoreWithUploadId._nested_fcn(and_, keys)
rec_exists = conn.execute(select([table.c.upload_id]).where(key_filter)).rowcount
if rec_exists:
known_values = {k: v for k, v in as_dict(record).items()
if v is not None and k not in cls._no_update_fields and
k not in cls.key_fields}
vals = [table.c[k] != v for k, v in known_values.items()]
val_filter = I2B2CoreWithUploadId._nested_fcn(or_, vals)
known_values['update_date'] = record.update_date
upd = update(table).where(and_(key_filter, val_filter)).values(known_values)
num_updates += conn.execute(upd).rowcount
else:
inserts.append(as_dict(record))
if inserts:
if cls._check_dups:
dups = cls._check_for_dups(inserts)
nprints = 0
if dups:
print("{} duplicate records encountered".format(len(dups)))
for k, vals in dups.items():
if len(vals) == 2 and vals[0] == vals[1]:
inserts.remove(vals[1])
else:
if nprints < 20:
print("Key: {} has a non-identical dup".format(k))
elif nprints == 20:
print(".... more ...")
nprints += 1
for v in vals[1:]:
inserts.remove(v)
# TODO: refactor this to load on a per-resource basis. Temporary fix
for insert in ListChunker(inserts, 500):
num_inserts += conn.execute(table.insert(), insert).rowcount
return num_inserts, num_updates | python | {
"resource": ""
} |
q44355 | Group.equality | train | def equality(self, other):
"""Calculate equality based on equality of all group items."""
if not len(self) == len(other):
return False
return super().equality(other) | python | {
"resource": ""
} |
q44356 | Group.similarity | train | def similarity(self, other):
"""Calculate similarity based on best matching permutation of items."""
# Select the longer list as the basis for comparison
if len(self.items) > len(other.items):
first, second = self, other
else:
first, second = other, self
items = list(first.items) # backup items list
length = len(items)
sim = self.Similarity(0.0 if length else 1.0)
# Calculate the similarity for each permutation of items
cname = self.__class__.__name__
for num, perm in enumerate(permutations(items, length), start=1):
first.items = perm
aname = 'items-p{}'.format(num)
self.log(first, second, '%', cname=cname, aname=aname)
permutation_sim = super(Group, first).similarity(second)
self.log(first, second, '%', cname=cname, aname=aname,
result=permutation_sim)
sim = max(sim, permutation_sim)
logging.debug("highest similarity: %s", sim)
first.items = items # restore original items list
return sim | python | {
"resource": ""
} |
q44357 | peers | train | async def peers(client: Client, leaves: bool = False, leaf: str = "") -> dict:
"""
GET peering entries of every node inside the currency network
:param client: Client to connect to the api
:param leaves: True if leaves should be requested
:param leaf: True if leaf should be requested
:return:
"""
if leaves is True:
return await client.get(MODULE + '/peering/peers', {"leaves": "true"}, schema=PEERS_SCHEMA)
else:
return await client.get(MODULE + '/peering/peers', {"leaf": leaf}, schema=PEERS_SCHEMA) | python | {
"resource": ""
} |
q44358 | peer | train | async def peer(client: Client, peer_signed_raw: str) -> ClientResponse:
"""
POST a Peer signed raw document
:param client: Client to connect to the api
:param peer_signed_raw: Peer signed raw document
:return:
"""
return await client.post(MODULE + '/peering/peers', {'peer': peer_signed_raw}, rtype=RESPONSE_AIOHTTP) | python | {
"resource": ""
} |
q44359 | check_optical | train | def check_optical(disk):
''' Try to determine if a device is optical technology.
Needs improvement.
'''
dev = disk.dev
if dev.startswith('sr') or ('cd' in dev):
return True
elif disk.fmt in optical_fs:
return True
else:
return None | python | {
"resource": ""
} |
q44360 | get_meminfo | train | def get_meminfo(opts):
''' Returns a dictionary holding the current memory info,
divided by the ouptut unit. If mem info can't be read, returns None.
'''
meminfo = MemInfo()
outunit = opts.outunit
try:
with open(memfname) as infile:
lines = infile.readlines()
except IOError:
return None
for line in lines: # format: 'MemTotal: 511456 kB\n'
tokens = line.split()
if tokens:
name, value = tokens[0][:-1].lower(), tokens[1] # rm :
if len(tokens) == 2:
continue
unit = tokens[2].lower()
# parse_result to bytes TODO
value = int(value)
if unit == 'kb': value = value * 1024 # most likely
elif unit == 'b': value = value
elif unit == 'mb': value = value * 1024 * 1024
elif unit == 'gb': value = value * 1024 * 1024 * 1024
setattr(meminfo, name, value / outunit)
cache = meminfo.cached + meminfo.buffers
meminfo.used = meminfo.memtotal - meminfo.memfree - cache
meminfo.swapused = (meminfo.swaptotal - meminfo.swapcached -
meminfo.swapfree)
return meminfo | python | {
"resource": ""
} |
q44361 | timeit | train | def timeit(func):
"""
Simple decorator to time functions
:param func: function to decorate
:type func: callable
:return: wrapped function
:rtype: callable
"""
@wraps(func)
def _wrapper(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
elapsed = time.time() - start
LOGGER.info('%s took %s seconds to complete', func.__name__, round(elapsed, 2))
return result
return _wrapper | python | {
"resource": ""
} |
q44362 | setConnStringForWindows | train | def setConnStringForWindows():
""" Set Conn String for Windiws
Windows has a different way of forking processes, which causes the
@worker_process_init.connect signal not to work in "CeleryDbConnInit"
"""
global _dbConnectString
from peek_platform.file_config.PeekFileConfigABC import PeekFileConfigABC
from peek_platform.file_config.PeekFileConfigSqlAlchemyMixin import \
PeekFileConfigSqlAlchemyMixin
from peek_platform import PeekPlatformConfig
class _WorkerTaskConfigMixin(PeekFileConfigABC,
PeekFileConfigSqlAlchemyMixin):
pass
PeekPlatformConfig.componentName = peekWorkerName
_dbConnectString = _WorkerTaskConfigMixin().dbConnectString | python | {
"resource": ""
} |
q44363 | install_gem | train | def install_gem(gemname, version=None, conservative=True, ri=False, rdoc=False,
development=False, format_executable=False, force=False,
gem_source=None):
"""Install a ruby gem."""
cmdline = ['gem', 'install']
if conservative:
cmdline.append('--conservative')
if ri:
cmdline.append('--ri')
else:
cmdline.append('--no-ri')
if rdoc:
cmdline.append('--rdoc')
else:
cmdline.append('--no-rdoc')
if development:
cmdline.append('--development')
if format_executable:
cmdline.append('--format-executable')
if force:
cmdline.append('--force')
if version:
cmdline.extend(['--version', version])
cmdline.extend(['--clear-sources',
'--source', gem_source or RubyGems().gem_source])
cmdline.append(gemname)
msg = 'Installing ruby gem: %s' % gemname
if version:
msg += ' Version requested: %s' % version
log.debug(msg)
try:
subprocess.check_output(cmdline, shell=False)
except (OSError, subprocess.CalledProcessError) as err:
raise error.ButcherError(
'Gem install failed. Error was: %s. Output: %s' % (
err, err.output)) | python | {
"resource": ""
} |
q44364 | is_installed | train | def is_installed(gemname, version=None):
"""Check if a gem is installed."""
cmdline = ['gem', 'list', '-i', gemname]
if version:
cmdline.extend(['-v', version])
try:
subprocess.check_output(cmdline, shell=False)
return True
except (OSError, subprocess.CalledProcessError) as err:
if err.returncode == 1:
return False
else:
raise error.ButcherError(
'Failure running gem. Error was: %s. Output: %s', err,
err.output) | python | {
"resource": ""
} |
q44365 | Node.merge_links_from | train | def merge_links_from(self, other_node, merge_same_value_targets=False):
"""
Merge links from another node with ``self.link_list``.
Copy links from another node, merging when copied links point to a
node which this already links to.
Args:
other_node (Node): The node to merge links from
merge_same_value_targets (bool): Whether or not to merge links
whose targets have the same value (but are not necessarily
the same ``Node``). If False, links will only be merged
when ``link_in_other.target == link_in_self.target``. If True,
links will be merged when
``link_in_other.target.value == link_in_self.target.value``
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_link(node_1, 1)
>>> node_1.add_link(node_2, 3)
>>> node_2.add_link(node_1, 4)
>>> node_1.merge_links_from(node_2)
>>> print(node_1)
node.Node instance with value One with 2 links:
0: 5 --> One
1: 3 --> Two
"""
for other_link in other_node.link_list:
for existing_link in self.link_list:
if merge_same_value_targets:
if other_link.target.value == existing_link.target.value:
existing_link.weight += other_link.weight
break
else:
if other_link.target == existing_link.target:
existing_link.weight += other_link.weight
break
else:
self.add_link(other_link.target, other_link.weight) | python | {
"resource": ""
} |
q44366 | Node.find_link | train | def find_link(self, target_node):
"""
Find the link that points to ``target_node`` if it exists.
If no link in ``self`` points to ``target_node``, return None
Args:
target_node (Node): The node to look for in ``self.link_list``
Returns:
Link: An existing link pointing to ``target_node`` if found
None: If no such link exists
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_link(node_2, 1)
>>> link_1 = node_1.link_list[0]
>>> found_link = node_1.find_link(node_2)
>>> found_link == link_1
True
"""
try:
return next(l for l in self.link_list if l.target == target_node)
except StopIteration:
return None | python | {
"resource": ""
} |
q44367 | Node.add_link_to_self | train | def add_link_to_self(self, source, weight):
"""
Create and add a ``Link`` from a source node to ``self``.
Args:
source (Node): The node that will own the new ``Link``
pointing to ``self``
weight (int or float): The weight of the newly created ``Link``
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_link_to_self(node_2, 5)
>>> new_link = node_2.link_list[0]
>>> print('{} {}'.format(new_link.target.value, new_link.weight))
One 5
>>> print(new_link)
node.Link instance pointing to node with value "One" with weight 5
"""
# Generalize source to a list to simplify code
if not isinstance(source, list):
source = [source]
for source_node in source:
source_node.add_link(self, weight=weight) | python | {
"resource": ""
} |
q44368 | Node.add_reciprocal_link | train | def add_reciprocal_link(self, target, weight):
"""
Add links pointing in either direction between ``self`` and ``target``.
This creates a ``Link`` from ``self`` to ``target`` and a ``Link``
from ``target`` to ``self`` of equal weight. If ``target`` is a list
of ``Node`` 's, repeat this for each one.
Args:
target (Node or list[Node]):
weight (int or float):
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_reciprocal_link(node_2, 5)
>>> new_link_1 = node_1.link_list[0]
>>> new_link_2 = node_2.link_list[0]
>>> print(new_link_1)
node.Link instance pointing to node with value "Two" with weight 5
>>> print(new_link_2)
node.Link instance pointing to node with value "One" with weight 5
"""
# Generalize ``target`` to a list
if not isinstance(target, list):
target_list = [target]
else:
target_list = target
for t in target_list:
self.add_link(t, weight)
t.add_link(self, weight) | python | {
"resource": ""
} |
q44369 | Node.remove_links_to_self | train | def remove_links_to_self(self):
"""
Remove any link in ``self.link_list`` whose ``target`` is ``self``.
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_1.add_link(node_1, 5)
>>> node_1.remove_links_to_self()
>>> len(node_1.link_list)
0
"""
self.link_list = [link for link in self.link_list if
link.target != self] | python | {
"resource": ""
} |
q44370 | Compiler.compile | train | def compile(self, source_code, post_treatment=''.join):
"""Compile given source code.
Return object code, modified by given post treatment.
"""
# read structure
structure = self._structure(source_code)
values = self._struct_to_values(structure, source_code)
# create object code, translated in targeted language
obj_code = langspec.translated(
structure, values,
self.target_lang_spec
)
# apply post treatment and return
return obj_code if post_treatment is None else post_treatment(obj_code) | python | {
"resource": ""
} |
q44371 | Compiler._initialize_tables | train | def _initialize_tables(self):
"""Create tables for structure and values, word->vocabulary"""
# structure table
self.table_struct, self.idnt_struct_size = self._create_struct_table()
# values table
self.table_values, self.idnt_values_size = self._create_values_table() | python | {
"resource": ""
} |
q44372 | Compiler._structure | train | def _structure(self, source_code):
"""return structure in ACDP format."""
# define cutter as a per block reader
def cutter(seq, block_size):
for index in range(0, len(seq), block_size):
lexem = seq[index:index+block_size]
if len(lexem) == block_size:
yield self.table_struct[seq[index:index+block_size]]
return tuple(cutter(source_code, self.idnt_struct_size)) | python | {
"resource": ""
} |
q44373 | Compiler._next_lexem | train | def _next_lexem(self, lexem_type, source_code, source_code_size):
"""Return next readable lexem of given type in source_code.
If no value can be found, the neutral_value will be used"""
# define reader as a lexem extractor
def reader(seq, block_size):
identificator = ''
for char in source_code:
if len(identificator) == self.idnt_values_size[lexem_type]:
yield self.table_values[lexem_type][identificator]
identificator = ''
identificator += char
lexem_reader = reader(source_code, self.idnt_values_size)
lexem = None
time_out = 0
while lexem == None and time_out < 2*source_code_size:
lexem = next(lexem_reader)
time_out += 1
# here we have found a lexem
return lexem | python | {
"resource": ""
} |
q44374 | Compiler._next_condition_lexems | train | def _next_condition_lexems(self, source_code, source_code_size):
"""Return condition lexem readed in source_code"""
# find three lexems
lexems = tuple((
self._next_lexem(LEXEM_TYPE_COMPARISON, source_code, source_code_size),
self._next_lexem(LEXEM_TYPE_OPERATOR , source_code, source_code_size),
self._next_lexem(LEXEM_TYPE_COMPARISON, source_code, source_code_size)
))
# verify integrity
if None in lexems: # one of the condition lexem was not found in source code
return None
else: # all lexems are valid
return ' '.join(lexems) | python | {
"resource": ""
} |
q44375 | Compiler._string_to_int | train | def _string_to_int(self, s):
"""Read an integer in s, in Little Indian. """
base = len(self.alphabet)
return sum((self._letter_to_int(l) * base**lsb
for lsb, l in enumerate(s)
)) | python | {
"resource": ""
} |
q44376 | Compiler._struct_to_values | train | def _struct_to_values(self, structure, source_code):
"""Return list of values readed in source_code,
according to given structure.
"""
# iterate on source code until all values are finded
# if a value is not foundable,
# (ie its identificator is not in source code)
# it will be replaced by associated neutral value
iter_source_code = itertools.cycle(source_code)
values = []
for lexem_type in (l for l in structure if l is not 'D'):
if lexem_type is LEXEM_TYPE_CONDITION:
new_value = self._next_condition_lexems(
iter_source_code, len(source_code)
)
else:
new_value = self._next_lexem(
lexem_type, iter_source_code, len(source_code)
)
# if values is unvalid:
# association with the right neutral value
if new_value is None:
if lexem_type in (LEXEM_TYPE_PREDICAT, LEXEM_TYPE_CONDITION):
new_value = self.neutral_value_condition
else:
new_value = self.neutral_value_action
values.append(new_value)
return values | python | {
"resource": ""
} |
q44377 | Compiler._create_struct_table | train | def _create_struct_table(self):
"""Create table identificator->vocabulary,
and return it with size of an identificator"""
len_alph = len(self.alphabet)
len_vocb = len(self.voc_structure)
identificator_size = ceil(log(len_vocb, len_alph))
# create list of lexems
num2alph = lambda x, n: self.alphabet[(x // len_alph**n) % len_alph]
identificators = [[str(num2alph(x, n))
for n in range(identificator_size)
]
for x in range(len_vocb)
]
# initialize table and iterable
identificators_table = {}
zip_id_voc = zip_longest(
identificators, self.voc_structure,
fillvalue=None
)
# create dict identificator:word
for idt, word in zip_id_voc:
identificators_table[''.join(idt)] = word
return identificators_table, identificator_size | python | {
"resource": ""
} |
q44378 | check_arg_types | train | def check_arg_types(funcname, *args):
"""Raise TypeError if not all items of `args` are same string type."""
hasstr = hasbytes = False
for arg in args:
if isinstance(arg, str):
hasstr = True
elif isinstance(arg, bytes):
hasbytes = True
else:
raise TypeError('{0}() argument must be str or bytes, not {1}'
.format(funcname, arg.__class__.__name__))
if hasstr and hasbytes:
raise TypeError("Can't mix strings and bytes in path components") | python | {
"resource": ""
} |
q44379 | posix_commonpath | train | def posix_commonpath(paths):
"""Given a sequence of POSIX path names,
return the longest common sub-path."""
if not paths:
raise ValueError('commonpath() arg is an empty sequence')
check_arg_types('commonpath', *paths)
if isinstance(paths[0], bytes):
sep = b'/'
curdir = b'.'
else:
sep = '/'
curdir = '.'
split_paths = [path.split(sep) for path in paths]
try:
isabs, = set(p[:1] == sep for p in paths)
except ValueError:
raise ValueError("Can't mix absolute and relative paths")
split_paths = [[c for c in s if c and c != curdir] for s in split_paths]
s_min = min(split_paths)
s_max = max(split_paths)
common = s_min
for i, run_c in enumerate(s_min):
if run_c != s_max[i]:
common = s_min[:i]
break
prefix = sep if isabs else sep[:0]
return prefix + sep.join(common) | python | {
"resource": ""
} |
q44380 | nt_commonpath | train | def nt_commonpath(paths): # pylint: disable=too-many-locals
"""Given a sequence of NT path names,
return the longest common sub-path."""
from ntpath import splitdrive
if not paths:
raise ValueError('commonpath() arg is an empty sequence')
check_arg_types('commonpath', *paths)
if isinstance(paths[0], bytes):
sep = b'\\'
altsep = b'/'
curdir = b'.'
else:
sep = '\\'
altsep = '/'
curdir = '.'
drivesplits = [splitdrive(p.replace(altsep, sep).lower()) for p in paths]
split_paths = [p.split(sep) for d, p in drivesplits]
try:
isabs, = set(p[:1] == sep for d, p in drivesplits)
except ValueError:
raise ValueError("Can't mix absolute and relative paths")
# Check that all drive letters or UNC paths match. The check is made
# only now otherwise type errors for mixing strings and bytes would not
# be caught.
if len(set(d for d, p in drivesplits)) != 1:
raise ValueError("Paths don't have the same drive")
drive, path = splitdrive(paths[0].replace(altsep, sep))
common = path.split(sep)
common = [c for c in common if c and c != curdir]
split_paths = [[c for c in s if c and c != curdir] for s in split_paths]
s_min = min(split_paths)
s_max = max(split_paths)
for i, run_c in enumerate(s_min):
if run_c != s_max[i]:
common = common[:i]
break
else:
common = common[:len(s_min)]
prefix = drive + sep if isabs else drive
return prefix + sep.join(common) | python | {
"resource": ""
} |
q44381 | catches | train | def catches(exc, handler=re_raise):
'''
Function decorator. Used to decorate function that handles exception class exc.
An optional exception handler can be passed as a second argument. This exception
handler shall have the signature
handler(exc, message, traceback).
'''
if not __CHECKING__:
return lambda f: f
def wrap(f):
def call(*args, **kwd):
try:
ID = exc_checker.set_attention(exc)
res = f(*args, **kwd)
exc_checker.remove_attention(exc, ID)
return res
# handle checked exception
except exc, e:
exc_checker.remove_attention(exc, ID)
traceback = sys.exc_info()[2]
return handler(exc, str(e), traceback.tb_next.tb_next)
# re-raise unchecked exception but remove checked exeption info first
except Exception, e:
exc_checker.remove_attention(exc, ID)
traceback = sys.exc_info()[2]
raise e.__class__, e.args, traceback.tb_next.tb_next
call.__name__ = f.__name__
return call
return wrap | python | {
"resource": ""
} |
q44382 | SafeConnection.locked | train | def locked(self):
"""Context generator for `with` statement, yields thread-safe connection.
:return: thread-safe connection
:rtype: pydbal.connection.Connection
"""
conn = self._get_connection()
try:
self._lock(conn)
yield conn
finally:
self._unlock(conn) | python | {
"resource": ""
} |
q44383 | SafeConnection.query | train | def query(self, sql, *args, **kwargs):
"""Executes an SQL SELECT query and returns rows generator.
:param sql: query to execute
:param args: parameters iterable
:param kwargs: parameters iterable
:return: rows generator
:rtype: generator
"""
with self.locked() as conn:
for row in conn.query(sql, *args, **kwargs):
yield row | python | {
"resource": ""
} |
q44384 | SafeConnection.fetch | train | def fetch(self, sql, *args, **kwargs):
"""Executes an SQL SELECT query and returns the first row or `None`.
:param sql: statement to execute
:param args: parameters iterable
:param kwargs: parameters iterable
:return: the first row or `None`
"""
with self.locked() as conn:
return conn.query(sql, *args, **kwargs).fetch() | python | {
"resource": ""
} |
q44385 | SafeConnection.fetch_all | train | def fetch_all(self, sql, *args, **kwargs):
"""Executes an SQL SELECT query and returns all selected rows.
:param sql: statement to execute
:param args: parameters iterable
:param kwargs: parameters iterable
:return: all selected rows
:rtype: list
"""
with self.locked() as conn:
return conn.query(sql, *args, **kwargs).fetch_all() | python | {
"resource": ""
} |
q44386 | SafeConnection.fetch_column | train | def fetch_column(self, sql, *args, **kwargs):
"""Executes an SQL SELECT query and returns the first column of the first row or `None`.
:param sql: statement to execute
:param args: parameters iterable
:param kwargs: parameters iterable
:return: the first row of the first column or `None`
"""
with self.locked() as conn:
return conn.query(sql, *args, **kwargs).fetch_column() | python | {
"resource": ""
} |
q44387 | ensure_bytes | train | def ensure_bytes(data: Union[str, bytes]) -> bytes:
"""
Convert data in bytes if data is a string
:param data: Data
:rtype bytes:
"""
if isinstance(data, str):
return bytes(data, 'utf-8')
return data | python | {
"resource": ""
} |
q44388 | ensure_str | train | def ensure_str(data: Union[str, bytes]) -> str:
"""
Convert data in str if data are bytes
:param data: Data
:rtype str:
"""
if isinstance(data, bytes):
return str(data, 'utf-8')
return data | python | {
"resource": ""
} |
q44389 | xor_bytes | train | def xor_bytes(b1: bytes, b2: bytes) -> bytearray:
"""
Apply XOR operation on two bytes arguments
:param b1: First bytes argument
:param b2: Second bytes argument
:rtype bytearray:
"""
result = bytearray()
for i1, i2 in zip(b1, b2):
result.append(i1 ^ i2)
return result | python | {
"resource": ""
} |
q44390 | kompile | train | def kompile(src, raw=False, filename='<compiler>', loader=None, **kwargs):
'''
Creates a new class based on the supplied template, and returnsit.
class Template(object):
def __call__(self, context):
return ''.join(self._iterator(context))
def _iterator(self, context):
return map(str, self._root(context)
def _root(self, context):
yield ''
yield ...
yield from self.head(context)
Blocks create new methods, and add a 'yield from self.{block}(context)' to
the current function
'''
parser = Parser(src, loader=loader)
parser.load_library('knights.tags')
parser.load_library('knights.helpers')
parser.build_method('_root')
if parser.parent:
# Remove _root from the method list
parser.methods = [
method for method in parser.methods if method.name != '_root'
]
klass = parser.build_class()
# Wrap it in a module
inst = ast.Module(body=[klass])
ast.fix_missing_locations(inst)
if kwargs.get('astor', False):
import astor
print(astor.to_source(inst))
# Compile code to create class
code = compile(inst, filename=filename, mode='exec', optimize=2)
# Execute it and return the instance
g = {
'_': Helpers(parser.helpers),
'parent': parser.parent,
'ContextScope': ContextScope,
}
eval(code, g)
klass = g['Template']
if raw:
return klass
return klass() | python | {
"resource": ""
} |
q44391 | Ping.signal_handler | train | def signal_handler(self, signum, frame):
"""
Handle print_exit via signals.
"""
self.print_exit()
print("\n(Terminated with signal %d)\n" % (signum))
sys.exit(0) | python | {
"resource": ""
} |
q44392 | Ping.header2dict | train | def header2dict(self, names, struct_format, data):
"""
Unpack the raw received IP and ICMP header information to a dict.
"""
unpacked_data = struct.unpack(struct_format, data)
return dict(zip(names, unpacked_data)) | python | {
"resource": ""
} |
q44393 | Ping.do | train | def do(self):
"""
Send one ICMP ECHO_REQUEST and receive the response until self.timeout.
"""
try: # One could use UDP here, but it's obscure
current_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.getprotobyname("icmp"))
except socket.error as (errno, msg):
if errno == 1:
# Operation not permitted - Add more information to traceback
etype, evalue, etb = sys.exc_info()
evalue = etype(
"%s - Note that ICMP messages can only be send from processes running as root." % evalue
)
raise etype, evalue, etb
raise # raise the original error
send_time = self.send_one_ping(current_socket)
if send_time == None:
return
self.send_count += 1
receive_time, packet_size, ip, ip_header, icmp_header = self.receive_one_ping(current_socket)
current_socket.close()
if receive_time:
self.receive_count += 1
delay = (receive_time - send_time) * 1000.0
self.total_time += delay
if self.min_time > delay:
self.min_time = delay
if self.max_time < delay:
self.max_time = delay
return PingSuccess(delay, ip, packet_size, ip_header, icmp_header)
else:
return PingTimeout(self.destination) | python | {
"resource": ""
} |
q44394 | Ping.send_one_ping | train | def send_one_ping(self, current_socket):
"""
Send one ICMP ECHO_REQUEST.
"""
# Header is type (8), code (8), checksum (16), id (16), sequence (16)
checksum = 0
# Make a dummy header with a 0 checksum.
header = struct.pack(
"!BBHHH", ICMP_ECHO, 0, checksum, self.own_id, self.seq_number
)
padBytes = []
startVal = 0x42
for i in range(startVal, startVal + (self.packet_size)):
padBytes += [(i & 0xff)] # Keep chars in the 0-255 range
data = bytes(padBytes)
# Calculate the checksum on the data and the dummy header.
checksum = calculate_checksum(header + data) # Checksum is in network order
# Now that we have the right checksum, we put that in. It's just easier
# to make up a new header than to stuff it into the dummy.
header = struct.pack(
"!BBHHH", ICMP_ECHO, 0, checksum, self.own_id, self.seq_number
)
packet = header + data
send_time = default_timer()
try:
current_socket.sendto(packet, (self.destination, 1)) # Port number is irrelevant for ICMP
except socket.error as e:
print("General failure (%s)" % (e.args[1]))
current_socket.close()
return
return send_time | python | {
"resource": ""
} |
q44395 | Ping.receive_one_ping | train | def receive_one_ping(self, current_socket):
"""
Receive the ping from the socket. timeout = in ms.
"""
timeout = self.timeout / 1000.0
while True: # Loop while waiting for packet or timeout
select_start = default_timer()
inputready, outputready, exceptready = select.select([current_socket], [], [], timeout)
select_duration = (default_timer() - select_start)
if inputready == []: # timeout
return None, 0, 0, 0, 0
receive_time = default_timer()
packet_data, address = current_socket.recvfrom(ICMP_MAX_RECV)
icmp_header = self.header2dict(
names=[
"type", "code", "checksum",
"packet_id", "seq_number"
],
struct_format="!BBHHH",
data=packet_data[20:28]
)
if icmp_header["packet_id"] == self.own_id: # Our packet
ip_header = self.header2dict(
names=[
"version", "type", "length",
"id", "flags", "ttl", "protocol",
"checksum", "src_ip", "dest_ip"
],
struct_format="!BBHHHBBHII",
data=packet_data[:20]
)
packet_size = len(packet_data) - 28
ip = socket.inet_ntoa(struct.pack("!I", ip_header["src_ip"]))
# XXX: Why not ip = address[0] ???
return receive_time, packet_size, ip, ip_header, icmp_header
timeout = timeout - select_duration
if timeout <= 0:
return None, 0, 0, 0, 0 | python | {
"resource": ""
} |
q44396 | MQTTRouter.get_link | train | def get_link(self, peer):
"""
Retrieves the link to the given peer
"""
for access in peer.accesses:
if access.type == 'mqtt':
break
else:
# No MQTT access found
return None
# Get server access tuple
server = (access.server.host, access.server.port)
with self.__lock:
try:
# Get existing link
return self._links[server]
except KeyError:
# Create a new link
link = self._links[server] = MQTTLink(access)
return link | python | {
"resource": ""
} |
q44397 | exit | train | def exit(exit_code=0):
r"""A function to support exiting from exit hooks.
Could also be used to exit from the calling scripts in a thread safe manner.
"""
core.processExitHooks()
if state.isExitHooked and not hasattr(sys, 'exitfunc'): # The function is called from the exit hook
sys.stderr.flush()
sys.stdout.flush()
os._exit(exit_code) #pylint: disable=W0212
sys.exit(exit_code) | python | {
"resource": ""
} |
q44398 | listMemberHelps | train | def listMemberHelps(TargetGroup):
r"""Gets help on a group's children.
"""
Members = []
for Member in TargetGroup.Members.values(): # get unique children (by discarding aliases)
if Member not in Members:
Members.append(Member)
Ret = []
for Member in Members:
Config = Member.Config
Ret.append(('%s%s' % (Config['name'], ', %s' % Config['alias'] if 'alias' in Config else ''), Config.get('desc', '')))
return Ret | python | {
"resource": ""
} |
q44399 | getTypeStr | train | def getTypeStr(_type):
r"""Gets the string representation of the given type.
"""
if isinstance(_type, CustomType):
return str(_type)
if hasattr(_type, '__name__'):
return _type.__name__
return '' | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.