_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q43500
Butcher.missing_nodes
train
def missing_nodes(self): """The set of targets known as dependencies but not yet defined.""" missing = set() for target_addr, target_attrs in self.graph.node.items(): if 'target_obj' not in target_attrs: missing.add(target_addr) return missing
python
{ "resource": "" }
q43501
Butcher.load_buildfile
train
def load_buildfile(self, target): """Pull a build file from git.""" log.info('Loading: %s', target) filepath = os.path.join(target.path, app.get_options().buildfile_name) try: repo = self.repo_state.GetRepo(target.repo) return repo.get_file(filepath) except gitrepo.GitError as err: log.error('Failed loading %s: %s', target, err) raise error.BrokenGraph('Sadface.')
python
{ "resource": "" }
q43502
constructSpec
train
def constructSpec(indentation, begin_block, end_block, begin_line, end_line, begin_action, end_action, begin_condition, end_condition, logical_and, logical_or): """Return a language specification based on parameters.""" return { INDENTATION : indentation, BEG_BLOCK : begin_block, END_BLOCK : end_block, BEG_LINE : begin_line, END_LINE : end_line, BEG_ACTION : begin_action, END_ACTION : end_action, BEG_CONDITION : begin_condition, END_CONDITION : end_condition, LOGICAL_AND : logical_and, LOGICAL_OR : logical_or }
python
{ "resource": "" }
q43503
translated
train
def translated(structure, values, lang_spec): """Return code associated to given structure and values, translate with given language specification.""" # LANGUAGE SPECS indentation = '\t' endline = '\n' object_code = "" stack = [] # define shortcuts to behavior push = lambda x: stack.append(x) pop = lambda : stack.pop() last = lambda : stack[-1] if len(stack) > 0 else ' ' def indented_code(s, level, end): return lang_spec[INDENTATION]*level + s + end # recreate python structure, and replace type by value level = 0 CONDITIONS = [LEXEM_TYPE_PREDICAT, LEXEM_TYPE_CONDITION] ACTION = LEXEM_TYPE_ACTION DOWNLEVEL = LEXEM_TYPE_DOWNLEVEL for lexem_type in structure: if lexem_type is ACTION: # place previous conditions if necessary if last() in CONDITIONS: # construct conditions lines value, values = values[0:len(stack)], values[len(stack):] object_code += (indented_code(lang_spec[BEG_CONDITION] + lang_spec[LOGICAL_AND].join(value) + lang_spec[END_CONDITION], level, lang_spec[END_LINE] )) # if provided, print the begin block token on a new line if len(lang_spec[BEG_BLOCK]) > 0: object_code += indented_code( lang_spec[BEG_BLOCK], level, lang_spec[END_LINE] ) stack = [] level += 1 # and place the action object_code += indented_code( lang_spec[BEG_ACTION] + values[0], level, lang_spec[END_ACTION]+lang_spec[END_LINE] ) values = values[1:] elif lexem_type in CONDITIONS: push(lexem_type) elif lexem_type is DOWNLEVEL: if last() not in CONDITIONS: # down level, and add a END_BLOCK only if needed level -= 1 if level >= 0: object_code += indented_code( lang_spec[END_BLOCK], level, lang_spec[END_LINE] ) else: level = 0 # add END_BLOCK while needed for reach level 0 while level > 0: level -= 1 if level >= 0: object_code += indented_code( lang_spec[END_BLOCK], level, lang_spec[END_LINE] ) else: level = 0 # Finished ! return object_code
python
{ "resource": "" }
q43504
cpp_spec
train
def cpp_spec(): """C++ specification, provided for example, and java compatible.""" return { INDENTATION : '\t', BEG_BLOCK : '{', END_BLOCK : '}', BEG_LINE : '', END_LINE : '\n', BEG_ACTION : '', END_ACTION : ';', BEG_CONDITION : 'if(', END_CONDITION : ')', LOGICAL_AND : ' && ', LOGICAL_OR : ' || ' }
python
{ "resource": "" }
q43505
set_thresh
train
def set_thresh(thresh,p=False,hostname=None): '''Sets the level of the threshold slider. If ``p==True`` will be interpreted as a _p_-value''' driver_send("SET_THRESHNEW %s *%s" % (str(thresh),"p" if p else ""),hostname=hostname)
python
{ "resource": "" }
q43506
get_meminfo
train
def get_meminfo(opts): ''' Returns a dictionary holding the current memory info, divided by the ouptut unit. ''' meminfo = MemInfo() outunit = opts.outunit mstat = get_mem_info() # from winstats pinf = get_perf_info() try: pgpcnt = get_perf_data(r'\Paging File(_Total)\% Usage', 'double')[0] / 100 except WindowsError: pgpcnt = 0 totl = mstat.TotalPhys meminfo.memtotal = totl / float(outunit) used = totl * mstat.MemoryLoad / 100.0 # percent, more reliable meminfo.used = used / float(outunit) left = totl - used # Cached cache = pinf.SystemCacheBytes if cache > left and version >= win7ver: # Win7 RTM bug :/ this cache number is bogus free = get_perf_data(r'\Memory\Free & Zero Page List Bytes', 'long')[0] cache = left - free meminfo.memfree = free / float(outunit) else: meminfo.memfree = (totl - used - cache) / float(outunit) meminfo.buffers = 0 meminfo.cached = cache / float(outunit) # SWAP these numbers are actually commit charge, not swap; fix # should not contain RAM :/ swpt = abs(mstat.TotalPageFile - totl) # these nums aren't quite right either, use perfmon instead :/ swpu = swpt * pgpcnt swpf = swpt - swpu meminfo.swaptotal = swpt / float(outunit) meminfo.swapfree = swpf / float(outunit) meminfo.swapused = swpu / float(outunit) meminfo.swapcached = 0 # A linux stat for compat if opts.debug: import locale fmt = lambda val: locale.format('%d', val, True) print() print('TotalPhys:', fmt(totl)) print('AvailPhys:', fmt(mstat.AvailPhys)) print('MemoryLoad:', fmt(mstat.MemoryLoad)) print() print('used:', fmt(used)) print('left:', fmt(left)) if 'free' in locals(): print('PDH Free:', fmt(free)) print('SystemCacheBytes:', fmt(pinf.SystemCacheBytes)) print() print('TotalPageFile:', fmt(mstat.TotalPageFile)) print('AvailPageFile:', fmt(mstat.AvailPageFile)) print('TotalPageFile fixed:', fmt(swpt)) print('AvailPageFile fixed:', fmt(swpf)) return meminfo
python
{ "resource": "" }
q43507
endpoint
train
def endpoint(value: Any) -> Any: """ Convert a endpoint string to the corresponding Endpoint instance type :param value: Endpoint string or subclass :return: """ if issubclass(type(value), Endpoint): return value elif isinstance(value, str): for api, cls in MANAGED_API.items(): if value.startswith(api + " "): return cls.from_inline(value) return UnknownEndpoint.from_inline(value) else: raise TypeError("Cannot convert {0} to endpoint".format(value))
python
{ "resource": "" }
q43508
UnknownEndpoint.from_inline
train
def from_inline(cls: Type[UnknownEndpointType], inline: str) -> UnknownEndpointType: """ Return UnknownEndpoint instance from endpoint string :param inline: Endpoint string :return: """ try: api = inline.split()[0] properties = inline.split()[1:] return cls(api, properties) except IndexError: raise MalformedDocumentError(inline)
python
{ "resource": "" }
q43509
BMAEndpoint.from_inline
train
def from_inline(cls: Type[BMAEndpointType], inline: str) -> BMAEndpointType: """ Return BMAEndpoint instance from endpoint string :param inline: Endpoint string :return: """ m = BMAEndpoint.re_inline.match(inline) if m is None: raise MalformedDocumentError(BMAEndpoint.API) server = m.group(1) ipv4 = m.group(2) ipv6 = m.group(3) port = int(m.group(4)) return cls(server, ipv4, ipv6, port)
python
{ "resource": "" }
q43510
SecuredBMAEndpoint.from_inline
train
def from_inline(cls: Type[SecuredBMAEndpointType], inline: str) -> SecuredBMAEndpointType: """ Return SecuredBMAEndpoint instance from endpoint string :param inline: Endpoint string :return: """ m = SecuredBMAEndpoint.re_inline.match(inline) if m is None: raise MalformedDocumentError(SecuredBMAEndpoint.API) server = m.group(1) ipv4 = m.group(2) ipv6 = m.group(3) port = int(m.group(4)) path = m.group(5) if not path: path = "" return cls(server, ipv4, ipv6, port, path)
python
{ "resource": "" }
q43511
WS2PEndpoint.from_inline
train
def from_inline(cls: Type[WS2PEndpointType], inline: str) -> WS2PEndpointType: """ Return WS2PEndpoint instance from endpoint string :param inline: Endpoint string :return: """ m = WS2PEndpoint.re_inline.match(inline) if m is None: raise MalformedDocumentError(WS2PEndpoint.API) ws2pid = m.group(1) server = m.group(2) port = int(m.group(3)) path = m.group(4) if not path: path = "" return cls(ws2pid, server, port, path)
python
{ "resource": "" }
q43512
ESCoreEndpoint.from_inline
train
def from_inline(cls: Type[ESCoreEndpointType], inline: str) -> ESCoreEndpointType: """ Return ESCoreEndpoint instance from endpoint string :param inline: Endpoint string :return: """ m = ESCoreEndpoint.re_inline.match(inline) if m is None: raise MalformedDocumentError(ESCoreEndpoint.API) server = m.group(1) port = int(m.group(2)) return cls(server, port)
python
{ "resource": "" }
q43513
ESUserEndpoint.from_inline
train
def from_inline(cls: Type[ESUserEndpointType], inline: str) -> ESUserEndpointType: """ Return ESUserEndpoint instance from endpoint string :param inline: Endpoint string :return: """ m = ESUserEndpoint.re_inline.match(inline) if m is None: raise MalformedDocumentError(ESUserEndpoint.API) server = m.group(1) port = int(m.group(2)) return cls(server, port)
python
{ "resource": "" }
q43514
ESSubscribtionEndpoint.from_inline
train
def from_inline(cls: Type[ESSubscribtionEndpointType], inline: str) -> ESSubscribtionEndpointType: """ Return ESSubscribtionEndpoint instance from endpoint string :param inline: Endpoint string :return: """ m = ESSubscribtionEndpoint.re_inline.match(inline) if m is None: raise MalformedDocumentError(ESSubscribtionEndpoint.API) server = m.group(1) port = int(m.group(2)) return cls(server, port)
python
{ "resource": "" }
q43515
Visitor.continues
train
def continues(method): '''Method decorator signifying that the visitor should not visit the current node's children once this method has been invoked. ''' @functools.wraps(method) def wrapped(self, *args, **kwargs): yield method(self, *args, **kwargs) raise self.Continue() return wrapped
python
{ "resource": "" }
q43516
Visitor.get_methodnames
train
def get_methodnames(self, node): '''Given a node, generate all names for matching visitor methods. ''' nodekey = self.get_nodekey(node) prefix = self._method_prefix if isinstance(nodekey, self.GeneratorType): for nodekey in nodekey: yield self._method_prefix + nodekey else: yield self._method_prefix + nodekey
python
{ "resource": "" }
q43517
readGDF
train
def readGDF(filename="../data/RenatoFabbri06022014.gdf"): """Made to work with gdf files from my own network and friends and groups""" with open(filename,"r") as f: data=f.read() lines=data.split("\n") columns=lines[0].split(">")[1].split(",") column_names=[i.split(" ")[0] for i in columns] data_friends={cn:[] for cn in column_names} for line in lines[1:]: if not line: break if ">" in line: columns=line.split(">")[1].split(",") column_names2=[i.split(" ")[0] for i in columns] data_friendships={cn:[] for cn in column_names2} continue fields=line.split(",") if "column_names2" not in locals(): for i, field in enumerate(fields): if column_names[i] in ("name","groupid"): pass elif field.isdigit(): field=int(field) data_friends[column_names[i]].append(field) else: for i, field in enumerate(fields): if column_names2[i]=="name": pass elif field.isdigit(): field=int(field) data_friendships[column_names2[i]].append(field) return {"relations":data_friendships, "individuals":data_friends}
python
{ "resource": "" }
q43518
json_response
train
def json_response(data, status=200, serializer=None): """ Returns an HttpResponse object containing JSON serialized data. The mime-type is set to application/json, and the charset to UTF-8. """ return HttpResponse(json.dumps(data, default=serializer), status=status, content_type='application/json; charset=UTF-8')
python
{ "resource": "" }
q43519
jsonp_response
train
def jsonp_response(data, callback="f", status=200, serializer=None): """ Returns an HttpResponse object containing JSON serialized data, wrapped in a JSONP callback. The mime-type is set to application/x-javascript, and the charset to UTF-8. """ val = json.dumps(data, default=serializer) ret = "{callback}('{val}');".format(callback=callback, val=val) return HttpResponse(ret, status=status, content_type='application/x-javascript; charset=UTF-8')
python
{ "resource": "" }
q43520
CollectionAttributesMixin.set_per_page
train
def set_per_page(self, entries=100): """ set entries per page max 200 """ if isinstance(entries, int) and entries <= 200: self.per_page = int(entries) return self else: raise SalesKingException("PERPAGE_ONLYINT", "Please set an integer <200 for the per-page limit");
python
{ "resource": "" }
q43521
CollectionAttributesMixin.set_resource_type
train
def set_resource_type(self, klass): """ set type to load and load schema """ self.resource_type = klass self.schema = loaders.load_schema_raw(self.resource_type)
python
{ "resource": "" }
q43522
CollectionAttributesMixin.set_filters
train
def set_filters(self, filters): """ set and validate filters dict """ if not isinstance(filters, dict): raise Exception("filters must be a dict") self.filters = {} for key in filters.keys(): value = filters[key] self.add_filter(key,value)
python
{ "resource": "" }
q43523
CollectionAttributesMixin.add_filter
train
def add_filter(self, key, filter_value): """ add and validate a filter with value returns True on success otherwise exception """ seek = u"filter[%s]" % key if self.validate_filter(key, filter_value): self.filters[key] = filter_value return True else: msg = u'Invalid filter value: filter:%s value:%s' % (key, filter_value) print msg raise SalesKingException("FILTER_INVALID", msg )
python
{ "resource": "" }
q43524
CollectionAttributesMixin._build_query_url
train
def _build_query_url(self, page = None, verbose = False): """ builds the url to call """ query = [] # # build the filters # for afilter in self.filters.keys(): # value = self.filters[afilter] # print"filter:%s value:%s" % (afilter,value) # value = urlencode(value) # query_str = u"%s=%s" % (afilter, value) if len(self.filters) > 0: query.append(urlencode(self.filters)) if self.sort: query_str = u"%s=%s" % (u"sort", self.sort) query.append(query_str) if self.sort_by: query_str = u"%s=%s" % (u"sort_by", self.sort_by) query.append(query_str) if self.per_page: query_str = u"%s=%s" % (u"per_page", self.per_page) query.append(query_str) if page: query_str = u"%s=%s" % (u"page", page) query.append(query_str) query = u"?%s" % (u"&".join(query)) url = u"%s%s" % (self.get_list_endpoint()['href'],query) url = u"%s%s%s" % (self.__api__.base_url, API_BASE_PATH, url) msg = "_build_query_url: url:%s" % url log.debug(msg) if verbose: print msg return url
python
{ "resource": "" }
q43525
CollectionAttributesMixin._post_load
train
def _post_load(self, response, verbose): """ post load processing fills the self._items collection """ try: if verbose: print response.content log.debug(response.content) except Exception, e: raise e if response is not None and response.status_code == 200: types = helpers.pluralize(self.resource_type) #print "types %s" % types body = json.loads(response.content, encoding='utf-8') self.total_entries = body['collection']['total_entries'] self.total_pages = body['collection']['total_pages'] self.current_page = body['collection']['current_page'] ## now get the items from the class factory if self.total_entries != 0: for response_item in body[types]: obj = self._response_item_to_object(response_item) ## add the items self._items.append(obj) else: msg = u"Fetching failed, an error happend" raise SalesKingException("LOAD_ERROR", msg, response) return self
python
{ "resource": "" }
q43526
CollectionAttributesMixin._response_item_to_object
train
def _response_item_to_object(self, resp_item): """ take json and make a resource out of it """ item_cls = resources.get_model_class(self.resource_type) properties_dict = resp_item[self.resource_type] new_dict = helpers.remove_properties_containing_None(properties_dict) # raises exception if something goes wrong obj = item_cls(new_dict) return obj
python
{ "resource": "" }
q43527
BasicCommandsBot.cmd_part
train
def cmd_part(self, connection, sender, target, payload): """ Asks the bot to leave a channel """ if payload: connection.part(payload) else: raise ValueError("No channel given")
python
{ "resource": "" }
q43528
BasicCommandsBot.cmd_join
train
def cmd_join(self, connection, sender, target, payload): """ Asks the bot to join a channel """ if payload: connection.join(payload) else: raise ValueError("No channel given")
python
{ "resource": "" }
q43529
BasicCommandsBot.cmd_echo
train
def cmd_echo(self, connection, sender, target, payload): """ Echoes the given payload """ connection.privmsg(target, payload or "Hello, {0}".format(sender))
python
{ "resource": "" }
q43530
BasicCommandsBot.cmd_work
train
def cmd_work(self, connection, sender, target, payload): """ Does some job """ connection.action(target, "is doing something...") time.sleep(int(payload or "5")) connection.action(target, "has finished !") connection.privmsg(target, "My answer is: 42.")
python
{ "resource": "" }
q43531
AwsLogGroup.do_logStream
train
def do_logStream(self,args): """Go to the specified log stream. logStream -h for detailed help""" parser = CommandArgumentParser("logStream") parser.add_argument(dest='logStream',help='logStream index.'); args = vars(parser.parse_args(args)) print "loading log stream {}".format(args['logStream']) index = int(args['logStream']) logStream = self.logStreams[index] print "logStream:{}".format(logStream) self.childLoop(AwsLogStream.AwsLogStream(logStream,self))
python
{ "resource": "" }
q43532
print_version
train
def print_version(ctx: click.Context, _, value): """ Prints current version then exits """ if not value or ctx.resilient_parsing: return print(__version__) sys.exit(0)
python
{ "resource": "" }
q43533
base64url_decode
train
def base64url_decode(msg): """ Decode a base64 message based on JWT spec, Appendix B. "Notes on implementing base64url encoding without padding" """ rem = len(msg) % 4 if rem: msg += b'=' * (4 - rem) return base64.urlsafe_b64decode(msg)
python
{ "resource": "" }
q43534
_jws_header
train
def _jws_header(keyid, algorithm): """Produce a base64-encoded JWS header.""" data = { 'typ': 'JWT', 'alg': algorithm.name, # 'kid' is used to indicate the public part of the key # used during signing. 'kid': keyid } datajson = json.dumps(data, sort_keys=True).encode('utf8') return base64url_encode(datajson)
python
{ "resource": "" }
q43535
_jws_payload
train
def _jws_payload(expire_at, requrl=None, **kwargs): """ Produce a base64-encoded JWS payload. expire_at, if specified, must be a number that indicates a timestamp after which the message must be rejected. requrl, if specified, is used as the "audience" according to the JWT spec. Any other parameters are passed as is to the payload. """ data = { 'exp': expire_at, 'aud': requrl } data.update(kwargs) datajson = json.dumps(data, sort_keys=True).encode('utf8') return base64url_encode(datajson)
python
{ "resource": "" }
q43536
_jws_signature
train
def _jws_signature(signdata, privkey, algorithm): """ Produce a base64-encoded JWS signature based on the signdata specified, the privkey instance, and the algorithm passed. """ signature = algorithm.sign(privkey, signdata) return base64url_encode(signature)
python
{ "resource": "" }
q43537
sign_serialize
train
def sign_serialize(privkey, expire_after=3600, requrl=None, algorithm_name=DEFAULT_ALGO, **kwargs): """ Produce a JWT compact serialization by generating a header, payload, and signature using the privkey and algorithm specified. The privkey object must contain at least a member named pubkey. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] addy = algo.pubkey_serialize(privkey.pubkey) header = _jws_header(addy, algo).decode('utf8') payload = _build_payload(expire_after, requrl, **kwargs) signdata = "{}.{}".format(header, payload) signature = _jws_signature(signdata, privkey, algo).decode('utf8') return "{}.{}".format(signdata, signature)
python
{ "resource": "" }
q43538
multisig_sign_serialize
train
def multisig_sign_serialize(privkeys, expire_after=3600, requrl=None, algorithm_name=DEFAULT_ALGO, **kwargs): """ Produce a general JSON serialization by generating a header, payload, and multiple signatures using the list of private keys specified. All the signatures will be performed using the same algorithm. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload. """ assert algorithm_name in ALGORITHM_AVAILABLE payload = _build_payload(expire_after, requrl, **kwargs) result = {"payload": payload, "signatures": []} algo = ALGORITHM_AVAILABLE[algorithm_name] for pk in privkeys: addy = algo.pubkey_serialize(pk.pubkey) header = _jws_header(addy, algo).decode('utf8') signdata = "{}.{}".format(header, payload) signature = _jws_signature(signdata, pk, algo).decode('utf8') result["signatures"].append({ "protected": header, "signature": signature}) return json.dumps(result)
python
{ "resource": "" }
q43539
multisig_validate_deserialize
train
def multisig_validate_deserialize(rawmsg, requrl=None, check_expiration=True, decode_payload=True, algorithm_name=DEFAULT_ALGO): """ Validate a general JSON serialization and return the headers and payload if all the signatures are good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] data = json.loads(rawmsg) payload64 = data.get('payload', None) signatures = data.get('signatures', None) if payload64 is None or not isinstance(signatures, list): raise InvalidMessage('must contain "payload" and "signatures"') if not len(signatures): raise InvalidMessage('no signatures') try: payload, sigs = _multisig_decode(payload64, signatures, decode_payload) except Exception as err: raise InvalidMessage(str(err)) all_valid = True try: for entry in sigs: valid = _verify_signature(algorithm=algo, **entry) all_valid = all_valid and valid except Exception as err: raise InvalidMessage('failed to verify signature: {}'.format(err)) if not all_valid: return None, None if decode_payload: _verify_payload(payload, check_expiration, requrl) return [entry['header'] for entry in sigs], payload
python
{ "resource": "" }
q43540
validate_deserialize
train
def validate_deserialize(rawmsg, requrl=None, check_expiration=True, decode_payload=True, algorithm_name=DEFAULT_ALGO): """ Validate a JWT compact serialization and return the header and payload if the signature is good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] segments = rawmsg.split('.') if len(segments) != 3 or not all(segments): raise InvalidMessage('must contain 3 non-empty segments') header64, payload64, cryptoseg64 = segments try: signature = base64url_decode(cryptoseg64.encode('utf8')) payload_data = base64url_decode(payload64.encode('utf8')) header_data = base64url_decode(header64.encode('utf8')) header = json.loads(header_data.decode('utf8')) if decode_payload: payload = json.loads(payload_data.decode('utf8')) else: payload = payload_data except Exception as err: raise InvalidMessage(str(err)) try: valid = _verify_signature( '{}.{}'.format(header64, payload64), header, signature, algo) except Exception as err: raise InvalidMessage('failed to verify signature: {}'.format(err)) if not valid: return None, None if decode_payload: _verify_payload(payload, check_expiration, requrl) return header, payload
python
{ "resource": "" }
q43541
SalesKingApiBase.request
train
def request(self, url, method = u"get", data = None, headers = None, **kwargs): """ public method for doing the live request """ url, method, data, headers, kwargs = self._pre_request(url, method=method, data=data, headers=headers, **kwargs) response = self._request(url, method=method, data=data, headers=headers, **kwargs) response = self._post_request(response) # raises the appropriate exceptions response = self._handle_response(response) return response
python
{ "resource": "" }
q43542
APIClient._request
train
def _request(self, url, method = u"get", data = None, headers=None, **kwargs): """ does the request via requests - oauth not implemented yet - use basic auth please """ # if self.access_token: # auth_header = { # u"Authorization": "Bearer %s" % (self.access_token) # } # headers.update(auth_header) #basic auth msg = "method: %s url:%s\nheaders:%s\ndata:%s" % ( method, url, headers, data) #print msg if not self.use_oauth: auth = (self.sk_user, self.sk_pw) if not self.client: self.client = requests.session() r = self.client.request(method, url, headers=headers, data=data, auth=auth,**kwargs) else: if not self.client: self.client = requests.session(hooks={'pre_request': oauth_hook}) r = self.client.request(method, url, headers=headers, data=data,**kwargs) return r
python
{ "resource": "" }
q43543
APIClient._handle_response
train
def _handle_response(self, response): """ internal method to throw the correct exception if something went wrong """ status = response.status_code if status == 400: msg = u"bad request" raise exceptions.BadRequest(status, msg) elif status == 401: msg = u"authorization failed user:%s" % (self.sk_user) raise exceptions.Unauthorized(status, msg) elif status == 404: raise exceptions.NotFound() elif status == 422: msg = u"bad request" raise exceptions.BadRequest(status, msg) elif status in range(400, 500): msg = u"unexpected bad request" raise exceptions.BadRequest(status, msg) elif status in range(500, 600): raise exceptions.ServerError() return response
python
{ "resource": "" }
q43544
see_doc
train
def see_doc(obj_with_doc): """Copy docstring from existing object to the decorated callable.""" def decorator(fn): fn.__doc__ = obj_with_doc.__doc__ return fn return decorator
python
{ "resource": "" }
q43545
class_in_progress
train
def class_in_progress(stack=None): """True if currently inside a class definition, else False.""" if stack is None: stack = inspect.stack() for frame in stack: statement_list = frame[4] if statement_list is None: continue if statement_list[0].strip().startswith('class '): return True return False
python
{ "resource": "" }
q43546
GeneratorProvider.close
train
def close(self): """Close the generator.""" if self.support_name: self.generator.close() try: next(self.generator) except StopIteration: return else: msg = "generator didn't stop: function {!r}" raise RuntimeError(msg.format(self.function))
python
{ "resource": "" }
q43547
Annotator.get_annotations
train
def get_annotations(cls, __fn): """Get the annotations of a given callable.""" if hasattr(__fn, '__func__'): __fn = __fn.__func__ if hasattr(__fn, '__notes__'): return __fn.__notes__ raise AttributeError('{!r} does not have annotations'.format(__fn))
python
{ "resource": "" }
q43548
Annotator.set_annotations
train
def set_annotations(cls, __fn, *notes, **keyword_notes): """Set the annotations on the given callable.""" if hasattr(__fn, '__func__'): __fn = __fn.__func__ if hasattr(__fn, '__notes__'): msg = 'callable already has notes: {!r}' raise AttributeError(msg.format(__fn)) __fn.__notes__ = (notes, keyword_notes)
python
{ "resource": "" }
q43549
Annotator.wraps
train
def wraps(__fn, **kw): """Like ``functools.wraps``, with support for annotations.""" kw['assigned'] = kw.get('assigned', WRAPPER_ASSIGNMENTS) return functools.wraps(__fn, **kw)
python
{ "resource": "" }
q43550
Annotator.partial
train
def partial(__fn, *a, **kw): """Wrap a note for injection of a partially applied function. This allows for annotated functions to be injected for composition:: from jeni import annotate @annotate('foo', bar=annotate.maybe('bar')) def foobar(foo, bar=None): return @annotate('foo', annotate.partial(foobar)) def bazquux(foo, fn): # fn: injector.partial(foobar) return Keyword arguments are treated as `maybe` when using partial, in order to allow partial application of only the notes which can be provided, where the caller could then apply arguments known to be unavailable in the injector. Note that with Python 3 function annotations, all annotations are injected as keyword arguments. Injections on the partial function are lazy and not applied until the injected partial function is called. See `eager_partial` to inject eagerly. """ return (PARTIAL, (__fn, a, tuple(kw.items())))
python
{ "resource": "" }
q43551
Annotator.partial_regardless
train
def partial_regardless(__fn, *a, **kw): """Wrap a note for injection of a partially applied function, or don't. Use this instead of `partial` when binding a callable that may or may not have annotations. """ return (PARTIAL_REGARDLESS, (__fn, a, tuple(kw.items())))
python
{ "resource": "" }
q43552
Annotator.eager_partial
train
def eager_partial(__fn, *a, **kw): """Wrap a note for injection of an eagerly partially applied function. Use this instead of `partial` when eager injection is needed in place of lazy injection. """ return (EAGER_PARTIAL, (__fn, a, tuple(kw.items())))
python
{ "resource": "" }
q43553
Annotator.eager_partial_regardless
train
def eager_partial_regardless(__fn, *a, **kw): """Wrap a note for injection of an eagerly partially applied function, or don't. Use this instead of `eager_partial partial` when binding a callable that may or may not have annotations. """ return (EAGER_PARTIAL_REGARDLESS, (__fn, a, tuple(kw.items())))
python
{ "resource": "" }
q43554
Injector.provider
train
def provider(cls, note, provider=None, name=False): """Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Provider): def get(self, name=None): if name is None: name = 'world' return 'Hello, {}!'.format(name) Simple generator:: @Injector.provider('answer') def answer(): yield 42 If a generator supports get with a name argument:: @Injector.provider('spam', name=True) def spam(): count_str = yield 'spam' while True: count_str = yield 'spam' * int(count_str) Registration can be a decorator or a direct method call:: Injector.provider('hello', HelloProvider) """ def decorator(provider): if inspect.isgeneratorfunction(provider): # Automatically adapt generator functions provider = cls.generator_provider.bind( provider, support_name=name) return decorator(provider) cls.register(note, provider) return provider if provider is not None: decorator(provider) else: return decorator
python
{ "resource": "" }
q43555
Injector.factory
train
def factory(cls, note, fn=None): """Register a function as a provider. Function (name support is optional):: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.factory('echo') def echo(name=None): return name Registration can be a decorator or a direct method call:: Injector.factory('echo', echo) """ def decorator(f): provider = cls.factory_provider.bind(f) cls.register(note, provider) return f if fn is not None: decorator(fn) else: return decorator
python
{ "resource": "" }
q43556
Injector.apply
train
def apply(self, fn, *a, **kw): """Fully apply annotated callable, returning callable's result.""" args, kwargs = self.prepare_callable(fn) args += a; kwargs.update(kw) return fn(*args, **kwargs)
python
{ "resource": "" }
q43557
Injector.partial
train
def partial(self, fn, *user_args, **user_kwargs): """Return function with closure to lazily inject annotated callable. Repeat calls to the resulting function will reuse injections from the first call. Positional arguments are provided in this order: 1. positional arguments provided by injector 2. positional arguments provided in `partial_fn = partial(fn, *args)` 3. positional arguments provided in `partial_fn(*args)` Keyword arguments are resolved in this order (later override earlier): 1. keyword arguments provided by injector 2. keyword arguments provided in `partial_fn = partial(fn, **kwargs)` 3. keyword arguments provided in `partial_fn(**kargs)` Note that Python function annotations (in Python 3) are injected as keyword arguments, as documented in `annotate`, which affects the argument order here. `annotate.partial` accepts arguments in same manner as this `partial`. """ self.get_annotations(fn) # Assert has annotations. def lazy_injection_fn(*run_args, **run_kwargs): arg_pack = getattr(lazy_injection_fn, 'arg_pack', None) if arg_pack is not None: pack_args, pack_kwargs = arg_pack else: jeni_args, jeni_kwargs = self.prepare_callable(fn, partial=True) pack_args = jeni_args + user_args pack_kwargs = {} pack_kwargs.update(jeni_kwargs) pack_kwargs.update(user_kwargs) lazy_injection_fn.arg_pack = (pack_args, pack_kwargs) final_args = pack_args + run_args final_kwargs = {} final_kwargs.update(pack_kwargs) final_kwargs.update(run_kwargs) return fn(*final_args, **final_kwargs) return lazy_injection_fn
python
{ "resource": "" }
q43558
Injector.eager_partial
train
def eager_partial(self, fn, *a, **kw): """Partially apply annotated callable, returning a partial function. By default, `partial` is lazy so that injections only happen when they are needed. Use `eager_partial` in place of `partial` when a guarantee of injection is needed at the time the partially applied function is created. `eager_partial` resolves arguments similarly to `partial` but relies on `functools.partial` for argument resolution when calling the final partial function. """ args, kwargs = self.prepare_callable(fn, partial=True) args += a; kwargs.update(kw) return functools.partial(fn, *args, **kwargs)
python
{ "resource": "" }
q43559
Injector.apply_regardless
train
def apply_regardless(self, fn, *a, **kw): """Like `apply`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.apply(fn, *a, **kw) return fn(*a, **kw)
python
{ "resource": "" }
q43560
Injector.partial_regardless
train
def partial_regardless(self, fn, *a, **kw): """Like `partial`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.partial(fn, *a, **kw) else: return functools.partial(fn, *a, **kw)
python
{ "resource": "" }
q43561
Injector.eager_partial_regardless
train
def eager_partial_regardless(self, fn, *a, **kw): """Like `eager_partial`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.eager_partial(fn, *a, **kw) return functools.partial(fn, *a, **kw)
python
{ "resource": "" }
q43562
Injector.get
train
def get(self, note): """Resolve a single note into an object.""" if self.closed: raise RuntimeError('{!r} already closed'.format(self)) # Record request for note even if it fails to resolve. self.stats[note] += 1 # Handle injection of partially applied annotated functions. if isinstance(note, tuple) and len(note) == 2: if note[0] == PARTIAL: fn, a, kw_items = note[1] return self.partial(fn, *a, **dict(kw_items)) elif note[0] == PARTIAL_REGARDLESS: fn, a, kw_items = note[1] return self.partial_regardless(fn, *a, **dict(kw_items)) elif note[0] == EAGER_PARTIAL: fn, a, kw_items = note[1] return self.eager_partial(fn, *a, **dict(kw_items)) elif note[0] == EAGER_PARTIAL_REGARDLESS: fn, a, kw_items = note[1] return self.eager_partial_regardless(fn, *a, **dict(kw_items)) basenote, name = self.parse_note(note) if name is None and basenote in self.values: return self.values[basenote] try: provider_factory = self.lookup(basenote) except LookupError: msg = "Unable to resolve '{}'" raise LookupError(msg.format(note)) self.instantiating.append((basenote, name)) try: if self.instantiating.count((basenote, name)) > 1: stack = ' <- '.join(repr(note) for note in self.instantiating) notes = tuple(self.instantiating) raise DependencyCycleError(stack, notes=notes) return self.handle_provider(provider_factory, note) finally: self.instantiating.pop()
python
{ "resource": "" }
q43563
Injector.close
train
def close(self): """Close injector & injected Provider instances, including generators. Providers are closed in the reverse order in which they were opened, and each provider is only closed once. Providers are closed if accessed by the injector, even if a dependency is not successfully provided. As such, providers should determine whether or not anything needs to be done in the close method. """ if self.closed: raise RuntimeError('{!r} already closed'.format(self)) for finalizer in reversed(self.finalizers): # Note: Unable to apply injector on close method. finalizer() self.closed = True self.instances.clear() self.values.clear()
python
{ "resource": "" }
q43564
Injector.prepare_callable
train
def prepare_callable(self, fn, partial=False): """Prepare arguments required to apply function.""" notes, keyword_notes = self.get_annotations(fn) return self.prepare_notes(*notes, __partial=partial, **keyword_notes)
python
{ "resource": "" }
q43565
Injector.prepare_notes
train
def prepare_notes(self, *notes, **keyword_notes): """Get injection values for all given notes.""" __partial = keyword_notes.pop('__partial', False) args = tuple(self.get(note) for note in notes) kwargs = {} for arg in keyword_notes: note = keyword_notes[arg] if isinstance(note, tuple) and len(note) == 2 and note[0] == MAYBE: try: kwargs[arg] = self.get(note[1]) except LookupError: continue elif __partial: try: kwargs[arg] = self.get(note) except LookupError: continue else: kwargs[arg] = self.get(note) return args, kwargs
python
{ "resource": "" }
q43566
Injector.parse_note
train
def parse_note(cls, note): """Parse string annotation into object reference with optional name.""" if isinstance(note, tuple): if len(note) != 2: raise ValueError('tuple annotations must be length 2') return note try: match = cls.re_note.match(note) except TypeError: # Note is not a string. Support any Python object as a note. return note, None return match.groups()
python
{ "resource": "" }
q43567
Injector.handle_provider
train
def handle_provider(self, provider_factory, note): """Get value from provider as requested by note.""" # Implementation in separate method to support accurate book-keeping. basenote, name = self.parse_note(note) # _handle_provider could be even shorter if # Injector.apply() worked with classes, issue #9. if basenote not in self.instances: if (isinstance(provider_factory, type) and self.has_annotations(provider_factory.__init__)): args, kwargs = self.prepare_callable(provider_factory.__init__) self.instances[basenote] = provider_factory(*args, **kwargs) else: self.instances[basenote] = self.apply_regardless( provider_factory) provider = self.instances[basenote] if hasattr(provider, 'close'): self.finalizers.append(self.instances[basenote].close) provider = self.instances[basenote] get = self.partial_regardless(provider.get) try: if name is not None: return get(name=name) self.values[basenote] = get() return self.values[basenote] except UnsetError: # Use sys.exc_info to support both Python 2 and Python 3. exc_type, exc_value, tb = sys.exc_info() exc_msg = str(exc_value) if exc_msg: msg = '{}: {!r}'.format(exc_msg, note) else: msg = repr(note) six.reraise(exc_type, exc_type(msg, note=note), tb)
python
{ "resource": "" }
q43568
Injector.register
train
def register(cls, note, provider): """Implementation to register provider via `provider` & `factory`.""" basenote, name = cls.parse_note(note) if 'provider_registry' not in vars(cls): cls.provider_registry = {} cls.provider_registry[basenote] = provider
python
{ "resource": "" }
q43569
Injector.lookup
train
def lookup(cls, basenote): """Look up note in registered annotations, walking class tree.""" # Walk method resolution order, which includes current class. for c in cls.mro(): if 'provider_registry' not in vars(c): # class is a mixin, super to base class, or never registered. continue if basenote in c.provider_registry: # note is in the registry. return c.provider_registry[basenote] raise LookupError(repr(basenote))
python
{ "resource": "" }
q43570
Injector.sub
train
def sub(cls, *mixins_and_dicts, **values): """Create and instantiate a sub-injector. Mixins and local value dicts can be passed in as arguments. Local values can also be passed in as keyword arguments. """ class SubInjector(cls): pass mixins = [ x for x in mixins_and_dicts if isinstance(x, type) ] if mixins: SubInjector.__bases__ = tuple(mixins) + SubInjector.__bases__ dicts = [ x for x in mixins_and_dicts if not isinstance(x, type) ] for d in reversed(dicts): for k,v in d.items(): if k not in values: values[k] = v for k,v in values.items(): SubInjector.value(k, v) return SubInjector()
python
{ "resource": "" }
q43571
_getFuncArgs
train
def _getFuncArgs(func): r"""Gives the details on the args of the given func. Args: func (function): The function to get details on. """ code = func.func_code Defaults = func.func_defaults nargs = code.co_argcount ArgNames = code.co_varnames[:nargs] Args = OrderedDict() argCount = len(ArgNames) defCount = len(Defaults) if Defaults else 0 diff = argCount - defCount for i in range(0, diff): Args[ArgNames[i]] = {} for i in range(diff, argCount): Args[ArgNames[i]] = {'default': Defaults[i - diff]} return Args
python
{ "resource": "" }
q43572
FormLabel.get_form_label
train
def get_form_label(self, request=None, obj=None, model=None, form=None): """Returns a customized form label, if condition is met, otherwise returns the default form label. * condition is an instance of CustomLabelCondition. """ label = form.base_fields[self.field].label condition = self.condition_cls(request=request, obj=obj, model=model) if condition.check(): additional_opts = condition.get_additional_options( request=request, obj=obj, model=model ) visit_datetime = "" if obj: visit_datetime = getattr( obj, obj.visit_model_attr() ).report_datetime.strftime("%B %Y") try: label = self.custom_label.format( appointment=condition.appointment, previous_appointment=condition.previous_appointment, previous_obj=condition.previous_obj, previous_visit=condition.previous_visit, visit_datetime=visit_datetime, **additional_opts, ) except KeyError as e: raise CustomFormLabelError( f"Custom label template has invalid keys. See {label}. Got {e}." ) return label
python
{ "resource": "" }
q43573
AwsRoot.do_stack
train
def do_stack(self,args): """Go to the specified stack. stack -h for detailed help""" parser = CommandArgumentParser("stack") parser.add_argument(dest='stack',help='stack index or name'); parser.add_argument('-a','--asg',dest='asg',help='descend into specified asg'); args = vars(parser.parse_args(args)) try: index = int(args['stack']) if self.stackList == None: self.do_stacks('-s') stack = AwsConnectionFactory.instance.getCfResource().Stack(self.stackList[index]['StackName']) except ValueError: stack = AwsConnectionFactory.instance.getCfResource().Stack(args['stack']) if 'asg' in args: AwsProcessor.processorFactory.Stack(stack,stack.name,self).onecmd('asg {}'.format(args['asg'])) AwsProcessor.processorFactory.Stack(stack,stack.name,self).cmdloop()
python
{ "resource": "" }
q43574
AwsRoot.do_delete_stack
train
def do_delete_stack(self,args): """Delete specified stack. delete_stack -h for detailed help.""" parser = CommandArgumentParser("delete_stack") parser.add_argument(dest='stack',help='stack index or name'); args = vars(parser.parse_args(args)) try: index = int(args['stack']) if self.stackList == None: self.do_stacks('-s') stack = AwsConnectionFactory.instance.getCfResource().Stack(self.stackList[index]['StackName']) except ValueError: stack = AwsConnectionFactory.instance.getCfResource().Stack(args['stack']) print "Here are the details of the stack you are about to delete:" print "Stack.name: {}".format(stack.name) print "Stack.stack_id: {}".format(stack.stack_id) print "Stack.creation_time: {}".format(stack.creation_time) confirmation = raw_input("If you are sure, enter the Stack.name here: ") if stack.name == confirmation: stack.delete() print "Stack deletion in progress" else: print "Stack deletion canceled: '{}' != '{}'".format(stack.name,confirmation)
python
{ "resource": "" }
q43575
AwsRoot.do_stacks
train
def do_stacks(self,args): """List available stacks. stacks -h for detailed help.""" parser = CommandArgumentParser() parser.add_argument('-s','--silent',dest='silent',action='store_true',help='Run silently') parser.add_argument('-i','--include',nargs='*',dest='includes',default=[],help='Add statuses') parser.add_argument('-e','--exclude',nargs='*',dest='excludes',default=[],help='Remove statuses') parser.add_argument('--summary',dest='summary',action='store_true',default=False,help='Show just a summary') parser.add_argument(dest='filters',nargs='*',default=["*"],help='Filter stacks') args = vars(parser.parse_args(args)) nextToken = None includes = args['includes'] excludes = args['excludes'] filters = args['filters'] global stackStatusFilter for i in includes: if not i in stackStatusFilter: stackStatusFilter.append(i) for e in excludes: stackStatusFilter.remove(e) complete = False; stackSummaries = [] while not complete: if None == nextToken: stacks = AwsConnectionFactory.getCfClient().list_stacks(StackStatusFilter=stackStatusFilter) else: stacks = AwsConnectionFactory.getCfClient().list_stacks(NextToken=nextToken,StackStatusFilter=stackStatusFilter) #pprint(stacks) if not 'NextToken' in stacks: complete = True; else: nextToken = stacks['NextToken'] if 'StackSummaries' in stacks: stackSummaries.extend(stacks['StackSummaries']) stackSummaries = filter( lambda x: fnmatches(x['StackName'],filters),stackSummaries) stackSummaries = sorted(stackSummaries, key= lambda entry: entry['StackName']) index = 0; stackSummariesByIndex = {} for summary in stackSummaries: summary['Index'] = index stackSummariesByIndex[index] = summary index += 1 self.stackList = stackSummariesByIndex if not (args['silent'] or args['summary']): for index,summary in stackSummariesByIndex.items(): print '{0:3d}: {2:20} {1:40} {3}'.format(summary['Index'],summary['StackName'],summary['StackStatus'],defaultifyDict(summary,'StackStatusReason','')) if args['summary'] and not args['silent']: print '{} stacks'.format(len(stackSummariesByIndex))
python
{ "resource": "" }
q43576
AwsRoot.do_stack_resource
train
def do_stack_resource(self, args): """Use specified stack resource. stack_resource -h for detailed help.""" parser = CommandArgumentParser() parser.add_argument('-s','--stack-name',dest='stack-name',help='name of the stack resource'); parser.add_argument('-i','--logical-id',dest='logical-id',help='logical id of the child resource'); args = vars(parser.parse_args(args)) stackName = args['stack-name'] logicalId = args['logical-id'] self.stackResource(stackName,logicalId)
python
{ "resource": "" }
q43577
Monitor.configure
train
def configure(self, config): """ Configure Monitor, pull list of what to monitor, initialize threads """ self.config = config self.update_monitors() # initialize thread pools for profile in ('worker', 'result'): for _ in range(config['threads'][profile]['number']): worker = threading.Thread(target=config['threads'][profile]['function']) worker.daemon = True worker.start() # send a heartbeat right away self.heartbeat() # setup interval jobs self.refresh_stopper = set_interval(config['interval']['refresh']*1000, self.update_monitors) self.heartbeat_stopper = set_interval(config['interval']['heartbeat']*1000, self.heartbeat) self.reporting_stopper = set_interval(config['interval']['reporting']*1000, self.reporting) return self
python
{ "resource": "" }
q43578
Monitor.start
train
def start(self): """ The main loop, run forever. """ while True: self.thread_debug("Interval starting") for thr in threading.enumerate(): self.thread_debug(" " + str(thr)) self.feed_monitors() start = time.time() # wait fore queue to empty self.workers_queue.join() end = time.time() diff = self.config['interval']['test'] - (end - start) if diff <= 0: # alarm self.stats.procwin = -diff self.thread_debug("Cannot keep up with tests! {} seconds late" .format(abs(diff))) else: self.thread_debug("waiting {} seconds...".format(diff)) time.sleep(diff)
python
{ "resource": "" }
q43579
Monitor.update_monitors
train
def update_monitors(self): """ Periodically check in with Reflex Engine and refresh the list of what to monitor """ self.thread_debug("Starting monitor refresh", module="update_monitors") # need to make a more efficient way of doing this via Reflex Engine monitors = [] self.rcs.cache_reset() svcs = self.rcs.cache_list('service', cols=['pipeline', 'name', 'active-instances']) for svc in svcs: try: pipeline = self.rcs.cache_get('pipeline', svc['pipeline']) for mon in pipeline.get('monitor', []): self.DEBUG("monitor {}".format(mon)) mon['service'] = svc['name'] mon['pipeline'] = svc['pipeline'] for inst_name in svc.get('active-instances', []): inst = self.rcs.cache_get('instance', inst_name) # todo: insert: macro flatten mymon = mon.copy() mymon['instance'] = inst_name mymon['target'] = inst['address'] mymon['title'] = svc['name'] + ": " + mon['name'] monitors.append(mymon) except KeyboardInterrupt: raise except: # pylint: disable=bare-except self.NOTIFY("Error in processing monitor:", err=traceback.format_exc()) self.NOTIFY("Refreshed monitors", total_monitors=len(monitors)) self.DEBUG("Monitors", monitors=monitors) # mutex / threadsafe? self.monitors = monitors cache = self.rcs._cache # pylint: disable=protected-access self.instances = cache['instance'] self.services = cache['service'] self.pipelines = cache['pipeline'] self.thread_debug("Refresh complete", module="update_monitors")
python
{ "resource": "" }
q43580
Monitor.thread_debug
train
def thread_debug(self, *args, **kwargs): """ Wrap debug to include thread information """ if 'module' not in kwargs: kwargs['module'] = "Monitor" if kwargs['module'] != 'Monitor' and self.do_DEBUG(module='Monitor'): self.debug[kwargs['module']] = True if not self.do_DEBUG(module=kwargs['module']): return thread_id = threading.current_thread().name key = "[" + thread_id + "] " + kwargs['module'] if not self.debug.get(key): self.debug[key] = True kwargs['module'] = key self.DEBUG(*args, **kwargs)
python
{ "resource": "" }
q43581
Monitor._worker_http
train
def _worker_http(self, monitor): """ Process an http monitor. """ self.thread_debug("process_http", data=monitor, module='handler') query = monitor['query'] method = query['method'].lower() self.stats.http_run += 1 try: target = monitor['target'] url = 'http://{host}:{port}{path}'.format(path=query['path'], **target) response = { 'url': url, 'status': 'failed', 'result': {}, 'monitor': monitor, 'message': 'did not meet expected result or no expected result defined', 'elapsedms': monitor['timeout']*1000, 'code':0 } # not sed_env_dict -- we do not want to xref headers headers = query.get('headers', {}) for elem in headers: headers[elem] = self.sed_env(headers[elem], {}, '') res = response['result'] = getattr(requests, method)(url, headers=headers, timeout=monitor['timeout']) response['code'] = res.status_code response['elapsedms'] = res.elapsed.total_seconds() * 1000 if 'response-code' in monitor['expect']: if int(monitor['expect']['response-code']) == res.status_code: response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response if 'content' in monitor['expect']: if monitor['expect']['content'] in res.text: response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response if 'regex' in monitor['expect']: if re.search(monitor['expect']['regex'], res.text): response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response except requests.exceptions.Timeout: response['message'] = 'timeout' except requests.exceptions.ConnectionError: response['message'] = 'connect-failed' response['elapsedms'] = -1 return response
python
{ "resource": "" }
q43582
Monitor._handler_http
train
def _handler_http(self, result): """ Handle the result of an http monitor """ monitor = result['monitor'] self.thread_debug("process_http", data=monitor, module='handler') self.stats.http_handled += 1 # splunk will pick this up logargs = { 'type':"metric", 'endpoint': result['url'], 'pipeline': monitor['pipeline'], 'service': monitor['service'], 'instance': monitor['instance'], 'status': result['status'], 'elapsed-ms': round(result['elapsedms'], 5), 'code': result['code'] } self.NOTIFY(result['message'], **logargs) # if our status has changed, also update Reflex Engine if result['status'] != self.instances[monitor['instance']]['status']: # do some retry/counter steps on failure? self.instances[monitor['instance']]['status'] = result['status'] self.rcs.patch('instance', monitor['instance'], {'status': result['status']})
python
{ "resource": "" }
q43583
Monitor.reporting
train
def reporting(self): """ report on consumption info """ self.thread_debug("reporting") res = resource.getrusage(resource.RUSAGE_SELF) self.NOTIFY("", type='internal-usage', maxrss=round(res.ru_maxrss/1024, 2), ixrss=round(res.ru_ixrss/1024, 2), idrss=round(res.ru_idrss/1024, 2), isrss=round(res.ru_isrss/1024, 2), threads=threading.active_count(), proctot=len(self.monitors), procwin=self.stats.procwin)
python
{ "resource": "" }
q43584
Monitor.start_agent
train
def start_agent(self, cfgin=True): """ CLI interface to start 12-factor service """ default_conf = { "threads": { "result": { "number": 0, "function": None }, "worker": { "number": 0, "function": None }, }, "interval": { "refresh": 900, "heartbeat": 300, "reporting": 300, "test": 60 }, "heartbeat-hook": False } indata = {} if cfgin: indata = json.load(sys.stdin) elif os.environ.get("REFLEX_MONITOR_CONFIG"): indata = os.environ.get("REFLEX_MONITOR_CONFIG") if indata[0] != "{": indata = base64.b64decode(indata) else: self.NOTIFY("Using default configuration") conf = dictlib.union(default_conf, indata) conf['threads']['result']['function'] = self.handler_thread conf['threads']['worker']['function'] = self.worker_thread self.NOTIFY("Starting monitor Agent") try: self.configure(conf).start() except KeyboardInterrupt: self.thread_stopper.set() if self.refresh_stopper: self.refresh_stopper.set() if self.heartbeat_stopper: self.heartbeat_stopper.set() if self.reporting_stopper: self.reporting_stopper.set()
python
{ "resource": "" }
q43585
start
train
def start(): r"""Starts ec. """ processPendingModules() if not state.main_module_name in ModuleMembers: # don't start the core when main is not Ec-ed return MainModule = sys.modules[state.main_module_name] if not MainModule.__ec_member__.Members: # there was some error while loading script(s) return global BaseGroup BaseGroup = MainModule.__ec_member__ Argv = sys.argv[1:] global mode mode = 'd' if Argv else 's' # dispatch / shell mode if mode == 's': import shell shell.init() else: import dispatch dispatch.init(Argv) processExitHooks()
python
{ "resource": "" }
q43586
execCommand
train
def execCommand(Argv, collect_missing): r"""Executes the given task with parameters. """ try: return _execCommand(Argv, collect_missing) except Exception as e: if Settings['errorHandler']: Settings['errorHandler'](e) if Settings['debug']: # #ToDo: Have an option to debug through stderr. The issue is, the way to make pdb.post_mortem, to use stderr, like pdb.set_trace is unknown. import pdb pdb.post_mortem(sys.exc_info()[2]) if not Settings['silent']: # Debug, then log the trace. import traceback etype, value, tb = sys.exc_info() tb = tb.tb_next.tb_next # remove the ec - calls from the traceback, to make it more understandable message = ''.join(traceback.format_exception(etype, value, tb))[:-1] else: if isinstance(e, HandledException): # let the modes handle the HandledException raise e message = str(e) # provide a succinct error message raise HandledException(message)
python
{ "resource": "" }
q43587
getDescendant
train
def getDescendant(Ancestor, RouteParts): r"""Resolves a descendant, of the given Ancestor, as pointed by the RouteParts. """ if not RouteParts: return Ancestor Resolved = Ancestor.Members.get(RouteParts.pop(0)) if isinstance(Resolved, Group): return getDescendant(Resolved, RouteParts) else: return Resolved
python
{ "resource": "" }
q43588
setActiveModule
train
def setActiveModule(Module): r"""Helps with collecting the members of the imported modules. """ module_name = Module.__name__ if module_name not in ModuleMembers: ModuleMembers[module_name] = [] ModulesQ.append(module_name) Group(Module, {}) # brand the module with __ec_member__ state.ActiveModuleMemberQ = ModuleMembers[module_name]
python
{ "resource": "" }
q43589
processModule
train
def processModule(module_name): r"""Builds a command tree out of the configured members of a module. """ Module = sys.modules[module_name] MembersTarget = [] ClassQ = [] Cls = None ClsGroup = None ClsGrpMembers = [] for Member in ModuleMembers[module_name]: Underlying = Member.Underlying member_name = Member.Config['name'] member_alias = Member.Config.get('alias', None) if ClassQ: ClsGroup = ClassQ[-1] Cls = ClsGroup.Underlying if getattr(Cls, Underlying.__name__, None) is Underlying: # we got a member tht is a child of the previous class if isclass(Underlying): ClassQ.append(Underlying.__ec_member__) elif not isunderlying(Underlying): continue if member_alias: ClsGrpMembers.insert(0, (member_alias, Member)) ClsGrpMembers.insert(0, (member_name, Member)) continue elif Cls: # we've finished adding children to the previous class ClsGroup.Members = OrderedDict(ClsGrpMembers) ClsGrpMembers = [] ClassQ.pop() Cls = None ClsGroup = None if isunderlying(Underlying): if member_alias: MembersTarget.insert(0, (member_alias, Member)) MembersTarget.insert(0, (member_name, Member)) if isclass(Underlying): ClassQ.append(Underlying.__ec_member__) if ClsGroup: ClsGroup.Members = OrderedDict(ClsGrpMembers) ModuleMembers[module_name] = [] # remove the existing members from the cache so that they won't be processed again if not hasattr(Module.__ec_member__, 'Members'): Module.__ec_member__.Members = OrderedDict(MembersTarget)
python
{ "resource": "" }
q43590
_execCommand
train
def _execCommand(Argv, collect_missing): r"""Worker of execCommand. """ if not Argv: raise HandledException('Please specify a command!') RouteParts = Argv[0].split('/') Args, KwArgs = getDigestableArgs(Argv[1:]) ResolvedMember = getDescendant(BaseGroup, RouteParts[:]) if isinstance(ResolvedMember, Group): raise HandledException('Please specify a task.', Member=ResolvedMember) if not isinstance(ResolvedMember, Task): raise HandledException('No such task.', Member=BaseGroup) return ResolvedMember.__collect_n_call__(*Args, **KwArgs) if collect_missing else ResolvedMember(*Args, **KwArgs)
python
{ "resource": "" }
q43591
memoize
train
def memoize(fn): '''Cache the results of a function that only takes positional arguments.''' cache = {} @wraps(fn) def wrapped_function(*args): if args in cache: return cache[args] else: result = fn(*args) cache[args] = result return result return wrapped_function
python
{ "resource": "" }
q43592
setup_config
train
def setup_config(epab_version: str): """ Set up elib_config package :param epab_version: installed version of EPAB as as string """ logger = logging.getLogger('EPAB') logger.debug('setting up config') elib_config.ELIBConfig.setup( app_name='EPAB', app_version=epab_version, config_file_path='pyproject.toml', config_sep_str='__', root_path=['tool', 'epab'] ) elib_config.write_example_config('pyproject.toml.example') if not pathlib.Path('pyproject.toml').exists(): raise FileNotFoundError('pyproject.toml') elib_config.validate_config()
python
{ "resource": "" }
q43593
get_month_list
train
def get_month_list(to_date, from_date): """ Generate a list containing year+month between two dates. Returns: [(2013, 11), (2013, 12), (2014, 1)] """ num_months = get_months_apart(to_date, from_date) month_offset = from_date.month month_list = [] for month in range(month_offset-1, month_offset+num_months): year = from_date.year+(month/12) real_month = (month % 12) + 1 month_list.append((year, real_month)) return month_list
python
{ "resource": "" }
q43594
find_amplitude
train
def find_amplitude(chunk): """ Calculate the 0-1 amplitude of an ndarray chunk of audio samples. Samples in the ndarray chunk are signed int16 values oscillating anywhere between -32768 and 32767. Find the amplitude between 0 and 1 by summing the absolute values of the minimum and maximum, and dividing by 32767. Args: chunk (numpy.ndarray): An array of int16 audio samples Returns: float: The amplitude of the sample between 0 and 1. Note that this is not a decibel representation of the amplitude. """ return (abs(int(chunk.max() - chunk.min())) / config.SAMPLE_RANGE)
python
{ "resource": "" }
q43595
AmplitudeHandler.step_amp
train
def step_amp(self): """ Change the amplitude according to the change rate and drift target. Returns: None """ difference = self.drift_target - self._raw_value if abs(difference) < self.change_rate: self.value = self.drift_target else: delta = self.change_rate * numpy.sign(difference) self.value = self._raw_value + delta
python
{ "resource": "" }
q43596
_LoaderBasics.create_module
train
def create_module(self, spec): """Creates the module, and also insert it into sys.modules, adding this onto py2 import logic.""" mod = sys.modules.setdefault(spec.name, types.ModuleType(spec.name)) # we are using setdefault to satisfy https://docs.python.org/3/reference/import.html#loaders return mod
python
{ "resource": "" }
q43597
_LoaderBasics.exec_module
train
def exec_module(self, module): """Execute the module.""" code = self.get_code(module.__name__) if code is None: raise ImportError('cannot load module {!r} when get_code() ' 'returns None'.format(module.__name__)) exec(code, module.__dict__)
python
{ "resource": "" }
q43598
_LoaderBasics.load_module
train
def load_module(self, fullname): """Load the specified module into sys.modules and return it. This method is for python2 only, but implemented with backported py3 methods. """ if fullname in sys.modules: mod = sys.modules[fullname] self.exec_module(mod) # In this case we do not want to remove the module in case of error # Ref : https://docs.python.org/3/reference/import.html#loaders else: try: # Retrieving the spec to help creating module properly spec = spec_from_loader(fullname, self) # this will call create_module and also initialize the module properly (like for py3) mod = module_from_spec(spec) # as per https://docs.python.org/3/reference/import.html#loaders assert mod.__name__ in sys.modules self.exec_module(mod) # We don't ensure that the import-related module attributes get # set in the sys.modules replacement case. Such modules are on # their own. except Exception as exc: # TODO : log exception ! # as per https://docs.python.org/3/reference/import.html#loaders if fullname in sys.modules: del sys.modules[fullname] raise return sys.modules[fullname]
python
{ "resource": "" }
q43599
NamespaceLoader2.create_module
train
def create_module(self, spec): """Improve python2 semantics for module creation.""" mod = super(NamespaceLoader2, self).create_module(spec) # Set a few properties required by PEP 302 # mod.__file__ = [p for p in self.path] # this will set mod.__repr__ to not builtin... shouldnt break anything in py2... # CAREFUL : get_filename present implies the module has ONE location, which is not true with namespaces return mod
python
{ "resource": "" }