_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q45500
CapakeyRestGateway.get_sectie_by_id_and_afdeling
train
def get_sectie_by_id_and_afdeling(self, id, afdeling): ''' Get a `sectie`. :param id: An id of a sectie. eg. "A" :param afdeling: The :class:`Afdeling` for in which the `sectie` can \ be found. Can also be the id of and `afdeling`. :rtype: A :class:`Sectie`. ''' try: aid = afdeling.id except AttributeError: aid = afdeling afdeling = self.get_kadastrale_afdeling_by_id(aid) afdeling.clear_gateway() def creator(): url = self.base_url + '/municipality/%s/department/%s/section/%s' % (afdeling.gemeente.id, afdeling.id, id) h = self.base_headers p = { 'geometry': 'full', 'srs': '31370' } res = capakey_rest_gateway_request(url, h, p).json() return Sectie( res['sectionCode'], afdeling, self._parse_centroid(res['geometry']['center']), self._parse_bounding_box(res['geometry']['boundingBox']), res['geometry']['shape'], ) if self.caches['long'].is_configured: key = 'get_sectie_by_id_and_afdeling_rest#%s#%s' % (id, aid) sectie = self.caches['long'].get_or_create(key, creator) else: sectie = creator() sectie.set_gateway(self) return sectie
python
{ "resource": "" }
q45501
CapakeyRestGateway.list_percelen_by_sectie
train
def list_percelen_by_sectie(self, sectie): ''' List all percelen in a `sectie`. :param sectie: The :class:`Sectie` for which the percelen are wanted. :param integer sort: Field to sort on. :rtype: A :class:`list` of :class:`Perceel`. ''' sid = sectie.id aid = sectie.afdeling.id gid = sectie.afdeling.gemeente.id sectie.clear_gateway() def creator(): url = self.base_url + '/municipality/%s/department/%s/section/%s/parcel' % (gid, aid, sid) h = self.base_headers p = { 'data': 'adp' } res = capakey_rest_gateway_request(url, h, p).json() return [ Perceel( r['perceelnummer'], sectie, r['capakey'], self.parse_percid(r['capakey']), ) for r in res['parcels'] ] if self.caches['short'].is_configured: key = 'list_percelen_by_sectie_rest#%s#%s#%s' % (gid, aid, sid) percelen = self.caches['short'].get_or_create(key, creator) else: percelen = creator() for p in percelen: p.set_gateway(self) return percelen
python
{ "resource": "" }
q45502
Perceel._split_capakey
train
def _split_capakey(self): ''' Split a capakey into more readable elements. Splits a capakey into it's grondnummer, bisnummer, exponent and macht. ''' import re match = re.match( r"^[0-9]{5}[A-Z]{1}([0-9]{4})\/([0-9]{2})([A-Z\_]{1})([0-9]{3})$", self.capakey ) if match: self.grondnummer = match.group(1) self.bisnummer = match.group(2) self.exponent = match.group(3) self.macht = match.group(4) else: raise ValueError( "Invalid Capakey %s can't be parsed" % self.capakey )
python
{ "resource": "" }
q45503
requires_libsodium
train
def requires_libsodium(func): """ Mark a function as requiring libsodium. If no libsodium support is detected, a `RuntimeError` is thrown. """ @wraps(func) def wrapper(*args, **kwargs): libsodium_check() return func(*args, **kwargs) return wrapper
python
{ "resource": "" }
q45504
NamespaceAdmin.has_delete_permission
train
def has_delete_permission(self, request, obj=None): """ Default namespaces cannot be deleted. """ if obj is not None and obj.fixed: return False return super(NamespaceAdmin, self).has_delete_permission(request, obj)
python
{ "resource": "" }
q45505
EntryAdminWYMEditorMixin.get_urls
train
def get_urls(self): """ Overload the admin's urls for WYMEditor. """ entry_admin_urls = super(EntryAdminWYMEditorMixin, self).get_urls() urls = [ url(r'^wymeditor/$', self.admin_site.admin_view(self.wymeditor), name='zinnia_entry_wymeditor'), ] return urls + entry_admin_urls
python
{ "resource": "" }
q45506
MockOpen._get_child_mock
train
def _get_child_mock(self, **kws): """Create a new FileLikeMock instance. The new mock will inherit the parent's side_effect and read_data attributes. """ kws.update({ '_new_parent': self, 'side_effect': self._mock_side_effect, 'read_data': self.__read_data, }) return FileLikeMock(**kws)
python
{ "resource": "" }
q45507
ZAPAuthenticator.on_request
train
async def on_request( self, domain, address, identity, mechanism, credentials, ): """ Handle a ZAP request. """ logger.debug( "Request in domain %s for %s (%r): %r (%r)", domain, address, identity, mechanism, credentials, ) user_id = None metadata = {} if self.whitelist: if address not in self.whitelist: raise ZAPAuthenticationFailure( "IP address is not in the whitelist", ) elif self.blacklist: if address in self.blacklist: raise ZAPAuthenticationFailure("IP address is blacklisted") if mechanism == b'PLAIN': username = credentials[0].decode('utf-8') password = credentials[1].decode('utf-8') ref_password = self.passwords.get(username) if not ref_password: raise ZAPAuthenticationFailure("No such user %r" % username) if password != ref_password: raise ZAPAuthenticationFailure( "Invalid password for user %r" % username, ) user_id = username elif mechanism == b'CURVE': public_key = credentials[0] if public_key not in self.authorized_keys: raise ZAPAuthenticationFailure( "Unauthorized key %r" % public_key, ) return user_id, metadata
python
{ "resource": "" }
q45508
APIResource.factory
train
def factory(data): """ Try to reconstruct the APIResource from its data. :param data: The APIResource data :type data: dict :return: The guessed APIResource :raise exceptions.UnkownAPIResource when it's impossible to reconstruct the APIResource from its data. """ if 'object' not in data: raise exceptions.UnknownAPIResource('Missing `object` key in resource.') for reconstituable_api_resource_type in ReconstituableAPIResource.__subclasses__(): if reconstituable_api_resource_type.object_type == data['object']: return reconstituable_api_resource_type(**data) raise exceptions.UnknownAPIResource('Unknown object `' + data['object'] + '`.')
python
{ "resource": "" }
q45509
APIResource._initialize
train
def _initialize(self, **resource_attributes): """ Initialize a resource. Default behavior is just to set all the attributes. You may want to override this. :param resource_attributes: The resource attributes """ self._set_attributes(**resource_attributes) for attribute, attribute_type in list(self._mapper.items()): if attribute in resource_attributes and isinstance(resource_attributes[attribute], dict): setattr(self, attribute, attribute_type(**resource_attributes[attribute]))
python
{ "resource": "" }
q45510
Payment._mapper
train
def _mapper(self): """ Maps payment attributes to their specific types. :see :func:`~APIResource._mapper` """ return { 'card': Payment.Card, 'customer': Payment.Customer, 'hosted_payment': Payment.HostedPayment, 'notification': Payment.Notification, 'failure': Payment.Failure, }
python
{ "resource": "" }
q45511
Customer.list_cards
train
def list_cards(self, *args, **kwargs): """ List the cards of the customer. :param page: the page number :type page: int|None :param per_page: number of customers per page. It's a good practice to increase this number if you know that you will need a lot of payments. :type per_page: int|None :return: The cards of the customer :rtype APIResourceCollection """ return payplug.Card.list(self, *args, **kwargs)
python
{ "resource": "" }
q45512
APIResourceCollection._initialize
train
def _initialize(self, **resource_attributes): """ Initialize the collection. :param resource_attributes: API resource parameters """ super(APIResourceCollection, self)._initialize(**resource_attributes) dict_list = self.data self.data = [] for resource in dict_list: self.data.append(self._expected_api_resource(**resource))
python
{ "resource": "" }
q45513
retry
train
def retry(retries=10, wait=5, catch=None): """ Decorator to retry on exceptions raised """ catch = catch or (Exception,) def real_retry(function): def wrapper(*args, **kwargs): for _ in range(retries): try: ret = function(*args, **kwargs) return ret except catch: time.sleep(wait) except Exception as e: raise e else: raise DSBException('Retries limit exceded.') return wrapper return real_retry
python
{ "resource": "" }
q45514
BaseConnection.discard_incoming_messages
train
def discard_incoming_messages(self): """ Discard all incoming messages for the time of the context manager. """ # Flush any received message so far. self.inbox.clear() # This allows nesting of discard_incoming_messages() calls. previous = self._discard_incoming_messages self._discard_incoming_messages = True try: yield finally: self._discard_incoming_messages = previous
python
{ "resource": "" }
q45515
MediaType.provides
train
def provides(self, imt): """ Returns True iff the self is at least as specific as other. Examples: application/xhtml+xml provides application/xml, application/*, */* text/html provides text/*, but not application/xhtml+xml or application/html """ return self.type[:imt.specifity] == imt.type[:imt.specifity]
python
{ "resource": "" }
q45516
MediaType.resolve
train
def resolve(cls, accept, available_renderers): """ Resolves a list of accepted MediaTypes and available renderers to the preferred renderer. Call as MediaType.resolve([MediaType], [renderer]). """ assert isinstance(available_renderers, tuple) accept = sorted(accept) renderers, seen = [], set() accept_groups = [[accept.pop()]] for imt in accept: if imt.equivalent(accept_groups[-1][0]): accept_groups[-1].append(imt) else: accept_groups.append([imt]) for accept_group in accept_groups: for renderer in available_renderers: if renderer in seen: continue for mimetype in renderer.mimetypes: for imt in accept_group: if mimetype.provides(imt): renderers.append(renderer) seen.add(renderer) break return renderers
python
{ "resource": "" }
q45517
StaticCompilerFileStorage.get_available_name
train
def get_available_name(self, name): """ Deletes the given file if it exists. """ if self.exists(name): self.delete(name) return name
python
{ "resource": "" }
q45518
Multiplexer.add_socket
train
def add_socket(self, socket): """ Add a socket to the multiplexer. :param socket: The socket. If it was added already, it won't be added a second time. """ if socket not in self._sockets: self._sockets.add(socket) socket.on_closed.connect(self.remove_socket)
python
{ "resource": "" }
q45519
Multiplexer.remove_socket
train
def remove_socket(self, socket): """ Remove a socket from the multiplexer. :param socket: The socket. If it was removed already or if it wasn't added, the call does nothing. """ if socket in self._sockets: socket.on_closed.disconnect(self.remove_socket) self._sockets.remove(socket)
python
{ "resource": "" }
q45520
Multiplexer.recv_multipart
train
async def recv_multipart(self): """ Read from all the associated sockets. :returns: A list of tuples (socket, frames) for each socket that returned a result. """ if not self._sockets: return [] results = [] async def recv_and_store(socket): frames = await socket.recv_multipart() results.append((socket, frames)) tasks = [ asyncio.ensure_future(recv_and_store(socket), loop=self.loop) for socket in self._sockets ] try: await asyncio.wait( tasks, return_when=asyncio.FIRST_COMPLETED, loop=self.loop, ) finally: for task in tasks: task.cancel() return results
python
{ "resource": "" }
q45521
modified_data_decorator
train
def modified_data_decorator(function): """ Decorator to initialise the modified_data if necessary. To be used in list functions to modify the list """ @wraps(function) def func(self, *args, **kwargs): """Decorator function""" if not self.get_read_only() or not self.is_locked(): self.initialise_modified_data() return function(self, *args, **kwargs) return lambda: None return func
python
{ "resource": "" }
q45522
ListModel.initialise_modified_data
train
def initialise_modified_data(self): """ Initialise the modified_data if necessary """ if self.__modified_data__ is None: if self.__original_data__: self.__modified_data__ = list(self.__original_data__) else: self.__modified_data__ = []
python
{ "resource": "" }
q45523
ListModel.append
train
def append(self, item): """ Appending elements to our list """ validated_value = self.get_validated_object(item) if validated_value is not None: self.__modified_data__.append(validated_value)
python
{ "resource": "" }
q45524
ListModel.insert
train
def insert(self, index, p_object): """ Insert an element to a list """ validated_value = self.get_validated_object(p_object) if validated_value is not None: self.__modified_data__.insert(index, validated_value)
python
{ "resource": "" }
q45525
ListModel.index
train
def index(self, value): """ Gets the index in the list for a value """ if self.__modified_data__ is not None: return self.__modified_data__.index(value) return self.__original_data__.index(value)
python
{ "resource": "" }
q45526
ListModel.count
train
def count(self, value): """ Gives the number of occurrencies of a value in the list """ if self.__modified_data__ is not None: return self.__modified_data__.count(value) return self.__original_data__.count(value)
python
{ "resource": "" }
q45527
ListModel.flat_data
train
def flat_data(self): """ Function to pass our modified values to the original ones """ def flat_field(value): """ Flat item """ try: value.flat_data() return value except AttributeError: return value modified_data = self.__modified_data__ if self.__modified_data__ is not None else self.__original_data__ if modified_data is not None: self.__original_data__ = [flat_field(value) for value in modified_data] self.__modified_data__ = None
python
{ "resource": "" }
q45528
ListModel.export_data
train
def export_data(self): """ Retrieves the data in a jsoned form """ def export_field(value): """ Export item """ try: return value.export_data() except AttributeError: return value if self.__modified_data__ is not None: return [export_field(value) for value in self.__modified_data__] return [export_field(value) for value in self.__original_data__]
python
{ "resource": "" }
q45529
ListModel.export_modified_data
train
def export_modified_data(self): """ Retrieves the modified data in a jsoned form """ def export_modfield(value, is_modified_seq=True): """ Export modified item """ try: return value.export_modified_data() except AttributeError: if is_modified_seq: return value if self.__modified_data__ is not None: return [export_modfield(value) for value in self.__modified_data__] return list(x for x in [export_modfield(value) for value in self.__original_data__] if x is not None)
python
{ "resource": "" }
q45530
ListModel.export_modifications
train
def export_modifications(self): """ Returns list modifications. """ if self.__modified_data__ is not None: return self.export_data() result = {} for key, value in enumerate(self.__original_data__): try: if not value.is_modified(): continue modifications = value.export_modifications() except AttributeError: continue try: result.update({'{}.{}'.format(key, f): v for f, v in modifications.items()}) except AttributeError: result[key] = modifications return result
python
{ "resource": "" }
q45531
ListModel.export_original_data
train
def export_original_data(self): """ Retrieves the original_data """ def export_field(value): """ Export item """ try: return value.export_original_data() except AttributeError: return value return [export_field(val) for val in self.__original_data__]
python
{ "resource": "" }
q45532
ListModel.export_deleted_fields
train
def export_deleted_fields(self): """ Returns a list with any deleted fields form original data. In tree models, deleted fields on children will be appended. """ result = [] if self.__modified_data__ is not None: return result for index, item in enumerate(self): try: deleted_fields = item.export_deleted_fields() result.extend(['{}.{}'.format(index, key) for key in deleted_fields]) except AttributeError: pass return result
python
{ "resource": "" }
q45533
ListModel.is_modified
train
def is_modified(self): """ Returns whether list is modified or not """ if self.__modified_data__ is not None: return True for value in self.__original_data__: try: if value.is_modified(): return True except AttributeError: pass return False
python
{ "resource": "" }
q45534
ListModel._get_indexes_by_path
train
def _get_indexes_by_path(self, field): """ Returns a list of indexes by field path. :param field: Field structure as following: *.subfield_2 would apply the function to the every subfield_2 of the elements 1.subfield_2 would apply the function to the subfield_2 of the element 1 * would apply the function to every element 1 would apply the function to element 1 """ try: field, next_field = field.split('.', 1) except ValueError: next_field = '' if field == '*': index_list = [] for item in self: index_list.append(self.index(item)) if index_list: return index_list, next_field return [], None elif field.isnumeric(): index = int(field) if index >= len(self): return None, None return [index], next_field
python
{ "resource": "" }
q45535
example
train
def example(index): """Index page.""" pid = PersistentIdentifier.query.filter_by(id=index).one() record = RecordMetadata.query.filter_by(id=pid.object_uuid).first() return render_template("app/detail.html", record=record.json, pid=pid, title="Demosite Invenio Org")
python
{ "resource": "" }
q45536
rebin
train
def rebin(a, factor, func=None): u"""Aggregate data from the input array ``a`` into rectangular tiles. The output array results from tiling ``a`` and applying `func` to each tile. ``factor`` specifies the size of the tiles. More precisely, the returned array ``out`` is such that:: out[i0, i1, ...] = func(a[f0*i0:f0*(i0+1), f1*i1:f1*(i1+1), ...]) If ``factor`` is an integer-like scalar, then ``f0 = f1 = ... = factor`` in the above formula. If ``factor`` is a sequence of integer-like scalars, then ``f0 = factor[0]``, ``f1 = factor[1]``, ... and the length of ``factor`` must equal the number of dimensions of ``a``. The reduction function ``func`` must accept an ``axis`` argument. Examples of such function are - ``numpy.mean`` (default), - ``numpy.sum``, - ``numpy.product``, - ... The following example shows how a (4, 6) array is reduced to a (2, 2) array >>> import numpy >>> from rebin import rebin >>> a = numpy.arange(24).reshape(4, 6) >>> rebin(a, factor=(2, 3), func=numpy.sum) array([[ 24, 42], [ 96, 114]]) If the elements of `factor` are not integer multiples of the dimensions of `a`, the remainding cells are discarded. >>> rebin(a, factor=(2, 2), func=numpy.sum) array([[16, 24, 32], [72, 80, 88]]) """ a = np.asarray(a) dim = a.ndim if np.isscalar(factor): factor = dim*(factor,) elif len(factor) != dim: raise ValueError('length of factor must be {} (was {})' .format(dim, len(factor))) if func is None: func = np.mean for f in factor: if f != int(f): raise ValueError('factor must be an int or a tuple of ints ' '(got {})'.format(f)) new_shape = [n//f for n, f in zip(a.shape, factor)]+list(factor) new_strides = [s*f for s, f in zip(a.strides, factor)]+list(a.strides) aa = as_strided(a, shape=new_shape, strides=new_strides) return func(aa, axis=tuple(range(-dim, 0)))
python
{ "resource": "" }
q45537
can_use_enum
train
def can_use_enum(func): """ Decorator to use Enum value on type checks. """ @wraps(func) def inner(self, value): if isinstance(value, Enum): return self.check_value(value.value) or func(self, value.value) return func(self, value) return inner
python
{ "resource": "" }
q45538
convert_enum
train
def convert_enum(func): """ Decorator to use Enum value on type casts. """ @wraps(func) def inner(self, value): try: if self.check_value(value.value): return value.value return func(self, value.value) except AttributeError: pass return func(self, value) return inner
python
{ "resource": "" }
q45539
BaseField.use_value
train
def use_value(self, value): """Converts value to field type or use original""" if self.check_value(value): return value return self.convert_value(value)
python
{ "resource": "" }
q45540
StringIdField.set_value
train
def set_value(self, obj, value): """Sets value to model if not empty""" if value: obj.set_field_value(self.name, value) else: self.delete_value(obj)
python
{ "resource": "" }
q45541
ArgumentParser.action
train
def action(self): """ Invoke functions according to the supplied flags """ user = self.args['--user'] if self.args['--user'] else None reset = True if self.args['--reset'] else False if self.args['generate']: generate_network(user, reset) elif self.args['publish']: publish_network(user, reset)
python
{ "resource": "" }
q45542
renderer
train
def renderer(format, mimetypes=(), priority=0, name=None, test=None): """ Decorates a view method to say that it renders a particular format and mimetypes. Use as: @renderer(format="foo") def render_foo(self, request, context, template_name): ... or @renderer(format="foo", mimetypes=("application/x-foo",)) def render_foo(self, request, context, template_name): ... The former case will inherit mimetypes from the previous renderer for that format in the MRO. Where there isn't one, it will default to the empty tuple. Takes an optional priority argument to resolve ties between renderers. """ def g(f): return Renderer(f, format, mimetypes, priority, name, test) return g
python
{ "resource": "" }
q45543
AsyncList.wait_change
train
async def wait_change(self): """ Wait for the list to change. """ future = asyncio.Future(loop=self.loop) self._change_futures.add(future) future.add_done_callback(self._change_futures.discard) await future
python
{ "resource": "" }
q45544
FairListProxy.shift
train
def shift(self, count=1): """ Shift the view a specified number of times. :param count: The count of times to shift the view. """ if self: self._index = (self._index + count) % len(self) else: self._index = 0
python
{ "resource": "" }
q45545
render
train
def render(value): """ This function finishes the url pattern creation by adding starting character ^ end possibly by adding end character at the end :param value: naive URL value :return: raw string """ # Empty urls if not value: # use case: wild card imports return r'^$' if value[0] != beginning: value = beginning + value if value[-1] != end: value += end return value
python
{ "resource": "" }
q45546
get_conjunctive_graph
train
def get_conjunctive_graph(store_id=None): """ Returns an open conjunctive graph. """ if not store_id: store_id = DEFAULT_STORE store = DjangoStore(DEFAULT_STORE) graph = ConjunctiveGraph(store=store, identifier=store_id) if graph.open(None) != VALID_STORE: raise ValueError("The store identified by {0} is not a valid store".format(store_id)) return graph
python
{ "resource": "" }
q45547
get_named_graph
train
def get_named_graph(identifier, store_id=DEFAULT_STORE, create=True): """ Returns an open named graph. """ if not isinstance(identifier, URIRef): identifier = URIRef(identifier) store = DjangoStore(store_id) graph = Graph(store, identifier=identifier) if graph.open(None, create=create) != VALID_STORE: raise ValueError("The store identified by {0} is not a valid store".format(store_id)) return graph
python
{ "resource": "" }
q45548
User.languages
train
def languages(self): """ A list of strings describing the user's languages. """ languages = [] for language in self.cache['languages']: language = Structure( id = language['id'], name = language['name'] ) languages.append(language) return languages
python
{ "resource": "" }
q45549
User.interested_in
train
def interested_in(self): """ A list of strings describing the genders the user is interested in. """ genders = [] for gender in self.cache['interested_in']: genders.append(gender) return genders
python
{ "resource": "" }
q45550
User.education
train
def education(self): """ A list of structures describing the user's education history. Each structure has attributes ``school``, ``year``, ``concentration`` and ``type``. ``school``, ``year`` reference ``Page`` instances, while ``concentration`` is a list of ``Page`` instances. ``type`` is just a string that describes the education level. .. note:: ``concentration`` may be ``False`` if the user has not specified his/her concentration for the given school. """ educations = [] for education in self.cache['education']: school = Page(**education.get('school')) year = Page(**education.get('year')) type = education.get('type') if 'concentration' in education: concentration = map(lambda c: Page(**c), education.get('concentration')) else: concentration = False education = Structure( school = school, year = year, concentration = concentration, type = type ) educations.append(education) return educations
python
{ "resource": "" }
q45551
User.permissions
train
def permissions(self): """ A list of strings describing permissions. See Facebook's exhaustive `Permissions Reference <http://developers.facebook.com/docs/authentication/permissions/>`_ for a list of available permissions. """ response = self.graph.get('%s/permissions' % self.id) permissions = [] for permission, state in response['data'][0].items(): permissions.append(permission) return permissions
python
{ "resource": "" }
q45552
User.accounts
train
def accounts(self): """ A list of structures describing apps and pages owned by this user. """ response = self.graph.get('%s/accounts' % self.id) accounts = [] for item in response['data']: account = Structure( page = Page( id = item['id'], name = item['name'], category = item['category'] ), access_token = item['access_token'], permissions = item['perms'] ) accounts.append(account) return accounts
python
{ "resource": "" }
q45553
Benchmark.update_xml_element
train
def update_xml_element(self): """ Updates the XML element contents to matches the instance contents. :returns: Updated XML element. :rtype: lxml.etree._Element """ if not hasattr(self, 'xml_element'): self.xml_element = etree.Element(self.name, nsmap=NSMAP) self.xml_element.clear() if hasattr(self, 'resolved'): self.xml_element.set('resolved', self.resolved) if hasattr(self, 'style'): self.xml_element.set('style', self.style) if hasattr(self, 'style_href'): self.xml_element.set('style-href', self.style_href) if hasattr(self, 'lang'): self.xml_element.set( '{http://www.w3.org/XML/1998/namespace}lang', self.lang) self.xml_element.set('id', self.id) for child in self.children: if hasattr(child, 'update_xml_element'): child.update_xml_element() if hasattr(child, 'xml_element'): self.xml_element.append(child.xml_element) return self.xml_element
python
{ "resource": "" }
q45554
post_message
train
def post_message(plugin, polled_time, identity, message): """Post single message :type plugin: errbot.BotPlugin :type polled_time: datetime.datetime :type identity: str :type message: str """ user = plugin.build_identifier(identity) return plugin.send(user, message)
python
{ "resource": "" }
q45555
open_pipe_connection
train
async def open_pipe_connection( path=None, *, loop=None, limit=DEFAULT_LIMIT, **kwargs ): """ Connect to a server using a Windows named pipe. """ path = path.replace('/', '\\') loop = loop or asyncio.get_event_loop() reader = asyncio.StreamReader(limit=limit, loop=loop) protocol = asyncio.StreamReaderProtocol(reader, loop=loop) transport, _ = await loop.create_pipe_connection( lambda: protocol, path, **kwargs ) writer = asyncio.StreamWriter(transport, protocol, reader, loop) return reader, writer
python
{ "resource": "" }
q45556
BaseData.set_read_only
train
def set_read_only(self, value): """ Sets whether model could be modified or not """ if self.__read_only__ != value: self.__read_only__ = value self._update_read_only()
python
{ "resource": "" }
q45557
BaseData.is_locked
train
def is_locked(self): """ Returns whether model is locked """ if not self.__locked__: return False elif self.get_parent(): return self.get_parent().is_locked() return True
python
{ "resource": "" }
q45558
generate_gml
train
def generate_gml(username, nodes, edges, cache=False): """ Generate a GML format file representing the given graph attributes """ # file segment that represents all the nodes in graph node_content = "" for i in range(len(nodes)): node_id = "\t\tid %d\n" % (i + 1) node_label = "\t\tlabel \"%s\"\n" % (nodes[i]) node_content += format_node(node_id, node_label) # file segment that represents all the edges in graph edge_content = "" for i in range(len(edges)): edge = edges[i] edge_source = "\t\tsource %d\n" % (nodes.index(edge[0]) + 1) edge_target = "\t\ttarget %d\n" % (nodes.index(edge[1]) + 1) edge_content += format_edge(edge_source, edge_target) # formatted file content content = format_content(node_content, edge_content) with open(username_to_file(username), 'w') as f: f.write(content) # save the file for further use if cache: cache_file(username_to_file(username))
python
{ "resource": "" }
q45559
config_extensions
train
def config_extensions(app): " Init application with extensions. " cache.init_app(app) db.init_app(app) main.init_app(app) collect.init_app(app) config_babel(app)
python
{ "resource": "" }
q45560
config_babel
train
def config_babel(app): " Init application with babel. " babel.init_app(app) def get_locale(): return request.accept_languages.best_match(app.config['BABEL_LANGUAGES']) babel.localeselector(get_locale)
python
{ "resource": "" }
q45561
CrontabMixin.activate_crontab
train
def activate_crontab(self): """Activate polling function and register first crontab """ self._crontab = [] if hasattr(self, 'CRONTAB'): for crontab_spec in self.CRONTAB: args = cronjob.parse_crontab(crontab_spec) job = cronjob.CronJob() if args['_timer'] == 'datetime': job.set_triggers(args['trigger_format'], args['trigger_time']) if args['_timer'] == 'crontab': job.set_crontab(args['crontab']) if args['action'].startswith('.'): action_name = args['action'][1:] action_ = getattr(self.__class__, action_name) else: action_ = args['action'] job.set_action(action_, *args['args']) self._crontab.append(job) self.start_poller(30, self.poll_crontab)
python
{ "resource": "" }
q45562
CrontabMixin.poll_crontab
train
def poll_crontab(self): """Check crontab and run target jobs """ polled_time = self._get_current_time() if polled_time.second >= 30: self.log.debug('Skip cronjobs in {}'.format(polled_time)) return for job in self._crontab: if not job.is_runnable(polled_time): continue job.do_action(self, polled_time)
python
{ "resource": "" }
q45563
RouteMap.include
train
def include(self, location, namespace=None, app_name=None): """ Return an object suitable for url_patterns. :param location: root URL for all URLs from this router :param namespace: passed to url() :param app_name: passed to url() """ sorted_entries = sorted(self.routes, key=operator.itemgetter(0), reverse=True) arg = [u for _, u in sorted_entries] return url(location, urls.include( arg=arg, namespace=namespace, app_name=app_name))
python
{ "resource": "" }
q45564
Installer.clone_source
train
def clone_source(self): " Clone source and prepare templates " print_header('Clone src: %s' % self.src, '-') # Get source source_dir = self._get_source() # Append settings from source self.read(op.join(source_dir, settings.CFGNAME)) self.templates += (self.args.template or self.template).split(',') self.templates = OrderedSet(self._gen_templates(self.templates)) self['template'] = ','.join(str(x[0]) for x in self.templates) print_header('Deploy templates: %s' % self.template, sep='-') with open(op.join(self.deploy_dir, settings.TPLNAME), 'w') as f: f.write(self.template) with open(op.join(self.deploy_dir, settings.CFGNAME), 'w') as f: self['deploy_dir'], tmp_dir = self.target_dir, self.deploy_dir self.write(f) self['deploy_dir'] = tmp_dir # Create site site = Site(self.deploy_dir) # Prepare templates for template_name, template in self.templates: site.paste_template(template_name, template, tmp_dir) # Create site if self.args.info: print_header('Project context', sep='-') LOGGER.debug(site.get_info(full=True)) return None # Check requirements call('sudo chmod +x %s/*.sh' % self.service_dir) site.run_check(service_dir=self.service_dir) # Save options site.write() return site
python
{ "resource": "" }
q45565
Installer._get_source
train
def _get_source(self): " Get source from CVS or filepath. " source_dir = op.join(self.deploy_dir, 'source') for tp, cmd in settings.SRC_CLONE: if self.src.startswith(tp + '+'): program = which(tp) assert program, '%s not found.' % tp cmd = cmd % dict(src=self.src[len(tp) + 1:], source_dir=source_dir, branch=self.branch) cmd = "sudo -u %s %s" % (self['src_user'], cmd) call(cmd, shell=True) self.templates.append('src-%s' % tp) break else: self.templates.append('src-dir') copytree(self.src, source_dir) return source_dir
python
{ "resource": "" }
q45566
quote
train
def quote(text, ws=plain): """Quote special characters in shell command arguments. E.g ``--foo bar>=10.1`` becomes "--foo bar\>\=10\.1``. """ return "".join(chr in ws and chr or '\\' + chr for chr in text)
python
{ "resource": "" }
q45567
AbstractResponseMixin.render_to_response
train
def render_to_response(self, context): "Return HttpResponse." return http.HttpResponse( self.render_template(context), content_type=self.mimetype)
python
{ "resource": "" }
q45568
prepare_plot_data
train
def prepare_plot_data(data_file): """ Return a list of Plotly elements representing the network graph """ G = ig.Graph.Read_GML(data_file) layout = G.layout('graphopt') labels = list(G.vs['label']) N = len(labels) E = [e.tuple for e in G.es] community = G.community_multilevel().membership communities = len(set(community)) color_list = community_colors(communities) Xn = [layout[k][0] for k in range(N)] Yn = [layout[k][1] for k in range(N)] Xe = [] Ye = [] for e in E: Xe += [layout[e[0]][0], layout[e[1]][0], None] Ye += [layout[e[0]][1], layout[e[1]][1], None] lines = Scatter(x=Xe, y=Ye, mode='lines', line=Line(color='rgb(210,210,210)', width=1), hoverinfo='none' ) plot_data = [lines] node_x = [[] for i in range(communities)] node_y = [[] for i in range(communities)] node_labels = [[] for i in range(communities)] for j in range(len(community)): index = community[j] node_x[index].append(layout[j][0]) node_y[index].append(layout[j][1]) node_labels[index].append(labels[j]) for i in range(communities): trace = Scatter(x=node_x[i], y=node_y[i], mode='markers', name='ntw', marker=Marker(symbol='dot', size=5, color=color_list[i], line=Line( color='rgb(50,50,50)', width=0.5) ), text=node_labels[i], hoverinfo='text' ) plot_data.append(trace) return plot_data
python
{ "resource": "" }
q45569
publish_network
train
def publish_network(user=None, reset=False): """ Generate graph network for a user and plot it using Plotly """ username = generate_network(user, reset) network_file = username_to_file(username) plot_data = prepare_plot_data(network_file) data = Data(plot_data) # hide axis line, grid, ticklabels and title axis = dict(showline=False, zeroline=False, showgrid=False, showticklabels=False, title='' ) width = 800 height = 800 layout = Layout(title='GitHub Network for "{0}"'.format(username), font=Font(size=12), showlegend=False, autosize=False, width=width, height=height, xaxis=XAxis(axis), yaxis=YAxis(axis), margin=Margin( l=40, r=40, b=85, t=100, ), hovermode='closest', annotations=Annotations([ Annotation( showarrow=False, text='This igraph.Graph has the graphopt layout', xref='paper', yref='paper', x=0, y=-0.1, xanchor='left', yanchor='bottom', font=Font( size=14 ) ) ]), ) fig = Figure(data=data, layout=layout) # use credentials of the bot "octogrid", if user isn't authenticated login_as_bot() try: plot_id = ''.join(choice(string.lowercase) for i in range(5)) plot_url = plotly.plot( fig, filename='Octogrid: GitHub communities for {0} [v{1}]'.format(username, plot_id)) print 'Published the network graph at {0}'.format(plot_url) except Exception, e: raise e
python
{ "resource": "" }
q45570
array_split
train
def array_split( ary, indices_or_sections=None, axis=None, tile_shape=None, max_tile_bytes=None, max_tile_shape=None, sub_tile_shape=None, halo=None ): "To be replaced." return [ ary[slyce] for slyce in shape_split( array_shape=ary.shape, indices_or_sections=indices_or_sections, axis=axis, array_start=None, array_itemsize=ary.itemsize, tile_shape=tile_shape, max_tile_bytes=max_tile_bytes, max_tile_shape=max_tile_shape, sub_tile_shape=sub_tile_shape, halo=halo, tile_bounds_policy=ARRAY_BOUNDS ).flatten() ]
python
{ "resource": "" }
q45571
ShapeSplitter.check_consistent_parameter_grouping
train
def check_consistent_parameter_grouping(self): """ Ensures this object does not have conflicting groups of parameters. :raises ValueError: For conflicting or absent parameters. """ parameter_groups = {} if self.indices_per_axis is not None: parameter_groups["indices_per_axis"] = \ {"self.indices_per_axis": self.indices_per_axis} if (self.split_size is not None) or (self.split_num_slices_per_axis is not None): parameter_groups["split_size"] = \ { "self.split_size": self.split_size, "self.split_num_slices_per_axis": self.split_num_slices_per_axis, } if self.tile_shape is not None: parameter_groups["tile_shape"] = \ {"self.tile_shape": self.tile_shape} if self.max_tile_bytes is not None: parameter_groups["max_tile_bytes"] = \ {"self.max_tile_bytes": self.max_tile_bytes} if self.max_tile_shape is not None: if "max_tile_bytes" not in parameter_groups.keys(): parameter_groups["max_tile_bytes"] = {} parameter_groups["max_tile_bytes"]["self.max_tile_shape"] = self.max_tile_shape if self.sub_tile_shape is not None: if "max_tile_bytes" not in parameter_groups.keys(): parameter_groups["max_tile_bytes"] = {} parameter_groups["max_tile_bytes"]["self.sub_tile_shape"] = self.sub_tile_shape self.logger.debug("parameter_groups=%s", parameter_groups) if len(parameter_groups.keys()) > 1: group_keys = sorted(parameter_groups.keys()) raise ValueError( "Got conflicting parameter groups specified, " + "should only specify one group to define the split:\n" + ( "\n".join( [ ( ("Group %18s: " % ("'%s'" % group_key)) + str(parameter_groups[group_key]) ) for group_key in group_keys ] ) ) ) if len(parameter_groups.keys()) <= 0: raise ValueError( "No split parameters specified, need parameters from one of the groups: " + "'indices_per_axis', 'split_size', 'tile_shape' or 'max_tile_bytes'" )
python
{ "resource": "" }
q45572
Context.socket
train
def socket(self, socket_type, identity=None, mechanism=None): """ Create and register a new socket. :param socket_type: The type of the socket. :param loop: An optional event loop to associate the socket with. This is the preferred method to create new sockets. """ socket = Socket( context=self, socket_type=socket_type, identity=identity, mechanism=mechanism, loop=self.loop, ) self.register_child(socket) return socket
python
{ "resource": "" }
q45573
Context.set_zap_authenticator
train
def set_zap_authenticator(self, zap_authenticator): """ Setup a ZAP authenticator. :param zap_authenticator: A ZAP authenticator instance to use. The context takes ownership of the specified instance. It will close it automatically when it stops. If `None` is specified, any previously owner instance is disowned and returned. It becomes the caller's responsibility to close it. :returns: The previous ZAP authenticator instance. """ result = self._zap_authenticator if result: self.unregister_child(result) self._zap_authenticator = zap_authenticator if self.zap_client: self.zap_client.close() if self._zap_authenticator: self.register_child(zap_authenticator) self.zap_client = ZAPClient(context=self) self.register_child(self.zap_client) else: self.zap_client = None return result
python
{ "resource": "" }
q45574
GitRepo.get_vcs_directory
train
def get_vcs_directory(context, directory): """Get the pathname of the directory containing the version control metadata files.""" nested = os.path.join(directory, '.git') return nested if context.is_directory(nested) else directory
python
{ "resource": "" }
q45575
GitRepo.expand_branch_name
train
def expand_branch_name(self, name): """ Expand branch names to their unambiguous form. :param name: The name of a local or remote branch (a string). :returns: The unambiguous form of the branch name (a string). This internal method is used by methods like :func:`find_revision_id()` and :func:`find_revision_number()` to detect and expand remote branch names into their unambiguous form which is accepted by commands like ``git rev-parse`` and ``git rev-list --count``. """ # If no name is given we pick the default revision. if not name: return self.default_revision # Run `git for-each-ref' once and remember the results. branches = list(self.find_branches_raw()) # Check for an exact match against a local branch. for prefix, other_name, revision_id in branches: if prefix == 'refs/heads/' and name == other_name: # If we find a local branch whose name exactly matches the name # given by the caller then we consider the argument given by # the caller unambiguous. logger.debug("Branch name %r matches local branch.", name) return name # Check for an exact match against a remote branch. for prefix, other_name, revision_id in branches: if prefix.startswith('refs/remotes/') and name == other_name: # If we find a remote branch whose name exactly matches the # name given by the caller then we expand the name given by the # caller into the full %(refname) emitted by `git for-each-ref'. unambiguous_name = prefix + name logger.debug("Branch name %r matches remote branch %r.", name, unambiguous_name) return unambiguous_name # As a fall back we return the given name without expanding it. # This code path might not be necessary but was added out of # conservativeness, with the goal of trying to guarantee # backwards compatibility. logger.debug("Failed to expand branch name %r.", name) return name
python
{ "resource": "" }
q45576
GitRepo.find_author
train
def find_author(self): """Get the author information from the version control system.""" return Author(name=self.context.capture('git', 'config', 'user.name', check=False, silent=True), email=self.context.capture('git', 'config', 'user.email', check=False, silent=True))
python
{ "resource": "" }
q45577
GitRepo.get_create_command
train
def get_create_command(self): """Get the command to create the local repository.""" command = ['git', 'clone' if self.remote else 'init'] if self.bare: command.append('--bare') if self.remote: command.append(self.remote) command.append(self.local) return command
python
{ "resource": "" }
q45578
GitRepo.get_export_command
train
def get_export_command(self, directory, revision): """Get the command to export the complete tree from the local repository.""" shell_command = 'git archive %s | tar --extract --directory=%s' return [shell_command % (quote(revision), quote(directory))]
python
{ "resource": "" }
q45579
Instance.new
train
def new(cls, settings, *args, **kwargs): """ Create a new Cloud instance based on the Settings """ logger.debug('Initializing new "%s" Instance object' % settings['CLOUD']) cloud = settings['CLOUD'] if cloud == 'bare': self = BareInstance(settings=settings, *args, **kwargs) elif cloud == 'aws': self = AWSInstance(settings=settings, *args, **kwargs) elif cloud == 'gcp': self = GCPInstance(settings=settings, *args, **kwargs) else: raise DSBException('Cloud "%s" not supported' % cloud) return self
python
{ "resource": "" }
q45580
initialise_loggers
train
def initialise_loggers(names, log_level=_builtin_logging.WARNING, handler_class=SplitStreamHandler): """ Initialises specified loggers to generate output at the specified logging level. If the specified named loggers do not exist, they are created. :type names: :obj:`list` of :obj:`str` :param names: List of logger names. :type log_level: :obj:`int` :param log_level: Log level for messages, typically one of :obj:`logging.DEBUG`, :obj:`logging.INFO`, :obj:`logging.WARN`, :obj:`logging.ERROR` or :obj:`logging.CRITICAL`. See :ref:`levels`. :type handler_class: One of the :obj:`logging.handlers` classes. :param handler_class: The handler class for output of log messages, for example :obj:`SplitStreamHandler` or :obj:`logging.StreamHandler`. Example:: >>> from array_split import logging >>> logging.initialise_loggers(["my_logger",], log_level=logging.INFO) >>> logger = logging.getLogger("my_logger") >>> logger.info("This is info logging.") 16:35:09|ARRSPLT| This is info logging. >>> logger.debug("Not logged at logging.INFO level.") >>> """ frmttr = get_formatter() for name in names: logr = _builtin_logging.getLogger(name) handler = handler_class() handler.setFormatter(frmttr) logr.addHandler(handler) logr.setLevel(log_level)
python
{ "resource": "" }
q45581
ClientController.close
train
def close(self): """Shut down the socket connection, client and controller""" self._sock = None self._controller = None if hasattr(self, "_port") and self._port: portpicker.return_port(self._port) self._port = None
python
{ "resource": "" }
q45582
ClientController.connect
train
def connect(self, url=c.LOCALHOST, port=None, timeout=c.INITIAL_TIMEOUT, debug=False): """socket connect to an already running starcraft2 process""" if port != None: # force a selection to a new port if self._port!=None: # if previously allocated port, return it portpicker.return_port(self._port) self._port = port elif self._port==None: # no connection exists self._port = portpicker.pick_unused_port() self._url = url if ":" in url and not url.startswith("["): # Support ipv6 addresses. url = "[%s]" % url for i in range(timeout): startTime = time.time() if debug: print("attempt #%d to websocket connect to %s:%s"%(i, url, port)) try: finalUrl = "ws://%s:%s/sc2api" %(url, self._port) ws = websocket.create_connection(finalUrl, timeout=timeout) #print("ws:", ws) self._client = protocol.StarcraftProtocol(ws) #super(ClientController, self).__init__(client) # ensure RemoteController initializtion is performed #if self.ping(): print("init ping()") # ensure the latest state is synced # ping returns: # game_version: "4.1.2.60604" # data_version: "33D9FE28909573253B7FC352CE7AEA40" # data_build: 60604 # base_build: 60321 return self except socket.error: pass # SC2 hasn't started listening yet. except websocket.WebSocketException as err: print(err, type(err)) if "Handshake Status 404" in str(err): pass # SC2 is listening, but hasn't set up the /sc2api endpoint yet. else: raise except Exception as e: print(type(e), e) sleepTime = max(0, 1 - (time.time() - startTime)) # try to wait for up to 1 second total if sleepTime: time.sleep(sleepTime) raise websocket.WebSocketException("Could not connect to game at %s on port %s"%(url, port))
python
{ "resource": "" }
q45583
ClientController.debug
train
def debug(self, *debugReqs): """send a debug command to control the game state's setup""" return self._client.send(debug=sc2api_pb2.RequestDebug(debug=debugReqs))
python
{ "resource": "" }
q45584
GoogleCloudProvider.submit
train
def submit(self, command="", blocksize=1, job_name="parsl.auto"): ''' The submit method takes the command string to be executed upon instantiation of a resource most often to start a pilot. Args : - command (str) : The bash command string to be executed. - blocksize (int) : Blocksize to be requested KWargs: - job_name (str) : Human friendly name to be assigned to the job request Returns: - A job identifier, this could be an integer, string etc Raises: - ExecutionProviderException or its subclasses ''' instance, name = self.create_instance(command=command) self.provisioned_blocks += 1 self.resources[name] = {"job_id": name, "status": translate_table[instance['status']]} return name
python
{ "resource": "" }
q45585
unpack
train
def unpack(rv): """Unpack the response from a view. :param rv: the view response :type rv: either a :class:`werkzeug.wrappers.Response` or a tuple of (data, status_code, headers) """ if isinstance(rv, ResponseBase): return rv status = headers = None if isinstance(rv, tuple): rv, status, headers = rv + (None,) * (3 - len(rv)) if rv is None: raise ValueError('View function did not return a response') if status is None: status = 200 return rv, status, headers or {}
python
{ "resource": "" }
q45586
Api.init_app
train
def init_app(self, app): """Initialize actions with the app or blueprint. :param app: the Flask application or blueprint object :type app: :class:`~flask.Flask` or :class:`~flask.Blueprint` Examples:: api = Api() api.add_resource(...) api.init_app(blueprint) """ try: # Assume this is a blueprint and defer initialization if app._got_registered_once is True: raise ValueError("""Blueprint is already registered with an app.""") app.record(self._deferred_blueprint_init) except AttributeError: self._init_app(app) else: self.blueprint = app
python
{ "resource": "" }
q45587
Api._deferred_blueprint_init
train
def _deferred_blueprint_init(self, setup_state): """Bind resources to the app as recorded in blueprint. Synchronize prefix between blueprint/api and registration options, then perform initialization with setup_state.app :class:`flask.Flask` object. When a :class:`flask.ext.resteasy.Api` object is initialized with a blueprint, this method is recorded on the blueprint to be run when the blueprint is later registered to a :class:`flask.Flask` object. This method also monkeypatches BlueprintSetupState.add_url_rule with _add_url_rule_patch. :param setup_state: The setup state object passed to deferred functions during blueprint registration :type setup_state: :class:`flask.blueprints.BlueprintSetupState` """ self.blueprint_setup = setup_state if setup_state.add_url_rule.__name__ != '_add_url_rule_patch': setup_state._original_add_url_rule = setup_state.add_url_rule setup_state.add_url_rule = MethodType(Api._add_url_rule_patch, setup_state) if not setup_state.first_registration: raise ValueError('flask-RESTEasy blueprints can only be registered once.') self._init_app(setup_state.app)
python
{ "resource": "" }
q45588
Api._register_view
train
def _register_view(self, app, resource, *urls, **kwargs): """Bind resources to the app. :param app: an actual :class:`flask.Flask` app :param resource: :param urls: :param endpoint: endpoint name (defaults to :meth:`Resource.__name__.lower` Can be used to reference this route in :meth:`flask.url_for` :type endpoint: str Additional keyword arguments not specified above will be passed as-is to :meth:`flask.Flask.add_url_rule`. SIDE EFFECT Implements the one mentioned in add_resource """ endpoint = kwargs.pop('endpoint', None) or resource.__name__.lower() self.endpoints.add(endpoint) if endpoint in getattr(app, 'view_class', {}): existing_view_class = app.view_functions[endpoint].__dict__['view_class'] # if you override the endpoint with a different class, avoid the collision by raising an exception if existing_view_class != resource: raise ValueError('Endpoint {!r} is already set to {!r}.' .format(endpoint, existing_view_class.__name__)) if not hasattr(resource, 'endpoint'): # Don't replace existing endpoint resource.endpoint = endpoint resource_func = self.output(resource.as_view(endpoint)) for decorator in chain(kwargs.pop('decorators', ()), self.decorators): resource_func = decorator(resource_func) for url in urls: rule = self._make_url(url, self.blueprint.url_prefix if self.blueprint else None) # If this Api has a blueprint if self.blueprint: # And this Api has been setup if self.blueprint_setup: # Set the rule to a string directly, as the blueprint # is already set up. self.blueprint_setup.add_url_rule(self._make_url(url, None), view_func=resource_func, **kwargs) continue else: # Set the rule to a function that expects the blueprint # prefix to construct the final url. Allows deferment # of url finalization in the case that the Blueprint # has not yet been registered to an application, so we # can wait for the registration prefix rule = partial(self._make_url, url) else: # If we've got no Blueprint, just build a url with no prefix rule = self._make_url(url, None) # Add the url to the application or blueprint app.add_url_rule(rule, view_func=resource_func, **kwargs)
python
{ "resource": "" }
q45589
Api._add_url_rule_patch
train
def _add_url_rule_patch(blueprint_setup, rule, endpoint=None, view_func=None, **options): """Patch BlueprintSetupState.add_url_rule for delayed creation. Method used for setup state instance corresponding to this Api instance. Exists primarily to enable _make_url's function. :param blueprint_setup: The BlueprintSetupState instance (self) :param rule: A string or callable that takes a string and returns a string(_make_url) that is the url rule for the endpoint being registered :param endpoint: See :meth:`flask.BlueprintSetupState.add_url_rule` :param view_func: See :meth:`flask.BlueprintSetupState.add_url_rule` :param **options: See :meth:`flask.BlueprintSetupState.add_url_rule` """ if callable(rule): rule = rule(blueprint_setup.url_prefix) elif blueprint_setup.url_prefix: rule = blueprint_setup.url_prefix + rule options.setdefault('subdomain', blueprint_setup.subdomain) if endpoint is None: endpoint = _endpoint_from_view_func(view_func) defaults = blueprint_setup.url_defaults if 'defaults' in options: defaults = dict(defaults, **options.pop('defaults')) blueprint_setup.app.add_url_rule(rule, '%s.%s' % (blueprint_setup.blueprint.name, endpoint), view_func, defaults=defaults, **options)
python
{ "resource": "" }
q45590
Api._make_url
train
def _make_url(self, url_part, blueprint_prefix): """Create URL from blueprint_prefix, api prefix and resource url. This method is used to defer the construction of the final url in the case that the Api is created with a Blueprint. :param url_part: The part of the url the endpoint is registered with :param blueprint_prefix: The part of the url contributed by the blueprint. Generally speaking, BlueprintSetupState.url_prefix """ parts = (blueprint_prefix, self.prefix, url_part) return ''.join(_ for _ in parts if _)
python
{ "resource": "" }
q45591
Api.url_for
train
def url_for(self, resource, **kwargs): """Create a url for the given resource. :param resource: The resource :type resource: :class:`Resource` :param kwargs: Same arguments you would give :class:`flask.url_for` """ if self.blueprint: return flask.url_for('.' + resource.endpoint, **kwargs) return flask.url_for(resource.endpoint, **kwargs)
python
{ "resource": "" }
q45592
Deployer.deploy
train
def deploy(self, job_name, command='', blocksize=1): instances = [] """Deploy the template to a resource group.""" self.client.resource_groups.create_or_update( self.resource_group, { 'location': self.location, } ) template_path = os.path.join(os.path.dirname( __file__), 'templates', 'template.json') with open(template_path, 'r') as template_file_fd: template = json.load(template_file_fd) parameters = { 'sshKeyData': self.pub_ssh_key, 'vmName': 'azure-deployment-sample-vm', 'dnsLabelPrefix': self.dns_label_prefix } parameters = {k: {'value': v} for k, v in parameters.items()} deployment_properties = { 'mode': DeploymentMode.incremental, 'template': template, 'parameters': parameters } for i in range(blocksize): deployment_async_operation = self.client.deployments.create_or_update( self.resource_group, 'azure-sample', deployment_properties ) instances.append(deployment_async_operation.wait()) return instances
python
{ "resource": "" }
q45593
Deployer.destroy
train
def destroy(self, job_ids): """Destroy the given resource group""" for job_id in job_ids: self.client.resource_groups.delete(self.resource_group)
python
{ "resource": "" }
q45594
Deployer.get_vm
train
def get_vm(self, resource_group_name, vm_name): ''' you need to retry this just in case the credentials token expires, that's where the decorator comes in this will return all the data about the virtual machine ''' return self.client.virtual_machines.get( resource_group_name, vm_name, expand='instanceView')
python
{ "resource": "" }
q45595
Russound._retrieve_cached_zone_variable
train
def _retrieve_cached_zone_variable(self, zone_id, name): """ Retrieves the cache state of the named variable for a particular zone. If the variable has not been cached then the UncachedVariable exception is raised. """ try: s = self._zone_state[zone_id][name.lower()] logger.debug("Zone Cache retrieve %s.%s = %s", zone_id.device_str(), name, s) return s except KeyError: raise UncachedVariable
python
{ "resource": "" }
q45596
Russound._store_cached_zone_variable
train
def _store_cached_zone_variable(self, zone_id, name, value): """ Stores the current known value of a zone variable into the cache. Calls any zone callbacks. """ zone_state = self._zone_state.setdefault(zone_id, {}) name = name.lower() zone_state[name] = value logger.debug("Zone Cache store %s.%s = %s", zone_id.device_str(), name, value) for callback in self._zone_callbacks: callback(zone_id, name, value)
python
{ "resource": "" }
q45597
Russound._retrieve_cached_source_variable
train
def _retrieve_cached_source_variable(self, source_id, name): """ Retrieves the cache state of the named variable for a particular source. If the variable has not been cached then the UncachedVariable exception is raised. """ try: s = self._source_state[source_id][name.lower()] logger.debug("Source Cache retrieve S[%d].%s = %s", source_id, name, s) return s except KeyError: raise UncachedVariable
python
{ "resource": "" }
q45598
Russound._store_cached_source_variable
train
def _store_cached_source_variable(self, source_id, name, value): """ Stores the current known value of a source variable into the cache. Calls any source callbacks. """ source_state = self._source_state.setdefault(source_id, {}) name = name.lower() source_state[name] = value logger.debug("Source Cache store S[%d].%s = %s", source_id, name, value) for callback in self._source_callbacks: callback(source_id, name, value)
python
{ "resource": "" }
q45599
Russound.connect
train
def connect(self): """ Connect to the controller and start processing responses. """ logger.info("Connecting to %s:%s", self._host, self._port) reader, writer = yield from asyncio.open_connection( self._host, self._port, loop=self._loop) self._ioloop_future = ensure_future( self._ioloop(reader, writer), loop=self._loop) logger.info("Connected")
python
{ "resource": "" }