sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def _check_middleware_dependencies(concerned_object, required_middleware): """ Check required middleware dependencies exist and in the correct order. Args: concerned_object (object): The object for which the required middleware is being checked. This is used for error messages only. required_middleware (list of String): An ordered list representing the required middleware to be checked. Usage: Add in __init__ method to a Middleware class to have its dependencies checked on startup. def __init__(self): super(SomeMiddleware, self).__init__() _check_middleware_dependencies(self, required_middleware=[ 'edx_django_utils.cache.middleware.RequestCacheMiddleware', ]) Raises: AssertionError if the provided dependencies don't appear in MIDDLEWARE_CLASSES in the correct order. """ declared_middleware = getattr(settings, 'MIDDLEWARE', None) if declared_middleware is None: declared_middleware = settings.MIDDLEWARE_CLASSES # Django 1.8 support # Filter out all the middleware except the ones we care about for ordering. matching_middleware = [mw for mw in declared_middleware if mw in required_middleware] if required_middleware != matching_middleware: raise AssertionError( "{} requires middleware order {} but matching middleware was {}".format( concerned_object, required_middleware, matching_middleware ) )
Check required middleware dependencies exist and in the correct order. Args: concerned_object (object): The object for which the required middleware is being checked. This is used for error messages only. required_middleware (list of String): An ordered list representing the required middleware to be checked. Usage: Add in __init__ method to a Middleware class to have its dependencies checked on startup. def __init__(self): super(SomeMiddleware, self).__init__() _check_middleware_dependencies(self, required_middleware=[ 'edx_django_utils.cache.middleware.RequestCacheMiddleware', ]) Raises: AssertionError if the provided dependencies don't appear in MIDDLEWARE_CLASSES in the correct order.
entailment
def is_valid_request(self, request, parameters={}, fake_method=None, handle_error=True): ''' Validates an OAuth request using the python-oauth2 library: https://github.com/simplegeo/python-oauth2 ''' try: # Set the parameters to be what we were passed earlier # if we didn't get any passed to us now if not parameters and hasattr(self, 'params'): parameters = self.params method, url, headers, parameters = self.parse_request( request, parameters, fake_method) oauth_request = oauth2.Request.from_request( method, url, headers=headers, parameters=parameters) self.oauth_server.verify_request( oauth_request, self.oauth_consumer, {}) except oauth2.MissingSignature, e: if handle_error: return False else: raise e # Signature was valid return True
Validates an OAuth request using the python-oauth2 library: https://github.com/simplegeo/python-oauth2
entailment
def parse_request(self, request, parameters=None, fake_method=None): ''' Parse Flask request ''' return (request.method, request.url, request.headers, request.form.copy())
Parse Flask request
entailment
def parse_request(self, request, parameters, fake_method=None): ''' Parse Django request ''' return (fake_method or request.method, request.build_absolute_uri(), request.META, (dict(request.POST.iteritems()) if request.method == 'POST' else parameters))
Parse Django request
entailment
def parse_request(self, request, parameters=None, fake_method=None): ''' Parse WebOb request ''' return (request.method, request.url, request.headers, request.POST.mixed())
Parse WebOb request
entailment
def from_post_response(post_response, content): ''' Convenience method for creating a new OutcomeResponse from a response object. ''' response = OutcomeResponse() response.post_response = post_response response.response_code = post_response.status response.process_xml(content) return response
Convenience method for creating a new OutcomeResponse from a response object.
entailment
def process_xml(self, xml): ''' Parse OutcomeResponse data form XML. ''' try: root = objectify.fromstring(xml) # Get message idenifier from header info self.message_identifier = root.imsx_POXHeader.\ imsx_POXResponseHeaderInfo.\ imsx_messageIdentifier status_node = root.imsx_POXHeader.\ imsx_POXResponseHeaderInfo.\ imsx_statusInfo # Get status parameters from header info status self.code_major = status_node.imsx_codeMajor self.severity = status_node.imsx_severity self.description = status_node.imsx_description self.message_ref_identifier = str( status_node.imsx_messageRefIdentifier) self.operation = status_node.imsx_operationRefIdentifier try: # Try to get the score self.score = str(root.imsx_POXBody.readResultResponse. result.resultScore.textString) except AttributeError: # Not a readResult, just ignore! pass except: pass
Parse OutcomeResponse data form XML.
entailment
def generate_response_xml(self): ''' Generate XML based on the current configuration. ''' root = etree.Element( 'imsx_POXEnvelopeResponse', xmlns='http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0') header = etree.SubElement(root, 'imsx_POXHeader') header_info = etree.SubElement(header, 'imsx_POXResponseHeaderInfo') version = etree.SubElement(header_info, 'imsx_version') version.text = 'V1.0' message_identifier = etree.SubElement(header_info, 'imsx_messageIdentifier') message_identifier.text = str(self.message_identifier) status_info = etree.SubElement(header_info, 'imsx_statusInfo') code_major = etree.SubElement(status_info, 'imsx_codeMajor') code_major.text = str(self.code_major) severity = etree.SubElement(status_info, 'imsx_severity') severity.text = str(self.severity) description = etree.SubElement(status_info, 'imsx_description') description.text = str(self.description) message_ref_identifier = etree.SubElement( status_info, 'imsx_messageRefIdentifier') message_ref_identifier.text = str(self.message_ref_identifier) operation_ref_identifier = etree.SubElement( status_info, 'imsx_operationRefIdentifier') operation_ref_identifier.text = str(self.operation) body = etree.SubElement(root, 'imsx_POXBody') response = etree.SubElement(body, '%s%s' % (self.operation, 'Response')) if self.score: result = etree.SubElement(response, 'result') result_score = etree.SubElement(result, 'resultScore') language = etree.SubElement(result_score, 'language') language.text = 'en' text_string = etree.SubElement(result_score, 'textString') text_string.text = str(self.score) return '<?xml version="1.0" encoding="UTF-8"?>' + etree.tostring(root)
Generate XML based on the current configuration.
entailment
def profiled_thread(func): """decorator to profile a thread or function. Profiling output will be written to 'agent_profile_<process_id>.<thread_id_>.<thread_name>.log'""" def wrapper(*args, **kwargs): profile = Profile() profile.enable() try: func(*args, **kwargs) finally: profile.disable() try: thread = current_thread() profile.dump_stats('profile_%s.%s.%s.log' % (getpid(), thread.name, thread.ident)) except: logger.exception('Failed to dump stats') return wrapper
decorator to profile a thread or function. Profiling output will be written to 'agent_profile_<process_id>.<thread_id_>.<thread_name>.log
entailment
def _minimally_quoted_parameter_value(value): """ Per RFC 7321 (https://tools.ietf.org/html/rfc7231#section-3.1.1.1): Parameters values don't need to be quoted if they are a "token". Token characters are defined by RFC 7320 (https://tools.ietf.org/html/rfc7230#section-3.2.6). Otherwise, parameters values can be a "quoted-string". So we will quote values that contain characters other than the standard token characters. """ if re.match("^[{charset}]*$".format(charset=MediaType.RFC7320_TOKEN_CHARSET), value): return value else: return MediaType._quote(value)
Per RFC 7321 (https://tools.ietf.org/html/rfc7231#section-3.1.1.1): Parameters values don't need to be quoted if they are a "token". Token characters are defined by RFC 7320 (https://tools.ietf.org/html/rfc7230#section-3.2.6). Otherwise, parameters values can be a "quoted-string". So we will quote values that contain characters other than the standard token characters.
entailment
def set_custom_metrics_for_course_key(course_key): """ Set monitoring custom metrics related to a course key. This is not cached, and only support reporting to New Relic Insights. """ if not newrelic: return newrelic.agent.add_custom_parameter('course_id', six.text_type(course_key)) newrelic.agent.add_custom_parameter('org', six.text_type(course_key.org))
Set monitoring custom metrics related to a course key. This is not cached, and only support reporting to New Relic Insights.
entailment
def set_monitoring_transaction_name(name, group=None, priority=None): """ Sets the transaction name for monitoring. This is not cached, and only support reporting to New Relic. """ if not newrelic: return newrelic.agent.set_transaction_name(name, group, priority)
Sets the transaction name for monitoring. This is not cached, and only support reporting to New Relic.
entailment
def function_trace(function_name): """ Wraps a chunk of code that we want to appear as a separate, explicit, segment in our monitoring tools. """ if newrelic: nr_transaction = newrelic.agent.current_transaction() with newrelic.agent.FunctionTrace(nr_transaction, function_name): yield else: yield
Wraps a chunk of code that we want to appear as a separate, explicit, segment in our monitoring tools.
entailment
def channel(self, channel_id=None, auto_encode_decode=True): """Fetch a Channel object identified by the numeric channel_id, or create that object if it doesn't already exist. See Channel for meaning of auto_encode_decode. If the channel already exists, the auto_* flag will not be updated.""" try: return self.channels[channel_id] except KeyError: return self.Channel(self, channel_id, auto_encode_decode=auto_encode_decode)
Fetch a Channel object identified by the numeric channel_id, or create that object if it doesn't already exist. See Channel for meaning of auto_encode_decode. If the channel already exists, the auto_* flag will not be updated.
entailment
def drain_events(self, timeout=None): """Wait for an event on a channel.""" chanmap = self.channels chanid, method_sig, args, content = self._wait_multiple( chanmap, None, timeout=timeout, ) channel = chanmap[chanid] if (content and channel.auto_encode_decode and hasattr(content, 'content_encoding')): try: content.body = content.body.decode(content.content_encoding) except Exception: pass amqp_method = (self._method_override.get(method_sig) or channel._METHOD_MAP.get(method_sig, None)) if amqp_method is None: raise AMQPNotImplementedError( 'Unknown AMQP method {0!r}'.format(method_sig)) if content is None: return amqp_method(channel, args) else: return amqp_method(channel, args, content)
Wait for an event on a channel.
entailment
def close(self, reply_code=0, reply_text='', method_sig=(0, 0)): """Request a connection close This method indicates that the sender wants to close the connection. This may be due to internal conditions (e.g. a forced shut-down) or due to an error handling a specific method, i.e. an exception. When a close is due to an exception, the sender provides the class and method id of the method which caused the exception. RULE: After sending this method any received method except the Close-OK method MUST be discarded. RULE: The peer sending this method MAY use a counter or timeout to detect failure of the other peer to respond correctly with the Close-OK method. RULE: When a server receives the Close method from a client it MUST delete all server-side resources associated with the client's context. A client CANNOT reconnect to a context after sending or receiving a Close method. PARAMETERS: reply_code: short The reply code. The AMQ reply codes are defined in AMQ RFC 011. reply_text: shortstr The localised reply text. This text can be logged as an aid to resolving issues. class_id: short failing method class When the close is provoked by a method exception, this is the class of the method. method_id: short failing method ID When the close is provoked by a method exception, this is the ID of the method. """ if self.transport is None: # already closed return args = AMQPWriter() args.write_short(reply_code) args.write_shortstr(reply_text) args.write_short(method_sig[0]) # class_id args.write_short(method_sig[1]) # method_id try: self._send_method((10, 50), args) return self.wait(allowed_methods=[ (10, 50), # Connection.close (10, 51), # Connection.close_ok ]) except (socket.timeout, socket.error) as e: # no point in waiting anymore if isinstance(e, socket.timeout): try: self.sock.settimeout(0.1) except: pass # lack of communication should not prevent tidy-up self._do_close()
Request a connection close This method indicates that the sender wants to close the connection. This may be due to internal conditions (e.g. a forced shut-down) or due to an error handling a specific method, i.e. an exception. When a close is due to an exception, the sender provides the class and method id of the method which caused the exception. RULE: After sending this method any received method except the Close-OK method MUST be discarded. RULE: The peer sending this method MAY use a counter or timeout to detect failure of the other peer to respond correctly with the Close-OK method. RULE: When a server receives the Close method from a client it MUST delete all server-side resources associated with the client's context. A client CANNOT reconnect to a context after sending or receiving a Close method. PARAMETERS: reply_code: short The reply code. The AMQ reply codes are defined in AMQ RFC 011. reply_text: shortstr The localised reply text. This text can be logged as an aid to resolving issues. class_id: short failing method class When the close is provoked by a method exception, this is the class of the method. method_id: short failing method ID When the close is provoked by a method exception, this is the ID of the method.
entailment
def filter_by(self, types=(), units=()): """Return list of value labels, filtered by either or both type and unit. An empty sequence for either argument will match as long as the other argument matches any values.""" if not (isinstance(types, Sequence) and isinstance(units, Sequence)): raise TypeError('types/units must be a sequence') empty = frozenset() if types: type_names = set() for type_ in types: type_names |= self.by_type.get(type_, empty) if not units: return type_names if units: unit_names = set() for unit in units: unit_names |= self.by_unit.get(unit, empty) if not types: return unit_names return (type_names & unit_names) if (types and units) else empty
Return list of value labels, filtered by either or both type and unit. An empty sequence for either argument will match as long as the other argument matches any values.
entailment
def get_template(self, data=None): # noqa (complexity) """Get new template which represents the values of this point in a [PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject). If data is set (to a dictionary), use this to populate the created template.""" with self.__lock: if self.__value_templates is None and self.__last_parse_ok: try: self.__refresh() except RefreshException: # Point has no (useable) values - don't try to refetch again self.__last_parse_ok = False raise if self.__value_templates is None: raise ValueError('Point has no values') if data is None: template = PointDataObject(self.__value_templates, self.__filter) else: while True: try: template = PointDataObject._from_dict(self.__value_templates, self.__filter, data) except: # parsing has failed for first time since refresh so try again if self.__last_parse_ok: logger.debug('Failed to parse data from for point %s, refreshing', self.__point) self.__last_parse_ok = False try: self.__refresh() except RefreshException: break else: raise else: self.__last_parse_ok = True break return template
Get new template which represents the values of this point in a [PointDataObject](./Point.m.html#IoticAgent.IOT.Point.PointDataObject). If data is set (to a dictionary), use this to populate the created template.
entailment
def __refresh(self): """Update local knowledge of values (to be used to create new skeletal instances). MUST be called within lock.""" raw_values = self.__get_values() if not raw_values: raise RefreshException('Point has no values') # individual templates templates = [] # lookup tables by type and unit of value by_type = {} by_unit = {} for raw_value in raw_values: label = raw_value['label'] if not valid_identifier(label) or label.startswith('__'): raise RefreshException('Value "%s" unsuitable for object wrapper' % label) value = Value(label, raw_value['type'], raw_value['unit'], raw_value['comment']) templates.append(value) try: by_type[value.type_].add(label) except KeyError: by_type[value.type_] = {label} if value.unit: try: by_unit[value.unit].add(label) except KeyError: by_unit[value.unit] = {label} self.__value_templates = templates self.__filter = _ValueFilter(by_type, by_unit)
Update local knowledge of values (to be used to create new skeletal instances). MUST be called within lock.
entailment
def __get_values(self): """Retrieve value information either via describe or point value listing. MUST be called within lock.""" values = [] if self.__remote: description = self.__client.describe(self.__point) if description is not None: if description['type'] != 'Point': raise IOTUnknown('%s is not a Point' % self.__point) values = description['meta']['values'] else: limit = 100 offset = 0 while True: new = self.__point.list(limit=limit, offset=offset) values += new if len(new) < limit: break offset += limit # Unlike for describe, value comments are keyed by language here, so unwrap to have same layout as for # describe call (default language only, if available). lang = self.__client.default_lang for value in values: value['comment'] = value['comment'].get(lang, None) if value['comment'] else None return values
Retrieve value information either via describe or point value listing. MUST be called within lock.
entailment
def get_s3_multipart_chunk_size(filesize): """Returns the chunk size of the S3 multipart object, given a file's size.""" if filesize <= AWS_MAX_MULTIPART_COUNT * AWS_MIN_CHUNK_SIZE: return AWS_MIN_CHUNK_SIZE else: div = filesize // AWS_MAX_MULTIPART_COUNT if div * AWS_MAX_MULTIPART_COUNT < filesize: div += 1 return ((div + MiB - 1) // MiB) * MiB
Returns the chunk size of the S3 multipart object, given a file's size.
entailment
def set_ext_param(self, ext_key, param_key, val): ''' Set the provided parameter in a set of extension parameters. ''' if not self.extensions[ext_key]: self.extensions[ext_key] = defaultdict(lambda: None) self.extensions[ext_key][param_key] = val
Set the provided parameter in a set of extension parameters.
entailment
def get_ext_param(self, ext_key, param_key): ''' Get specific param in set of provided extension parameters. ''' return self.extensions[ext_key][param_key] if self.extensions[ext_key]\ else None
Get specific param in set of provided extension parameters.
entailment
def process_xml(self, xml): ''' Parse tool configuration data out of the Common Cartridge LTI link XML. ''' root = objectify.fromstring(xml, parser = etree.XMLParser()) # Parse all children of the root node for child in root.getchildren(): if 'title' in child.tag: self.title = child.text if 'description' in child.tag: self.description = child.text if 'secure_launch_url' in child.tag: self.secure_launch_url = child.text elif 'launch_url' in child.tag: self.launch_url = child.text if 'icon' in child.tag: self.icon = child.text if 'secure_icon' in child.tag: self.secure_icon = child.text if 'cartridge_bundle' in child.tag: self.cartridge_bundle = child.attrib['identifierref'] if 'catridge_icon' in child.tag: self.cartridge_icon = child.atrib['identifierref'] if 'vendor' in child.tag: # Parse vendor tag for v_child in child.getchildren(): if 'code' in v_child.tag: self.vendor_code = v_child.text if 'description' in v_child.tag: self.vendor_description = v_child.text if 'name' in v_child.tag: self.vendor_name = v_child.text if 'url' in v_child.tag: self.vendor_url = v_child.text if 'contact' in v_child.tag: # Parse contact tag for email and name for c_child in v_child: if 'name' in c_child.tag: self.vendor_contact_name = c_child.text if 'email' in c_child.tag: self.vendor_contact_email = c_child.text if 'custom' in child.tag: # Parse custom tags for custom_child in child.getchildren(): self.custom_params[custom_child.attrib['name']] =\ custom_child.text if 'extensions' in child.tag: platform = child.attrib['platform'] properties = {} # Parse extension tags for ext_child in child.getchildren(): if 'property' in ext_child.tag: properties[ext_child.attrib['name']] = ext_child.text elif 'options' in ext_child.tag: opt_name = ext_child.attrib['name'] options = {} for option_child in ext_child.getchildren(): options[option_child.attrib['name']] =\ option_child.text properties[opt_name] = options self.set_ext_params(platform, properties)
Parse tool configuration data out of the Common Cartridge LTI link XML.
entailment
def to_xml(self, opts = defaultdict(lambda: None)): ''' Generate XML from the current settings. ''' if not self.launch_url or not self.secure_launch_url: raise InvalidLTIConfigError('Invalid LTI configuration') root = etree.Element('cartridge_basiclti_link', attrib = { '{%s}%s' %(NSMAP['xsi'], 'schemaLocation'): 'http://www.imsglobal.org/xsd/imslticc_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticc_v1p0.xsd http://www.imsglobal.org/xsd/imsbasiclti_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imsbasiclti_v1p0p1.xsd http://www.imsglobal.org/xsd/imslticm_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticm_v1p0.xsd http://www.imsglobal.org/xsd/imslticp_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticp_v1p0.xsd', 'xmlns': 'http://www.imsglobal.org/xsd/imslticc_v1p0' }, nsmap = NSMAP) for key in ['title', 'description', 'launch_url', 'secure_launch_url']: option = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], key)) option.text = getattr(self, key) vendor_keys = ['name', 'code', 'description', 'url'] if any('vendor_' + key for key in vendor_keys) or\ self.vendor_contact_email: vendor_node = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], 'vendor')) for key in vendor_keys: if getattr(self, 'vendor_' + key) != None: v_node = etree.SubElement(vendor_node, '{%s}%s' %(NSMAP['lticp'], key)) v_node.text = getattr(self, 'vendor_' + key) if getattr(self, 'vendor_contact_email'): v_node = etree.SubElement(vendor_node, '{%s}%s' %(NSMAP['lticp'], 'contact')) c_name = etree.SubElement(v_node, '{%s}%s' %(NSMAP['lticp'], 'name')) c_name.text = self.vendor_contact_name c_email = etree.SubElement(v_node, '{%s}%s' %(NSMAP['lticp'], 'email')) c_email.text = self.vendor_contact_email # Custom params if len(self.custom_params) != 0: custom_node = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], 'custom')) for (key, val) in sorted(self.custom_params.items()): c_node = etree.SubElement(custom_node, '{%s}%s' %(NSMAP['lticm'], 'property')) c_node.set('name', key) c_node.text = val # Extension params if len(self.extensions) != 0: for (key, params) in sorted(self.extensions.items()): extension_node = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], 'extensions'), platform = key) self.recursive_options(extension_node,params) if getattr(self, 'cartridge_bundle'): identifierref = etree.SubElement(root, 'cartridge_bundle', identifierref = self.cartridge_bundle) if getattr(self, 'cartridge_icon'): identifierref = etree.SubElement(root, 'cartridge_icon', identifierref = self.cartridge_icon) return '<?xml version="1.0" encoding="UTF-8"?>' + etree.tostring(root)
Generate XML from the current settings.
entailment
def create_widget(self): """ Create the underlying widget. """ d = self.declaration if d.orientation == 'vertical': self.widget = ScrollView(self.get_context(), None, d.style) else: self.widget = HorizontalScrollView(self.get_context(), None, d.style)
Create the underlying widget.
entailment
def create_widget(self): """ Create the underlying widget. """ d = self.declaration self.widget = CalendarView(self.get_context(), None, d.style or "@attr/calendarViewStyle")
Create the underlying widget.
entailment
def init_widget(self): """ Initialize the underlying widget. """ super(AndroidCalendarView, self).init_widget() #: Setup listener w = self.widget w.setOnDateChangeListener(w.getId()) w.onSelectedDayChange.connect(self.on_selected_day_change)
Initialize the underlying widget.
entailment
def init_widget(self): """ Initialize the underlying widget. """ super(AndroidFragment, self).init_widget() f = self.fragment f.setFragmentListener(f.getId()) f.onCreateView.connect(self.on_create_view) f.onDestroyView.connect(self.on_destroy_view)
Initialize the underlying widget.
entailment
def init_layout(self): """ Initialize the layout of the toolkit widget. This method is called during the bottom-up pass. This method should initialize the layout of the widget. The child widgets will be fully initialized and layed out when this is called. """ parent = self.parent() if parent is not None: self.adapter = parent.adapter self.adapter.addFragment(self.fragment)
Initialize the layout of the toolkit widget. This method is called during the bottom-up pass. This method should initialize the layout of the widget. The child widgets will be fully initialized and layed out when this is called.
entailment
def destroy(self): """ Custom destructor that deletes the fragment and removes itself from the adapter it was added to. """ #: Destroy fragment fragment = self.fragment if fragment: #: Stop listening fragment.setFragmentListener(None) #: Cleanup from fragment if self.adapter is not None: self.adapter.removeFragment(self.fragment) del self.fragment super(AndroidFragment, self).destroy()
Custom destructor that deletes the fragment and removes itself from the adapter it was added to.
entailment
def on_create_view(self): """ Trigger the click """ d = self.declaration changed = not d.condition if changed: d.condition = True view = self.get_view() if changed: self.ready.set_result(True) return view
Trigger the click
entailment
def get_view(self): """ Get the page to display. If a view has already been created and is cached, use that otherwise initialize the view and proxy. If defer loading is used, wrap the view in a FrameLayout and defer add view until later. """ d = self.declaration if d.cached and self.widget: return self.widget if d.defer_loading: self.widget = FrameLayout(self.get_context()) app = self.get_context() app.deferred_call( lambda: self.widget.addView(self.load_view(), 0)) else: self.widget = self.load_view() return self.widget
Get the page to display. If a view has already been created and is cached, use that otherwise initialize the view and proxy. If defer loading is used, wrap the view in a FrameLayout and defer add view until later.
entailment
def init_widget(self): """ Initialize the underlying widget. """ super(AndroidPagerFragment, self).init_widget() d = self.declaration if d.title: self.set_title(d.title) if d.icon: self.set_icon(d.icon)
Initialize the underlying widget.
entailment
def engine(func): """Callback-oriented decorator for asynchronous generators. This is an older interface; for new code that does not need to be compatible with versions of Tornado older than 3.0 the `coroutine` decorator is recommended instead. This decorator is similar to `coroutine`, except it does not return a `.Future` and the ``callback`` argument is not treated specially. In most cases, functions decorated with `engine` should take a ``callback`` argument and invoke it with their result when they are finished. One notable exception is the `~tornado.web.RequestHandler` :ref:`HTTP verb methods <verbs>`, which use ``self.finish()`` in place of a callback argument. """ func = _make_coroutine_wrapper(func, replace_callback=False) @functools.wraps(func) def wrapper(*args, **kwargs): future = func(*args, **kwargs) def final_callback(future): if future.result() is not None: raise ReturnValueIgnoredError( "@gen.engine functions cannot return values: %r" % (future.result(),)) # The engine interface doesn't give us any way to return # errors but to raise them into the stack context. # Save the stack context here to use when the Future has resolved. future.add_done_callback(stack_context.wrap(final_callback)) return wrapper
Callback-oriented decorator for asynchronous generators. This is an older interface; for new code that does not need to be compatible with versions of Tornado older than 3.0 the `coroutine` decorator is recommended instead. This decorator is similar to `coroutine`, except it does not return a `.Future` and the ``callback`` argument is not treated specially. In most cases, functions decorated with `engine` should take a ``callback`` argument and invoke it with their result when they are finished. One notable exception is the `~tornado.web.RequestHandler` :ref:`HTTP verb methods <verbs>`, which use ``self.finish()`` in place of a callback argument.
entailment
def _make_coroutine_wrapper(func, replace_callback): """The inner workings of ``@gen.coroutine`` and ``@gen.engine``. The two decorators differ in their treatment of the ``callback`` argument, so we cannot simply implement ``@engine`` in terms of ``@coroutine``. """ # On Python 3.5, set the coroutine flag on our generator, to allow it # to be used with 'await'. wrapped = func if hasattr(types, 'coroutine'): func = types.coroutine(func) @functools.wraps(wrapped) def wrapper(*args, **kwargs): future = TracebackFuture() if replace_callback and 'callback' in kwargs: callback = kwargs.pop('callback') IOLoop.current().add_future( future, lambda future: callback(future.result())) try: result = func(*args, **kwargs) except (Return, StopIteration) as e: result = _value_from_stopiteration(e) except Exception: future.set_exc_info(sys.exc_info()) return future else: if isinstance(result, GeneratorType): # Inline the first iteration of Runner.run. This lets us # avoid the cost of creating a Runner when the coroutine # never actually yields, which in turn allows us to # use "optional" coroutines in critical path code without # performance penalty for the synchronous case. try: orig_stack_contexts = stack_context._state.contexts yielded = next(result) if stack_context._state.contexts is not orig_stack_contexts: yielded = TracebackFuture() yielded.set_exception( stack_context.StackContextInconsistentError( 'stack_context inconsistency (probably caused ' 'by yield within a "with StackContext" block)')) except (StopIteration, Return) as e: future.set_result(_value_from_stopiteration(e)) except Exception: future.set_exc_info(sys.exc_info()) else: _futures_to_runners[future] = Runner(result, future, yielded) yielded = None try: return future finally: # Subtle memory optimization: if next() raised an exception, # the future's exc_info contains a traceback which # includes this stack frame. This creates a cycle, # which will be collected at the next full GC but has # been shown to greatly increase memory usage of # benchmarks (relative to the refcount-based scheme # used in the absence of cycles). We can avoid the # cycle by clearing the local variable after we return it. future = None future.set_result(result) return future wrapper.__wrapped__ = wrapped wrapper.__tornado_coroutine__ = True return wrapper
The inner workings of ``@gen.coroutine`` and ``@gen.engine``. The two decorators differ in their treatment of the ``callback`` argument, so we cannot simply implement ``@engine`` in terms of ``@coroutine``.
entailment
def Task(func, *args, **kwargs): """Adapts a callback-based asynchronous function for use in coroutines. Takes a function (and optional additional arguments) and runs it with those arguments plus a ``callback`` keyword argument. The argument passed to the callback is returned as the result of the yield expression. .. versionchanged:: 4.0 ``gen.Task`` is now a function that returns a `.Future`, instead of a subclass of `YieldPoint`. It still behaves the same way when yielded. """ future = Future() def handle_exception(typ, value, tb): if future.done(): return False future.set_exc_info((typ, value, tb)) return True def set_result(result): if future.done(): return future.set_result(result) with stack_context.ExceptionStackContext(handle_exception): func(*args, callback=_argument_adapter(set_result), **kwargs) return future
Adapts a callback-based asynchronous function for use in coroutines. Takes a function (and optional additional arguments) and runs it with those arguments plus a ``callback`` keyword argument. The argument passed to the callback is returned as the result of the yield expression. .. versionchanged:: 4.0 ``gen.Task`` is now a function that returns a `.Future`, instead of a subclass of `YieldPoint`. It still behaves the same way when yielded.
entailment
def _contains_yieldpoint(children): """Returns True if ``children`` contains any YieldPoints. ``children`` may be a dict or a list, as used by `MultiYieldPoint` and `multi_future`. """ if isinstance(children, dict): return any(isinstance(i, YieldPoint) for i in children.values()) if isinstance(children, list): return any(isinstance(i, YieldPoint) for i in children) return False
Returns True if ``children`` contains any YieldPoints. ``children`` may be a dict or a list, as used by `MultiYieldPoint` and `multi_future`.
entailment
def multi(children, quiet_exceptions=()): """Runs multiple asynchronous operations in parallel. ``children`` may either be a list or a dict whose values are yieldable objects. ``multi()`` returns a new yieldable object that resolves to a parallel structure containing their results. If ``children`` is a list, the result is a list of results in the same order; if it is a dict, the result is a dict with the same keys. That is, ``results = yield multi(list_of_futures)`` is equivalent to:: results = [] for future in list_of_futures: results.append(yield future) If any children raise exceptions, ``multi()`` will raise the first one. All others will be logged, unless they are of types contained in the ``quiet_exceptions`` argument. If any of the inputs are `YieldPoints <YieldPoint>`, the returned yieldable object is a `YieldPoint`. Otherwise, returns a `.Future`. This means that the result of `multi` can be used in a native coroutine if and only if all of its children can be. In a ``yield``-based coroutine, it is not normally necessary to call this function directly, since the coroutine runner will do it automatically when a list or dict is yielded. However, it is necessary in ``await``-based coroutines, or to pass the ``quiet_exceptions`` argument. This function is available under the names ``multi()`` and ``Multi()`` for historical reasons. .. versionchanged:: 4.2 If multiple yieldables fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. .. versionchanged:: 4.3 Replaced the class ``Multi`` and the function ``multi_future`` with a unified function ``multi``. Added support for yieldables other than `YieldPoint` and `.Future`. """ if _contains_yieldpoint(children): return MultiYieldPoint(children, quiet_exceptions=quiet_exceptions) else: return multi_future(children, quiet_exceptions=quiet_exceptions)
Runs multiple asynchronous operations in parallel. ``children`` may either be a list or a dict whose values are yieldable objects. ``multi()`` returns a new yieldable object that resolves to a parallel structure containing their results. If ``children`` is a list, the result is a list of results in the same order; if it is a dict, the result is a dict with the same keys. That is, ``results = yield multi(list_of_futures)`` is equivalent to:: results = [] for future in list_of_futures: results.append(yield future) If any children raise exceptions, ``multi()`` will raise the first one. All others will be logged, unless they are of types contained in the ``quiet_exceptions`` argument. If any of the inputs are `YieldPoints <YieldPoint>`, the returned yieldable object is a `YieldPoint`. Otherwise, returns a `.Future`. This means that the result of `multi` can be used in a native coroutine if and only if all of its children can be. In a ``yield``-based coroutine, it is not normally necessary to call this function directly, since the coroutine runner will do it automatically when a list or dict is yielded. However, it is necessary in ``await``-based coroutines, or to pass the ``quiet_exceptions`` argument. This function is available under the names ``multi()`` and ``Multi()`` for historical reasons. .. versionchanged:: 4.2 If multiple yieldables fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. .. versionchanged:: 4.3 Replaced the class ``Multi`` and the function ``multi_future`` with a unified function ``multi``. Added support for yieldables other than `YieldPoint` and `.Future`.
entailment
def multi_future(children, quiet_exceptions=()): """Wait for multiple asynchronous futures in parallel. This function is similar to `multi`, but does not support `YieldPoints <YieldPoint>`. .. versionadded:: 4.0 .. versionchanged:: 4.2 If multiple ``Futures`` fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. .. deprecated:: 4.3 Use `multi` instead. """ if isinstance(children, dict): keys = list(children.keys()) children = children.values() else: keys = None children = list(map(convert_yielded, children)) assert all(is_future(i) for i in children) unfinished_children = set(children) future = Future() if not children: future.set_result({} if keys is not None else []) def callback(f): unfinished_children.remove(f) if not unfinished_children: result_list = [] for f in children: try: result_list.append(f.result()) except Exception as e: if future.done(): if not isinstance(e, quiet_exceptions): app_log.error("Multiple exceptions in yield list", exc_info=True) else: future.set_exc_info(sys.exc_info()) if not future.done(): if keys is not None: future.set_result(dict(zip(keys, result_list))) else: future.set_result(result_list) listening = set() for f in children: if f not in listening: listening.add(f) f.add_done_callback(callback) return future
Wait for multiple asynchronous futures in parallel. This function is similar to `multi`, but does not support `YieldPoints <YieldPoint>`. .. versionadded:: 4.0 .. versionchanged:: 4.2 If multiple ``Futures`` fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. .. deprecated:: 4.3 Use `multi` instead.
entailment
def maybe_future(x): """Converts ``x`` into a `.Future`. If ``x`` is already a `.Future`, it is simply returned; otherwise it is wrapped in a new `.Future`. This is suitable for use as ``result = yield gen.maybe_future(f())`` when you don't know whether ``f()`` returns a `.Future` or not. .. deprecated:: 4.3 This function only handles ``Futures``, not other yieldable objects. Instead of `maybe_future`, check for the non-future result types you expect (often just ``None``), and ``yield`` anything unknown. """ if is_future(x): return x else: fut = Future() fut.set_result(x) return fut
Converts ``x`` into a `.Future`. If ``x`` is already a `.Future`, it is simply returned; otherwise it is wrapped in a new `.Future`. This is suitable for use as ``result = yield gen.maybe_future(f())`` when you don't know whether ``f()`` returns a `.Future` or not. .. deprecated:: 4.3 This function only handles ``Futures``, not other yieldable objects. Instead of `maybe_future`, check for the non-future result types you expect (often just ``None``), and ``yield`` anything unknown.
entailment
def with_timeout(timeout, future, quiet_exceptions=()): """Wraps a `.Future` (or other yieldable object) in a timeout. Raises `tornado.util.TimeoutError` if the input future does not complete before ``timeout``, which may be specified in any form allowed by `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time relative to `.IOLoop.time`) If the wrapped `.Future` fails after it has timed out, the exception will be logged unless it is of a type contained in ``quiet_exceptions`` (which may be an exception type or a sequence of types). Does not support `YieldPoint` subclasses. .. versionadded:: 4.0 .. versionchanged:: 4.1 Added the ``quiet_exceptions`` argument and the logging of unhandled exceptions. .. versionchanged:: 4.4 Added support for yieldable objects other than `.Future`. """ # TODO: allow YieldPoints in addition to other yieldables? # Tricky to do with stack_context semantics. # # It's tempting to optimize this by cancelling the input future on timeout # instead of creating a new one, but A) we can't know if we are the only # one waiting on the input future, so cancelling it might disrupt other # callers and B) concurrent futures can only be cancelled while they are # in the queue, so cancellation cannot reliably bound our waiting time. future = convert_yielded(future) result = Future() chain_future(future, result) io_loop = IOLoop.current() def error_callback(future): try: future.result() except Exception as e: if not isinstance(e, quiet_exceptions): app_log.error("Exception in Future %r after timeout", future, exc_info=True) def timeout_callback(): result.set_exception(TimeoutError("Timeout")) # In case the wrapped future goes on to fail, log it. future.add_done_callback(error_callback) timeout_handle = io_loop.add_timeout( timeout, timeout_callback) if isinstance(future, Future): # We know this future will resolve on the IOLoop, so we don't # need the extra thread-safety of IOLoop.add_future (and we also # don't care about StackContext here. future.add_done_callback( lambda future: io_loop.remove_timeout(timeout_handle)) else: # concurrent.futures.Futures may resolve on any thread, so we # need to route them back to the IOLoop. io_loop.add_future( future, lambda future: io_loop.remove_timeout(timeout_handle)) return result
Wraps a `.Future` (or other yieldable object) in a timeout. Raises `tornado.util.TimeoutError` if the input future does not complete before ``timeout``, which may be specified in any form allowed by `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time relative to `.IOLoop.time`) If the wrapped `.Future` fails after it has timed out, the exception will be logged unless it is of a type contained in ``quiet_exceptions`` (which may be an exception type or a sequence of types). Does not support `YieldPoint` subclasses. .. versionadded:: 4.0 .. versionchanged:: 4.1 Added the ``quiet_exceptions`` argument and the logging of unhandled exceptions. .. versionchanged:: 4.4 Added support for yieldable objects other than `.Future`.
entailment
def sleep(duration): """Return a `.Future` that resolves after the given number of seconds. When used with ``yield`` in a coroutine, this is a non-blocking analogue to `time.sleep` (which should not be used in coroutines because it is blocking):: yield gen.sleep(0.5) Note that calling this function on its own does nothing; you must wait on the `.Future` it returns (usually by yielding it). .. versionadded:: 4.1 """ f = Future() IOLoop.current().call_later(duration, lambda: f.set_result(None)) return f
Return a `.Future` that resolves after the given number of seconds. When used with ``yield`` in a coroutine, this is a non-blocking analogue to `time.sleep` (which should not be used in coroutines because it is blocking):: yield gen.sleep(0.5) Note that calling this function on its own does nothing; you must wait on the `.Future` it returns (usually by yielding it). .. versionadded:: 4.1
entailment
def _argument_adapter(callback): """Returns a function that when invoked runs ``callback`` with one arg. If the function returned by this function is called with exactly one argument, that argument is passed to ``callback``. Otherwise the args tuple and kwargs dict are wrapped in an `Arguments` object. """ def wrapper(*args, **kwargs): if kwargs or len(args) > 1: callback(Arguments(args, kwargs)) elif args: callback(args[0]) else: callback(None) return wrapper
Returns a function that when invoked runs ``callback`` with one arg. If the function returned by this function is called with exactly one argument, that argument is passed to ``callback``. Otherwise the args tuple and kwargs dict are wrapped in an `Arguments` object.
entailment
def convert_yielded(yielded): """Convert a yielded object into a `.Future`. The default implementation accepts lists, dictionaries, and Futures. If the `~functools.singledispatch` library is available, this function may be extended to support additional types. For example:: @convert_yielded.register(asyncio.Future) def _(asyncio_future): return tornado.platform.asyncio.to_tornado_future(asyncio_future) .. versionadded:: 4.1 """ # Lists and dicts containing YieldPoints were handled earlier. if yielded is None: return moment elif isinstance(yielded, (list, dict)): return multi(yielded) elif is_future(yielded): return yielded elif isawaitable(yielded): return _wrap_awaitable(yielded) else: raise BadYieldError("yielded unknown object %r" % (yielded,))
Convert a yielded object into a `.Future`. The default implementation accepts lists, dictionaries, and Futures. If the `~functools.singledispatch` library is available, this function may be extended to support additional types. For example:: @convert_yielded.register(asyncio.Future) def _(asyncio_future): return tornado.platform.asyncio.to_tornado_future(asyncio_future) .. versionadded:: 4.1
entailment
def done(self): """Returns True if this iterator has no more results.""" if self._finished or self._unfinished: return False # Clear the 'current' values when iteration is done. self.current_index = self.current_future = None return True
Returns True if this iterator has no more results.
entailment
def next(self): """Returns a `.Future` that will yield the next available result. Note that this `.Future` will not be the same object as any of the inputs. """ self._running_future = TracebackFuture() if self._finished: self._return_result(self._finished.popleft()) return self._running_future
Returns a `.Future` that will yield the next available result. Note that this `.Future` will not be the same object as any of the inputs.
entailment
def _return_result(self, done): """Called set the returned future's state that of the future we yielded, and set the current future for the iterator. """ chain_future(done, self._running_future) self.current_future = done self.current_index = self._unfinished.pop(done)
Called set the returned future's state that of the future we yielded, and set the current future for the iterator.
entailment
def register_callback(self, key): """Adds ``key`` to the list of callbacks.""" if self.pending_callbacks is None: # Lazily initialize the old-style YieldPoint data structures. self.pending_callbacks = set() self.results = {} if key in self.pending_callbacks: raise KeyReuseError("key %r is already pending" % (key,)) self.pending_callbacks.add(key)
Adds ``key`` to the list of callbacks.
entailment
def is_ready(self, key): """Returns true if a result is available for ``key``.""" if self.pending_callbacks is None or key not in self.pending_callbacks: raise UnknownKeyError("key %r is not pending" % (key,)) return key in self.results
Returns true if a result is available for ``key``.
entailment
def set_result(self, key, result): """Sets the result for ``key`` and attempts to resume the generator.""" self.results[key] = result if self.yield_point is not None and self.yield_point.is_ready(): try: self.future.set_result(self.yield_point.get_result()) except: self.future.set_exc_info(sys.exc_info()) self.yield_point = None self.run()
Sets the result for ``key`` and attempts to resume the generator.
entailment
def pop_result(self, key): """Returns the result for ``key`` and unregisters it.""" self.pending_callbacks.remove(key) return self.results.pop(key)
Returns the result for ``key`` and unregisters it.
entailment
def run(self): """Starts or resumes the generator, running until it reaches a yield point that is not ready. """ if self.running or self.finished: return try: self.running = True while True: future = self.future if not future.done(): return self.future = None try: orig_stack_contexts = stack_context._state.contexts exc_info = None try: value = future.result() except Exception: self.had_exception = True exc_info = sys.exc_info() future = None if exc_info is not None: try: yielded = self.gen.throw(*exc_info) finally: # Break up a reference to itself # for faster GC on CPython. exc_info = None else: yielded = self.gen.send(value) if stack_context._state.contexts is not orig_stack_contexts: self.gen.throw( stack_context.StackContextInconsistentError( 'stack_context inconsistency (probably caused ' 'by yield within a "with StackContext" block)')) except (StopIteration, Return) as e: self.finished = True self.future = _null_future if self.pending_callbacks and not self.had_exception: # If we ran cleanly without waiting on all callbacks # raise an error (really more of a warning). If we # had an exception then some callbacks may have been # orphaned, so skip the check in that case. raise LeakedCallbackError( "finished without waiting for callbacks %r" % self.pending_callbacks) self.result_future.set_result(_value_from_stopiteration(e)) self.result_future = None self._deactivate_stack_context() return except Exception: self.finished = True self.future = _null_future self.result_future.set_exc_info(sys.exc_info()) self.result_future = None self._deactivate_stack_context() return if not self.handle_yield(yielded): return yielded = None finally: self.running = False
Starts or resumes the generator, running until it reaches a yield point that is not ready.
entailment
def create_widget(self): """ Create the underlying widget. """ d = self.declaration self.widget = RadioButton(self.get_context(), None, d.style or '@attr/radioButtonStyle')
Create the underlying widget.
entailment
def write_message(self, data, binary=False): """ Write a message to the client """ self.connection.write_message(data, binary)
Write a message to the client
entailment
def render_files(self, root=None): """ Render the file path as accordions """ if root is None: tmp = os.environ.get('TMP') root = sys.path[1 if tmp and tmp in sys.path else 0] items = [] for filename in os.listdir(root): # for subdirname in dirnames: # path = os.path.join(dirname, subdirname) # items.append(FOLDER_TMPL.format( # name=subdirname, # id=path, # items=self.render_files(path) # )) #for filename in filenames: f,ext = os.path.splitext(filename) if ext in ['.py', '.enaml']: items.append(FILE_TMPL.format( name=filename, id=filename )) return "".join(items)
Render the file path as accordions
entailment
def render_code(self): """ Try to load the previous code (if we had a crash or something) I should allow saving. """ tmp_dir = os.environ.get('TMP','') view_code = os.path.join(tmp_dir,'view.enaml') if os.path.exists(view_code): try: with open(view_code) as f: return f.read() except: pass return DEFAULT_CODE
Try to load the previous code (if we had a crash or something) I should allow saving.
entailment
def render_component(self, declaration): """ Render a row of all the attributes """ items = ["""<tr><td>{name}</td><td>{type}</td></tr>""" .format(name=m.name, type=self.render_component_types(declaration, m)) for m in self.get_component_members(declaration)] info = [] parent = declaration.__mro__[1] #: Superclass info.append("<tr><td>extends component</td>" "<td><a href='#component-{id}'>{name}</a></td></td>" .format(id=parent.__name__.lower(), name=parent.__name__)) #: Source and example, only works with enamlnative builtins source_path = inspect.getfile(declaration).replace( ".pyo", ".py").replace(".pyc", ".py") if 'enamlnative' in source_path: source_link = "https://github.com/frmdstryr/" \ "enaml-native/tree/master/src/{}".format( source_path.split("assets/python")[1] ) info.append("<tr><td>source code</td>" "<td><a href='{}' target='_blank'>show</a></td></td>" .format(source_link)) #: Examples link example_link = "https://www.codelv.com/projects/" \ "enaml-native/docs/components#{}" \ .format(declaration.__name__.lower()) info.append("<tr><td>example usage</td>" "<td><a href='{}' target='_blank'>view</a></td></td>" .format(example_link)) return COMPONENT_TMPL.format(id=declaration.__name__.lower(), name=declaration.__name__, info="".join(info), items="".join(items))
Render a row of all the attributes
entailment
def start(self): """ Start the dev session. Attempt to use tornado first, then try twisted """ print("Starting debug client cwd: {}".format(os.getcwd())) print("Sys path: {}".format(sys.path)) #: Initialize the hotswapper self.hotswap = Hotswapper(debug=False) if self.mode == 'server': self.server.start(self) else: self.client.start(self)
Start the dev session. Attempt to use tornado first, then try twisted
entailment
def _default_url(self): """ Websocket URL to connect to and listen for reload requests """ host = 'localhost' if self.mode == 'remote' else self.host return 'ws://{}:{}/dev'.format(host, self.port)
Websocket URL to connect to and listen for reload requests
entailment
def write_message(self, data, binary=False): """ Write a message to the active client """ self.client.write_message(data, binary=binary)
Write a message to the active client
entailment
def handle_message(self, data): """ When we get a message """ msg = json.loads(data) print("Dev server message: {}".format(msg)) handler_name = 'do_{}'.format(msg['type']) if hasattr(self, handler_name): handler = getattr(self, handler_name) result = handler(msg) return {'ok': True, 'result': result} else: err = "Warning: Unhandled message: {}".format(msg) print(err) return {'ok': False, 'message': err}
When we get a message
entailment
def do_reload(self, msg): """ Called when the dev server wants to reload the view. """ #: TODO: This should use the autorelaoder app = self.app #: Show loading screen try: self.app.widget.showLoading("Reloading... Please wait.", now=True) #self.app.widget.restartPython(now=True) #sys.exit(0) except: #: TODO: Implement for iOS... pass self.save_changed_files(msg) if app.load_view is None: print("Warning: Reloading the view is not implemented. " "Please set `app.load_view` to support this.") return if app.view is not None: try: app.view.destroy() except: pass def wrapped(f): def safe_reload(*args, **kwargs): try: return f(*args, **kwargs) except: #: Display the error app.send_event(Command.ERROR, traceback.format_exc()) return safe_reload app.deferred_call(wrapped(app.load_view), app)
Called when the dev server wants to reload the view.
entailment
def do_hotswap(self, msg): """ Attempt to hotswap the code """ #: Show hotswap tooltip try: self.app.widget.showTooltip("Hot swapping...", now=True) except: pass self.save_changed_files(msg) hotswap = self.hotswap app = self.app try: print("Attempting hotswap....") with hotswap.active(): hotswap.update(app.view) except: #: Display the error app.send_event(Command.ERROR, traceback.format_exc())
Attempt to hotswap the code
entailment
def _update_proxy(self, change): """ An observer which sends the state change to the proxy. """ if change['type'] == 'event': name = 'do_'+change['name'] if hasattr(self.proxy, name): handler = getattr(self.proxy, name) handler() else: super(WebView, self)._update_proxy(change)
An observer which sends the state change to the proxy.
entailment
def create_widget(self): """ Create the underlying widget. A dialog is not a subclass of view, hence we don't set name as widget or children will try to use it as their parent. """ d = self.declaration style = d.style or '@style/Widget.DeviceDefault.PopupMenu' self.window = PopupWindow(self.get_context(), None, 0, style) self.showing = False
Create the underlying widget. A dialog is not a subclass of view, hence we don't set name as widget or children will try to use it as their parent.
entailment
def init_widget(self): """ Set the listeners """ w = self.window d = self.declaration self.set_background_color(d.background_color) self.set_touchable(d.touchable) self.set_outside_touchable(d.outside_touchable) # Listen for events w.setOnDismissListener(w.getId()) w.onDismiss.connect(self.on_dismiss) super(AndroidPopupWindow, self).init_widget()
Set the listeners
entailment
def init_layout(self): """ If a view is given show it """ super(AndroidPopupWindow, self).init_layout() #: Set the content for view in self.child_widgets(): self.window.setContentView(view) break #: Show it if needed d = self.declaration if d.show: self.set_show(d.show)
If a view is given show it
entailment
def child_added(self, child): """ Overwrite the content view """ view = child.widget if view is not None: self.window.setContentView(view)
Overwrite the content view
entailment
def destroy(self): """ A reimplemented destructor that cancels the dialog before destroying. """ super(AndroidPopupWindow, self).destroy() window = self.window if window: #: Clear the dismiss listener #: (or we get an error during the callback) window.setOnDismissListener(None) #window.dismiss() del self.window
A reimplemented destructor that cancels the dialog before destroying.
entailment
def update(self): """ Update the PopupWindow if it is currently showing. This avoids calling update during initialization. """ if not self.showing: return d = self.declaration self.set_show(d.show)
Update the PopupWindow if it is currently showing. This avoids calling update during initialization.
entailment
def refresh_items(self): """ Refresh the items of the pattern. This method destroys the old items and creates and initializes the new items. It is overridden to NOT insert the children to the parent. The Fragment adapter handles this. """ items = [] if self.condition: for nodes, key, f_locals in self.pattern_nodes: with new_scope(key, f_locals): for node in nodes: child = node(None) if isinstance(child, list): items.extend(child) else: items.append(child) for old in self.items: if not old.is_destroyed: old.destroy() #: Insert items into THIS node, NOT the PARENT #if len(items) > 0: # self.parent.insert_children(self, items) self.items = items
Refresh the items of the pattern. This method destroys the old items and creates and initializes the new items. It is overridden to NOT insert the children to the parent. The Fragment adapter handles this.
entailment
def init_widget(self): """ Initialize the underlying widget. """ super(AndroidSwipeRefreshLayout, self).init_widget() d = self.declaration w = self.widget if not d.enabled: self.set_enabled(d.enabled) if d.indicator_background_color: self.set_indicator_background_color(d.indicator_background_color) if d.indicator_color: self.set_indicator_color(d.indicator_color) if d.trigger_distance: self.set_trigger_distance(d.trigger_distance) w.onRefresh.connect(self.on_refresh) w.setOnRefreshListener(w.getId())
Initialize the underlying widget.
entailment
def load_module(self, mod): """ Load the extension using the load_dynamic method. """ try: return sys.modules[mod] except KeyError: pass lib = ExtensionImporter.extension_modules[mod] m = imp.load_dynamic(mod, lib) sys.modules[mod] = m return m
Load the extension using the load_dynamic method.
entailment
def create_widget(self): """ Create the underlying widget. """ d = self.declaration self.widget = TabLayout(self.get_context(), None, d.style)
Create the underlying widget.
entailment
def init_widget(self): """ Initialize the underlying widget. """ super(AndroidTabLayout, self).init_widget() w = self.widget w.addOnTabSelectedListener(w.getId()) w.onTabSelected.connect(self.on_tab_selected) w.onTabUnselected.connect(self.on_tab_unselected)
Initialize the underlying widget.
entailment
def destroy(self): """ Destroy all tabs when destroyed """ super(AndroidTabLayout, self).destroy() if self.tabs: del self.tabs
Destroy all tabs when destroyed
entailment
def update_atom_members(old, new): """ Update an atom member """ old_keys = old.members().keys() new_keys = new.members().keys() for key in old_keys: old_obj = getattr(old, key) try: new_obj = getattr(new, key) if old_obj == new_obj: continue except AttributeError: # Remove any obsolete members try: delattr(old, key) except (AttributeError, TypeError): pass continue try: #: Update any changed members #: TODO: We have to somehow know if this was changed by the user or the code! #: and ONLY update if it's due to the code changing! Without this, the entire concept #: is broken and useless... setattr(old, key, getattr(new, key)) except (AttributeError, TypeError): pass # skip non-writable attributes #: Add any new members for key in set(new_keys)-set(old_keys): try: setattr(old, key, getattr(new, key)) except (AttributeError, TypeError): pass
Update an atom member
entailment
def update_class_by_type(old, new): """ Update declarative classes or fallback on default """ autoreload.update_class(old, new) if isinstance2(old, new, AtomMeta): update_atom_members(old, new)
Update declarative classes or fallback on default
entailment
def check(self, check_all=True, do_reload=True): """Check whether some modules need to be reloaded.""" with enaml.imports(): super(EnamlReloader, self).check(check_all=check_all, do_reload=do_reload)
Check whether some modules need to be reloaded.
entailment
def update(self, old, new=None): """ Update given view declaration with new declaration Parameters ----------- old: Declarative The existing view instance that needs to be updated new: Declarative or None The new or reloaded view instance to that will be used to update the existing view. If none is given, one of the same type as old will be created and initialized with no attributes passed. """ #: Create and initialize if not new: new = type(old)() if not new.is_initialized: new.initialize() if self.debug: print("Updating {} with {}".format(old, new)) #: Update attrs, funcs, and bindings of this node self.update_attrs(old, new) self.update_funcs(old, new) self.update_bindings(old, new) #: Update any child pattern nodes before the children self.update_pattern_nodes(old, new) #: Now update any children self.update_children(old, new)
Update given view declaration with new declaration Parameters ----------- old: Declarative The existing view instance that needs to be updated new: Declarative or None The new or reloaded view instance to that will be used to update the existing view. If none is given, one of the same type as old will be created and initialized with no attributes passed.
entailment
def find_best_matching_node(self, new, old_nodes): """ Find the node that best matches the new node given the old nodes. If no good match exists return `None`. """ name = new.__class__.__name__ #: TODO: We should pick the BEST one from this list #: based on some "matching" criteria (such as matching ref name or params) matches = [c for c in old_nodes if name == c.__class__.__name__] if self.debug: print("Found matches for {}: {} ".format(new, matches)) return matches[0] if matches else None
Find the node that best matches the new node given the old nodes. If no good match exists return `None`.
entailment
def update_attrs(self, old, new): """ Update any `attr` members. Parameters ----------- old: Declarative The existing view instance that needs to be updated new: Declarative The new view instance that should be used for updating """ #: Copy in storage from new node if new._d_storage: old._d_storage = new._d_storage
Update any `attr` members. Parameters ----------- old: Declarative The existing view instance that needs to be updated new: Declarative The new view instance that should be used for updating
entailment
def update_bindings(self, old, new): """ Update any enaml operator bindings. Parameters ----------- old: Declarative The existing view instance that needs to be updated new: Declarative The new view instance that should be used for updating """ #: Copy the Expression Engine if new._d_engine: old._d_engine = new._d_engine engine = old._d_engine #: Rerun any read expressions which should trigger #: any dependent writes for k in engine._handlers.keys(): try: engine.update(old, k) except: if self.debug: print(traceback.format_exc()) pass
Update any enaml operator bindings. Parameters ----------- old: Declarative The existing view instance that needs to be updated new: Declarative The new view instance that should be used for updating
entailment
def init_widget(self): """ Initialize the state of the toolkit widget. This method is called during the top-down pass, just after the 'create_widget()' method is called. This method should init the state of the widget. The child widgets will not yet be created. """ super(UiKitProgressView, self).init_widget() d = self.declaration if d.progress: self.set_progress(d.progress)
Initialize the state of the toolkit widget. This method is called during the top-down pass, just after the 'create_widget()' method is called. This method should init the state of the widget. The child widgets will not yet be created.
entailment
def destroy(self): """ A reimplemented destructor. This destructor will clear the reference to the toolkit widget and set its parent to None. """ widget = self.widget if widget is not None: del self.widget super(UiKitToolkitObject, self).destroy()
A reimplemented destructor. This destructor will clear the reference to the toolkit widget and set its parent to None.
entailment
def create_widget(self): """ Create the underlying label widget. """ d = self.declaration self.widget = View(self.get_context(), None, d.style)
Create the underlying label widget.
entailment
def init_widget(self): """ Initialize the underlying widget. This reads all items declared in the enamldef block for this node and sets only the values that have been specified. All other values will be left as default. Doing it this way makes atom to only create the properties that need to be overridden from defaults thus greatly reducing the number of initialization checks, saving time and memory. If you don't want this to happen override `get_declared_keys` to return an empty list. """ super(AndroidView, self).init_widget() # Initialize the widget by updating only the members that # have read expressions declared. This saves a lot of time and # simplifies widget initialization code for k, v in self.get_declared_items(): handler = getattr(self, 'set_'+k, None) if handler: handler(v)
Initialize the underlying widget. This reads all items declared in the enamldef block for this node and sets only the values that have been specified. All other values will be left as default. Doing it this way makes atom to only create the properties that need to be overridden from defaults thus greatly reducing the number of initialization checks, saving time and memory. If you don't want this to happen override `get_declared_keys` to return an empty list.
entailment
def get_declared_items(self): """ Get the members that were set in the enamldef block for this Declaration. Layout keys are grouped together until the end so as to avoid triggering multiple updates. Returns ------- result: List of (k,v) pairs that were defined for this widget in enaml List of keys and values """ d = self.declaration engine = d._d_engine if engine: layout = {} for k, h in engine._handlers.items(): # Handlers with read operations if not h.read_pair: continue v = getattr(d, k) if k in LAYOUT_KEYS: layout[k] = v continue yield (k, v) if layout: yield ('layout', layout)
Get the members that were set in the enamldef block for this Declaration. Layout keys are grouped together until the end so as to avoid triggering multiple updates. Returns ------- result: List of (k,v) pairs that were defined for this widget in enaml List of keys and values
entailment
def on_key(self, view, key, event): """ Trigger the key event Parameters ---------- view: int The ID of the view that sent this event key: int The code of the key that was pressed data: bytes The msgpack encoded key event """ d = self.declaration r = {'key': key, 'result': False} d.key_event(r) return r['result']
Trigger the key event Parameters ---------- view: int The ID of the view that sent this event key: int The code of the key that was pressed data: bytes The msgpack encoded key event
entailment
def on_touch(self, view, event): """ Trigger the touch event Parameters ---------- view: int The ID of the view that sent this event data: bytes The msgpack encoded key event """ d = self.declaration r = {'event': event, 'result': False} d.touch_event(r) return r['result']
Trigger the touch event Parameters ---------- view: int The ID of the view that sent this event data: bytes The msgpack encoded key event
entailment
def set_visible(self, visible): """ Set the visibility of the widget. """ v = View.VISIBILITY_VISIBLE if visible else View.VISIBILITY_GONE self.widget.setVisibility(v)
Set the visibility of the widget.
entailment
def set_layout(self, layout): """ Sets the LayoutParams of this widget. Since the available properties that may be set for the layout params depends on the parent, actual creation of the params is delegated to the parent Parameters ---------- layout: Dict A dict of layout parameters the parent should used to layout this child. The widget defaults are updated with user passed values. """ # Update the layout with the widget defaults update = self.layout_params is not None params = self.default_layout.copy() params.update(layout) # Create the layout params parent = self.parent() if not isinstance(parent, AndroidView): # Root node parent = self update = True parent.apply_layout(self, params) if update: self.widget.setLayoutParams(self.layout_params)
Sets the LayoutParams of this widget. Since the available properties that may be set for the layout params depends on the parent, actual creation of the params is delegated to the parent Parameters ---------- layout: Dict A dict of layout parameters the parent should used to layout this child. The widget defaults are updated with user passed values.
entailment
def create_layout_params(self, child, layout): """ Create the LayoutParams for a child with it's requested layout parameters. Subclasses should override this as needed to handle layout specific needs. Parameters ---------- child: AndroidView A view to create layout params for. layout: Dict A dict of layout parameters to use to create the layout. Returns ------- layout_params: LayoutParams A LayoutParams bridge object with the requested layout options. """ dp = self.dp w, h = (coerce_size(layout.get('width', 'wrap_content')), coerce_size(layout.get('height', 'wrap_content'))) w = w if w < 0 else int(w * dp) h = h if h < 0 else int(h * dp) layout_params = self.layout_param_type(w, h) if layout.get('margin'): l, t, r, b = layout['margin'] layout_params.setMargins(int(l*dp), int(t*dp), int(r*dp), int(b*dp)) return layout_params
Create the LayoutParams for a child with it's requested layout parameters. Subclasses should override this as needed to handle layout specific needs. Parameters ---------- child: AndroidView A view to create layout params for. layout: Dict A dict of layout parameters to use to create the layout. Returns ------- layout_params: LayoutParams A LayoutParams bridge object with the requested layout options.
entailment
def apply_layout(self, child, layout): """ Apply a layout to a child. This sets the layout_params of the child which is later used during the `init_layout` pass. Subclasses should override this as needed to handle layout specific needs of the ViewGroup. Parameters ---------- child: AndroidView A view to create layout params for. layout: Dict A dict of layout parameters to use to create the layout. """ layout_params = child.layout_params if not layout_params: layout_params = self.create_layout_params(child, layout) w = child.widget if w: dp = self.dp # padding if 'padding' in layout: l, t, r, b = layout['padding'] w.setPadding(int(l*dp), int(t*dp), int(r*dp), int(b*dp)) # left, top, right, bottom if 'left' in layout: w.setLeft(int(layout['left']*dp)) if 'top' in layout: w.setTop(int(layout['top']*dp)) if 'right' in layout: w.setRight(int(layout['right']*dp)) if 'bottom' in layout: w.setBottom(int(layout['bottom']*dp)) # x, y, z if 'x' in layout: w.setX(layout['x']*dp) if 'y' in layout: w.setY(layout['y']*dp) if 'z' in layout: w.setZ(layout['z']*dp) # set min width and height # maximum is not supported by AndroidViews (without flexbox) if 'min_height' in layout: w.setMinimumHeight(int(layout['min_height']*dp)) if 'min_width' in layout: w.setMinimumWidth(int(layout['min_width']*dp)) child.layout_params = layout_params
Apply a layout to a child. This sets the layout_params of the child which is later used during the `init_layout` pass. Subclasses should override this as needed to handle layout specific needs of the ViewGroup. Parameters ---------- child: AndroidView A view to create layout params for. layout: Dict A dict of layout parameters to use to create the layout.
entailment
def default(cls): """ Get the first available event loop implementation based on which packages are installed. """ with enamlnative.imports(): for impl in [ TornadoEventLoop, TwistedEventLoop, BuiltinEventLoop, ]: if impl.available(): print("Using {} event loop!".format(impl)) return impl() raise RuntimeError("No event loop implementation is available. " "Install tornado or twisted.")
Get the first available event loop implementation based on which packages are installed.
entailment
def log_error(self, callback, error=None): """ Log the error that occurred when running the given callback. """ print("Uncaught error during callback: {}".format(callback)) print("Error: {}".format(error))
Log the error that occurred when running the given callback.
entailment
def deferred_call(self, callback, *args, **kwargs): """ We have to wake up the reactor after every call because it may calculate a long delay where it can sleep which causes events that happen during this period to seem really slow as they do not get processed until after the reactor "wakes up" """ loop = self.loop r = loop.callLater(0, callback, *args, **kwargs) loop.wakeUp() return r
We have to wake up the reactor after every call because it may calculate a long delay where it can sleep which causes events that happen during this period to seem really slow as they do not get processed until after the reactor "wakes up"
entailment
def timed_call(self, ms, callback, *args, **kwargs): """ We have to wake up the reactor after every call because it may calculate a long delay where it can sleep which causes events that happen during this period to seem really slow as they do not get processed until after the reactor "wakes up" """ loop = self.loop r = loop.callLater(ms/1000.0, callback, *args, **kwargs) loop.wakeUp() return r
We have to wake up the reactor after every call because it may calculate a long delay where it can sleep which causes events that happen during this period to seem really slow as they do not get processed until after the reactor "wakes up"
entailment
def init_widget(self): """ Initialize the underlying widget. This reads all items declared in the enamldef block for this node and sets only the values that have been specified. All other values will be left as default. Doing it this way makes atom to only create the properties that need to be overridden from defaults thus greatly reducing the number of initialization checks, saving time and memory. If you don't want this to happen override `get_declared_keys` to return an empty list. """ super(UiKitView, self).init_widget() self.widget.yoga.isEnabled = True # Initialize the widget by updating only the members that # have read expressions declared. This saves a lot of time and # simplifies widget initialization code for k, v in self.get_declared_items(): handler = getattr(self, 'set_'+k, None) if handler: handler(v)
Initialize the underlying widget. This reads all items declared in the enamldef block for this node and sets only the values that have been specified. All other values will be left as default. Doing it this way makes atom to only create the properties that need to be overridden from defaults thus greatly reducing the number of initialization checks, saving time and memory. If you don't want this to happen override `get_declared_keys` to return an empty list.
entailment