_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q9700
|
RemoteServiceAdminEvent.fromexportreg
|
train
|
def fromexportreg(cls, bundle, export_reg):
# type: (Bundle, ExportRegistration) -> RemoteServiceAdminEvent
"""
Creates a RemoteServiceAdminEvent object from an ExportRegistration
"""
exc = export_reg.get_exception()
if exc:
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.EXPORT_ERROR,
bundle,
export_reg.get_export_container_id(),
export_reg.get_remoteservice_id(),
None,
None,
exc,
export_reg.get_description(),
)
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.EXPORT_REGISTRATION,
bundle,
export_reg.get_export_container_id(),
export_reg.get_remoteservice_id(),
None,
export_reg.get_export_reference(),
None,
export_reg.get_description(),
)
|
python
|
{
"resource": ""
}
|
q9701
|
RemoteServiceAdminEvent.fromexportupdate
|
train
|
def fromexportupdate(cls, bundle, export_reg):
# type: (Bundle, ExportRegistration) -> RemoteServiceAdminEvent
"""
Creates a RemoteServiceAdminEvent object from the update of an
ExportRegistration
"""
exc = export_reg.get_exception()
if exc:
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.EXPORT_ERROR,
bundle,
export_reg.get_export_container_id(),
export_reg.get_remoteservice_id(),
None,
export_reg.get_export_reference(),
None,
export_reg.get_description(),
)
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.EXPORT_UPDATE,
bundle,
export_reg.get_export_container_id(),
export_reg.get_remoteservice_id(),
None,
export_reg.get_export_reference(),
None,
export_reg.get_description(),
)
|
python
|
{
"resource": ""
}
|
q9702
|
RemoteServiceAdminEvent.fromimportupdate
|
train
|
def fromimportupdate(cls, bundle, import_reg):
# type: (Bundle, ImportRegistration) -> RemoteServiceAdminEvent
"""
Creates a RemoteServiceAdminEvent object from the update of an
ImportRegistration
"""
exc = import_reg.get_exception()
if exc:
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.IMPORT_ERROR,
bundle,
import_reg.get_import_container_id(),
import_reg.get_remoteservice_id(),
None,
None,
exc,
import_reg.get_description(),
)
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.IMPORT_UPDATE,
bundle,
import_reg.get_import_container_id(),
import_reg.get_remoteservice_id(),
import_reg.get_import_reference(),
None,
None,
import_reg.get_description(),
)
|
python
|
{
"resource": ""
}
|
q9703
|
RemoteServiceAdminEvent.fromimportunreg
|
train
|
def fromimportunreg(
cls, bundle, cid, rsid, import_ref, exception, endpoint
):
# type: (Bundle, Tuple[str, str], Tuple[Tuple[str, str], int], ImportReference, Optional[Tuple[Any, Any, Any]], EndpointDescription) -> RemoteServiceAdminEvent
"""
Creates a RemoteServiceAdminEvent object from the departure of an
ImportRegistration
"""
return RemoteServiceAdminEvent(
typ=RemoteServiceAdminEvent.IMPORT_UNREGISTRATION,
bundle=bundle,
cid=cid,
rsid=rsid,
import_ref=import_ref,
exception=exception,
endpoint=endpoint,
)
|
python
|
{
"resource": ""
}
|
q9704
|
RemoteServiceAdminEvent.fromexportunreg
|
train
|
def fromexportunreg(
cls, bundle, exporterid, rsid, export_ref, exception, endpoint
):
# type: (Bundle, Tuple[str, str], Tuple[Tuple[str, str], int], ExportReference, Optional[Tuple[Any, Any, Any]], EndpointDescription) -> RemoteServiceAdminEvent
"""
Creates a RemoteServiceAdminEvent object from the departure of an
ExportRegistration
"""
return RemoteServiceAdminEvent(
typ=RemoteServiceAdminEvent.EXPORT_UNREGISTRATION,
bundle=bundle,
cid=exporterid,
rsid=rsid,
export_ref=export_ref,
exception=exception,
endpoint=endpoint,
)
|
python
|
{
"resource": ""
}
|
q9705
|
RemoteServiceAdminEvent.fromimporterror
|
train
|
def fromimporterror(cls, bundle, importerid, rsid, exception, endpoint):
# type: (Bundle, Tuple[str, str], Tuple[Tuple[str, str], int], Optional[Tuple[Any, Any, Any]], EndpointDescription) -> RemoteServiceAdminEvent
"""
Creates a RemoteServiceAdminEvent object from an import error
"""
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.IMPORT_ERROR,
bundle,
importerid,
rsid,
None,
None,
exception,
endpoint,
)
|
python
|
{
"resource": ""
}
|
q9706
|
RemoteServiceAdminEvent.fromexporterror
|
train
|
def fromexporterror(cls, bundle, exporterid, rsid, exception, endpoint):
# type: (Bundle, Tuple[str, str], Tuple[Tuple[str, str], int], Optional[Tuple[Any, Any, Any]], EndpointDescription) -> RemoteServiceAdminEvent
"""
Creates a RemoteServiceAdminEvent object from an export error
"""
return RemoteServiceAdminEvent(
RemoteServiceAdminEvent.EXPORT_ERROR,
bundle,
exporterid,
rsid,
None,
None,
exception,
endpoint,
)
|
python
|
{
"resource": ""
}
|
q9707
|
IPopoWaitingList._try_instantiate
|
train
|
def _try_instantiate(self, ipopo, factory, component):
# type: (Any, str, str) -> None
"""
Tries to instantiate a component from the queue. Hides all exceptions.
:param ipopo: The iPOPO service
:param factory: Component factory
:param component: Component name
"""
try:
# Get component properties
with self.__lock:
properties = self.__queue[factory][component]
except KeyError:
# Component not in queue
return
else:
try:
# Try instantiation
ipopo.instantiate(factory, component, properties)
except TypeError:
# Unknown factory: try later
pass
except ValueError as ex:
# Already known component
_logger.error("Component already running: %s", ex)
except Exception as ex:
# Other error
_logger.exception("Error instantiating component: %s", ex)
|
python
|
{
"resource": ""
}
|
q9708
|
IPopoWaitingList.service_changed
|
train
|
def service_changed(self, event):
# type: (ServiceEvent) -> None
"""
Handles an event about the iPOPO service
"""
kind = event.get_kind()
if kind == ServiceEvent.REGISTERED:
# iPOPO service registered: register to factory events
with use_ipopo(self.__context) as ipopo:
ipopo.add_listener(self)
|
python
|
{
"resource": ""
}
|
q9709
|
IPopoWaitingList.handle_ipopo_event
|
train
|
def handle_ipopo_event(self, event):
# type: (IPopoEvent) -> None
"""
Handles an iPOPO event
:param event: iPOPO event bean
"""
kind = event.get_kind()
if kind == IPopoEvent.REGISTERED:
# A factory has been registered
try:
with use_ipopo(self.__context) as ipopo:
factory = event.get_factory_name()
with self.__lock:
# Copy the list of components names for this factory
components = self.__queue[factory].copy()
for component in components:
self._try_instantiate(ipopo, factory, component)
except BundleException:
# iPOPO not yet started
pass
except KeyError:
# No components for this new factory
pass
|
python
|
{
"resource": ""
}
|
q9710
|
IPopoWaitingList.add
|
train
|
def add(self, factory, component, properties=None):
# type: (str, str, dict) -> None
"""
Enqueues the instantiation of the given component
:param factory: Factory name
:param component: Component name
:param properties: Component properties
:raise ValueError: Component name already reserved in the queue
:raise Exception: Error instantiating the component
"""
with self.__lock:
if component in self.__names:
raise ValueError(
"Component name already queued: {0}".format(component)
)
# Normalize properties
if properties is None:
properties = {}
# Store component description
self.__names[component] = factory
self.__queue.setdefault(factory, {})[component] = properties
try:
with use_ipopo(self.__context) as ipopo:
# Try to instantiate the component right now
self._try_instantiate(ipopo, factory, component)
except BundleException:
# iPOPO not yet started
pass
|
python
|
{
"resource": ""
}
|
q9711
|
_create_server
|
train
|
def _create_server(
shell,
server_address,
port,
cert_file=None,
key_file=None,
key_password=None,
ca_file=None,
):
"""
Creates the TCP console on the given address and port
:param shell: The remote shell handler
:param server_address: Server bound address
:param port: Server port
:param cert_file: Path to the server certificate
:param key_file: Path to the server private key
:param key_password: Password for the key file
:param ca_file: Path to Certificate Authority to authenticate clients
:return: A tuple: Server thread, TCP server object, Server active flag
"""
# Set up the request handler creator
active_flag = SharedBoolean(True)
def request_handler(*rh_args):
"""
Constructs a RemoteConsole as TCP request handler
"""
return RemoteConsole(shell, active_flag, *rh_args)
# Set up the server
server = ThreadingTCPServerFamily(
(server_address, port),
request_handler,
cert_file,
key_file,
key_password,
ca_file,
)
# Set flags
server.daemon_threads = True
server.allow_reuse_address = True
# Activate the server
server.server_bind()
server.server_activate()
# Serve clients
server_thread = threading.Thread(
target=server.serve_forever, name="RemoteShell-{0}".format(port)
)
server_thread.daemon = True
server_thread.start()
return server_thread, server, active_flag
|
python
|
{
"resource": ""
}
|
q9712
|
_run_interpreter
|
train
|
def _run_interpreter(variables, banner):
"""
Runs a Python interpreter console and blocks until the user exits it.
:param variables: Interpreters variables (locals)
:param banner: Start-up banners
"""
# Script-only imports
import code
try:
import readline
import rlcompleter
readline.set_completer(rlcompleter.Completer(variables).complete)
readline.parse_and_bind("tab: complete")
except ImportError:
# readline is not available: ignore
pass
# Start the console
shell = code.InteractiveConsole(variables)
shell.interact(banner)
|
python
|
{
"resource": ""
}
|
q9713
|
RemoteConsole.send
|
train
|
def send(self, data):
"""
Tries to send data to the client.
:param data: Data to be sent
:return: True if the data was sent, False on error
"""
if data is not None:
data = data.encode("UTF-8")
try:
self.wfile.write(data)
self.wfile.flush()
return True
except IOError:
# An error occurred, mask it
# -> This allows to handle the command even if the client has been
# disconnect (i.e. "echo stop 0 | nc localhost 9000")
return False
|
python
|
{
"resource": ""
}
|
q9714
|
RemoteConsole.handle
|
train
|
def handle(self):
"""
Handles a TCP client
"""
_logger.info(
"RemoteConsole client connected: [%s]:%d",
self.client_address[0],
self.client_address[1],
)
# Prepare the session
session = beans.ShellSession(
beans.IOHandler(self.rfile, self.wfile),
{"remote_client_ip": self.client_address[0]},
)
# Print the banner
def get_ps1():
"""
Gets the prompt string from the session of the shell service
:return: The prompt string
"""
try:
return session.get("PS1")
except KeyError:
return self._shell.get_ps1()
self.send(self._shell.get_banner())
self.send(get_ps1())
try:
while self._active.get_value():
# Wait for data
rlist = select([self.connection], [], [], .5)[0]
if not rlist:
# Nothing to do (poll timed out)
continue
data = self.rfile.readline()
if not data:
# End of stream (client gone)
break
# Strip the line
line = data.strip()
if not data:
# Empty line
continue
# Execute it
try:
self._shell.handle_line(line, session)
except KeyboardInterrupt:
# Stop there on interruption
self.send("\nInterruption received.")
return
except IOError as ex:
# I/O errors are fatal
_logger.exception(
"Error communicating with a client: %s", ex
)
break
except Exception as ex:
# Other exceptions are not important
import traceback
self.send("\nError during last command: {0}\n".format(ex))
self.send(traceback.format_exc())
# Print the prompt
self.send(get_ps1())
finally:
_logger.info(
"RemoteConsole client gone: [%s]:%d",
self.client_address[0],
self.client_address[1],
)
try:
# Be polite
self.send("\nSession closed. Good bye.\n")
self.finish()
except IOError as ex:
_logger.warning("Error cleaning up connection: %s", ex)
|
python
|
{
"resource": ""
}
|
q9715
|
ThreadingTCPServerFamily.get_request
|
train
|
def get_request(self):
"""
Accepts a new client. Sets up SSL wrapping if necessary.
:return: A tuple: (client socket, client address tuple)
"""
# Accept the client
client_socket, client_address = self.socket.accept()
if ssl is not None and self.cert_file:
# Setup an SSL context to accept clients with a certificate
# signed by a known chain of authority.
# Other clients will be rejected during handshake.
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
try:
# Force a valid/signed client-side certificate
context.verify_mode = ssl.CERT_REQUIRED
# Load the server certificate
context.load_cert_chain(
certfile=self.cert_file,
keyfile=self.key_file,
password=self.key_password,
)
if self.ca_file:
# Load the given authority chain
context.load_verify_locations(self.ca_file)
else:
# Load the default chain if none given
context.load_default_certs(ssl.Purpose.CLIENT_AUTH)
except Exception as ex:
# Explicitly log the error as the default behaviour hides it
_logger.error("Error setting up the SSL context: %s", ex)
raise
try:
# SSL handshake
client_stream = context.wrap_socket(
client_socket, server_side=True
)
except ssl.SSLError as ex:
# Explicitly log the exception before re-raising it
_logger.warning(
"Error during SSL handshake with %s: %s", client_address, ex
)
raise
else:
# Nothing to do, use the raw socket
client_stream = client_socket
return client_stream, client_address
|
python
|
{
"resource": ""
}
|
q9716
|
ThreadingTCPServerFamily.process_request
|
train
|
def process_request(self, request, client_address):
"""
Starts a new thread to process the request, adding the client address
in its name.
"""
thread = threading.Thread(
name="RemoteShell-{0}-Client-{1}".format(
self.server_address[1], client_address[:2]
),
target=self.process_request_thread,
args=(request, client_address),
)
thread.daemon = self.daemon_threads
thread.start()
|
python
|
{
"resource": ""
}
|
q9717
|
_HTTPServletRequest.get_header
|
train
|
def get_header(self, name, default=None):
"""
Retrieves the value of a header
"""
return self._handler.headers.get(name, default)
|
python
|
{
"resource": ""
}
|
q9718
|
_HTTPServletResponse.end_headers
|
train
|
def end_headers(self):
"""
Ends the headers part
"""
# Send them all at once
for name, value in self._headers.items():
self._handler.send_header(name, value)
self._handler.end_headers()
|
python
|
{
"resource": ""
}
|
q9719
|
_RequestHandler.log_error
|
train
|
def log_error(self, message, *args, **kwargs):
# pylint: disable=W0221
"""
Log server error
"""
self._service.log(logging.ERROR, message, *args, **kwargs)
|
python
|
{
"resource": ""
}
|
q9720
|
_RequestHandler.log_request
|
train
|
def log_request(self, code="-", size="-"):
"""
Logs a request to the server
"""
self._service.log(logging.DEBUG, '"%s" %s', self.requestline, code)
|
python
|
{
"resource": ""
}
|
q9721
|
_RequestHandler.send_no_servlet_response
|
train
|
def send_no_servlet_response(self):
"""
Default response sent when no servlet is found for the requested path
"""
# Use the helper to send the error page
response = _HTTPServletResponse(self)
response.send_content(404, self._service.make_not_found_page(self.path))
|
python
|
{
"resource": ""
}
|
q9722
|
_RequestHandler.send_exception
|
train
|
def send_exception(self, response):
"""
Sends an exception page with a 500 error code.
Must be called from inside the exception handling block.
:param response: The response handler
"""
# Get a formatted stack trace
stack = traceback.format_exc()
# Log the error
self.log_error(
"Error handling request upon: %s\n%s\n", self.path, stack
)
# Send the page
response.send_content(
500, self._service.make_exception_page(self.path, stack)
)
|
python
|
{
"resource": ""
}
|
q9723
|
_HttpServerFamily.server_bind
|
train
|
def server_bind(self):
"""
Override server_bind to store the server name, even in IronPython.
See https://ironpython.codeplex.com/workitem/29477
"""
TCPServer.server_bind(self)
host, port = self.socket.getsockname()[:2]
self.server_port = port
try:
self.server_name = socket.getfqdn(host)
except ValueError:
# Use the local host name in case of error, like CPython does
self.server_name = socket.gethostname()
|
python
|
{
"resource": ""
}
|
q9724
|
encode_list
|
train
|
def encode_list(key, list_):
# type: (str, Iterable) -> Dict[str, str]
"""
Converts a list into a space-separated string and puts it in a dictionary
:param key: Dictionary key to store the list
:param list_: A list of objects
:return: A dictionary key->string or an empty dictionary
"""
if not list_:
return {}
return {key: " ".join(str(i) for i in list_)}
|
python
|
{
"resource": ""
}
|
q9725
|
package_name
|
train
|
def package_name(package):
# type: (str) -> str
"""
Returns the package name of the given module name
"""
if not package:
return ""
lastdot = package.rfind(".")
if lastdot == -1:
return package
return package[:lastdot]
|
python
|
{
"resource": ""
}
|
q9726
|
encode_osgi_props
|
train
|
def encode_osgi_props(ed):
# type: (EndpointDescription) -> Dict[str, str]
"""
Prepares a dictionary of OSGi properties for the given EndpointDescription
"""
result_props = {}
intfs = ed.get_interfaces()
result_props[OBJECTCLASS] = " ".join(intfs)
for intf in intfs:
pkg_name = package_name(intf)
ver = ed.get_package_version(pkg_name)
if ver and not ver == (0, 0, 0):
result_props[ENDPOINT_PACKAGE_VERSION_] = ".".join(
str(v) for v in ver
)
result_props[ENDPOINT_ID] = ed.get_id()
result_props[ENDPOINT_SERVICE_ID] = "{0}".format(ed.get_service_id())
result_props[ENDPOINT_FRAMEWORK_UUID] = ed.get_framework_uuid()
imp_configs = ed.get_imported_configs()
if imp_configs:
result_props[SERVICE_IMPORTED_CONFIGS] = " ".join(
ed.get_imported_configs()
)
intents = ed.get_intents()
if intents:
result_props[SERVICE_INTENTS] = " ".join(intents)
remote_configs = ed.get_remote_configs_supported()
if remote_configs:
result_props[REMOTE_CONFIGS_SUPPORTED] = " ".join(remote_configs)
remote_intents = ed.get_remote_intents_supported()
if remote_intents:
result_props[REMOTE_INTENTS_SUPPORTED] = " ".join(remote_intents)
return result_props
|
python
|
{
"resource": ""
}
|
q9727
|
decode_list
|
train
|
def decode_list(input_props, name):
# type: (Dict[str, str], str) -> List[str]
"""
Decodes a space-separated list
"""
val_str = input_props.get(name, None)
if val_str:
return val_str.split(" ")
return []
|
python
|
{
"resource": ""
}
|
q9728
|
decode_osgi_props
|
train
|
def decode_osgi_props(input_props):
# type: (Dict[str, Any]) -> Dict[str, Any]
"""
Decodes the OSGi properties of the given endpoint properties
"""
result_props = {}
intfs = decode_list(input_props, OBJECTCLASS)
result_props[OBJECTCLASS] = intfs
for intf in intfs:
package_key = ENDPOINT_PACKAGE_VERSION_ + package_name(intf)
intfversionstr = input_props.get(package_key, None)
if intfversionstr:
result_props[package_key] = intfversionstr
result_props[ENDPOINT_ID] = input_props[ENDPOINT_ID]
result_props[ENDPOINT_SERVICE_ID] = input_props[ENDPOINT_SERVICE_ID]
result_props[ENDPOINT_FRAMEWORK_UUID] = input_props[ENDPOINT_FRAMEWORK_UUID]
imp_configs = decode_list(input_props, SERVICE_IMPORTED_CONFIGS)
if imp_configs:
result_props[SERVICE_IMPORTED_CONFIGS] = imp_configs
intents = decode_list(input_props, SERVICE_INTENTS)
if intents:
result_props[SERVICE_INTENTS] = intents
remote_configs = decode_list(input_props, REMOTE_CONFIGS_SUPPORTED)
if remote_configs:
result_props[REMOTE_CONFIGS_SUPPORTED] = remote_configs
remote_intents = decode_list(input_props, REMOTE_INTENTS_SUPPORTED)
if remote_intents:
result_props[REMOTE_INTENTS_SUPPORTED] = remote_intents
return result_props
|
python
|
{
"resource": ""
}
|
q9729
|
decode_endpoint_props
|
train
|
def decode_endpoint_props(input_props):
# type: (Dict) -> Dict[str, Any]
"""
Decodes the endpoint properties from the given dictionary
"""
ed_props = decode_osgi_props(input_props)
ed_props[ECF_ENDPOINT_CONTAINERID_NAMESPACE] = input_props[
ECF_ENDPOINT_CONTAINERID_NAMESPACE
]
ed_props[ECF_RSVC_ID] = int(input_props[ECF_RSVC_ID])
ed_props[ECF_ENDPOINT_ID] = input_props[ECF_ENDPOINT_ID]
ed_props[ECF_ENDPOINT_TIMESTAMP] = int(input_props[ECF_ENDPOINT_TIMESTAMP])
target_id = input_props.get(ECF_ENDPOINT_CONNECTTARGET_ID, None)
if target_id:
ed_props[ECF_ENDPOINT_CONNECTTARGET_ID] = target_id
id_filters = decode_list(input_props, ECF_ENDPOINT_IDFILTER_IDS)
if id_filters:
ed_props[ECF_ENDPOINT_IDFILTER_IDS] = id_filters
rs_filter = input_props.get(ECF_ENDPOINT_REMOTESERVICE_FILTER, None)
if rs_filter:
ed_props[ECF_ENDPOINT_REMOTESERVICE_FILTER] = rs_filter
async_intfs = input_props.get(ECF_SERVICE_EXPORTED_ASYNC_INTERFACES, None)
if async_intfs:
if async_intfs == "*":
ed_props[ECF_SERVICE_EXPORTED_ASYNC_INTERFACES] = async_intfs
else:
async_intfs = decode_list(
input_props, ECF_SERVICE_EXPORTED_ASYNC_INTERFACES
)
if async_intfs:
ed_props[ECF_SERVICE_EXPORTED_ASYNC_INTERFACES] = async_intfs
for key in input_props.keys():
if not is_reserved_property(key):
val = input_props.get(key, None)
if val:
ed_props[key] = val
return ed_props
|
python
|
{
"resource": ""
}
|
q9730
|
encode_endpoint_props
|
train
|
def encode_endpoint_props(ed):
"""
Encodes the properties of the given EndpointDescription
"""
props = encode_osgi_props(ed)
props[ECF_RSVC_ID] = "{0}".format(ed.get_remoteservice_id()[1])
props[ECF_ENDPOINT_ID] = "{0}".format(ed.get_container_id()[1])
props[ECF_ENDPOINT_CONTAINERID_NAMESPACE] = "{0}".format(
ed.get_container_id()[0]
)
props[ECF_ENDPOINT_TIMESTAMP] = "{0}".format(ed.get_timestamp())
ctid = ed.get_connect_target_id()
if ctid:
props[ECF_ENDPOINT_CONNECTTARGET_ID] = "{0}".format(ctid)
id_filters = ed.get_id_filters()
if id_filters:
props[ECF_ENDPOINT_IDFILTER_IDS] = " ".join([x[1] for x in id_filters])
rs_filter = ed.get_remoteservice_filter()
if rs_filter:
props[ECF_ENDPOINT_REMOTESERVICE_FILTER] = ed.get_remoteservice_filter()
async_intfs = ed.get_async_interfaces()
if async_intfs:
props[ECF_SERVICE_EXPORTED_ASYNC_INTERFACES] = " ".join(async_intfs)
all_props = ed.get_properties()
other_props = {
key: all_props[key]
for key in all_props.keys()
if not is_reserved_property(key)
}
return merge_dicts(props, other_props)
|
python
|
{
"resource": ""
}
|
q9731
|
EndpointDescription.is_same_service
|
train
|
def is_same_service(self, endpoint):
# type: (EndpointDescription) -> bool
"""
Tests if this endpoint and the given one have the same framework UUID
and service ID
:param endpoint: Another endpoint
:return: True if both endpoints represent the same remote service
"""
return (
self.get_framework_uuid() == endpoint.get_framework_uuid()
and self.get_service_id() == endpoint.get_service_id()
)
|
python
|
{
"resource": ""
}
|
q9732
|
MqttClient.generate_id
|
train
|
def generate_id(cls, prefix="pelix-"):
"""
Generates a random MQTT client ID
:param prefix: Client ID prefix (truncated to 8 chars)
:return: A client ID of 22 or 23 characters
"""
if not prefix:
# Normalize string
prefix = ""
else:
# Truncate long prefixes
prefix = prefix[:8]
# Prepare the missing part
nb_bytes = (23 - len(prefix)) // 2
random_bytes = os.urandom(nb_bytes)
if sys.version_info[0] >= 3:
random_ints = [char for char in random_bytes]
else:
random_ints = [ord(char) for char in random_bytes]
random_id = "".join("{0:02x}".format(value) for value in random_ints)
return "{0}{1}".format(prefix, random_id)
|
python
|
{
"resource": ""
}
|
q9733
|
MqttClient.set_will
|
train
|
def set_will(self, topic, payload, qos=0, retain=False):
"""
Sets up the will message
:param topic: Topic of the will message
:param payload: Content of the message
:param qos: Quality of Service
:param retain: The message will be retained
:raise ValueError: Invalid topic
:raise TypeError: Invalid payload
"""
self.__mqtt.will_set(topic, payload, qos, retain=retain)
|
python
|
{
"resource": ""
}
|
q9734
|
MqttClient.connect
|
train
|
def connect(self, host="localhost", port=1883, keepalive=60):
"""
Connects to the MQTT server. The client will automatically try to
reconnect to this server when the connection is lost.
:param host: MQTT server host
:param port: MQTT server port
:param keepalive: Maximum period in seconds between communications with
the broker
:raise ValueError: Invalid host or port
"""
# Disconnect first (it also stops the timer)
self.disconnect()
# Prepare the connection
self.__mqtt.connect(host, port, keepalive)
# Start the MQTT loop
self.__mqtt.loop_start()
|
python
|
{
"resource": ""
}
|
q9735
|
MqttClient.disconnect
|
train
|
def disconnect(self):
"""
Disconnects from the MQTT server
"""
# Stop the timer
self.__stop_timer()
# Unlock all publishers
for event in self.__in_flight.values():
event.set()
# Disconnect from the server
self.__mqtt.disconnect()
# Stop the MQTT loop thread
# Use a thread to avoid a dead lock in Paho
thread = threading.Thread(target=self.__mqtt.loop_stop)
thread.daemon = True
thread.start()
# Give it some time
thread.join(4)
|
python
|
{
"resource": ""
}
|
q9736
|
MqttClient.publish
|
train
|
def publish(self, topic, payload, qos=0, retain=False, wait=False):
"""
Sends a message through the MQTT connection
:param topic: Message topic
:param payload: Message content
:param qos: Quality of Service
:param retain: Retain flag
:param wait: If True, prepares an event to wait for the message to be
published
:return: The local message ID, None on error
"""
result = self.__mqtt.publish(topic, payload, qos, retain)
if wait and not result[0]:
# Publish packet sent, wait for it to return
self.__in_flight[result[1]] = threading.Event()
_logger.debug("Waiting for publication of %s", topic)
return result[1]
|
python
|
{
"resource": ""
}
|
q9737
|
MqttClient.__start_timer
|
train
|
def __start_timer(self, delay):
"""
Starts the reconnection timer
:param delay: Delay (in seconds) before calling the reconnection method
"""
self.__timer = threading.Timer(delay, self.__reconnect)
self.__timer.daemon = True
self.__timer.start()
|
python
|
{
"resource": ""
}
|
q9738
|
MqttClient.__reconnect
|
train
|
def __reconnect(self):
"""
Tries to connect to the MQTT server
"""
# Cancel the timer, if any
self.__stop_timer()
try:
# Try to reconnect the server
result_code = self.__mqtt.reconnect()
if result_code:
# Something wrong happened
message = "Error connecting the MQTT server: {0} ({1})".format(
result_code, paho.error_string(result_code)
)
_logger.error(message)
raise ValueError(message)
except Exception as ex:
# Something went wrong: log it
_logger.error("Exception connecting server: %s", ex)
finally:
# Prepare a reconnection timer. It will be cancelled by the
# on_connect callback
self.__start_timer(10)
|
python
|
{
"resource": ""
}
|
q9739
|
MqttClient.__on_connect
|
train
|
def __on_connect(self, client, userdata, flags, result_code):
# pylint: disable=W0613
"""
Client connected to the server
:param client: Connected Paho client
:param userdata: User data (unused)
:param flags: Response flags sent by the broker
:param result_code: Connection result code (0: success, others: error)
"""
if result_code:
# result_code != 0: something wrong happened
_logger.error(
"Error connecting the MQTT server: %s (%d)",
paho.connack_string(result_code),
result_code,
)
else:
# Connection is OK: stop the reconnection timer
self.__stop_timer()
# Notify the caller, if any
if self.on_connect is not None:
try:
self.on_connect(self, result_code)
except Exception as ex:
_logger.exception("Error notifying MQTT listener: %s", ex)
|
python
|
{
"resource": ""
}
|
q9740
|
MqttClient.__on_disconnect
|
train
|
def __on_disconnect(self, client, userdata, result_code):
# pylint: disable=W0613
"""
Client has been disconnected from the server
:param client: Client that received the message
:param userdata: User data (unused)
:param result_code: Disconnection reason (0: expected, 1: error)
"""
if result_code:
# rc != 0: unexpected disconnection
_logger.error(
"Unexpected disconnection from the MQTT server: %s (%d)",
paho.connack_string(result_code),
result_code,
)
# Try to reconnect
self.__stop_timer()
self.__start_timer(2)
# Notify the caller, if any
if self.on_disconnect is not None:
try:
self.on_disconnect(self, result_code)
except Exception as ex:
_logger.exception("Error notifying MQTT listener: %s", ex)
|
python
|
{
"resource": ""
}
|
q9741
|
MqttClient.__on_message
|
train
|
def __on_message(self, client, userdata, msg):
# pylint: disable=W0613
"""
A message has been received from a server
:param client: Client that received the message
:param userdata: User data (unused)
:param msg: A MQTTMessage bean
"""
# Notify the caller, if any
if self.on_message is not None:
try:
self.on_message(self, msg)
except Exception as ex:
_logger.exception("Error notifying MQTT listener: %s", ex)
|
python
|
{
"resource": ""
}
|
q9742
|
MqttClient.__on_publish
|
train
|
def __on_publish(self, client, userdata, mid):
# pylint: disable=W0613
"""
A message has been published by a server
:param client: Client that received the message
:param userdata: User data (unused)
:param mid: Message ID
"""
try:
self.__in_flight[mid].set()
except KeyError:
pass
|
python
|
{
"resource": ""
}
|
q9743
|
to_import_properties
|
train
|
def to_import_properties(properties):
# type: (dict) -> dict
"""
Returns a dictionary where export properties have been replaced by import
ones
:param properties: A dictionary of service properties (with export keys)
:return: A dictionary with import properties
"""
# Copy the given dictionary
props = properties.copy()
# Add the "imported" property
props[pelix.remote.PROP_IMPORTED] = True
# Remote service ID
try:
props[pelix.remote.PROP_ENDPOINT_SERVICE_ID] = props.pop(
pelix.constants.SERVICE_ID
)
except KeyError:
# No service ID
pass
# Replace the "export configs"
configs = props.pop(pelix.remote.PROP_EXPORTED_CONFIGS, None)
if configs:
props[pelix.remote.PROP_IMPORTED_CONFIGS] = configs
# Clear other export properties
for key in (
pelix.remote.PROP_EXPORTED_INTENTS,
pelix.remote.PROP_EXPORTED_INTENTS_EXTRA,
pelix.remote.PROP_EXPORTED_INTERFACES,
):
try:
del props[key]
except KeyError:
# Key wasn't there
pass
return props
|
python
|
{
"resource": ""
}
|
q9744
|
compute_exported_specifications
|
train
|
def compute_exported_specifications(svc_ref):
# type: (pelix.framework.ServiceReference) -> List[str]
"""
Computes the list of specifications exported by the given service
:param svc_ref: A ServiceReference
:return: The list of exported specifications (or an empty list)
"""
if svc_ref.get_property(pelix.remote.PROP_EXPORT_NONE):
# The export of this service is explicitly forbidden, stop here
return []
# Service specifications
specs = svc_ref.get_property(pelix.constants.OBJECTCLASS)
# Exported specifications
exported_specs = svc_ref.get_property(pelix.remote.PROP_EXPORTED_INTERFACES)
if exported_specs and exported_specs != "*":
# A set of specifications is exported, replace "objectClass"
iterable_exports = pelix.utilities.to_iterable(exported_specs, False)
all_exported_specs = [
spec for spec in specs if spec in iterable_exports
]
else:
# Export everything
all_exported_specs = pelix.utilities.to_iterable(specs)
# Authorized and rejected specifications
export_only_specs = pelix.utilities.to_iterable(
svc_ref.get_property(pelix.remote.PROP_EXPORT_ONLY), False
)
if export_only_specs:
# Filter specifications (keep authorized specifications)
return [
spec for spec in all_exported_specs if spec in export_only_specs
]
# Filter specifications (reject)
rejected_specs = pelix.utilities.to_iterable(
svc_ref.get_property(pelix.remote.PROP_EXPORT_REJECT), False
)
return [spec for spec in all_exported_specs if spec not in rejected_specs]
|
python
|
{
"resource": ""
}
|
q9745
|
format_specifications
|
train
|
def format_specifications(specifications):
# type: (Iterable[str]) -> List[str]
"""
Transforms the interfaces names into URI strings, with the interface
implementation language as a scheme.
:param specifications: Specifications to transform
:return: The transformed names
"""
transformed = set()
for original in specifications:
try:
lang, spec = _extract_specification_parts(original)
transformed.add(_format_specification(lang, spec))
except ValueError:
# Ignore invalid specifications
pass
return list(transformed)
|
python
|
{
"resource": ""
}
|
q9746
|
ExportEndpoint.get_properties
|
train
|
def get_properties(self):
# type: () -> dict
"""
Returns merged properties
:return: Endpoint merged properties
"""
# Get service properties
properties = self.__reference.get_properties()
# Merge with local properties
properties.update(self.__properties)
# Some properties can't be merged
for key in pelix.constants.OBJECTCLASS, pelix.constants.SERVICE_ID:
properties[key] = self.__reference.get_property(key)
# Force the exported configurations
properties[pelix.remote.PROP_EXPORTED_CONFIGS] = self.configurations
return properties
|
python
|
{
"resource": ""
}
|
q9747
|
ExportEndpoint.make_import_properties
|
train
|
def make_import_properties(self):
# type: () -> dict
"""
Returns the properties of this endpoint where export properties have
been replaced by import ones
:return: A dictionary with import properties
"""
# Convert merged properties
props = to_import_properties(self.get_properties())
# Add the framework UID
props[pelix.remote.PROP_ENDPOINT_FRAMEWORK_UUID] = self.__fw_uid
return props
|
python
|
{
"resource": ""
}
|
q9748
|
EndpointDescription.__check_properties
|
train
|
def __check_properties(props):
# type: (dict) -> None
"""
Checks that the given dictionary doesn't have export keys and has
import keys
:param props: Properties to validate
:raise ValueError: Invalid properties
"""
# Mandatory properties
mandatory = (
pelix.remote.PROP_ENDPOINT_ID,
pelix.remote.PROP_IMPORTED_CONFIGS,
pelix.constants.OBJECTCLASS,
)
for key in mandatory:
if key not in props:
raise ValueError("Missing property: {0}".format(key))
# Export/Import properties
props_export = (
pelix.remote.PROP_EXPORTED_CONFIGS,
pelix.remote.PROP_EXPORTED_INTERFACES,
)
for key in props_export:
if key in props:
raise ValueError("Export property found: {0}".format(key))
|
python
|
{
"resource": ""
}
|
q9749
|
EndpointDescription.matches
|
train
|
def matches(self, ldap_filter):
# type: (Any[str, pelix.ldapfilter.LDAPFilter]) -> bool
"""
Tests the properties of this EndpointDescription against the given
filter
:param ldap_filter: A filter
:return: True if properties matches the filter
"""
return pelix.ldapfilter.get_ldap_filter(ldap_filter).matches(
self.__properties
)
|
python
|
{
"resource": ""
}
|
q9750
|
EndpointDescription.to_import
|
train
|
def to_import(self):
# type: () -> ImportEndpoint
"""
Converts an EndpointDescription bean to an ImportEndpoint
:return: An ImportEndpoint bean
"""
# Properties
properties = self.get_properties()
# Framework UUID
fw_uid = self.get_framework_uuid()
# Endpoint name
try:
# From Pelix UID
name = properties[pelix.remote.PROP_ENDPOINT_NAME]
except KeyError:
# Generated
name = "{0}.{1}".format(fw_uid, self.get_service_id())
# Configuration / kind
configurations = self.get_configuration_types()
# Interfaces
specifications = self.get_interfaces()
return ImportEndpoint(
self.get_id(),
fw_uid,
configurations,
name,
specifications,
properties,
)
|
python
|
{
"resource": ""
}
|
q9751
|
EndpointDescription.from_export
|
train
|
def from_export(cls, endpoint):
# type: (ExportEndpoint) -> EndpointDescription
"""
Converts an ExportEndpoint bean to an EndpointDescription
:param endpoint: An ExportEndpoint bean
:return: An EndpointDescription bean
"""
assert isinstance(endpoint, ExportEndpoint)
# Service properties
properties = endpoint.get_properties()
# Set import keys
properties[pelix.remote.PROP_ENDPOINT_ID] = endpoint.uid
properties[pelix.remote.PROP_IMPORTED_CONFIGS] = endpoint.configurations
properties[
pelix.remote.PROP_EXPORTED_INTERFACES
] = endpoint.specifications
# Remove export keys
for key in (
pelix.remote.PROP_EXPORTED_CONFIGS,
pelix.remote.PROP_EXPORTED_INTERFACES,
pelix.remote.PROP_EXPORTED_INTENTS,
pelix.remote.PROP_EXPORTED_INTENTS_EXTRA,
):
try:
del properties[key]
except KeyError:
pass
# Other information
properties[pelix.remote.PROP_ENDPOINT_NAME] = endpoint.name
properties[
pelix.remote.PROP_ENDPOINT_FRAMEWORK_UUID
] = endpoint.framework
return EndpointDescription(None, properties)
|
python
|
{
"resource": ""
}
|
q9752
|
EDEFReader._parse_description
|
train
|
def _parse_description(self, node):
# type: (ElementTree.Element) -> EndpointDescription
"""
Parse an endpoint description node
:param node: The endpoint description node
:return: The parsed EndpointDescription bean
:raise KeyError: Attribute missing
:raise ValueError: Invalid description
"""
endpoint = {}
for prop_node in node.findall(TAG_PROPERTY):
name, value = self._parse_property(prop_node)
endpoint[name] = value
return EndpointDescription(None, endpoint)
|
python
|
{
"resource": ""
}
|
q9753
|
EDEFReader._parse_property
|
train
|
def _parse_property(self, node):
# type: (ElementTree.Element) -> Tuple[str, Any]
"""
Parses a property node
:param node: The property node
:return: A (name, value) tuple
:raise KeyError: Attribute missing
"""
# Get information
name = node.attrib[ATTR_NAME]
vtype = node.attrib.get(ATTR_VALUE_TYPE, TYPE_STRING)
# Look for a value as a single child node
try:
value_node = next(iter(node))
value = self._parse_value_node(vtype, value_node)
except StopIteration:
# Value is an attribute
value = self._convert_value(vtype, node.attrib[ATTR_VALUE])
return name, value
|
python
|
{
"resource": ""
}
|
q9754
|
EDEFReader._parse_value_node
|
train
|
def _parse_value_node(self, vtype, node):
# type: (str, ElementTree.Element) -> Any
"""
Parses a value node
:param vtype: The value type
:param node: The value node
:return: The parsed value
"""
kind = node.tag
if kind == TAG_XML:
# Raw XML value
return next(iter(node))
elif kind == TAG_LIST or kind == TAG_ARRAY:
# List
return [
self._convert_value(vtype, value_node.text)
for value_node in node.findall(TAG_VALUE)
]
elif kind == TAG_SET:
# Set
return set(
self._convert_value(vtype, value_node.text)
for value_node in node.findall(TAG_VALUE)
)
else:
# Unknown
raise ValueError("Unknown value tag: {0}".format(kind))
|
python
|
{
"resource": ""
}
|
q9755
|
EDEFReader.parse
|
train
|
def parse(self, xml_str):
# type: (str) -> List[EndpointDescription]
"""
Parses an EDEF XML string
:param xml_str: An XML string
:return: The list of parsed EndpointDescription
"""
# Parse the document
root = ElementTree.fromstring(xml_str)
if root.tag != TAG_ENDPOINT_DESCRIPTIONS:
raise ValueError("Not an EDEF XML: {0}".format(root.tag))
# Parse content
return [
self._parse_description(node)
for node in root.findall(TAG_ENDPOINT_DESCRIPTION)
]
|
python
|
{
"resource": ""
}
|
q9756
|
EDEFWriter._make_endpoint
|
train
|
def _make_endpoint(self, root_node, endpoint):
# type: (ElementTree.Element, EndpointDescription) -> None
"""
Converts the given endpoint bean to an XML Element
:param root_node: The XML root Element
:param endpoint: An EndpointDescription bean
"""
endpoint_node = ElementTree.SubElement(
root_node, TAG_ENDPOINT_DESCRIPTION
)
for name, value in endpoint.get_properties().items():
# Compute value type
vtype = self._get_type(name, value)
# Prepare the property node
prop_node = ElementTree.SubElement(
endpoint_node, TAG_PROPERTY, {ATTR_NAME: name}
)
if vtype == XML_VALUE:
# Special case, we have to store the value as a child
# without a value-type attribute
prop_node.append(value)
continue
# Set the value type
prop_node.set(ATTR_VALUE_TYPE, vtype)
# Compute value node or attribute
if isinstance(value, tuple):
# Array
self._add_container(prop_node, TAG_ARRAY, value)
elif isinstance(value, list):
# List
self._add_container(prop_node, TAG_ARRAY, value)
elif isinstance(value, set):
# Set
self._add_container(prop_node, TAG_SET, value)
elif isinstance(value, type(root_node)):
# XML (direct addition)
prop_node.append(value)
else:
# Simple value -> Attribute
prop_node.set(ATTR_VALUE, str(value))
|
python
|
{
"resource": ""
}
|
q9757
|
EDEFWriter._make_xml
|
train
|
def _make_xml(self, endpoints):
# type: (List[EndpointDescription]) -> ElementTree.Element
"""
Converts the given endpoint description beans into an XML Element
:param endpoints: A list of EndpointDescription beans
:return: A string containing an XML document
"""
root = ElementTree.Element(TAG_ENDPOINT_DESCRIPTIONS)
for endpoint in endpoints:
self._make_endpoint(root, endpoint)
# Prepare pretty-printing
self._indent(root)
return root
|
python
|
{
"resource": ""
}
|
q9758
|
EDEFWriter.write
|
train
|
def write(self, endpoints, filename):
# type: (List[EndpointDescription], str) -> None
"""
Writes the given endpoint descriptions to the given file
:param endpoints: A list of EndpointDescription beans
:param filename: Name of the file where to write the XML
:raise IOError: Error writing the file
"""
with open(filename, "w") as filep:
filep.write(self.to_string(endpoints))
|
python
|
{
"resource": ""
}
|
q9759
|
is_from_parent
|
train
|
def is_from_parent(cls, attribute_name, value=None):
# type: (type, str, bool) -> bool
"""
Tests if the current attribute value is shared by a parent of the given
class.
Returns None if the attribute value is None.
:param cls: Child class with the requested attribute
:param attribute_name: Name of the attribute to be tested
:param value: The exact value in the child class (optional)
:return: True if the attribute value is shared with a parent class
"""
if value is None:
try:
# Get the current value
value = getattr(cls, attribute_name)
except AttributeError:
# No need to go further: the attribute does not exist
return False
for base in cls.__bases__:
# Look for the value in each parent class
try:
return getattr(base, attribute_name) is value
except AttributeError:
pass
# Attribute value not found in parent classes
return False
|
python
|
{
"resource": ""
}
|
q9760
|
get_factory_context
|
train
|
def get_factory_context(cls):
# type: (type) -> FactoryContext
"""
Retrieves the factory context object associated to a factory. Creates it
if needed
:param cls: The factory class
:return: The factory class context
"""
context = getattr(cls, constants.IPOPO_FACTORY_CONTEXT, None)
if context is None:
# Class not yet manipulated
context = FactoryContext()
elif is_from_parent(cls, constants.IPOPO_FACTORY_CONTEXT):
# Create a copy the context
context = context.copy(True)
# * Manipulation has not been applied yet
context.completed = False
else:
# Nothing special to do
return context
# Context has been created or copied, inject the new bean
setattr(cls, constants.IPOPO_FACTORY_CONTEXT, context)
return context
|
python
|
{
"resource": ""
}
|
q9761
|
get_method_description
|
train
|
def get_method_description(method):
# type: (Callable) -> str
"""
Retrieves a description of the given method. If possible, the description
contains the source file name and line.
:param method: A method
:return: A description of the method (at least its name)
:raise AttributeError: Given object has no __name__ attribute
"""
try:
try:
line_no = inspect.getsourcelines(method)[1]
except IOError:
# Error reading the source file
line_no = -1
return "'{method}' ({file}:{line})".format(
method=method.__name__, file=inspect.getfile(method), line=line_no
)
except TypeError:
# Method can't be inspected
return "'{0}'".format(method.__name__)
|
python
|
{
"resource": ""
}
|
q9762
|
validate_method_arity
|
train
|
def validate_method_arity(method, *needed_args):
# type: (Callable, *str) -> None
"""
Tests if the decorated method has a sufficient number of parameters.
:param method: The method to be tested
:param needed_args: The name (for description only) of the needed
arguments, without "self".
:return: Nothing
:raise TypeError: Invalid number of parameter
"""
nb_needed_args = len(needed_args)
# Test the number of parameters
arg_spec = get_method_arguments(method)
method_args = arg_spec.args
try:
# Remove the self argument when present
if method_args[0] == "self":
del method_args[0]
except IndexError:
pass
nb_args = len(method_args)
if arg_spec.varargs is not None:
# Variable arguments
if nb_args != 0:
# Other arguments detected
raise TypeError(
"When using '*args', the decorated {0} method must only "
"accept the 'self' argument".format(
get_method_description(method)
)
)
elif arg_spec.keywords is not None:
raise TypeError("Methods using '**kwargs' are not handled")
elif nb_args != nb_needed_args:
# "Normal" arguments
raise TypeError(
"The decorated method {0} must accept exactly {1} parameters: "
"(self, {2})".format(
get_method_description(method),
nb_needed_args + 1,
", ".join(needed_args),
)
)
|
python
|
{
"resource": ""
}
|
q9763
|
_ipopo_setup_callback
|
train
|
def _ipopo_setup_callback(cls, context):
# type: (type, FactoryContext) -> None
"""
Sets up the class _callback dictionary
:param cls: The class to handle
:param context: The factory class context
"""
assert inspect.isclass(cls)
assert isinstance(context, FactoryContext)
if context.callbacks is not None:
callbacks = context.callbacks.copy()
else:
callbacks = {}
functions = inspect.getmembers(cls, inspect.isroutine)
for _, func in functions:
if not hasattr(func, constants.IPOPO_METHOD_CALLBACKS):
# No attribute, get the next member
continue
method_callbacks = getattr(func, constants.IPOPO_METHOD_CALLBACKS)
if not isinstance(method_callbacks, list):
# Invalid content
_logger.warning(
"Invalid callback information %s in %s",
constants.IPOPO_METHOD_CALLBACKS,
get_method_description(func),
)
continue
# Keeping it allows inheritance : by removing it, only the first
# child will see the attribute -> Don't remove it
# Store the call backs
for _callback in method_callbacks:
if _callback in callbacks and not is_from_parent(
cls, callbacks[_callback].__name__, callbacks[_callback]
):
_logger.warning(
"Redefining the callback %s in class '%s'.\n"
"\tPrevious callback : %s\n"
"\tNew callback : %s",
_callback,
cls.__name__,
get_method_description(callbacks[_callback]),
get_method_description(func),
)
callbacks[_callback] = func
# Update the factory context
context.callbacks.clear()
context.callbacks.update(callbacks)
|
python
|
{
"resource": ""
}
|
q9764
|
_ipopo_setup_field_callback
|
train
|
def _ipopo_setup_field_callback(cls, context):
# type: (type, FactoryContext) -> None
"""
Sets up the class _field_callback dictionary
:param cls: The class to handle
:param context: The factory class context
"""
assert inspect.isclass(cls)
assert isinstance(context, FactoryContext)
if context.field_callbacks is not None:
callbacks = context.field_callbacks.copy()
else:
callbacks = {}
functions = inspect.getmembers(cls, inspect.isroutine)
for name, func in functions:
if not hasattr(func, constants.IPOPO_METHOD_FIELD_CALLBACKS):
# No attribute, get the next member
continue
method_callbacks = getattr(func, constants.IPOPO_METHOD_FIELD_CALLBACKS)
if not isinstance(method_callbacks, list):
# Invalid content
_logger.warning(
"Invalid attribute %s in %s",
constants.IPOPO_METHOD_FIELD_CALLBACKS,
name,
)
continue
# Keeping it allows inheritance : by removing it, only the first
# child will see the attribute -> Don't remove it
# Store the call backs
for kind, field, if_valid in method_callbacks:
fields_cbs = callbacks.setdefault(field, {})
if kind in fields_cbs and not is_from_parent(
cls, fields_cbs[kind][0].__name__
):
_logger.warning(
"Redefining the callback %s in '%s'. "
"Previous callback : '%s' (%s). "
"New callback : %s",
kind,
name,
fields_cbs[kind][0].__name__,
fields_cbs[kind][0],
func,
)
fields_cbs[kind] = (func, if_valid)
# Update the factory context
context.field_callbacks.clear()
context.field_callbacks.update(callbacks)
|
python
|
{
"resource": ""
}
|
q9765
|
_append_object_entry
|
train
|
def _append_object_entry(obj, list_name, entry):
# type: (Any, str, Any) -> None
"""
Appends the given entry in the given object list.
Creates the list field if needed.
:param obj: The object that contains the list
:param list_name: The name of the list member in *obj*
:param entry: The entry to be added to the list
:raise ValueError: Invalid attribute content
"""
# Get the list
obj_list = getattr(obj, list_name, None)
if obj_list is None:
# We'll have to create it
obj_list = []
setattr(obj, list_name, obj_list)
assert isinstance(obj_list, list)
# Set up the property, if needed
if entry not in obj_list:
obj_list.append(entry)
|
python
|
{
"resource": ""
}
|
q9766
|
_get_specifications
|
train
|
def _get_specifications(specifications):
"""
Computes the list of strings corresponding to the given specifications
:param specifications: A string, a class or a list of specifications
:return: A list of strings
:raise ValueError: Invalid specification found
"""
if not specifications or specifications is object:
raise ValueError("No specifications given")
elif inspect.isclass(specifications):
if Provides.USE_MODULE_QUALNAME:
if sys.version_info < (3, 3, 0):
raise ValueError(
"Qualified name capability requires Python 3.3+"
)
# Get the name of the class
if not specifications.__module__:
return [specifications.__qualname__]
return [
"{0}.{1}".format(
specifications.__module__, specifications.__qualname__
)
]
else:
# Legacy behavior
return [specifications.__name__]
elif is_string(specifications):
# Specification name
specifications = specifications.strip()
if not specifications:
raise ValueError("Empty specification given")
return [specifications]
elif isinstance(specifications, (list, tuple)):
# List given: normalize its content
results = []
for specification in specifications:
results.extend(_get_specifications(specification))
return results
else:
raise ValueError(
"Unhandled specifications type : {0}".format(
type(specifications).__name__
)
)
|
python
|
{
"resource": ""
}
|
q9767
|
PostRegistration
|
train
|
def PostRegistration(method):
# pylint: disable=C0103
"""
The service post-registration callback decorator is called after a service
of the component has been registered to the framework.
The decorated method must accept the
:class:`~pelix.framework.ServiceReference` of the registered
service as argument::
@PostRegistration
def callback_method(self, service_reference):
'''
service_reference: The ServiceReference of the provided service
'''
# ...
:param method: The decorated method
:raise TypeError: The decorated element is not a valid function
"""
if not isinstance(method, types.FunctionType):
raise TypeError("@PostRegistration can only be applied on functions")
# Tests the number of parameters
validate_method_arity(method, "service_reference")
_append_object_entry(
method,
constants.IPOPO_METHOD_CALLBACKS,
constants.IPOPO_CALLBACK_POST_REGISTRATION,
)
return method
|
python
|
{
"resource": ""
}
|
q9768
|
PostUnregistration
|
train
|
def PostUnregistration(method):
# pylint: disable=C0103
"""
The service post-unregistration callback decorator is called after a service
of the component has been unregistered from the framework.
The decorated method must accept the
:class:`~pelix.framework.ServiceReference` of the registered
service as argument::
@PostUnregistration
def callback_method(self, service_reference):
'''
service_reference: The ServiceReference of the provided service
'''
# ...
:param method: The decorated method
:raise TypeError: The decorated element is not a valid function
"""
if not isinstance(method, types.FunctionType):
raise TypeError("@PostUnregistration can only be applied on functions")
# Tests the number of parameters
validate_method_arity(method, "service_reference")
_append_object_entry(
method,
constants.IPOPO_METHOD_CALLBACKS,
constants.IPOPO_CALLBACK_POST_UNREGISTRATION,
)
return method
|
python
|
{
"resource": ""
}
|
q9769
|
_ShellUtils.bundlestate_to_str
|
train
|
def bundlestate_to_str(state):
"""
Converts a bundle state integer to a string
"""
states = {
pelix.Bundle.INSTALLED: "INSTALLED",
pelix.Bundle.ACTIVE: "ACTIVE",
pelix.Bundle.RESOLVED: "RESOLVED",
pelix.Bundle.STARTING: "STARTING",
pelix.Bundle.STOPPING: "STOPPING",
pelix.Bundle.UNINSTALLED: "UNINSTALLED",
}
return states.get(state, "Unknown state ({0})".format(state))
|
python
|
{
"resource": ""
}
|
q9770
|
_ShellUtils.make_table
|
train
|
def make_table(headers, lines, prefix=None):
"""
Generates an ASCII table according to the given headers and lines
:param headers: List of table headers (N-tuple)
:param lines: List of table lines (N-tuples)
:param prefix: Optional prefix for each line
:return: The ASCII representation of the table
:raise ValueError: Different number of columns between headers and
lines
"""
# Normalize the prefix
prefix = str(prefix or "")
# Maximum lengths
lengths = [len(title) for title in headers]
# Store the number of columns (0-based)
nb_columns = len(lengths) - 1
# Lines
str_lines = []
for idx, line in enumerate(lines):
# Recompute lengths
str_line = []
str_lines.append(str_line)
column = -1
try:
for column, entry in enumerate(line):
str_entry = str(entry)
str_line.append(str_entry)
if len(str_entry) > lengths[column]:
lengths[column] = len(str_entry)
except IndexError:
# Line too small/big
raise ValueError(
"Different sizes for header and lines "
"(line {0})".format(idx + 1)
)
except (TypeError, AttributeError):
# Invalid type of line
raise ValueError(
"Invalid type of line: %s", type(line).__name__
)
else:
if column != nb_columns:
# Check if all lines have the same number of columns
raise ValueError(
"Different sizes for header and lines "
"(line {0})".format(idx + 1)
)
# Prepare the head (centered text)
format_str = "{0}|".format(prefix)
for column, length in enumerate(lengths):
format_str += " {%d:^%d} |" % (column, length)
head_str = format_str.format(*headers)
# Prepare the separator, according the length of the headers string
separator = "{0}{1}".format(prefix, "-" * (len(head_str) - len(prefix)))
idx = head_str.find("|")
while idx != -1:
separator = "+".join((separator[:idx], separator[idx + 1 :]))
idx = head_str.find("|", idx + 1)
# Prepare the output
output = [separator, head_str, separator.replace("-", "=")]
# Compute the lines
format_str = format_str.replace("^", "<")
for line in str_lines:
output.append(format_str.format(*line))
output.append(separator)
# Force the last end of line
output.append("")
# Join'em
return "\n".join(output)
|
python
|
{
"resource": ""
}
|
q9771
|
_ShellService.bind_handler
|
train
|
def bind_handler(self, svc_ref):
"""
Called if a command service has been found.
Registers the methods of this service.
:param svc_ref: A reference to the found service
:return: True if the commands have been registered
"""
if svc_ref in self._bound_references:
# Already bound service
return False
# Get the service
handler = self._context.get_service(svc_ref)
# Get its name space
namespace = handler.get_namespace()
commands = []
# Register all service methods directly
for command, method in handler.get_methods():
self.register_command(namespace, command, method)
commands.append(command)
# Store the reference
self._bound_references[svc_ref] = handler
self._reference_commands[svc_ref] = (namespace, commands)
return True
|
python
|
{
"resource": ""
}
|
q9772
|
_ShellService.unbind_handler
|
train
|
def unbind_handler(self, svc_ref):
"""
Called if a command service is gone.
Unregisters its commands.
:param svc_ref: A reference to the unbound service
:return: True if the commands have been unregistered
"""
if svc_ref not in self._bound_references:
# Unknown reference
return False
# Unregister its commands
namespace, commands = self._reference_commands[svc_ref]
for command in commands:
self.unregister(namespace, command)
# Release the service
self._context.unget_service(svc_ref)
del self._bound_references[svc_ref]
del self._reference_commands[svc_ref]
return True
|
python
|
{
"resource": ""
}
|
q9773
|
_ShellService.bundle_details
|
train
|
def bundle_details(self, io_handler, bundle_id):
"""
Prints the details of the bundle with the given ID or name
"""
bundle = None
try:
# Convert the given ID into an integer
bundle_id = int(bundle_id)
except ValueError:
# Not an integer, suppose it's a bundle name
for bundle in self._context.get_bundles():
if bundle.get_symbolic_name() == bundle_id:
break
else:
# Bundle not found
bundle = None
else:
# Integer ID: direct access
try:
bundle = self._context.get_bundle(bundle_id)
except constants.BundleException:
pass
if bundle is None:
# No matching bundle
io_handler.write_line("Unknown bundle ID: {0}", bundle_id)
return False
lines = [
"ID......: {0}".format(bundle.get_bundle_id()),
"Name....: {0}".format(bundle.get_symbolic_name()),
"Version.: {0}".format(bundle.get_version()),
"State...: {0}".format(
self._utils.bundlestate_to_str(bundle.get_state())
),
"Location: {0}".format(bundle.get_location()),
"Published services:",
]
try:
services = bundle.get_registered_services()
if services:
for svc_ref in services:
lines.append("\t{0}".format(svc_ref))
else:
lines.append("\tn/a")
except constants.BundleException as ex:
# Bundle in a invalid state
lines.append("\tError: {0}".format(ex))
lines.append("Services used by this bundle:")
try:
services = bundle.get_services_in_use()
if services:
for svc_ref in services:
lines.append("\t{0}".format(svc_ref))
else:
lines.append("\tn/a")
except constants.BundleException as ex:
# Bundle in a invalid state
lines.append("\tError: {0}".format(ex))
lines.append("")
io_handler.write("\n".join(lines))
return None
|
python
|
{
"resource": ""
}
|
q9774
|
_ShellService.bundles_list
|
train
|
def bundles_list(self, io_handler, name=None):
"""
Lists the bundles in the framework and their state. Possibility to
filter on the bundle name.
"""
# Head of the table
headers = ("ID", "Name", "State", "Version")
# Get the bundles
bundles = self._context.get_bundles()
# The framework is not in the result of get_bundles()
bundles.insert(0, self._context.get_framework())
if name is not None:
# Filter the list
bundles = [
bundle
for bundle in bundles
if name in bundle.get_symbolic_name()
]
# Make the entries
lines = [
[
str(entry)
for entry in (
bundle.get_bundle_id(),
bundle.get_symbolic_name(),
self._utils.bundlestate_to_str(bundle.get_state()),
bundle.get_version(),
)
]
for bundle in bundles
]
# Print'em all
io_handler.write(self._utils.make_table(headers, lines))
if name is None:
io_handler.write_line("{0} bundles installed", len(lines))
else:
io_handler.write_line("{0} filtered bundles", len(lines))
|
python
|
{
"resource": ""
}
|
q9775
|
_ShellService.service_details
|
train
|
def service_details(self, io_handler, service_id):
"""
Prints the details of the service with the given ID
"""
svc_ref = self._context.get_service_reference(
None, "({0}={1})".format(constants.SERVICE_ID, service_id)
)
if svc_ref is None:
io_handler.write_line("Service not found: {0}", service_id)
return False
lines = [
"ID............: {0}".format(
svc_ref.get_property(constants.SERVICE_ID)
),
"Rank..........: {0}".format(
svc_ref.get_property(constants.SERVICE_RANKING)
),
"Specifications: {0}".format(
svc_ref.get_property(constants.OBJECTCLASS)
),
"Bundle........: {0}".format(svc_ref.get_bundle()),
"Properties....:",
]
for key, value in sorted(svc_ref.get_properties().items()):
lines.append("\t{0} = {1}".format(key, value))
lines.append("Bundles using this service:")
for bundle in svc_ref.get_using_bundles():
lines.append("\t{0}".format(bundle))
lines.append("")
io_handler.write("\n".join(lines))
return None
|
python
|
{
"resource": ""
}
|
q9776
|
_ShellService.services_list
|
train
|
def services_list(self, io_handler, specification=None):
"""
Lists the services in the framework. Possibility to filter on an exact
specification.
"""
# Head of the table
headers = ("ID", "Specifications", "Bundle", "Ranking")
# Lines
references = (
self._context.get_all_service_references(specification, None) or []
)
# Construct the list of services
lines = [
[
str(entry)
for entry in (
ref.get_property(constants.SERVICE_ID),
ref.get_property(constants.OBJECTCLASS),
ref.get_bundle(),
ref.get_property(constants.SERVICE_RANKING),
)
]
for ref in references
]
if not lines and specification:
# No matching service found
io_handler.write_line("No service provides '{0}'", specification)
return False
# Print'em all
io_handler.write(self._utils.make_table(headers, lines))
io_handler.write_line("{0} services registered", len(lines))
return None
|
python
|
{
"resource": ""
}
|
q9777
|
_ShellService.properties_list
|
train
|
def properties_list(self, io_handler):
"""
Lists the properties of the framework
"""
# Get the framework
framework = self._context.get_framework()
# Head of the table
headers = ("Property Name", "Value")
# Lines
lines = [item for item in framework.get_properties().items()]
# Sort lines
lines.sort()
# Print the table
io_handler.write(self._utils.make_table(headers, lines))
|
python
|
{
"resource": ""
}
|
q9778
|
_ShellService.property_value
|
train
|
def property_value(self, io_handler, name):
"""
Prints the value of the given property, looking into
framework properties then environment variables.
"""
value = self._context.get_property(name)
if value is None:
# Avoid printing "None"
value = ""
io_handler.write_line(str(value))
|
python
|
{
"resource": ""
}
|
q9779
|
_ShellService.environment_list
|
train
|
def environment_list(self, io_handler):
"""
Lists the framework process environment variables
"""
# Head of the table
headers = ("Environment Variable", "Value")
# Lines
lines = [item for item in os.environ.items()]
# Sort lines
lines.sort()
# Print the table
io_handler.write(self._utils.make_table(headers, lines))
|
python
|
{
"resource": ""
}
|
q9780
|
_ShellService.change_dir
|
train
|
def change_dir(self, session, path):
"""
Changes the working directory
"""
if path == "-":
# Previous directory
path = self._previous_path or "."
try:
previous = os.getcwd()
os.chdir(path)
except IOError as ex:
# Can't change directory
session.write_line("Error changing directory: {0}", ex)
else:
# Store previous path
self._previous_path = previous
session.write_line(os.getcwd())
|
python
|
{
"resource": ""
}
|
q9781
|
_ShellService.start
|
train
|
def start(self, io_handler, bundle_id, *bundles_ids):
"""
Starts the bundles with the given IDs. Stops on first failure.
"""
for bid in (bundle_id,) + bundles_ids:
try:
# Got an int => it's a bundle ID
bid = int(bid)
except ValueError:
# Got something else, we will try to install it first
bid = self.install(io_handler, bid)
bundle = self.__get_bundle(io_handler, bid)
if bundle is not None:
io_handler.write_line(
"Starting bundle {0} ({1})...",
bid,
bundle.get_symbolic_name(),
)
bundle.start()
else:
return False
return None
|
python
|
{
"resource": ""
}
|
q9782
|
_ShellService.stop
|
train
|
def stop(self, io_handler, bundle_id, *bundles_ids):
"""
Stops the bundles with the given IDs. Stops on first failure.
"""
for bid in (bundle_id,) + bundles_ids:
bundle = self.__get_bundle(io_handler, bid)
if bundle is not None:
io_handler.write_line(
"Stopping bundle {0} ({1})...",
bid,
bundle.get_symbolic_name(),
)
bundle.stop()
else:
return False
return None
|
python
|
{
"resource": ""
}
|
q9783
|
_ShellService.install
|
train
|
def install(self, io_handler, module_name):
"""
Installs the bundle with the given module name
"""
bundle = self._context.install_bundle(module_name)
io_handler.write_line("Bundle ID: {0}", bundle.get_bundle_id())
return bundle.get_bundle_id()
|
python
|
{
"resource": ""
}
|
q9784
|
TemporalDependency.__cancel_timer
|
train
|
def __cancel_timer(self):
"""
Cancels the timer, and calls its target method immediately
"""
if self.__timer is not None:
self.__timer.cancel()
self.__unbind_call(True)
self.__timer_args = None
self.__timer = None
|
python
|
{
"resource": ""
}
|
q9785
|
TemporalDependency.__unbind_call
|
train
|
def __unbind_call(self, still_valid):
"""
Calls the iPOPO unbind method
"""
with self._lock:
if self.__timer is not None:
# Timeout expired, we're not valid anymore
self.__timer = None
self.__still_valid = still_valid
self._ipopo_instance.unbind(
self, self.__timer_args[0], self.__timer_args[1]
)
|
python
|
{
"resource": ""
}
|
q9786
|
_JabsorbRpcServlet.do_POST
|
train
|
def do_POST(self, request, response):
# pylint: disable=C0103
"""
Handle a POST request
:param request: The HTTP request bean
:param response: The HTTP response handler
"""
# Get the request JSON content
data = jsonrpclib.loads(to_str(request.read_data()))
# Convert from Jabsorb
data = jabsorb.from_jabsorb(data)
# Dispatch
try:
result = self._unmarshaled_dispatch(data, self._simple_dispatch)
except NoMulticallResult:
# No result (never happens, but who knows...)
result = None
if result is not None:
# Convert result to Jabsorb
if "result" in result:
result["result"] = jabsorb.to_jabsorb(result["result"])
# Store JSON
result = jsonrpclib.jdumps(result)
else:
# It was a notification
result = ""
# Send the result
response.send_content(200, result, "application/json-rpc")
|
python
|
{
"resource": ""
}
|
q9787
|
RestDispatcher._rest_dispatch
|
train
|
def _rest_dispatch(self, request, response):
# type: (AbstractHTTPServletRequest, AbstractHTTPServletResponse) -> None
"""
Dispatches the request
:param request: Request bean
:param response: Response bean
"""
# Extract request information
http_verb = request.get_command()
sub_path = request.get_sub_path()
# Find the best matching method, according to the number of
# readable arguments
max_valid_args = -1
best_method = None
best_args = None
best_match = None
for route, method in self.__routes.get(http_verb, {}).items():
# Parse the request path
match = route.match(sub_path)
if not match:
continue
# Count the number of valid arguments
method_args = self.__methods_args[method]
nb_valid_args = 0
for name in method_args:
try:
match.group(name)
nb_valid_args += 1
except IndexError:
# Argument not found
pass
if nb_valid_args > max_valid_args:
# Found a better match
max_valid_args = nb_valid_args
best_method = method
best_args = method_args
best_match = match
if best_method is None:
# No match: return a 404 plain text error
response.send_content(
404,
"No method to handle path {0}".format(sub_path),
"text/plain",
)
else:
# Found a method
# ... convert arguments
kwargs = {}
if best_args:
for name, converter in best_args.items():
try:
str_value = best_match.group(name)
except IndexError:
# Argument is missing: do nothing
pass
else:
if str_value:
# Keep the default value when an argument is
# missing, i.e. don't give it in kwargs
if converter is not None:
# Convert the argument
kwargs[name] = converter(str_value)
else:
# Use the string value as is
kwargs[name] = str_value
# Prepare positional arguments
extra_pos_args = []
if kwargs:
# Ignore the first two parameters (request and response)
method_args = get_method_arguments(best_method).args[:2]
for pos_arg in method_args:
try:
extra_pos_args.append(kwargs.pop(pos_arg))
except KeyError:
pass
# ... call the method (exceptions will be handled by the server)
best_method(request, response, *extra_pos_args, **kwargs)
|
python
|
{
"resource": ""
}
|
q9788
|
RestDispatcher._setup_rest_dispatcher
|
train
|
def _setup_rest_dispatcher(self):
"""
Finds all methods to call when handling a route
"""
for _, method in inspect.getmembers(self, inspect.isroutine):
try:
config = getattr(method, HTTP_ROUTE_ATTRIBUTE)
except AttributeError:
# Not a REST method
continue
for route in config["routes"]:
pattern, arguments = self.__convert_route(route)
self.__methods_args.setdefault(method, {}).update(arguments)
for http_verb in config["methods"]:
self.__routes.setdefault(http_verb, {})[pattern] = method
|
python
|
{
"resource": ""
}
|
q9789
|
StoredInstance.check_event
|
train
|
def check_event(self, event):
# type: (ServiceEvent) -> bool
"""
Tests if the given service event must be handled or ignored, based
on the state of the iPOPO service and on the content of the event.
:param event: A service event
:return: True if the event can be handled, False if it must be ignored
"""
with self._lock:
if self.state == StoredInstance.KILLED:
# This call may have been blocked by the internal state lock,
# ignore it
return False
return self.__safe_handlers_callback("check_event", event)
|
python
|
{
"resource": ""
}
|
q9790
|
StoredInstance.bind
|
train
|
def bind(self, dependency, svc, svc_ref):
# type: (Any, Any, ServiceReference) -> None
"""
Called by a dependency manager to inject a new service and update the
component life cycle.
"""
with self._lock:
self.__set_binding(dependency, svc, svc_ref)
self.check_lifecycle()
|
python
|
{
"resource": ""
}
|
q9791
|
StoredInstance.update
|
train
|
def update(self, dependency, svc, svc_ref, old_properties, new_value=False):
# type: (Any, Any, ServiceReference, dict, bool) -> None
"""
Called by a dependency manager when the properties of an injected
dependency have been updated.
:param dependency: The dependency handler
:param svc: The injected service
:param svc_ref: The reference of the injected service
:param old_properties: Previous properties of the dependency
:param new_value: If True, inject the new value of the handler
"""
with self._lock:
self.__update_binding(
dependency, svc, svc_ref, old_properties, new_value
)
self.check_lifecycle()
|
python
|
{
"resource": ""
}
|
q9792
|
StoredInstance.unbind
|
train
|
def unbind(self, dependency, svc, svc_ref):
# type: (Any, Any, ServiceReference) -> None
"""
Called by a dependency manager to remove an injected service and to
update the component life cycle.
"""
with self._lock:
# Invalidate first (if needed)
self.check_lifecycle()
# Call unbind() and remove the injection
self.__unset_binding(dependency, svc, svc_ref)
# Try a new configuration
if self.update_bindings():
self.check_lifecycle()
|
python
|
{
"resource": ""
}
|
q9793
|
StoredInstance.set_controller_state
|
train
|
def set_controller_state(self, name, value):
# type: (str, bool) -> None
"""
Sets the state of the controller with the given name
:param name: The name of the controller
:param value: The new value of the controller
"""
with self._lock:
self._controllers_state[name] = value
self.__safe_handlers_callback("on_controller_change", name, value)
|
python
|
{
"resource": ""
}
|
q9794
|
StoredInstance.update_property
|
train
|
def update_property(self, name, old_value, new_value):
# type: (str, Any, Any) -> None
"""
Handles a property changed event
:param name: The changed property name
:param old_value: The previous property value
:param new_value: The new property value
"""
with self._lock:
self.__safe_handlers_callback(
"on_property_change", name, old_value, new_value
)
|
python
|
{
"resource": ""
}
|
q9795
|
StoredInstance.update_hidden_property
|
train
|
def update_hidden_property(self, name, old_value, new_value):
# type: (str, Any, Any) -> None
"""
Handles an hidden property changed event
:param name: The changed property name
:param old_value: The previous property value
:param new_value: The new property value
"""
with self._lock:
self.__safe_handlers_callback(
"on_hidden_property_change", name, old_value, new_value
)
|
python
|
{
"resource": ""
}
|
q9796
|
StoredInstance.get_handlers
|
train
|
def get_handlers(self, kind=None):
"""
Retrieves the handlers of the given kind. If kind is None, all handlers
are returned.
:param kind: The kind of the handlers to return
:return: A list of handlers, or an empty list
"""
with self._lock:
if kind is not None:
try:
return self._handlers[kind][:]
except KeyError:
return []
return self.__all_handlers.copy()
|
python
|
{
"resource": ""
}
|
q9797
|
StoredInstance.check_lifecycle
|
train
|
def check_lifecycle(self):
"""
Tests if the state of the component must be updated, based on its own
state and on the state of its dependencies
"""
with self._lock:
# Validation flags
was_valid = self.state == StoredInstance.VALID
can_validate = self.state not in (
StoredInstance.VALIDATING,
StoredInstance.VALID,
)
# Test the validity of all handlers
handlers_valid = self.__safe_handlers_callback(
"is_valid", break_on_false=True
)
if was_valid and not handlers_valid:
# A dependency is missing
self.invalidate(True)
elif (
can_validate and handlers_valid and self._ipopo_service.running
):
# We're all good
self.validate(True)
|
python
|
{
"resource": ""
}
|
q9798
|
StoredInstance.update_bindings
|
train
|
def update_bindings(self):
# type: () -> bool
"""
Updates the bindings of the given component
:return: True if the component can be validated
"""
with self._lock:
all_valid = True
for handler in self.get_handlers(handlers_const.KIND_DEPENDENCY):
# Try to bind
self.__safe_handler_callback(handler, "try_binding")
# Update the validity flag
all_valid &= self.__safe_handler_callback(
handler, "is_valid", only_boolean=True, none_as_true=True
)
return all_valid
|
python
|
{
"resource": ""
}
|
q9799
|
StoredInstance.retry_erroneous
|
train
|
def retry_erroneous(self, properties_update):
# type: (dict) -> int
"""
Removes the ERRONEOUS state from a component and retries a validation
:param properties_update: A dictionary to update component properties
:return: The new state of the component
"""
with self._lock:
if self.state != StoredInstance.ERRONEOUS:
# Not in erroneous state: ignore
return self.state
# Update properties
if properties_update:
self.context.properties.update(properties_update)
# Reset state
self.state = StoredInstance.INVALID
self.error_trace = None
# Retry
self.check_lifecycle()
# Check if the component is still erroneous
return self.state
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.