_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q12800
|
chmod_native
|
train
|
def chmod_native(path, mode_expression, recursive=False):
"""
This is ugly and will only work on POSIX, but the built-in Python os.chmod support
is very minimal, and neither supports fast recursive chmod nor "+X" type expressions,
both of which are slow for large trees. So just shell out.
"""
popenargs = ["chmod"]
if recursive:
popenargs.append("-R")
popenargs.append(mode_expression)
popenargs.append(path)
subprocess.check_call(popenargs)
|
python
|
{
"resource": ""
}
|
q12801
|
file_sha1
|
train
|
def file_sha1(path):
"""
Compute SHA1 hash of a file.
"""
sha1 = hashlib.sha1()
with open(path, "rb") as f:
while True:
block = f.read(2 ** 10)
if not block:
break
sha1.update(block)
return sha1.hexdigest()
|
python
|
{
"resource": ""
}
|
q12802
|
ImageFrame.get_image
|
train
|
async def get_image(
self,
input_source: str,
output_format: str = IMAGE_JPEG,
extra_cmd: Optional[str] = None,
timeout: int = 15,
) -> Optional[bytes]:
"""Open FFmpeg process as capture 1 frame."""
command = ["-an", "-frames:v", "1", "-c:v", output_format]
# open input for capture 1 frame
is_open = await self.open(
cmd=command,
input_source=input_source,
output="-f image2pipe -",
extra_cmd=extra_cmd,
)
# error after open?
if not is_open:
_LOGGER.warning("Error starting FFmpeg.")
return None
# read image
try:
proc_func = functools.partial(self._proc.communicate, timeout=timeout)
image, _ = await self._loop.run_in_executor(None, proc_func)
return image
except (subprocess.TimeoutExpired, ValueError):
_LOGGER.warning("Timeout reading image.")
self.kill()
return None
|
python
|
{
"resource": ""
}
|
q12803
|
FFVersion.get_version
|
train
|
async def get_version(self, timeout: int = 15) -> Optional[str]:
"""Execute FFmpeg process and parse the version information.
Return full FFmpeg version string. Such as 3.4.2-tessus
"""
command = ["-version"]
# open input for capture 1 frame
is_open = await self.open(cmd=command, input_source=None, output="")
# error after open?
if not is_open:
_LOGGER.warning("Error starting FFmpeg.")
return
# read output
try:
proc_func = functools.partial(self._proc.communicate, timeout=timeout)
output, _ = await self._loop.run_in_executor(None, proc_func)
result = re.search(r"ffmpeg version (\S*)", output.decode())
if result is not None:
return result.group(1)
except (subprocess.TimeoutExpired, ValueError):
_LOGGER.warning("Timeout reading stdout.")
self.kill()
return None
|
python
|
{
"resource": ""
}
|
q12804
|
CameraMjpeg.open_camera
|
train
|
def open_camera(
self, input_source: str, extra_cmd: Optional[str] = None
) -> Coroutine:
"""Open FFmpeg process as mjpeg video stream.
Return A coroutine.
"""
command = ["-an", "-c:v", "mjpeg"]
return self.open(
cmd=command,
input_source=input_source,
output="-f mpjpeg -",
extra_cmd=extra_cmd,
)
|
python
|
{
"resource": ""
}
|
q12805
|
NVRTCInterface._load_nvrtc_lib
|
train
|
def _load_nvrtc_lib(self, lib_path):
"""
Loads the NVRTC shared library, with an optional search path in
lib_path.
"""
if sizeof(c_void_p) == 8:
if system() == 'Windows':
def_lib_name = 'nvrtc64_92.dll'
elif system() == 'Darwin':
def_lib_name = 'libnvrtc.dylib'
else:
def_lib_name = 'libnvrtc.so'
else:
raise NVRTCException('NVRTC is not supported on 32-bit platforms.')
if len(lib_path) == 0:
name = def_lib_name
else:
name = lib_path
self._lib = cdll.LoadLibrary(name)
self._lib.nvrtcCreateProgram.argtypes = [
POINTER(c_void_p), # prog
c_char_p, # src
c_char_p, # name
c_int, # numHeaders
POINTER(c_char_p), # headers
POINTER(c_char_p) # include_names
]
self._lib.nvrtcCreateProgram.restype = c_int
self._lib.nvrtcDestroyProgram.argtypes = [
POINTER(c_void_p) # prog
]
self._lib.nvrtcDestroyProgram.restype = c_int
self._lib.nvrtcCompileProgram.argtypes = [
c_void_p, # prog
c_int, # numOptions
POINTER(c_char_p) # options
]
self._lib.nvrtcCompileProgram.restype = c_int
self._lib.nvrtcGetPTXSize.argtypes = [
c_void_p, # prog
POINTER(c_size_t) # ptxSizeRet
]
self._lib.nvrtcGetPTXSize.restype = c_int
self._lib.nvrtcGetPTX.argtypes = [
c_void_p, # prog
c_char_p # ptx
]
self._lib.nvrtcGetPTX.restype = c_int
self._lib.nvrtcGetProgramLogSize.argtypes = [
c_void_p, # prog
POINTER(c_size_t) # logSizeRet
]
self._lib.nvrtcGetProgramLogSize.restype = c_int
self._lib.nvrtcGetProgramLog.argtypes = [
c_void_p, # prog
c_char_p # log
]
self._lib.nvrtcGetProgramLog.restype = c_int
self._lib.nvrtcAddNameExpression.argtypes = [
c_void_p, # prog
c_char_p # nameExpression
]
self._lib.nvrtcAddNameExpression.restype = c_int
self._lib.nvrtcGetLoweredName.argtypes = [
c_void_p, # prog
c_char_p, # nameExpression
POINTER(c_char_p) # loweredName
]
self._lib.nvrtcGetLoweredName.restype = c_int
self._lib.nvrtcGetErrorString.argtypes = [
c_int # result
]
self._lib.nvrtcGetErrorString.restype = c_char_p
self._lib.nvrtcVersion.argtypes = [
POINTER(c_int), # major
POINTER(c_int) # minor
]
self._lib.nvrtcVersion.restype = c_int
|
python
|
{
"resource": ""
}
|
q12806
|
NVRTCInterface.nvrtcCreateProgram
|
train
|
def nvrtcCreateProgram(self, src, name, headers, include_names):
"""
Creates and returns a new NVRTC program object.
"""
res = c_void_p()
headers_array = (c_char_p * len(headers))()
headers_array[:] = encode_str_list(headers)
include_names_array = (c_char_p * len(include_names))()
include_names_array[:] = encode_str_list(include_names)
code = self._lib.nvrtcCreateProgram(byref(res),
c_char_p(encode_str(src)), c_char_p(encode_str(name)),
len(headers),
headers_array, include_names_array)
self._throw_on_error(code)
return res
|
python
|
{
"resource": ""
}
|
q12807
|
NVRTCInterface.nvrtcDestroyProgram
|
train
|
def nvrtcDestroyProgram(self, prog):
"""
Destroys the given NVRTC program object.
"""
code = self._lib.nvrtcDestroyProgram(byref(prog))
self._throw_on_error(code)
return
|
python
|
{
"resource": ""
}
|
q12808
|
NVRTCInterface.nvrtcCompileProgram
|
train
|
def nvrtcCompileProgram(self, prog, options):
"""
Compiles the NVRTC program object into PTX, using the provided options
array. See the NVRTC API documentation for accepted options.
"""
options_array = (c_char_p * len(options))()
options_array[:] = encode_str_list(options)
code = self._lib.nvrtcCompileProgram(prog, len(options), options_array)
self._throw_on_error(code)
return
|
python
|
{
"resource": ""
}
|
q12809
|
NVRTCInterface.nvrtcGetPTX
|
train
|
def nvrtcGetPTX(self, prog):
"""
Returns the compiled PTX for the NVRTC program object.
"""
size = c_size_t()
code = self._lib.nvrtcGetPTXSize(prog, byref(size))
self._throw_on_error(code)
buf = create_string_buffer(size.value)
code = self._lib.nvrtcGetPTX(prog, buf)
self._throw_on_error(code)
return buf.value.decode('utf-8')
|
python
|
{
"resource": ""
}
|
q12810
|
NVRTCInterface.nvrtcGetProgramLog
|
train
|
def nvrtcGetProgramLog(self, prog):
"""
Returns the log for the NVRTC program object.
Only useful after calls to nvrtcCompileProgram or nvrtcVerifyProgram.
"""
size = c_size_t()
code = self._lib.nvrtcGetProgramLogSize(prog, byref(size))
self._throw_on_error(code)
buf = create_string_buffer(size.value)
code = self._lib.nvrtcGetProgramLog(prog, buf)
self._throw_on_error(code)
return buf.value.decode('utf-8')
|
python
|
{
"resource": ""
}
|
q12811
|
NVRTCInterface.nvrtcGetErrorString
|
train
|
def nvrtcGetErrorString(self, code):
"""
Returns a text identifier for the given NVRTC status code.
"""
code_int = c_int(code)
res = self._lib.nvrtcGetErrorString(code_int)
return res.decode('utf-8')
|
python
|
{
"resource": ""
}
|
q12812
|
HAFFmpeg.is_running
|
train
|
def is_running(self) -> bool:
"""Return True if ffmpeg is running."""
if self._proc is None or self._proc.returncode is not None:
return False
return True
|
python
|
{
"resource": ""
}
|
q12813
|
HAFFmpeg._generate_ffmpeg_cmd
|
train
|
def _generate_ffmpeg_cmd(
self,
cmd: List[str],
input_source: Optional[str],
output: Optional[str],
extra_cmd: Optional[str] = None,
) -> None:
"""Generate ffmpeg command line."""
self._argv = [self._ffmpeg]
# start command init
if input_source is not None:
self._put_input(input_source)
self._argv.extend(cmd)
# exists a extra cmd from customer
if extra_cmd is not None:
self._argv.extend(shlex.split(extra_cmd))
self._merge_filters()
self._put_output(output)
|
python
|
{
"resource": ""
}
|
q12814
|
HAFFmpeg._put_input
|
train
|
def _put_input(self, input_source: str) -> None:
"""Put input string to ffmpeg command."""
input_cmd = shlex.split(str(input_source))
if len(input_cmd) > 1:
self._argv.extend(input_cmd)
else:
self._argv.extend(["-i", input_source])
|
python
|
{
"resource": ""
}
|
q12815
|
HAFFmpeg._put_output
|
train
|
def _put_output(self, output: Optional[str]) -> None:
"""Put output string to ffmpeg command."""
if output is None:
self._argv.extend(["-f", "null", "-"])
return
output_cmd = shlex.split(str(output))
if len(output_cmd) > 1:
self._argv.extend(output_cmd)
else:
self._argv.append(output)
|
python
|
{
"resource": ""
}
|
q12816
|
HAFFmpeg._merge_filters
|
train
|
def _merge_filters(self) -> None:
"""Merge all filter config in command line."""
for opts in (["-filter:a", "-af"], ["-filter:v", "-vf"]):
filter_list = []
new_argv = []
cmd_iter = iter(self._argv)
for element in cmd_iter:
if element in opts:
filter_list.insert(0, next(cmd_iter))
else:
new_argv.append(element)
# update argv if changes
if filter_list:
new_argv.extend([opts[0], ",".join(filter_list)])
self._argv = new_argv.copy()
|
python
|
{
"resource": ""
}
|
q12817
|
HAFFmpeg.open
|
train
|
async def open(
self,
cmd: List[str],
input_source: Optional[str],
output: Optional[str] = "-",
extra_cmd: Optional[str] = None,
stdout_pipe: bool = True,
stderr_pipe: bool = False,
) -> bool:
"""Start a ffmpeg instance and pipe output."""
stdout = subprocess.PIPE if stdout_pipe else subprocess.DEVNULL
stderr = subprocess.PIPE if stderr_pipe else subprocess.DEVNULL
if self.is_running:
_LOGGER.warning("FFmpeg is already running!")
return True
# set command line
self._generate_ffmpeg_cmd(cmd, input_source, output, extra_cmd)
# start ffmpeg
_LOGGER.debug("Start FFmpeg with %s", str(self._argv))
try:
proc_func = functools.partial(
subprocess.Popen,
self._argv,
bufsize=0,
stdin=subprocess.PIPE,
stdout=stdout,
stderr=stderr,
)
self._proc = await self._loop.run_in_executor(None, proc_func)
except Exception as err: # pylint: disable=broad-except
_LOGGER.exception("FFmpeg fails %s", err)
self._clear()
return False
return self._proc is not None
|
python
|
{
"resource": ""
}
|
q12818
|
HAFFmpeg.kill
|
train
|
def kill(self) -> None:
"""Kill ffmpeg job."""
self._proc.kill()
self._loop.run_in_executor(None, self._proc.communicate)
|
python
|
{
"resource": ""
}
|
q12819
|
HAFFmpeg.get_reader
|
train
|
async def get_reader(self, source=FFMPEG_STDOUT) -> asyncio.StreamReader:
"""Create and return streamreader."""
reader = asyncio.StreamReader(loop=self._loop)
reader_protocol = asyncio.StreamReaderProtocol(reader)
# Attach stream
if source == FFMPEG_STDOUT:
await self._loop.connect_read_pipe(
lambda: reader_protocol, self._proc.stdout
)
else:
await self._loop.connect_read_pipe(
lambda: reader_protocol, self._proc.stderr
)
# Start reader
return reader
|
python
|
{
"resource": ""
}
|
q12820
|
HAFFmpegWorker._process_lines
|
train
|
async def _process_lines(self, pattern: Optional[str] = None) -> None:
"""Read line from pipe they match with pattern."""
if pattern is not None:
cmp = re.compile(pattern)
_LOGGER.debug("Start working with pattern '%s'.", pattern)
# read lines
while self.is_running:
try:
line = await self._input.readline()
if not line:
break
line = line.decode()
except Exception: # pylint: disable=broad-except
break
match = True if pattern is None else cmp.search(line)
if match:
_LOGGER.debug("Process: %s", line)
await self._que.put(line)
try:
await self._loop.run_in_executor(None, self._proc.wait)
finally:
await self._que.put(None)
_LOGGER.debug("Close read ffmpeg output.")
|
python
|
{
"resource": ""
}
|
q12821
|
HAFFmpegWorker.start_worker
|
train
|
async def start_worker(
self,
cmd: List[str],
input_source: str,
output: Optional[str] = None,
extra_cmd: Optional[str] = None,
pattern: Optional[str] = None,
reading: str = FFMPEG_STDERR,
) -> None:
"""Start ffmpeg do process data from output."""
if self.is_running:
_LOGGER.warning("Can't start worker. It is allready running!")
return
if reading == FFMPEG_STDERR:
stdout = False
stderr = True
else:
stdout = True
stderr = False
# start ffmpeg and reading to queue
await self.open(
cmd=cmd,
input_source=input_source,
output=output,
extra_cmd=extra_cmd,
stdout_pipe=stdout,
stderr_pipe=stderr,
)
self._input = await self.get_reader(reading)
# start background processing
self._read_task = self._loop.create_task(self._process_lines(pattern))
self._loop.create_task(self._worker_process())
|
python
|
{
"resource": ""
}
|
q12822
|
Program.compile
|
train
|
def compile(self, options=[]):
"""
Compiles the program object to PTX using the compiler options
specified in `options`.
"""
try:
self._interface.nvrtcCompileProgram(self._program, options)
ptx = self._interface.nvrtcGetPTX(self._program)
return ptx
except NVRTCException as e:
log = self._interface.nvrtcGetProgramLog(self._program)
raise ProgramException(log)
|
python
|
{
"resource": ""
}
|
q12823
|
SensorNoise.open_sensor
|
train
|
def open_sensor(
self,
input_source: str,
output_dest: Optional[str] = None,
extra_cmd: Optional[str] = None,
) -> Coroutine:
"""Open FFmpeg process for read autio stream.
Return a coroutine.
"""
command = ["-vn", "-filter:a", "silencedetect=n={}dB:d=1".format(self._peak)]
# run ffmpeg, read output
return self.start_worker(
cmd=command,
input_source=input_source,
output=output_dest,
extra_cmd=extra_cmd,
pattern="silence",
)
|
python
|
{
"resource": ""
}
|
q12824
|
SensorMotion.open_sensor
|
train
|
def open_sensor(
self, input_source: str, extra_cmd: Optional[str] = None
) -> Coroutine:
"""Open FFmpeg process a video stream for motion detection.
Return a coroutine.
"""
command = [
"-an",
"-filter:v",
"select=gt(scene\\,{0})".format(self._changes / 100),
]
# run ffmpeg, read output
return self.start_worker(
cmd=command,
input_source=input_source,
output="-f framemd5 -",
extra_cmd=extra_cmd,
pattern=self.MATCH,
reading=FFMPEG_STDOUT,
)
|
python
|
{
"resource": ""
}
|
q12825
|
DeviceData.ca_id
|
train
|
def ca_id(self, ca_id):
"""
Sets the ca_id of this DeviceData.
The certificate issuer's ID.
:param ca_id: The ca_id of this DeviceData.
:type: str
"""
if ca_id is not None and len(ca_id) > 500:
raise ValueError("Invalid value for `ca_id`, length must be less than or equal to `500`")
self._ca_id = ca_id
|
python
|
{
"resource": ""
}
|
q12826
|
DeviceData.device_class
|
train
|
def device_class(self, device_class):
"""
Sets the device_class of this DeviceData.
An ID representing the model and hardware revision of the device.
:param device_class: The device_class of this DeviceData.
:type: str
"""
if device_class is not None and len(device_class) > 32:
raise ValueError("Invalid value for `device_class`, length must be less than or equal to `32`")
self._device_class = device_class
|
python
|
{
"resource": ""
}
|
q12827
|
DeviceData.device_key
|
train
|
def device_key(self, device_key):
"""
Sets the device_key of this DeviceData.
The fingerprint of the device certificate.
:param device_key: The device_key of this DeviceData.
:type: str
"""
if device_key is not None and len(device_key) > 512:
raise ValueError("Invalid value for `device_key`, length must be less than or equal to `512`")
self._device_key = device_key
|
python
|
{
"resource": ""
}
|
q12828
|
DeviceData.endpoint_type
|
train
|
def endpoint_type(self, endpoint_type):
"""
Sets the endpoint_type of this DeviceData.
The endpoint type of the device. For example, the device is a gateway.
:param endpoint_type: The endpoint_type of this DeviceData.
:type: str
"""
if endpoint_type is not None and len(endpoint_type) > 64:
raise ValueError("Invalid value for `endpoint_type`, length must be less than or equal to `64`")
self._endpoint_type = endpoint_type
|
python
|
{
"resource": ""
}
|
q12829
|
DeviceData.mechanism
|
train
|
def mechanism(self, mechanism):
"""
Sets the mechanism of this DeviceData.
The ID of the channel used to communicate with the device.
:param mechanism: The mechanism of this DeviceData.
:type: str
"""
allowed_values = ["connector", "direct"]
if mechanism not in allowed_values:
raise ValueError(
"Invalid value for `mechanism` ({0}), must be one of {1}"
.format(mechanism, allowed_values)
)
self._mechanism = mechanism
|
python
|
{
"resource": ""
}
|
q12830
|
BaseAPI._verify_filters
|
train
|
def _verify_filters(self, kwargs, obj, encode=False):
"""Legacy entrypoint with 'encode' flag"""
return (filters.legacy_filter_formatter if encode else filters.filter_formatter)(
kwargs,
obj._get_attributes_map()
)
|
python
|
{
"resource": ""
}
|
q12831
|
BaseAPI.get_last_api_metadata
|
train
|
def get_last_api_metadata(self):
"""Get meta data for the last Mbed Cloud API call.
:returns: meta data of the last Mbed Cloud API call
:rtype: ApiMetadata
"""
last_metadata = None
for key, api in iteritems(self.apis):
api_client = api.api_client
if api_client is not None:
metadata = api_client.get_last_metadata()
if metadata is not None and metadata.get('timestamp', None) is not None:
if last_metadata is None:
last_metadata = metadata
elif metadata["timestamp"] >= last_metadata["timestamp"]:
last_metadata = metadata
if last_metadata is not None:
last_metadata = ApiMetadata(last_metadata.get("url"),
last_metadata.get("method"),
last_metadata.get("response", None),
last_metadata.get("return_data", None),
last_metadata.get("exception", None))
return last_metadata
|
python
|
{
"resource": ""
}
|
q12832
|
StubAPI.success
|
train
|
def success(self, **kwargs):
"""Returns all arguments received in init and this method call"""
response = {'success': True}
# check dates can be manipulated
response.update(kwargs)
response.update(self.kwargs)
response['test_argument3'] = datetime.timedelta(days=1) + response['test_argument3']
return response
|
python
|
{
"resource": ""
}
|
q12833
|
BaseObject.update_attributes
|
train
|
def update_attributes(self, updates):
"""Update attributes."""
if not isinstance(updates, dict):
updates = updates.to_dict()
for sdk_key, spec_key in self._get_attributes_map().items():
attr = '_%s' % sdk_key
if spec_key in updates and not hasattr(self, attr):
setattr(self, attr, updates[spec_key])
|
python
|
{
"resource": ""
}
|
q12834
|
BulkResponse.etag
|
train
|
def etag(self, etag):
"""
Sets the etag of this BulkResponse.
etag
:param etag: The etag of this BulkResponse.
:type: str
"""
if etag is None:
raise ValueError("Invalid value for `etag`, must not be `None`")
if etag is not None and not re.search('[A-Za-z0-9]{0,256}', etag):
raise ValueError("Invalid value for `etag`, must be a follow pattern or equal to `/[A-Za-z0-9]{0,256}/`")
self._etag = etag
|
python
|
{
"resource": ""
}
|
q12835
|
BulkResponse.id
|
train
|
def id(self, id):
"""
Sets the id of this BulkResponse.
Bulk ID
:param id: The id of this BulkResponse.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
if id is not None and not re.search('^[A-Za-z0-9]{32}', id):
raise ValueError("Invalid value for `id`, must be a follow pattern or equal to `/^[A-Za-z0-9]{32}/`")
self._id = id
|
python
|
{
"resource": ""
}
|
q12836
|
AccountInfo.mfa_status
|
train
|
def mfa_status(self, mfa_status):
"""
Sets the mfa_status of this AccountInfo.
The enforcement status of the multi-factor authentication, either 'enforced' or 'optional'.
:param mfa_status: The mfa_status of this AccountInfo.
:type: str
"""
allowed_values = ["enforced", "optional"]
if mfa_status not in allowed_values:
raise ValueError(
"Invalid value for `mfa_status` ({0}), must be one of {1}"
.format(mfa_status, allowed_values)
)
self._mfa_status = mfa_status
|
python
|
{
"resource": ""
}
|
q12837
|
ServicePackageQuotaHistoryReservation.account_id
|
train
|
def account_id(self, account_id):
"""
Sets the account_id of this ServicePackageQuotaHistoryReservation.
Account ID.
:param account_id: The account_id of this ServicePackageQuotaHistoryReservation.
:type: str
"""
if account_id is None:
raise ValueError("Invalid value for `account_id`, must not be `None`")
if account_id is not None and len(account_id) > 250:
raise ValueError("Invalid value for `account_id`, length must be less than or equal to `250`")
if account_id is not None and len(account_id) < 1:
raise ValueError("Invalid value for `account_id`, length must be greater than or equal to `1`")
self._account_id = account_id
|
python
|
{
"resource": ""
}
|
q12838
|
ServicePackageQuotaHistoryReservation.campaign_name
|
train
|
def campaign_name(self, campaign_name):
"""
Sets the campaign_name of this ServicePackageQuotaHistoryReservation.
Textual campaign name for this reservation.
:param campaign_name: The campaign_name of this ServicePackageQuotaHistoryReservation.
:type: str
"""
if campaign_name is None:
raise ValueError("Invalid value for `campaign_name`, must not be `None`")
if campaign_name is not None and len(campaign_name) > 250:
raise ValueError("Invalid value for `campaign_name`, length must be less than or equal to `250`")
if campaign_name is not None and len(campaign_name) < 1:
raise ValueError("Invalid value for `campaign_name`, length must be greater than or equal to `1`")
self._campaign_name = campaign_name
|
python
|
{
"resource": ""
}
|
q12839
|
ServicePackageQuotaHistoryReservation.id
|
train
|
def id(self, id):
"""
Sets the id of this ServicePackageQuotaHistoryReservation.
Reservation ID.
:param id: The id of this ServicePackageQuotaHistoryReservation.
:type: str
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
if id is not None and len(id) > 250:
raise ValueError("Invalid value for `id`, length must be less than or equal to `250`")
if id is not None and len(id) < 1:
raise ValueError("Invalid value for `id`, length must be greater than or equal to `1`")
self._id = id
|
python
|
{
"resource": ""
}
|
q12840
|
ApiClient.metadata_wrapper
|
train
|
def metadata_wrapper(fn):
"""Save metadata of last api call."""
@functools.wraps(fn)
def wrapped_f(self, *args, **kwargs):
self.last_metadata = {}
self.last_metadata["url"] = self.configuration.host + args[0]
self.last_metadata["method"] = args[1]
self.last_metadata["timestamp"] = time.time()
try:
return fn(self, *args, **kwargs)
except Exception as e:
self.last_metadata["exception"] = e
raise
return wrapped_f
|
python
|
{
"resource": ""
}
|
q12841
|
_normalise_key_values
|
train
|
def _normalise_key_values(filter_obj, attr_map=None):
"""Converts nested dictionary filters into django-style key value pairs
Map filter operators and aliases to operator-land
Additionally, perform replacements according to attribute map
Automatically assumes __eq if not explicitly defined
"""
new_filter = {}
for key, constraints in filter_obj.items():
aliased_key = key
if attr_map is not None:
aliased_key = attr_map.get(key)
if aliased_key is None:
raise CloudValueError(
'Invalid key %r for filter attribute; must be one of:\n%s' % (
key,
attr_map.keys()
)
)
if not isinstance(constraints, dict):
constraints = {'eq': constraints}
for operator, value in constraints.items():
# FIXME: deprecate this $ nonsense
canonical_operator = FILTER_OPERATOR_ALIASES.get(operator.lstrip('$'))
if canonical_operator is None:
raise CloudValueError(
'Invalid operator %r for filter key %s; must be one of:\n%s' % (
operator,
key,
FILTER_OPERATOR_ALIASES.keys()
)
)
canonical_key = str('%s__%s' % (aliased_key, canonical_operator))
new_filter[canonical_key] = _normalise_value(value)
return new_filter
|
python
|
{
"resource": ""
}
|
q12842
|
_get_filter
|
train
|
def _get_filter(sdk_filter, attr_map):
"""Common functionality for filter structures
:param sdk_filter: {field:constraint, field:{operator:constraint}, ...}
:return: {field__operator: constraint, ...}
"""
if not isinstance(sdk_filter, dict):
raise CloudValueError('filter value must be a dictionary, was %r' % (sdk_filter,))
custom = sdk_filter.pop('custom_attributes', {})
new_filter = _normalise_key_values(filter_obj=sdk_filter, attr_map=attr_map)
new_filter.update({
'custom_attributes__%s' % k: v for k, v in _normalise_key_values(filter_obj=custom).items()
})
return new_filter
|
python
|
{
"resource": ""
}
|
q12843
|
legacy_filter_formatter
|
train
|
def legacy_filter_formatter(kwargs, attr_map):
"""Builds a filter for update and device apis
:param kwargs: expected to contain {'filter/filters': {filter dict}}
:returns: {'filter': 'url-encoded-validated-filter-string'}
"""
params = _depluralise_filters_key(copy.copy(kwargs))
new_filter = _get_filter(sdk_filter=params.pop('filter', {}), attr_map=attr_map)
if new_filter:
new_filter = sorted([(k.rsplit('__eq')[0], v) for k, v in new_filter.items()])
params['filter'] = urllib.parse.urlencode(new_filter)
return params
|
python
|
{
"resource": ""
}
|
q12844
|
filter_formatter
|
train
|
def filter_formatter(kwargs, attr_map):
"""Builds a filter according to the cross-api specification
:param kwargs: expected to contain {'filter': {filter dict}}
:returns: {validated filter dict}
"""
params = _depluralise_filters_key(copy.copy(kwargs))
params.update(_get_filter(sdk_filter=params.pop('filter', {}), attr_map=attr_map))
return params
|
python
|
{
"resource": ""
}
|
q12845
|
PaginatedResponse.count
|
train
|
def count(self):
"""Approximate number of results, according to the API"""
if self._total_count is None:
self._total_count = self._get_total_count()
return self._total_count
|
python
|
{
"resource": ""
}
|
q12846
|
PaginatedResponse.first
|
train
|
def first(self):
"""Returns the first item from the query, or None if there are no results"""
if self._results_cache:
return self._results_cache[0]
query = PaginatedResponse(func=self._func, lwrap_type=self._lwrap_type, **self._kwargs)
try:
return next(query)
except StopIteration:
return None
|
python
|
{
"resource": ""
}
|
q12847
|
PaginatedResponse.data
|
train
|
def data(self):
"""Deprecated. Returns the data as a `list`"""
import warnings
warnings.warn(
'`data` attribute is deprecated and will be removed in a future release, '
'use %s as an iterable instead' % (PaginatedResponse,),
category=DeprecationWarning,
stacklevel=2 # log wherever '.data' is referenced, rather than this line
)
return list(self)
|
python
|
{
"resource": ""
}
|
q12848
|
main
|
train
|
def main():
"""Sends release notifications to interested parties
Currently this is an arm-internal slack channel.
1. assumes you've set a token for an authorised slack user/bot.
2. assumes said user/bot is already in channel.
otherwise: https://github.com/slackapi/python-slackclient#joining-a-channel
"""
slack_token = os.environ.get('SLACK_API_TOKEN')
channel_id = os.environ.get('SLACK_CHANNEL', '#mbed-cloud-sdk')
message = os.environ.get('SLACK_MESSAGE', (
':checkered_flag: New version of :snake: Python SDK released: *{version}* '
'(<https://pypi.org/project/mbed-cloud-sdk/{version}/|PyPI>)'
))
if not slack_token:
print('no slack token')
return
version = os.environ.get('SLACK_NOTIFY_VERSION')
if not version:
try:
import mbed_cloud
except ImportError:
pass
else:
version = mbed_cloud.__version__
payload = message.format(version=version) if version else message
print('notifying slack channel %s with payload:\n%s' % (channel_id, payload))
sc = SlackClient(slack_token)
sc.api_call(
'chat.postMessage',
channel=channel_id,
text=payload,
)
|
python
|
{
"resource": ""
}
|
q12849
|
TrustedCertificateInternalResp.service
|
train
|
def service(self, service):
"""
Sets the service of this TrustedCertificateInternalResp.
Service name where the certificate is to be used.
:param service: The service of this TrustedCertificateInternalResp.
:type: str
"""
if service is None:
raise ValueError("Invalid value for `service`, must not be `None`")
allowed_values = ["lwm2m", "bootstrap"]
if service not in allowed_values:
raise ValueError(
"Invalid value for `service` ({0}), must be one of {1}"
.format(service, allowed_values)
)
self._service = service
|
python
|
{
"resource": ""
}
|
q12850
|
ReportResponse.month
|
train
|
def month(self, month):
"""
Sets the month of this ReportResponse.
Month of requested billing report
:param month: The month of this ReportResponse.
:type: str
"""
if month is None:
raise ValueError("Invalid value for `month`, must not be `None`")
if month is not None and not re.search('^\\d{4}-\\d{2}$', month):
raise ValueError("Invalid value for `month`, must be a follow pattern or equal to `/^\\d{4}-\\d{2}$/`")
self._month = month
|
python
|
{
"resource": ""
}
|
q12851
|
ReportBillingData.active_devices
|
train
|
def active_devices(self, active_devices):
"""
Sets the active_devices of this ReportBillingData.
:param active_devices: The active_devices of this ReportBillingData.
:type: int
"""
if active_devices is None:
raise ValueError("Invalid value for `active_devices`, must not be `None`")
if active_devices is not None and active_devices < 0:
raise ValueError("Invalid value for `active_devices`, must be a value greater than or equal to `0`")
self._active_devices = active_devices
|
python
|
{
"resource": ""
}
|
q12852
|
ReportBillingData.firmware_updates
|
train
|
def firmware_updates(self, firmware_updates):
"""
Sets the firmware_updates of this ReportBillingData.
:param firmware_updates: The firmware_updates of this ReportBillingData.
:type: int
"""
if firmware_updates is None:
raise ValueError("Invalid value for `firmware_updates`, must not be `None`")
if firmware_updates is not None and firmware_updates < 0:
raise ValueError("Invalid value for `firmware_updates`, must be a value greater than or equal to `0`")
self._firmware_updates = firmware_updates
|
python
|
{
"resource": ""
}
|
q12853
|
main
|
train
|
def main():
"""Writes out newsfile if significant version bump"""
last_known = '0'
if os.path.isfile(metafile):
with open(metafile) as fh:
last_known = fh.read()
import mbed_cloud
current = mbed_cloud.__version__
# how significant a change in version scheme should trigger a new changelog entry
# (api major, api minor, sdk major, sdk minor, sdk patch)
sigfigs = 4
current_version = LooseVersion(current).version
last_known_version = LooseVersion(last_known).version
should_towncrier = current_version[:sigfigs] != last_known_version[:sigfigs]
print('%s -- %s :: current vs previous changelog build' % (current, last_known))
if should_towncrier:
print('%s >> %s :: running changelog build' % (current, last_known))
subprocess.check_call(
['towncrier', '--yes'],
cwd=os.path.join(PROJECT_ROOT, 'docs', 'changelog')
)
with open(metafile, 'w') as fh:
fh.write(current)
|
python
|
{
"resource": ""
}
|
q12854
|
Config.paths
|
train
|
def paths(self):
"""Get list of paths to look in for configuration data"""
filename = '.mbed_cloud_config.json'
return [
# Global config in /etc for *nix users
"/etc/%s" % filename,
# Config file in home directory
os.path.join(os.path.expanduser("~"), filename),
# Config file in current directory
os.path.join(os.getcwd(), filename),
# Config file specified using environment variable
os.environ.get(self.path_from_env_key)
]
|
python
|
{
"resource": ""
}
|
q12855
|
Config.load
|
train
|
def load(self, updates):
"""Load configuration data"""
# Go through in order and override the config (`.mbed_cloud_config.json` loader)
for path in self.paths():
if not path:
continue
abs_path = os.path.abspath(os.path.expanduser(path))
if not os.path.isfile(abs_path):
self._using_paths.append('missing: %s' % abs_path)
continue
self._using_paths.append(' exists: %s' % abs_path)
with open(abs_path) as fh:
self.update(json.load(fh))
# New dotenv loader - requires explicit instructions to use current working directory
load_dotenv(find_dotenv(usecwd=True))
# Pluck config values out of the environment
for env_var, key in {ENVVAR_API_HOST: 'host', ENVVAR_API_KEY: 'api_key'}.items():
env_value = os.getenv(env_var)
if env_value is not None:
self[key] = env_value
if updates:
self.update(updates)
self.validate()
|
python
|
{
"resource": ""
}
|
q12856
|
expand_dict_as_keys
|
train
|
def expand_dict_as_keys(d):
"""Expands a dictionary into a list of immutables with cartesian product
:param d: dictionary (of strings or lists)
:returns: cartesian product of list parts
"""
to_product = []
for key, values in sorted(d.items()):
# if we sort the inputs here, itertools.product will keep a stable sort order for us later
key_values = sorted([(key, v) for v in utils.ensure_listable(values) if v is not None])
if key_values:
to_product.append(key_values)
return list(itertools.product(*to_product))
|
python
|
{
"resource": ""
}
|
q12857
|
RoutingBase.create_route
|
train
|
def create_route(self, item, routes):
"""Stores a new item in routing map"""
for route in routes:
self._routes.setdefault(route, set()).add(item)
return item
|
python
|
{
"resource": ""
}
|
q12858
|
RoutingBase.remove_routes
|
train
|
def remove_routes(self, item, routes):
"""Removes item from matching routes"""
for route in routes:
items = self._routes.get(route)
try:
items.remove(item)
LOG.debug('removed item from route %s', route)
except ValueError:
pass
if not items:
self._routes.pop(route)
LOG.debug('removed route %s', route)
|
python
|
{
"resource": ""
}
|
q12859
|
SubscriptionsManager.get_channel
|
train
|
def get_channel(self, subscription_channel, **observer_params):
"""Get or start the requested channel"""
keys = subscription_channel.get_routing_keys()
# watch keys are unique sets of keys that we will attempt to extract from inbound items
self.watch_keys.add(frozenset({k_v[0] for key in keys for k_v in key}))
self.create_route(subscription_channel, keys)
subscription_channel._configure(self, self.connect_api, observer_params)
return subscription_channel
|
python
|
{
"resource": ""
}
|
q12860
|
SubscriptionsManager._notify_single_item
|
train
|
def _notify_single_item(self, item):
"""Route inbound items to individual channels"""
# channels that this individual item has already triggered
# (dont want to trigger them again)
triggered_channels = set()
for key_set in self.watch_keys:
# only pluck keys if they exist
plucked = {
key_name: item[key_name]
for key_name in key_set if key_name in item
}
route_keys = expand_dict_as_keys(plucked)
for route in route_keys:
channels = self.get_route_items(route) or {}
LOG.debug('route table match: %s -> %s', route, channels)
if not channels:
LOG.debug(
'no subscribers for message.\nkey %s\nroutes: %s',
route,
self._routes
)
for channel in channels:
if channel in triggered_channels:
LOG.debug('skipping dispatch to %s', channel)
continue
LOG.debug('routing dispatch to %s: %s', channel, item)
try:
channel.notify(item) and triggered_channels.add(channel)
except Exception: # noqa
LOG.exception('Channel notification failed')
return triggered_channels
|
python
|
{
"resource": ""
}
|
q12861
|
SubscriptionsManager.notify
|
train
|
def notify(self, data):
"""Notify subscribers that data was received"""
triggered_channels = []
for channel_name, items in data.items():
for item in items or []:
LOG.debug('notify received: %s', item)
try:
# some channels return strings rather than objects (e.g. de-registrations),
# normalize them here
item = {'value': item} if isinstance(item, six.string_types) else dict(item)
# inject the channel name to the data (so channels can filter on it)
item['channel'] = channel_name
triggered_channels.extend(list(self._notify_single_item(item)))
except Exception: # noqa
LOG.exception('Subscription notification failed')
return triggered_channels
|
python
|
{
"resource": ""
}
|
q12862
|
SubscriptionsManager.unsubscribe_all
|
train
|
def unsubscribe_all(self):
"""Unsubscribes all channels"""
for channel in self.list_all():
channel.ensure_stopped()
self.connect_api.stop_notifications()
|
python
|
{
"resource": ""
}
|
q12863
|
AggregatedQuotaUsageReport.type
|
train
|
def type(self, type):
"""
Sets the type of this AggregatedQuotaUsageReport.
Type of quota usage entry.
:param type: The type of this AggregatedQuotaUsageReport.
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`")
allowed_values = ["reservation", "reservation_release", "reservation_termination", "package_renewal", "package_creation", "package_termination"]
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}"
.format(type, allowed_values)
)
self._type = type
|
python
|
{
"resource": ""
}
|
q12864
|
UpdateAPI.list_campaigns
|
train
|
def list_campaigns(self, **kwargs):
"""List all update campaigns.
:param int limit: number of campaigns to retrieve
:param str order: sort direction of campaigns when ordered by creation time (desc|asc)
:param str after: get campaigns after given campaign ID
:param dict filters: Dictionary of filters to apply
:return: List of :py:class:`Campaign` objects
:rtype: PaginatedResponse
"""
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, Campaign, True)
api = self._get_api(update_service.DefaultApi)
return PaginatedResponse(api.update_campaign_list, lwrap_type=Campaign, **kwargs)
|
python
|
{
"resource": ""
}
|
q12865
|
UpdateAPI.get_campaign
|
train
|
def get_campaign(self, campaign_id):
"""Get existing update campaign.
:param str campaign_id: Campaign ID to retrieve (Required)
:return: Update campaign object matching provided ID
:rtype: Campaign
"""
api = self._get_api(update_service.DefaultApi)
return Campaign(api.update_campaign_retrieve(campaign_id))
|
python
|
{
"resource": ""
}
|
q12866
|
UpdateAPI.add_campaign
|
train
|
def add_campaign(self, name, device_filter, **kwargs):
"""Add new update campaign.
Add an update campaign with a name and device filtering. Example:
.. code-block:: python
device_api, update_api = DeviceDirectoryAPI(), UpdateAPI()
# Get a filter to use for update campaign
query_obj = device_api.get_query(query_id="MYID")
# Create the campaign
new_campaign = update_api.add_campaign(
name="foo",
device_filter=query_obj.filter
)
:param str name: Name of the update campaign (Required)
:param str device_filter: The device filter to use (Required)
:param str manifest_id: ID of the manifest with description of the update
:param str description: Description of the campaign
:param int scheduled_at: The timestamp at which update campaign is scheduled to start
:param str state: The state of the campaign. Values:
"draft", "scheduled", "devicefetch", "devicecopy", "publishing",
"deploying", "deployed", "manifestremoved", "expired"
:return: newly created campaign object
:rtype: Campaign
"""
device_filter = filters.legacy_filter_formatter(
dict(filter=device_filter),
Device._get_attributes_map()
)
campaign = Campaign._create_request_map(kwargs)
if 'when' in campaign:
# FIXME: randomly validating an input here is a sure route to nasty surprises elsewhere
campaign['when'] = force_utc(campaign['when'])
body = UpdateCampaignPostRequest(
name=name,
device_filter=device_filter['filter'],
**campaign)
api = self._get_api(update_service.DefaultApi)
return Campaign(api.update_campaign_create(body))
|
python
|
{
"resource": ""
}
|
q12867
|
UpdateAPI.update_campaign
|
train
|
def update_campaign(self, campaign_object=None, campaign_id=None, **kwargs):
"""Update an update campaign.
:param :class:`Campaign` campaign_object: Campaign object to update (Required)
:return: updated campaign object
:rtype: Campaign
"""
api = self._get_api(update_service.DefaultApi)
if campaign_object:
campaign_id = campaign_object.id
campaign_object = campaign_object._create_patch_request()
else:
campaign_object = Campaign._create_request_map(kwargs)
if 'device_filter' in campaign_object:
campaign_object["device_filter"] = filters.legacy_filter_formatter(
dict(filter=campaign_object["device_filter"]),
Device._get_attributes_map()
)['filter']
if 'when' in campaign_object:
# FIXME: randomly validating an input here is a sure route to nasty surprises elsewhere
campaign_object['when'] = force_utc(campaign_object['when'])
return Campaign(api.update_campaign_update(campaign_id=campaign_id,
campaign=campaign_object))
|
python
|
{
"resource": ""
}
|
q12868
|
UpdateAPI.delete_campaign
|
train
|
def delete_campaign(self, campaign_id):
"""Delete an update campaign.
:param str campaign_id: Campaign ID to delete (Required)
:return: void
"""
api = self._get_api(update_service.DefaultApi)
api.update_campaign_destroy(campaign_id)
return
|
python
|
{
"resource": ""
}
|
q12869
|
UpdateAPI.list_campaign_device_states
|
train
|
def list_campaign_device_states(self, campaign_id, **kwargs):
"""List campaign devices status.
:param str campaign_id: Id of the update campaign (Required)
:param int limit: number of devices state to retrieve
:param str order: sort direction of device state when ordered by creation time (desc|asc)
:param str after: get devices state after given id
:return: List of :py:class:`CampaignDeviceState` objects
:rtype: PaginatedResponse
"""
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, CampaignDeviceState, True)
kwargs["campaign_id"] = campaign_id
api = self._get_api(update_service.DefaultApi)
return PaginatedResponse(api.update_campaign_metadata_list,
lwrap_type=CampaignDeviceState, **kwargs)
|
python
|
{
"resource": ""
}
|
q12870
|
UpdateAPI.get_firmware_image
|
train
|
def get_firmware_image(self, image_id):
"""Get a firmware image with provided image_id.
:param str image_id: The firmware ID for the image to retrieve (Required)
:return: FirmwareImage
"""
api = self._get_api(update_service.DefaultApi)
return FirmwareImage(api.firmware_image_retrieve(image_id))
|
python
|
{
"resource": ""
}
|
q12871
|
UpdateAPI.list_firmware_images
|
train
|
def list_firmware_images(self, **kwargs):
"""List all firmware images.
:param int limit: number of firmware images to retrieve
:param str order: ordering of images when ordered by time. 'desc' or 'asc'
:param str after: get firmware images after given `image_id`
:param dict filters: Dictionary of filters to apply
:return: list of :py:class:`FirmwareImage` objects
:rtype: PaginatedResponse
"""
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, FirmwareImage, True)
api = self._get_api(update_service.DefaultApi)
return PaginatedResponse(api.firmware_image_list, lwrap_type=FirmwareImage, **kwargs)
|
python
|
{
"resource": ""
}
|
q12872
|
UpdateAPI.add_firmware_image
|
train
|
def add_firmware_image(self, name, datafile, **kwargs):
"""Add a new firmware reference.
:param str name: Firmware file short name (Required)
:param str datafile: The file object or *path* to the firmware image file (Required)
:param str description: Firmware file description
:return: the newly created firmware file object
:rtype: FirmwareImage
"""
kwargs.update({'name': name})
firmware_image = FirmwareImage._create_request_map(kwargs)
firmware_image.update({'datafile': datafile})
api = self._get_api(update_service.DefaultApi)
return FirmwareImage(
api.firmware_image_create(**firmware_image)
)
|
python
|
{
"resource": ""
}
|
q12873
|
UpdateAPI.delete_firmware_image
|
train
|
def delete_firmware_image(self, image_id):
"""Delete a firmware image.
:param str image_id: image ID for the firmware to remove/delete (Required)
:return: void
"""
api = self._get_api(update_service.DefaultApi)
api.firmware_image_destroy(image_id=image_id)
return
|
python
|
{
"resource": ""
}
|
q12874
|
UpdateAPI.get_firmware_manifest
|
train
|
def get_firmware_manifest(self, manifest_id):
"""Get manifest with provided manifest_id.
:param str manifest_id: ID of manifest to retrieve (Required)
:return: FirmwareManifest
"""
api = self._get_api(update_service.DefaultApi)
return FirmwareManifest(api.firmware_manifest_retrieve(manifest_id=manifest_id))
|
python
|
{
"resource": ""
}
|
q12875
|
UpdateAPI.list_firmware_manifests
|
train
|
def list_firmware_manifests(self, **kwargs):
"""List all manifests.
:param int limit: number of manifests to retrieve
:param str order: sort direction of manifests when ordered by time. 'desc' or 'asc'
:param str after: get manifests after given `image_id`
:param dict filters: Dictionary of filters to apply
:return: list of :py:class:`FirmwareManifest` objects
:rtype: PaginatedResponse
"""
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, FirmwareManifest, True)
api = self._get_api(update_service.DefaultApi)
return PaginatedResponse(api.firmware_manifest_list, lwrap_type=FirmwareManifest, **kwargs)
|
python
|
{
"resource": ""
}
|
q12876
|
UpdateAPI.add_firmware_manifest
|
train
|
def add_firmware_manifest(self, name, datafile, key_table_file=None, **kwargs):
"""Add a new manifest reference.
:param str name: Manifest file short name (Required)
:param str datafile: The file object or path to the manifest file (Required)
:param str key_table_file: The file object or path to the key_table file (Optional)
:param str description: Manifest file description
:return: the newly created manifest file object
:rtype: FirmwareManifest
"""
kwargs.update({
'name': name,
'url': datafile, # really it's the datafile
})
if key_table_file is not None:
kwargs.update({'key_table_url': key_table_file}) # really it's the key_table
firmware_manifest = FirmwareManifest._create_request_map(kwargs)
api = self._get_api(update_service.DefaultApi)
return FirmwareManifest(
api.firmware_manifest_create(**firmware_manifest)
)
|
python
|
{
"resource": ""
}
|
q12877
|
UpdateAPI.delete_firmware_manifest
|
train
|
def delete_firmware_manifest(self, manifest_id):
"""Delete an existing manifest.
:param str manifest_id: Manifest file ID to delete (Required)
:return: void
"""
api = self._get_api(update_service.DefaultApi)
return api.firmware_manifest_destroy(manifest_id)
|
python
|
{
"resource": ""
}
|
q12878
|
Campaign.device_filter
|
train
|
def device_filter(self):
"""The device filter to use.
:rtype: dict
"""
if isinstance(self._device_filter, str):
return self._decode_query(self._device_filter)
return self._device_filter
|
python
|
{
"resource": ""
}
|
q12879
|
ServicePackageQuotaHistoryItem.reason
|
train
|
def reason(self, reason):
"""
Sets the reason of this ServicePackageQuotaHistoryItem.
Type of quota usage entry.
:param reason: The reason of this ServicePackageQuotaHistoryItem.
:type: str
"""
if reason is None:
raise ValueError("Invalid value for `reason`, must not be `None`")
allowed_values = ["reservation", "reservation_release", "reservation_termination", "package_creation", "package_renewal", "package_termination"]
if reason not in allowed_values:
raise ValueError(
"Invalid value for `reason` ({0}), must be one of {1}"
.format(reason, allowed_values)
)
self._reason = reason
|
python
|
{
"resource": ""
}
|
q12880
|
PreSharedKey.secret_hex
|
train
|
def secret_hex(self, secret_hex):
"""
Sets the secret_hex of this PreSharedKey.
The secret of the pre-shared key in hexadecimal. It is not case sensitive; 4a is same as 4A, and it is allowed with or without 0x in the beginning. The minimum length of the secret is 128 bits and maximum 256 bits.
:param secret_hex: The secret_hex of this PreSharedKey.
:type: str
"""
if secret_hex is None:
raise ValueError("Invalid value for `secret_hex`, must not be `None`")
if secret_hex is not None and not re.search('^(0[xX])?[0-9a-fA-F]{32,64}$', secret_hex):
raise ValueError("Invalid value for `secret_hex`, must be a follow pattern or equal to `/^(0[xX])?[0-9a-fA-F]{32,64}$/`")
self._secret_hex = secret_hex
|
python
|
{
"resource": ""
}
|
q12881
|
combine_bytes
|
train
|
def combine_bytes(bytearr):
"""Given some bytes, join them together to make one long binary
(e.g. 00001000 00000000 -> 0000100000000000)
:param bytearr:
:return:
"""
bytes_count = len(bytearr)
result = 0b0
for index, byt in enumerate(bytearr):
offset_bytes = bytes_count - index - 1
result += byt << (8 * offset_bytes)
return result
|
python
|
{
"resource": ""
}
|
q12882
|
binary_tlv_to_python
|
train
|
def binary_tlv_to_python(binary_string, result=None):
"""Recursively decode a binary string and store output in result object
:param binary_string: a bytearray object of tlv data
:param result: result store for recursion
:return:
"""
result = {} if result is None else result
if not binary_string:
return result
byte = binary_string[0]
kind = byte & type_mask
id_length = get_id_length(byte)
payload_length = get_value_length(byte)
# start after the type indicator
offset = 1
item_id = str(combine_bytes(binary_string[offset:offset + id_length]))
offset += id_length
# get length of payload from specifier
value_length = payload_length
if byte & length_type_mask != LengthTypes.SET_BYTE:
value_length = combine_bytes(binary_string[offset:offset + payload_length])
offset += payload_length
if kind == Types.MULTI:
binary_tlv_to_python(
binary_string[offset:offset + value_length],
result.setdefault(item_id, {})
)
else:
value_binary = binary_string[offset: offset + value_length]
result[item_id] = (
combine_bytes(value_binary) if not all(value_binary) else value_binary.decode('utf8')
)
offset += value_length
binary_tlv_to_python(binary_string[offset:], result)
return result
|
python
|
{
"resource": ""
}
|
q12883
|
maybe_decode_payload
|
train
|
def maybe_decode_payload(payload, content_type='application/nanoservice-tlv', decode_b64=True):
"""If the payload is tlv, decode it, otherwise passthrough
:param payload: some data
:param content_type: http content type
:param decode_b64: by default, payload is assumed to be b64 encoded
:return:
"""
if not payload:
return None
binary = b64decoder(payload) if decode_b64 else payload
if content_type and 'tlv' in content_type.lower():
return binary_tlv_to_python(bytearray(binary))
return binary
|
python
|
{
"resource": ""
}
|
q12884
|
ServicePackageQuotaHistoryResponse.after
|
train
|
def after(self, after):
"""
Sets the after of this ServicePackageQuotaHistoryResponse.
After which quota history ID this paged response is fetched.
:param after: The after of this ServicePackageQuotaHistoryResponse.
:type: str
"""
if after is not None and len(after) > 32:
raise ValueError("Invalid value for `after`, length must be less than or equal to `32`")
if after is not None and len(after) < 32:
raise ValueError("Invalid value for `after`, length must be greater than or equal to `32`")
self._after = after
|
python
|
{
"resource": ""
}
|
q12885
|
ServicePackageQuotaHistoryResponse.object
|
train
|
def object(self, object):
"""
Sets the object of this ServicePackageQuotaHistoryResponse.
Always set to 'service-package-quota-history'.
:param object: The object of this ServicePackageQuotaHistoryResponse.
:type: str
"""
if object is None:
raise ValueError("Invalid value for `object`, must not be `None`")
allowed_values = ["service-package-quota-history"]
if object not in allowed_values:
raise ValueError(
"Invalid value for `object` ({0}), must be one of {1}"
.format(object, allowed_values)
)
self._object = object
|
python
|
{
"resource": ""
}
|
q12886
|
ServicePackageQuotaHistoryResponse.total_count
|
train
|
def total_count(self, total_count):
"""
Sets the total_count of this ServicePackageQuotaHistoryResponse.
Sum of all quota history entries that should be returned
:param total_count: The total_count of this ServicePackageQuotaHistoryResponse.
:type: int
"""
if total_count is None:
raise ValueError("Invalid value for `total_count`, must not be `None`")
if total_count is not None and total_count < 0:
raise ValueError("Invalid value for `total_count`, must be a value greater than or equal to `0`")
self._total_count = total_count
|
python
|
{
"resource": ""
}
|
q12887
|
ErrorResponse.code
|
train
|
def code(self, code):
"""
Sets the code of this ErrorResponse.
Response code.
:param code: The code of this ErrorResponse.
:type: int
"""
allowed_values = [400, 401, 404]
if code not in allowed_values:
raise ValueError(
"Invalid value for `code` ({0}), must be one of {1}"
.format(code, allowed_values)
)
self._code = code
|
python
|
{
"resource": ""
}
|
q12888
|
ErrorResponse.request_id
|
train
|
def request_id(self, request_id):
"""
Sets the request_id of this ErrorResponse.
Request ID.
:param request_id: The request_id of this ErrorResponse.
:type: str
"""
if request_id is not None and not re.search('^[A-Za-z0-9]{32}', request_id):
raise ValueError("Invalid value for `request_id`, must be a follow pattern or equal to `/^[A-Za-z0-9]{32}/`")
self._request_id = request_id
|
python
|
{
"resource": ""
}
|
q12889
|
ConnectAPI.start_notifications
|
train
|
def start_notifications(self):
"""Start the notifications thread.
If an external callback is not set up (using `update_webhook`) then
calling this function is mandatory to get or set resource.
.. code-block:: python
>>> api.start_notifications()
>>> print(api.get_resource_value(device, path))
Some value
>>> api.stop_notifications()
:returns: void
"""
with self._notifications_lock:
if self.has_active_notification_thread:
return
api = self._get_api(mds.NotificationsApi)
self._notifications_thread = NotificationsThread(
self._db,
self._queues,
b64decode=self.b64decode,
notifications_api=api,
subscription_manager=self.subscribe,
)
self._notifications_thread.daemon = True
self._notifications_thread.start()
|
python
|
{
"resource": ""
}
|
q12890
|
ConnectAPI.stop_notifications
|
train
|
def stop_notifications(self):
"""Stop the notifications thread.
:returns:
"""
with self._notifications_lock:
if not self.has_active_notification_thread:
return
thread = self._notifications_thread
self._notifications_thread = None
stopping = thread.stop()
api = self._get_api(mds.NotificationsApi)
api.delete_long_poll_channel()
return stopping.wait()
|
python
|
{
"resource": ""
}
|
q12891
|
ConnectAPI.list_connected_devices
|
train
|
def list_connected_devices(self, **kwargs):
"""List connected devices.
Example usage, listing all registered devices in the catalog:
.. code-block:: python
filters = {
'created_at': {'$gte': datetime.datetime(2017,01,01),
'$lte': datetime.datetime(2017,12,31)
}
}
devices = api.list_connected_devices(order='asc', filters=filters)
for idx, device in enumerate(devices):
print(device)
## Other example filters
# Directly connected devices (not via gateways):
filters = {
'host_gateway': {'$eq': ''},
'device_type': {'$eq': ''}
}
# Devices connected via gateways:
filters = {
'host_gateway': {'$neq': ''}
}
# Gateway devices:
filters = {
'device_type': {'$eq': 'MBED_GW'}
}
:param int limit: The number of devices to retrieve.
:param str order: The ordering direction, ascending (asc) or
descending (desc)
:param str after: Get devices after/starting at given `device_id`
:param filters: Dictionary of filters to apply.
:returns: a list of connected :py:class:`Device` objects.
:rtype: PaginatedResponse
"""
# TODO(pick one of these)
filter_or_filters = 'filter' if 'filter' in kwargs else 'filters'
kwargs.setdefault(filter_or_filters, {}).setdefault('state', {'$eq': 'registered'})
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, Device, True)
api = self._get_api(device_directory.DefaultApi)
return PaginatedResponse(api.device_list, lwrap_type=Device, **kwargs)
|
python
|
{
"resource": ""
}
|
q12892
|
ConnectAPI.list_resources
|
train
|
def list_resources(self, device_id):
"""List all resources registered to a connected device.
.. code-block:: python
>>> for r in api.list_resources(device_id):
print(r.name, r.observable, r.uri)
None,True,/3/0/1
Update,False,/5/0/3
...
:param str device_id: The ID of the device (Required)
:returns: A list of :py:class:`Resource` objects for the device
:rtype: list
"""
api = self._get_api(mds.EndpointsApi)
return [Resource(r) for r in api.get_endpoint_resources(device_id)]
|
python
|
{
"resource": ""
}
|
q12893
|
ConnectAPI._mds_rpc_post
|
train
|
def _mds_rpc_post(self, device_id, _wrap_with_consumer=True, async_id=None, **params):
"""Helper for using RPC endpoint"""
self.ensure_notifications_thread()
api = self._get_api(mds.DeviceRequestsApi)
async_id = async_id or utils.new_async_id()
device_request = mds.DeviceRequest(**params)
api.create_async_request(
device_id,
async_id=async_id,
body=device_request,
)
return AsyncConsumer(async_id, self._db) if _wrap_with_consumer else async_id
|
python
|
{
"resource": ""
}
|
q12894
|
ConnectAPI.get_resource_value_async
|
train
|
def get_resource_value_async(self, device_id, resource_path, fix_path=True):
"""Get a resource value for a given device and resource path.
Will not block, but instead return an AsyncConsumer. Example usage:
.. code-block:: python
a = api.get_resource_value_async(device, path)
while not a.is_done:
time.sleep(0.1)
if a.error:
print("Error", a.error)
print("Current value", a.value)
:param str device_id: The name/id of the device (Required)
:param str resource_path: The resource path to get (Required)
:param bool fix_path: strip leading / of path if present
:returns: Consumer object to control asynchronous request
:rtype: AsyncConsumer
"""
return self._mds_rpc_post(device_id=device_id, method='GET', uri=resource_path)
|
python
|
{
"resource": ""
}
|
q12895
|
ConnectAPI.get_resource_value
|
train
|
def get_resource_value(self, device_id, resource_path, fix_path=True, timeout=None):
"""Get a resource value for a given device and resource path by blocking thread.
Example usage:
.. code-block:: python
try:
v = api.get_resource_value(device_id, path)
print("Current value", v)
except CloudAsyncError, e:
print("Error", e)
:param str device_id: The name/id of the device (Required)
:param str resource_path: The resource path to get (Required)
:param fix_path: if True then the leading /, if found, will be stripped before
doing request to backend. This is a requirement for the API to work properly
:param timeout: Seconds to request value for before timeout. If not provided, the
program might hang indefinitely.
:raises: CloudAsyncError, CloudTimeoutError
:returns: The resource value for the requested resource path
:rtype: str
"""
return self.get_resource_value_async(device_id, resource_path, fix_path).wait(timeout)
|
python
|
{
"resource": ""
}
|
q12896
|
ConnectAPI.add_resource_subscription
|
train
|
def add_resource_subscription(self, device_id, resource_path, fix_path=True, queue_size=5):
"""Subscribe to resource updates.
When called on a valid device and resource path a subscription is setup so that
any update on the resource path value triggers a new element on the FIFO queue.
The returned object is a native Python Queue object.
:param device_id: Name of device to subscribe on (Required)
:param resource_path: The resource path on device to observe (Required)
:param fix_path: Removes leading / on resource_path if found
:param queue_size: Sets the Queue size. If set to 0, no queue object will be created
:returns: a queue of resource updates
:rtype: Queue
"""
# When path starts with / we remove the slash, as the API can't handle //.
# Keep the original path around however, as we use that for queue registration.
fixed_path = resource_path
if fix_path and resource_path.startswith("/"):
fixed_path = resource_path[1:]
# Create the queue and register it with the dict holding all queues
q = queue.Queue(queue_size) if queue_size > 0 else None
# FIXME: explicit behaviour on replacing an existing queue
self._queues[device_id][resource_path] = q
# Send subscription request
self._add_subscription(device_id, fixed_path)
# Return the Queue object to the user
return q
|
python
|
{
"resource": ""
}
|
q12897
|
ConnectAPI.add_resource_subscription_async
|
train
|
def add_resource_subscription_async(self, device_id, resource_path, callback_fn,
fix_path=True, queue_size=5):
"""Subscribe to resource updates with callback function.
When called on a valid device and resource path a subscription is setup so that
any update on the resource path value triggers an update on the callback function.
:param device_id: Name of device to set the subscription on (Required)
:param resource_path: The resource path on device to observe (Required)
:param callback_fn: Callback function to be executed on update to subscribed resource
:param fix_path: Removes leading / on resource_path if found
:param queue_size: Sets the Queue size. If set to 0, no queue object will be created
:returns: void
"""
queue = self.add_resource_subscription(device_id, resource_path, fix_path, queue_size)
# Setup daemon thread for callback function
t = threading.Thread(target=self._subscription_handler,
args=[queue, device_id, resource_path, callback_fn])
t.daemon = True
t.start()
|
python
|
{
"resource": ""
}
|
q12898
|
ConnectAPI.get_resource_subscription
|
train
|
def get_resource_subscription(self, device_id, resource_path, fix_path=True):
"""Read subscription status.
:param device_id: Name of device to set the subscription on (Required)
:param resource_path: The resource path on device to observe (Required)
:param fix_path: Removes leading / on resource_path if found
:returns: status of subscription
"""
# When path starts with / we remove the slash, as the API can't handle //.
# Keep the original path around however, as we use that for queue registration.
fixed_path = resource_path
if fix_path and resource_path.startswith("/"):
fixed_path = resource_path[1:]
api = self._get_api(mds.SubscriptionsApi)
try:
api.check_resource_subscription(device_id, fixed_path)
except Exception as e:
if e.status == 404:
return False
raise
return True
|
python
|
{
"resource": ""
}
|
q12899
|
ConnectAPI.update_presubscriptions
|
train
|
def update_presubscriptions(self, presubscriptions):
"""Update pre-subscription data. Pre-subscription data will be removed for empty list.
:param presubscriptions: list of `Presubscription` objects (Required)
:returns: None
"""
api = self._get_api(mds.SubscriptionsApi)
presubscriptions_list = []
for presubscription in presubscriptions:
if not isinstance(presubscription, dict):
presubscription = presubscription.to_dict()
presubscription = {
"endpoint_name": presubscription.get("device_id", None),
"endpoint_type": presubscription.get("device_type", None),
"_resource_path": presubscription.get("resource_paths", None)
}
presubscriptions_list.append(PresubscriptionData(**presubscription))
return api.update_pre_subscriptions(presubscriptions_list)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.