code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def custom_observable_object_prefix_lax(instance): for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES and obj['type'] not in enums.OBSERVABLE_RESERVED_OBJECTS and not CUSTOM_TYPE_LAX_PREFIX_RE.match(obj['type'])): ...
Ensure custom observable objects follow naming style conventions.
def _timeout_thread(self, remain): time.sleep(remain) if not self._ended: self._ended = True self._release_all()
Timeout before releasing every thing, if nothing was returned
def _validate_j2k_colorspace(self, cparams, colorspace): if cparams.codec_fmt == opj2.CODEC_J2K and colorspace is not None: msg = 'Do not specify a colorspace when writing a raw codestream.' raise IOError(msg)
Cannot specify a colorspace with J2K.
def xray_heartbeat_batch_handler(self, unused_channel, data): gcs_entries = ray.gcs_utils.GcsTableEntry.GetRootAsGcsTableEntry( data, 0) heartbeat_data = gcs_entries.Entries(0) message = (ray.gcs_utils.HeartbeatBatchTableData. GetRootAsHeartbeatBatchTableData(heart...
Handle an xray heartbeat batch message from Redis.
def solar_constant(self): if self.wavenumber is not None: return np.trapz(self.irradiance, self.wavenumber) elif self.wavelength is not None: return np.trapz(self.irradiance, self.wavelength) else: raise TypeError('Neither wavelengths nor wavenumbers available...
Calculate the solar constant
def sink_bigquery(client, to_delete): dataset = _sink_bigquery_setup(client) to_delete.append(dataset) SINK_NAME = "robots-bigquery-%d" % (_millis(),) FILTER = "textPayload:robot" DESTINATION = "bigquery.googleapis.com%s" % (dataset.path,) sink = client.sink(SINK_NAME, filter_=FILTER, destinatio...
Sink log entries to bigquery.
def passwordChallengeResponse(self, cnonce, response): return self._login(_AMPUsernamePassword( self.username, self.challenge, cnonce, response))
Verify the response to a challenge.
def option(self, key=None): if key is None: return self._args.options() return self._args.option(key)
Get the value of a command option.
def delete(self, to_delete): if not isinstance(to_delete, list): to_delete = [to_delete] for zenpy_object in to_delete: object_type = get_object_type(zenpy_object) object_cache = self.mapping.get(object_type, None) if object_cache: removed_...
Purge one or more items from the relevant caches
def _collapse_subitems(base, items): out = [] for d in items: newd = _diff_dict(base, d) out.append(newd) return out
Collapse full data representations relative to a standard base.
def _create_class_proxy(cls, theclass): def make_method(name): def method(self, *args, **kw): return getattr(object.__getattribute__(self, "_obj"), name)(*args, **kw) return method namespace = {} for name in cls._special_names: if hasattr(thecl...
creates a proxy for the given class
def dataset_detail(request, dataset_id): active_dataset = get_object_or_404(Dataset, pk=dataset_id) datadict_id = active_dataset.data_dictionary_id datadict = DataDictionaryField.objects.filter( parent_dict=datadict_id ).order_by('columnIndex') uploader_name = grab_names_from_emails([active_...
Renders individual dataset detail page.
def versionok_for_gui(): if sys.hexversion < 0x02060000: return False if sys.hexversion >= 0x03000000 and sys.hexversion < 0x03020000: return False if sys.platform.startswith("java") or sys.platform.startswith('cli'): return False return True
Return True if running Python is suitable for GUI Event Integration and deeper IPython integration
def app_token(vault_client, app_id, user_id): resp = vault_client.auth_app_id(app_id, user_id) if 'auth' in resp and 'client_token' in resp['auth']: return resp['auth']['client_token'] else: raise aomi.exceptions.AomiCredentials('invalid apptoken')
Returns a vault token based on the app and user id.
def getCheck(self, checkid): check = PingdomCheck(self, {'id': checkid}) check.getDetails() return check
Returns a detailed description of a specified check.
def reset_mode(self): self.command(0x18, b"\x01", timeout=0.1) self.transport.write(Chipset.ACK) time.sleep(0.010)
Send a Reset command to set the operation mode to 0.
def run(self, name): for entry in self.entries: if entry.name == name: run_func(entry) break
Runs the function associated with the given entry `name`.
def rados_df(self, host_list=None, remote_user=None, remote_pass=None): result, failed_hosts = self.runner.ansible_perform_operation( host_list=host_list, remote_user=remote_user, remote_pass=remote_pass, module="...
Invoked the rados df command and return output to user
def _extract_header(time_series): return TimeSeries( metric=time_series.metric, resource=time_series.resource, metric_kind=time_series.metric_kind, value_type=time_series.value_type, )
Return a copy of time_series with the points removed.
def swapaxes(self, axis1, axis2): if self.ndim <=1 or axis1 == axis2: return self ar = np.asarray(self).swapaxes(axis1, axis2) if axis1 != 0 and axis2 != 0: labels = self.labels[:] labels[axis1], labels[axis2] = labels[axis2], labels[axis1] return ...
Interchange two axes of a Timeseries.
def mode(self, target, *data): self.send_line('MODE %s %s' % (target, ' '.join(data)), nowait=True)
set user or channel mode
def program_rtr(self, args, rout_id, namespace=None): if namespace is None: namespace = self.find_rtr_namespace(rout_id) if namespace is None: LOG.error("Unable to find namespace for router %s", rout_id) return False final_args = ['ip', 'netns', 'exec', namesp...
Execute the command against the namespace.
def _load_model(self): super()._load_model() self.mujoco_robot.set_base_xpos([0, 0, 0]) self.mujoco_arena = TableArena( table_full_size=self.table_full_size, table_friction=self.table_friction ) if self.use_indicator_object: self.mujoco_arena.add_pos_indic...
Loads the arena and pot object.
def option(self, opt): if 'config.merge' in self.functions: return self.functions['config.merge'](opt, {}, omit_master=True) return self.opts.get(opt, {})
Return options merged from config and pillar
def all(cls, connection=None, **params): request = cls._make_request('GET', cls._get_all_path(), connection, params=params) return cls._create_object(request, connection=connection)
Returns first page if no params passed in as a list.
def copy_from_csv_sql(qualified_name: str, delimiter=',', encoding='utf8', null_str='', header=True, escape_str='\\', quote_char='"', force_not_null=None, force_null=None): options = [] options.append("DELIMITER '%s'" % delimiter) options.append("NULL '%s'" % null...
Generate copy from csv statement.
def ip_check(*args, func=None): func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, ipaddress._IPAddressBase): name = type(var).__name__ raise IPError( f'Function {func} expected IP address, {name} got instead.')
Check if arguments are IP addresses.
def _initialize(self, show_bounds, reset_camera, outline): self.plotter.subplot(*self.loc) if outline is None: self.plotter.add_mesh(self.input_dataset.outline_corners(), reset_camera=False, color=vtki.rcParams['outline_color'], loc=self.loc) e...
Outlines the input dataset and sets up the scene
def gauss_box_model(x, amplitude=1.0, mean=0.0, stddev=1.0, hpix=0.5): z = (x - mean) / stddev z2 = z + hpix / stddev z1 = z - hpix / stddev return amplitude * (norm.cdf(z2) - norm.cdf(z1))
Integrate a Gaussian profile.
def _gen_etetoolkit_tree(self, node, subject_info_tree): for si_node in subject_info_tree.child_list: if si_node.type_str == TYPE_NODE_TAG: child = self._add_type_node(node, si_node.label_str) elif si_node.type_str == SUBJECT_NODE_TAG: child = self._add_su...
Copy SubjectInfoTree to a ETE Tree.
def document(schema_file): schema = load_schema_file(open(schema_file, 'r')) documentation = generate_documentation(schema) sys.stdout.write(documentation)
Generate reStructuredText documentation from a confirm schema.
def namespace_to_taxon() -> Dict[str, Node]: human_taxon = Node( id='NCBITaxon:9606', label='Homo sapiens' ) return { 'MGI': Node( id='NCBITaxon:10090', label='Mus musculus' ), 'MONDO': human_taxon, 'OMIM': human_taxon, 'MO...
namespace to taxon mapping
def dump_collection(cfg, f, indent=0): for i, value in enumerate(cfg): dump_value(None, value, f, indent) if i < len(cfg) - 1: f.write(u',\n')
Save a collection of attributes
def _write_file(self, slug, folderpath, html): if not os.path.isdir(folderpath): try: os.makedirs(folderpath) self.info("Creating directory " + folderpath) except Exception as e: self.err(e) return filepath = folderp...
Writes a chart's html to a file
def annotation_path(cls, project, incident, annotation): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}/annotations/{annotation}", project=project, incident=incident, annotation=annotation, )
Return a fully-qualified annotation string.
def restart_stopped_apps(): cozy_apps = monitor.status(only_cozy=True) for app in cozy_apps.keys(): state = cozy_apps[app] if state == 'up': next elif state == 'down': print 'Start {}'.format(app) rebuild_app(app, force=False) monitor.start...
Restart all apps in stopped state
def clear_lock(self, remote=None, lock_type='update'): cleared = [] errors = [] for repo in self.remotes: if remote: try: if not fnmatch.fnmatch(repo.url, remote): continue except TypeError: ...
Clear update.lk for all remotes
def join(rasters): raster = rasters[0] mask_band = None nodata = None with raster._raster_opener(raster.source_file) as r: nodata = r.nodata mask_flags = r.mask_flag_enums per_dataset_mask = all([rasterio.enums.MaskFlags.per_dataset in flags for flags in mask_flags]) if per_datas...
This method takes a list of rasters and returns a raster that is constructed of all of them
def isConjSouthNode(self): node = self.chart.getObject(const.SOUTH_NODE) return aspects.hasAspect(self.obj, node, aspList=[0])
Returns if object is conjunct south node.
def node_from_nid(self, nid): for node in self.iflat_nodes(): if node.node_id == nid: return node raise ValueError("Cannot find node with node id: %s" % nid)
Return the node in the `Flow` with the given `nid` identifier
def remove(self): self.canvas.get_first_view().unselect_item(self) for child in self.canvas.get_children(self)[:]: child.remove() self.remove_income() for outcome_v in self.outcomes[:]: self.remove_outcome(outcome_v) for input_port_v in self.inputs[:]: ...
Remove recursively all children and then the StateView itself
def calc_paired_insert_stats_save(in_bam, stat_file, nsample=1000000): if utils.file_exists(stat_file): with open(stat_file) as in_handle: return yaml.safe_load(in_handle) else: stats = calc_paired_insert_stats(in_bam, nsample) with open(stat_file, "w") as out_handle: ...
Calculate paired stats, saving to a file for re-runs.
def int_bytes(cls, string): if string[-1] in ('k', 'm'): value = cls.int_0_inf(string[:-1]) unit = string[-1] if unit == 'k': value *= 2 ** 10 else: value *= 2 ** 20 return value else: return cls.int_...
Convert string describing size to int.
def _get_receiver_name(self): if self._name is None and self._urls.mainzone is not None: name_tag = {"FriendlyName": None} try: root = self.get_status_xml(self._urls.mainzone) except (ValueError, requests.exceptions.RequestException): ...
Get name of receiver from web interface if not set.
def _shutdown(self): for exit_handler in self._exit_handlers: exit_handler() if self._socket: self._socket.close() self._socket = None
Shut down the server.
def _combine_transfers(self, result): transfers = {} for reaction_id, c1, c2, form in result: key = reaction_id, c1, c2 combined_form = transfers.setdefault(key, Formula()) transfers[key] = combined_form | form for (reaction_id, c1, c2), form in iteritems(tran...
Combine multiple pair transfers into one.
def pad_batch(features, batch_multiple): feature = list(features.items())[0][1] batch_size = tf.shape(feature)[0] mod = batch_size % batch_multiple has_mod = tf.cast(tf.cast(mod, tf.bool), tf.int32) batch_padding = batch_multiple * has_mod - mod padded_features = {} for k, feature in features.items(): ...
Pad batch dim of features to nearest multiple of batch_multiple.
def _reorder_columns(bed_file): new_bed = utils.splitext_plus(bed_file)[0] + '_order.bed' with open(bed_file) as in_handle: with open(new_bed, 'w') as out_handle: for line in in_handle: cols = line.strip().split("\t") cols[3] = _select_anno(cols[3]) + "_" + co...
Reorder columns to be compatible with CoRaL
def _get_uploaded_versions_pypicloud(project_name, index_url, requests_verify=True): api_url = index_url for suffix in ('/pypi', '/pypi/', '/simple', '/simple/'): if api_url.endswith(suffix): api_url = api_url[:len(suffix) * -1] + '/api/package' break url = '/'.join((api_url,...
Query the pypi index at index_url using pypicloud api to find all versions
def send(self, topic, message): if not message: Log.error("Expecting a message") message._prepare() if not self.connection: self.connect() producer = Producer( channel=self.connection, exchange=Exchange(self.settings.exchange, type='topic')...
Publishes a pulse message to the proper exchange.
def _fetch_features(self): if self.next_page_url is None: return response = get_json(self.next_page_url, post_values=self.query, headers=self.gpd_session.session_headers) self.features.extend(response['features']) self.next_page_url = response['pagination']['next'] se...
Retrieves a new page of features from Geopedia
def load_model_data(self, path, model): if os.path.isdir(path): if os.path.isfile(os.path.join(path, '_all.yml')): self.load_model_data_collection(path, model) self.load_model_data_from_files(path, model) self.session.commit()
Loads the data for the specified model from the given path.
def _parse_for_errors(self): error = self._response.find('{www.clusterpoint.com}error') if error is not None: if error.find('level').text.lower() in ('rejected', 'failed', 'error', 'fatal'): raise APIError(error) else: warnings.warn(APIWarning(erro...
Look for an error tag and raise APIError for fatal errors or APIWarning for nonfatal ones.
def add_role(ctx, role): if role is None: log('Specify the role with --role') return if ctx.obj['username'] is None: log('Specify the username with --username') return change_user = ctx.obj['db'].objectmodels['user'].find_one({ 'name': ctx.obj['username'] }) i...
Grant a role to an existing user
def assemble_rom_code(self, asm): stream = StringIO(asm) worker = assembler.Assembler(self.processor, stream) try: result = worker.assemble() except BaseException as e: return e, None self.rom.program(result) return None, result
assemble the given code and program the ROM
def _compute_results(self): self.Y_best = best_value(self.Y) self.x_opt = self.X[np.argmin(self.Y),:] self.fx_opt = np.min(self.Y)
Computes the optimum and its value.
def visit_project(self, item): if not item.remote_id: command = CreateProjectCommand(self.settings, item) self.task_runner_add(None, item, command) else: self.settings.project_id = item.remote_id
Adds create project command to task runner if project doesn't already exist.
def _get_things(self, method, thing, thing_type, params=None, cacheable=True): limit = params.get("limit", 1) seq = [] for node in _collect_nodes( limit, self, self.ws_prefix + "." + method, cacheable, params ): title = _extract(node, "name") artist = ...
Returns a list of the most played thing_types by this thing.
def csv(self, output): import csv csvwriter = csv.writer(self.outfile) csvwriter.writerows(output)
Output data as excel-compatible CSV
def show_floatingip(self, floatingip, **_params): return self.get(self.floatingip_path % (floatingip), params=_params)
Fetches information of a certain floatingip.
def save_npz(object, handle): log.warning("Saving npz files currently only works locally. :/") path = handle.name handle.close() if type(object) is dict: np.savez(path, **object) elif type(object) is list: np.savez(path, *object) else: log.warning("Saving non dict or list...
Save dict of numpy array as npz file.
def request_set_sensor_unreachable(self, req, sensor_name): sensor = self.get_sensor(sensor_name) ts, status, value = sensor.read() sensor.set_value(value, sensor.UNREACHABLE, ts) return('ok',)
Set sensor status to unreachable
def _define_absl_flag(self, flag_instance, suppress): flag_name = flag_instance.name short_name = flag_instance.short_name argument_names = ['--' + flag_name] if short_name: argument_names.insert(0, '-' + short_name) if suppress: helptext = argparse.SUPPRESS else: helptext = fl...
Defines a flag from the flag_instance.
def print_app_meta_data(self): l_metaData = dir(self) l_classVar = [x for x in l_metaData if x.isupper() ] for str_var in l_classVar: str_val = getattr(self, str_var) print("%20s: %s" % (str_var, str_val))
Print the app's meta data.
def _from_dict(cls, _dict): args = {} if 'text' in _dict: args['text'] = _dict.get('text') if 'keywords' in _dict: args['keywords'] = [ SemanticRolesKeyword._from_dict(x) for x in (_dict.get('keywords')) ] return cls(**a...
Initialize a SemanticRolesResultObject object from a json dictionary.
def initial_state(self) -> StateTensor: s0 = [] for fluent in self._compiler.compile_initial_state(self._batch_size): s0.append(self._output_size(fluent)) s0 = tuple(s0) return s0
Returns the initial state tensor.
def call_sync(func): @wraps(func) def wrapper(self, *args, **kw): if self.thread.ident == get_ident(): return func(self, *args, **kw) barrier = Barrier(2) result = None ex = None def call(): nonlocal result, ex try: resu...
Decorates a function to be called sync on the loop thread
def _stringify_na_values(na_values): result = [] for x in na_values: result.append(str(x)) result.append(x) try: v = float(x) if v == int(v): v = int(v) result.append("{value}.0".format(value=v)) result.append(str(v)...
return a stringified and numeric for these values
def exists(self, path): import hdfs try: self.client.status(path) return True except hdfs.util.HdfsError as e: if str(e).startswith('File does not exist: '): return False else: raise e
Returns true if the path exists and false otherwise.
def generated_password_entropy(self) -> float: characters = self._get_password_characters() if ( self.passwordlen is None or not characters ): raise ValueError("Can't calculate the password entropy: character" " set is empt...
Calculate the entropy of a password that would be generated.
def shutdown(self): if not process.proc_alive(self.proc): return logger.info("Attempting to connect to %s", self.hostname) client = self.connection attempts = 2 for i in range(attempts): logger.info("Attempting to send shutdown command to %s", ...
Send shutdown command and wait for the process to exit.
def send_to_pipe_channel(channel_name, label, value): 'Send message through pipe to client component' async_to_sync(async_send_to_pipe_channel)(channel_name=channel_name, label=label, ...
Send message through pipe to client component
def Describe(self): result = ["\nUsername: %s" % self.urn.Basename()] labels = [l.name for l in self.GetLabels()] result.append("Labels: %s" % ",".join(labels)) if self.Get(self.Schema.PASSWORD) is None: result.append("Password: not set") else: result.append("Password: set") return "...
Return a description of this user.
def portals(self): char = self.character make_edge = self.engine._get_edge for (o, d) in self.engine._edges_cache.iter_keys( self.character.name, *self.engine._btt() ): yield make_edge(char, o, d)
Iterate over all portals.
def center_at(self, x, y): self.x = x - (self.width / 2) self.y = y - (self.height / 2)
Center the menu at x, y
async def send_rpc(self, msg, _context): service = msg.get('name') rpc_id = msg.get('rpc_id') payload = msg.get('payload') timeout = msg.get('timeout') response_id = await self.service_manager.send_rpc_command(service, rpc_id, payload, ...
Send an RPC to a service on behalf of a client.
def regex_span_tokenize(s, regex): left = 0 for m in re.finditer(regex, s, re.U): right, next = m.span() if right != 0: yield left, right left = next yield left, len(s)
Return spans that identify tokens in s split using regex.
def averageOutsidePercentile(requestContext, seriesList, n): averages = [safeAvg(s) for s in seriesList] if n < 50: n = 100 - n lowPercentile = _getPercentile(averages, 100 - n) highPercentile = _getPercentile(averages, n) return [s for s in seriesList if not lowPercentile < safe...
Removes functions lying inside an average percentile interval
def _retry_usb_function(count, func, *args, **kwargs): helper = timeouts.RetryHelper(count) while True: try: return func(*args, **kwargs) except usb_exceptions.CommonUsbError: if not helper.retry_if_possible(): raise time.sleep(0.1) else: break
Helper function to retry USB.
def status(self, status_in): if isinstance(status_in, PIDStatus): status_in = [status_in, ] return self.filter( self._filtered_pid_class.status.in_(status_in) )
Filter the PIDs based on their status.
def remove_cluster(self, name): cluster = self.get_cluster(name) clusters = self.get_clusters() clusters.remove(cluster)
Remove a cluster from kubeconfig.
def _reachable_subsystems(network, indices, state): validate.is_network(network) for subset in utils.powerset(indices, nonempty=True, reverse=True): try: yield Subsystem(network, state, subset) except exceptions.StateUnreachableError: pass
A generator over all subsystems in a valid state.
def iterright(self): offs = 1 while True: sibl = self.sibling(offs) if sibl is None: break yield sibl offs += 1
Yield "rightward" siblings until None.
def _set_field_on_message(msg, key, value): if isinstance(value, (collections_abc.MutableSequence, tuple)): while getattr(msg, key): getattr(msg, key).pop() for item in value: if isinstance(item, collections_abc.Mapping): getattr(msg, key).add(**item) ...
Set helper for protobuf Messages.
def delete_license(license_id): response = utils.checked_api_call(pnc_api.licenses, 'delete', id=license_id) if response: return utils.format_json(response.content)
Delete a License by ID
def derive_title(self): title = super(SmartListView, self).derive_title() if not title: return force_text(self.model._meta.verbose_name_plural).title() else: return title
Derives our title from our list
def _count(self, X, Y): self.feature_count_ += safe_sparse_dot(Y.T, X) self.class_count_ += Y.sum(axis=0)
Count and smooth feature occurrences.
def statsd_middleware_factory(app, handler): @coroutine def middleware(request): timer = Timer() timer.start() statsd = yield from app.ps.metrics.client() pipe = statsd.pipe() pipe.incr('request.method.%s' % request.method) try: response = yield from h...
Send the application stats to statsd.
def delete_channel(self, chname): name = chname.lower() if len(self.channel_names) < 1: self.logger.error('Delete channel={0} failed. ' 'No channels left.'.format(chname)) return with self.lock: channel = self.channel[name] ...
Delete a given channel from viewer.
def loadtoc(self): self.toc = self.TOCTMPLT() self.lib.seek(self.pkgstart+self.tocpos) tocstr = self.lib.read(self.toclen) self.toc.frombinary(tocstr)
Load the table of contents into memory.
def log_url (self, url_data): node = self.get_node(url_data) if node: self.xml_starttag(u'node', attrs={u"name": u"%d" % node["id"]}) self.xml_tag(u"label", node["label"]) if self.has_part("realurl"): self.xml_tag(u"url", node["url"]) self....
Write one node and all possible edges.
def format_py2js(cls, datetime_format): for js_format, py_format in cls.format_map: datetime_format = datetime_format.replace(py_format, js_format) return datetime_format
Convert python datetime format to moment datetime format.
def ang_veltoaxisangledot(angle, axis, Omega): angle_dot = axis.dot(Omega) axis_dot = 1/2*(hat_map(axis) - 1/np.tan(angle/2) * hat_map(axis).dot(hat_map(axis))).dot(Omega) return angle_dot, axis_dot
Compute kinematics for axis angle representation
def to_np(*args): if len(args) > 1: return (cp.asnumpy(x) for x in args) else: return cp.asnumpy(args[0])
convert GPU arras to numpy and return them
def validate(token): token_url = TOKEN_URL_FMT.format(token=token) headers = { 'x-auth-token': token, 'accept': 'application/json', } resp = requests.get(token_url, headers=headers) if not resp.status_code == 200: raise HTTPError(status=401) return resp.json()
Validate token and return auth context.
def check_collections_are_supported(saved_model_handler, supported): for meta_graph in saved_model_handler.meta_graphs: used_collection_keys = set(meta_graph.collection_def.keys()) unsupported = used_collection_keys - supported if unsupported: raise ValueError("Unsupported collections in graph: %s\n...
Checks that SavedModelHandler only uses supported collections.
def create_named_notebook(fname, context): if os.path.exists(fname): return from nbformat import v4 as nbf text = "Welcome to *pyramid_notebook!* Use *File* *>* *Shutdown* to close this." cells = [nbf.new_markdown_cell(text)] greeting = context.get("greeting") if greeting: cells....
Create a named notebook if one doesn't exist.
def simToReg(self, sim): res = re.sub('^/', '', sim) res = re.sub('/$', '', res) return '^/?' + re.sub('\*', '[^/]+', res) + '/?$'
Convert simplified domain expression to regular expression
def partition(a:Collection, sz:int)->List[Collection]: "Split iterables `a` in equal parts of size `sz`" return [a[i:i+sz] for i in range(0, len(a), sz)]
Split iterables `a` in equal parts of size `sz`
def delete(self): resource = self.RESOURCE.format( account_id=self.account.id, tailored_audience_id=self.tailored_audience_id, id=self.id) response = Request(self.account.client, 'delete', resource).perform() return self.from_response(response.body['data'])
Deletes the current tailored audience permission.