code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
server = self._storage.pop(server_id) server.stop() server.cleanup()
def remove(self, server_id)
remove server and data stuff Args: server_id - server identity
7.853461
8.852516
0.887145
server = self._storage[server_id] try: if args: result = getattr(server, command)(*args) else: result = getattr(server, command)() except AttributeError: raise ValueError("Cannot issue the command %r to server %s" % (command, server_id)) self._storage[server_id] = server return result
def command(self, server_id, command, *args)
run command Args: server_id - server identity command - command which apply to server
2.830584
3.303869
0.856748
result = self._storage[server_id].info() result['id'] = server_id return result
def info(self, server_id)
return dicionary object with info about server Args: server_id - server identity
5.887286
5.557037
1.059429
if (self._storage is None or getattr(self, 'releases', {}) != releases or getattr(self, 'default_release', '') != default_release): self._storage = {} self.releases = releases or {} self.default_release = default_release
def set_settings(self, releases=None, default_release=None)
set path to storage
3.170539
2.854807
1.110596
if release: for r in self.releases: if release in r: return self.releases[r] raise MongoOrchestrationError("No such release '%s' in %r" % (release, self.releases)) if self.default_release: return self.releases[self.default_release] if self.releases: return list(self.releases.values())[0] return ''
def bin_path(self, release=None)
Get the bin path for a particular release.
3.196285
3.060687
1.044303
# Use 'rs_id' to set the id for consistency, but need to rename # to 'id' to use with ReplicaSets.create() rs_cfg['id'] = rs_cfg.pop('rs_id', None) for member in rs_cfg.setdefault('members', [{}]): member['procParams'] = self._strip_auth( member.get('procParams', {})) member['procParams']['configsvr'] = True if self.enable_ipv6: common.enable_ipv6_single(member['procParams']) rs_cfg['sslParams'] = self.sslParams self._configsvrs.append(ReplicaSets().create(rs_cfg))
def __init_configrs(self, rs_cfg)
Create and start a config replica set.
6.954052
6.236825
1.114999
self._configsvrs = [] for cfg in params: # Remove flags that turn on auth. cfg = self._strip_auth(cfg) server_id = cfg.pop('server_id', None) version = cfg.pop('version', self._version) cfg.update({'configsvr': True}) if self.enable_ipv6: common.enable_ipv6_single(cfg) self._configsvrs.append(Servers().create( 'mongod', cfg, sslParams=self.sslParams, autostart=True, version=version, server_id=server_id))
def __init_configsvrs(self, params)
create and start config servers
5.563202
5.418028
1.026795
if self.uses_rs_configdb: rs_id = self._configsvrs[0] mongodb_uri = ReplicaSets().info(rs_id)['mongodb_uri'] return [{'id': rs_id, 'mongodb_uri': mongodb_uri}] return [{'id': h_id, 'hostname': Servers().hostname(h_id)} for h_id in self._configsvrs]
def configsvrs(self)
return list of config servers
6.33138
5.901946
1.072762
for server in self._routers: info = Servers().info(server) if info['procInfo'].get('alive', False): return {'id': server, 'hostname': Servers().hostname(server)}
def router(self)
return first available router
9.876665
7.996215
1.235168
if self.uses_rs_configdb: # Replica set configdb. rs_id = self._configsvrs[0] config_members = ReplicaSets().members(rs_id) configdb = '%s/%s' % ( rs_id, ','.join(m['host'] for m in config_members)) else: configdb = ','.join(Servers().hostname(item) for item in self._configsvrs) server_id = params.pop('server_id', None) version = params.pop('version', self._version) params.update({'configdb': configdb}) if self.enable_ipv6: common.enable_ipv6_single(params) # Remove flags that turn auth on. params = self._strip_auth(params) self._routers.append(Servers().create( 'mongos', params, sslParams=self.sslParams, autostart=True, version=version, server_id=server_id)) return {'id': self._routers[-1], 'hostname': Servers().hostname(self._routers[-1])}
def router_add(self, params)
add new router (mongos) into existing configuration
5.73325
5.47431
1.047301
clients = [] for server in self._routers: if Servers().is_alive(server): client = self.create_connection(Servers().hostname(server)) clients.append(client) return clients
def router_connections(self)
Return a list of MongoClients, one for each mongos.
6.022977
5.254602
1.146229
result = Servers().remove(router_id) del self._routers[ self._routers.index(router_id) ] return { "ok": 1, "routers": self._routers }
def router_remove(self, router_id)
remove
6.305648
6.240602
1.010423
return self.router_command("addShard", (shard_uri, {"name": name}), is_eval=False)
def _add(self, shard_uri, name)
execute addShard command
17.186081
10.899405
1.576791
member_id = member_id or str(uuid4()) if self.enable_ipv6: common.enable_ipv6_repl(params) if 'members' in params: # is replica set for member in params['members']: if not member.get('rsParams', {}).get('arbiterOnly', False): member.setdefault('procParams', {})['shardsvr'] = True rs_params = params.copy() # Turn 'rs_id' -> 'id', to be consistent with 'server_id' below. rs_params['id'] = rs_params.pop('rs_id', None) rs_params.update({'sslParams': self.sslParams}) rs_params['version'] = params.pop('version', self._version) rs_params['members'] = [ self._strip_auth(params) for params in rs_params['members']] rs_id = ReplicaSets().create(rs_params) members = ReplicaSets().members(rs_id) cfgs = rs_id + r"/" + ','.join([item['host'] for item in members]) result = self._add(cfgs, member_id) if result.get('ok', 0) == 1: self._shards[result['shardAdded']] = {'isReplicaSet': True, '_id': rs_id} # return self._shards[result['shardAdded']].copy() return self.member_info(member_id) else: # is single server params.setdefault('procParams', {})['shardsvr'] = True params.update({'autostart': True, 'sslParams': self.sslParams}) params = params.copy() params['procParams'] = self._strip_auth( params.get('procParams', {})) params.setdefault('version', self._version) logger.debug("servers create params: {params}".format(**locals())) server_id = Servers().create('mongod', **params) result = self._add(Servers().hostname(server_id), member_id) if result.get('ok', 0) == 1: self._shards[result['shardAdded']] = {'isServer': True, '_id': server_id} return self.member_info(member_id)
def member_add(self, member_id=None, params=None)
add new member into existing configuration
3.828596
3.775052
1.014184
info = self._shards[member_id].copy() info['id'] = member_id info['tags'] = self.tags.get(member_id, list()) return info
def member_info(self, member_id)
return info about member
5.163729
5.265115
0.980744
result = self.router_command("removeShard", shard_name, is_eval=False) if result['ok'] == 1 and result['state'] == 'completed': shard = self._shards.pop(shard_name) if shard.get('isServer', False): Servers().remove(shard['_id']) if shard.get('isReplicaSet', False): ReplicaSets().remove(shard['_id']) return result
def _remove(self, shard_name)
remove member from configuration
4.800986
4.684107
1.024952
# Ensure all shards by calling "reset" on each. for shard_id in self._shards: if self._shards[shard_id].get('isReplicaSet'): singleton = ReplicaSets() elif self._shards[shard_id].get('isServer'): singleton = Servers() singleton.command(self._shards[shard_id]['_id'], 'reset') # Ensure all config servers by calling "reset" on each. for config_id in self._configsvrs: self.configdb_singleton.command(config_id, 'reset') # Ensure all routers by calling "reset" on each. for router_id in self._routers: Servers().command(router_id, 'reset') return self.info()
def reset(self)
Ensure all shards, configs, and routers are running and available.
4.216338
3.508888
1.201617
uri = ','.join(x['hostname'] for x in self.routers) mongodb_uri = 'mongodb://' + uri result = {'id': self.id, 'shards': self.members, 'configsvrs': self.configsvrs, 'routers': self.routers, 'mongodb_uri': mongodb_uri, 'orchestration': 'sharded_clusters'} if self.login: result['mongodb_auth_uri'] = self.mongodb_auth_uri(uri) return result
def info(self)
return info about configuration
5.723048
5.485924
1.043224
for _id, shard in self._shards.items(): if shard.get('isServer', False): Servers().remove(shard['_id']) if shard.get('isReplicaSet', False): ReplicaSets().remove(shard['_id']) for mongos in self._routers: Servers().remove(mongos) for config_id in self._configsvrs: self.configdb_singleton.remove(config_id) self._configsvrs = [] self._routers = [] self._shards = {}
def cleanup(self)
cleanup configuration: stop and remove all servers
4.534913
3.908927
1.160143
super(ShardedClusters, self).set_settings(releases, default_release) ReplicaSets().set_settings(releases, default_release)
def set_settings(self, releases=None, default_release=None)
set path to storage
8.562983
8.465002
1.011575
sh_id = params.get('id', str(uuid4())) if sh_id in self: raise ShardedClusterError( "Sharded cluster with id %s already exists." % sh_id) params['id'] = sh_id cluster = ShardedCluster(params) self[cluster.id] = cluster return cluster.id
def create(self, params)
create new ShardedCluster Args: params - dictionary with specific params for instance Return cluster_id where cluster_id - id which can use to take the cluster from servers collection
3.868543
3.35542
1.152924
cluster = self._storage.pop(cluster_id) cluster.cleanup()
def remove(self, cluster_id)
remove cluster and data stuff Args: cluster_id - cluster identity
10.782665
12.419349
0.868215
cluster = self._storage[cluster_id] result = cluster.router_add(params) self._storage[cluster_id] = cluster return result
def router_add(self, cluster_id, params)
add new router
3.688103
3.726944
0.989578
cluster = self._storage[cluster_id] result = cluster.router_remove(router_id) self._storage[cluster_id] = cluster return result
def router_del(self, cluster_id, router_id)
remove router from the ShardedCluster
3.867992
3.462698
1.117046
cluster = self._storage[cluster_id] return cluster.member_info(member_id)
def member_info(self, cluster_id, member_id)
return info about member
4.92917
4.973622
0.991063
cluster = self._storage[cluster_id] try: return getattr(cluster, command)(*args) except AttributeError: raise ValueError("Cannot issue the command %r to ShardedCluster %s" % (command, cluster_id))
def command(self, cluster_id, command, *args)
Call a ShardedCluster method.
4.781315
3.474889
1.375962
cluster = self._storage[cluster_id] result = cluster.member_remove(member_id) self._storage[cluster_id] = cluster return result
def member_del(self, cluster_id, member_id)
remove member from cluster cluster
3.696728
3.773023
0.979779
cluster = self._storage[cluster_id] result = cluster.member_add(params.get('id', None), params.get('shardParams', {})) self._storage[cluster_id] = cluster return result
def member_add(self, cluster_id, params)
add new member into configuration
4.968376
4.68122
1.061342
_dir = os.path.expanduser(os.path.expandvars(_dir)) if not os.path.isabs(_dir): _dir = os.path.normpath(os.path.join(cwd, _dir)) return _dir
def expand_dir(_dir, cwd=os.getcwd())
Return path with environmental variables and tilde ~ expanded. :param _dir: :type _dir: str :param cwd: current working dir (for deciphering relative _dir paths) :type cwd: str :rtype; str
1.711142
1.889078
0.905808
configs = [] for directory, repos in config.items(): for repo, repo_data in repos.items(): conf = {} ''' repo_name: http://myrepo.com/repo.git to repo_name: { url: 'http://myrepo.com/repo.git' } also assures the repo is a :py:class:`dict`. ''' if isinstance(repo_data, string_types): conf['url'] = repo_data else: conf = update_dict(conf, repo_data) if 'repo' in conf: if 'url' not in conf: conf['url'] = conf.pop('repo') else: conf.pop('repo', None) ''' ``shell_command_after``: if str, turn to list. ''' if 'shell_command_after' in conf: if isinstance(conf['shell_command_after'], string_types): conf['shell_command_after'] = [conf['shell_command_after']] if 'name' not in conf: conf['name'] = repo if 'parent_dir' not in conf: conf['parent_dir'] = expand_dir(directory, cwd) if 'repo_dir' not in conf: conf['repo_dir'] = expand_dir( os.path.join(conf['parent_dir'], conf['name']), cwd ) if 'remotes' in conf: remotes = [] for remote_name, url in conf['remotes'].items(): remotes.append({'remote_name': remote_name, 'url': url}) conf['remotes'] = sorted( remotes, key=lambda x: sorted(x.get('remote_name')) ) configs.append(conf) return configs
def extract_repos(config, cwd=os.getcwd())
Return expanded configuration. end-user configuration permit inline configuration shortcuts, expand to identical format for parsing. :param config: the repo config in :py:class:`dict` format. :type config: dict :param cwd: current working dir (for deciphering relative paths) :type cwd: str :rtype: list
2.823228
2.6825
1.052462
configs = [] yaml_config = os.path.expanduser('~/.vcspull.yaml') has_yaml_config = os.path.exists(yaml_config) json_config = os.path.expanduser('~/.vcspull.json') has_json_config = os.path.exists(json_config) if not has_yaml_config and not has_json_config: log.debug( 'No config file found. Create a .vcspull.yaml or .vcspull.json' ' in your $HOME directory. http://vcspull.git-pull.com for a' ' quickstart.' ) else: if sum(filter(None, [has_json_config, has_yaml_config])) > int(1): raise exc.MultipleConfigWarning() if has_yaml_config: configs.append(yaml_config) if has_json_config: configs.append(json_config) return configs
def find_home_config_files(filetype=['json', 'yaml'])
Return configs of ``.vcspull.{yaml,json}`` in user's home directory.
2.863657
2.597465
1.102481
configs = [] if include_home is True: configs.extend(find_home_config_files()) if isinstance(path, list): for p in path: configs.extend(find_config_files(p, match, filetype)) return configs else: path = os.path.expanduser(path) if isinstance(match, list): for m in match: configs.extend(find_config_files(path, m, filetype)) else: if isinstance(filetype, list): for f in filetype: configs.extend(find_config_files(path, match, f)) else: match = os.path.join(path, match) match += ".{filetype}".format(filetype=filetype) configs = glob.glob(match) return configs
def find_config_files( path=['~/.vcspull'], match=['*'], filetype=['json', 'yaml'], include_home=False )
Return repos from a directory and match. Not recursive. :param path: list of paths to search :type path: list :param match: list of globs to search against :type match: list :param filetype: list of filetypes to search against :type filetype: list :param include_home: Include home configuration files :type include_home: bool :raises: - LoadConfigRepoConflict: There are two configs that have same path and name with different repo urls. :returns: list of absolute paths to config files. :rtype: list
2.039735
2.068507
0.98609
repos = [] for f in files: _, ext = os.path.splitext(f) conf = kaptan.Kaptan(handler=ext.lstrip('.')).import_config(f) newrepos = extract_repos(conf.export('dict'), cwd) if not repos: repos.extend(newrepos) continue dupes = detect_duplicate_repos(repos, newrepos) if dupes: msg = ('repos with same path + different VCS detected!', dupes) raise exc.VCSPullException(msg) repos.extend(newrepos) return repos
def load_configs(files, cwd=os.getcwd())
Return repos from a list of files. :todo: Validate scheme, check for duplciate destinations, VCS urls :param files: paths to config file :type files: list :param cwd: current path (pass down for :func:`extract_repos` :type cwd: str :returns: expanded config dict item :rtype: list of dict
5.60994
5.341321
1.050291
dupes = [] path_dupe_repos = [] curpaths = [r['repo_dir'] for r in repos1] newpaths = [r['repo_dir'] for r in repos2] path_duplicates = list(set(curpaths).intersection(newpaths)) if not path_duplicates: return None path_dupe_repos.extend( [r for r in repos2 if any(r['repo_dir'] == p for p in path_duplicates)] ) if not path_dupe_repos: return None for n in path_dupe_repos: currepo = next((r for r in repos1 if r['repo_dir'] == n['repo_dir']), None) if n['url'] != currepo['url']: dupes += (n, currepo) return dupes
def detect_duplicate_repos(repos1, repos2)
Return duplicate repos dict if repo_dir same and vcs different. :param repos1: list of repo expanded dicts :type repos1: list of :py:dict :param repos2: list of repo expanded dicts :type repos2: list of :py:dict :rtype: list of dicts or None :returns: Duplicate lists
2.792642
2.858471
0.976971
configs = [] for filename in os.listdir(config_dir): if is_config_file(filename, extensions) and not filename.startswith('.'): configs.append(filename) return configs
def in_dir(config_dir=CONFIG_DIR, extensions=['.yml', '.yaml', '.json'])
Return a list of configs in ``config_dir``. :param config_dir: directory to search :type config_dir: str :param extensions: filetypes to check (e.g. ``['.yaml', '.json']``). :type extensions: list :rtype: list
2.886639
3.862875
0.747277
repo_list = [] if repo_dir: repo_list.extend( [r for r in config if fnmatch.fnmatch(r['parent_dir'], repo_dir)] ) if vcs_url: repo_list.extend( r for r in config if fnmatch.fnmatch(r.get('url', r.get('repo')), vcs_url) ) if name: repo_list.extend([r for r in config if fnmatch.fnmatch(r.get('name'), name)]) return repo_list
def filter_repos(config, repo_dir=None, vcs_url=None, name=None)
Return a :py:obj:`list` list of repos from (expanded) config file. repo_dir, vcs_url and name all support fnmatch. :param config: the expanded repo config in :py:class:`dict` format. :type config: dict :param repo_dir: directory of checkout location, fnmatch pattern supported :type repo_dir: str or None :param vcs_url: url of vcs remote, fn match pattern supported :type vcs_url: str or None :param name: project name, fnmatch pattern supported :type name: str or None :rtype: list
2.22193
2.157263
1.029976
if not log: log = logging.getLogger() if not log.handlers: channel = logging.StreamHandler() channel.setFormatter(DebugLogFormatter()) log.setLevel(level) log.addHandler(channel) # setup styling for repo loggers repo_logger = logging.getLogger('libvcs') channel = logging.StreamHandler() channel.setFormatter(RepoLogFormatter()) channel.addFilter(RepoFilter()) repo_logger.setLevel(level) repo_logger.addHandler(channel)
def setup_logger(log=None, level='INFO')
Setup logging for CLI use. :param log: instance of logger :type log: :py:class:`Logger`
2.965349
3.149173
0.941628
for attr in ['lineno', 'fromlineno', 'tolineno', 'col_offset', 'parent']: if hasattr(src, attr): setattr(dest, attr, getattr(src, attr))
def copy_node_info(src, dest)
Copy information from src to dest Every node in the AST has to have line number information. Get the information from the old stmt.
3.47619
3.46375
1.003591
'''Convert a flask.ext.admin into flask_admin.''' match = re.match(r'flask\.ext\.(.*)', flask_ext_name) if match is None: raise LookupError("Module name `{}` doesn't match" "`flask.ext` style import.") from_name = match.group(1) actual_module_name = 'flask_{}'.format(from_name) return actual_module_name
def make_non_magical_flask_import(flask_ext_name)
Convert a flask.ext.admin into flask_admin.
4.929387
4.260039
1.157123
'''Translates a flask.ext from-style import into a non-magical import. Translates: from flask.ext import wtf, bcrypt as fcrypt Into: import flask_wtf as wtf, flask_bcrypt as fcrypt ''' new_names = [] # node.names is a list of 2-tuples. Each tuple consists of (name, as_name). # So, the import would be represented as: # # from flask.ext import wtf as ftw, admin # # node.names = [('wtf', 'ftw'), ('admin', None)] for (name, as_name) in node.names: actual_module_name = 'flask_{}'.format(name) new_names.append((actual_module_name, as_name or name)) new_node = nodes.Import() copy_node_info(node, new_node) new_node.names = new_names mark_transformed(new_node) return new_node
def transform_flask_from_import(node)
Translates a flask.ext from-style import into a non-magical import. Translates: from flask.ext import wtf, bcrypt as fcrypt Into: import flask_wtf as wtf, flask_bcrypt as fcrypt
4.436666
3.061303
1.449274
'''Translates a flask.ext.wtf from-style import into a non-magical import. Translates: from flask.ext.wtf import Form from flask.ext.admin.model import InlineFormAdmin Into: from flask_wtf import Form from flask_admin.model import InlineFormAdmin ''' actual_module_name = make_non_magical_flask_import(node.modname) new_node = nodes.ImportFrom(actual_module_name, node.names, node.level) copy_node_info(node, new_node) mark_transformed(new_node) return new_node
def transform_flask_from_long(node)
Translates a flask.ext.wtf from-style import into a non-magical import. Translates: from flask.ext.wtf import Form from flask.ext.admin.model import InlineFormAdmin Into: from flask_wtf import Form from flask_admin.model import InlineFormAdmin
4.688375
2.598016
1.804599
'''Translates a flask.ext.wtf bare import into a non-magical import. Translates: import flask.ext.admin as admin Into: import flask_admin as admin ''' new_names = [] for (name, as_name) in node.names: match = re.match(r'flask\.ext\.(.*)', name) from_name = match.group(1) actual_module_name = 'flask_{}'.format(from_name) new_names.append((actual_module_name, as_name)) new_node = nodes.Import() copy_node_info(node, new_node) new_node.names = new_names mark_transformed(new_node) return new_node
def transform_flask_bare_import(node)
Translates a flask.ext.wtf bare import into a non-magical import. Translates: import flask.ext.admin as admin Into: import flask_admin as admin
3.803038
2.882019
1.319574
frame = APIFrame(data, self._escaped).output() self.serial.write(frame)
def _write(self, data)
_write: binary data -> None Packages the given binary data in an API frame and writes the result to the serial port
22.209038
15.614113
1.422369
try: cmd_spec = self.api_commands[cmd] except AttributeError: raise NotImplementedError("API command specifications could not be " "found; use a derived class which defines" " 'api_commands'.") packet = b'' for field in cmd_spec: try: # Read this field's name from the function arguments dict data = kwargs[field['name']] if isinstance(data, str): data = stringToBytes(data) except KeyError: # Data wasn't given # Only a problem if the field has a specific length if field['len'] is not None: # Was a default value specified? default_value = field['default'] if default_value: # If so, use it data = default_value else: # Otherwise, fail raise KeyError( "The expected field {} of length {} " "was not provided".format( field['name'], field['len'] ) ) else: # No specific length, ignore it data = None # Ensure that the proper number of elements will be written if field['len'] and len(data) != field['len']: raise ValueError( "The data provided for '{}' was not {} " "bytes long".format(field['name'], field['len']) ) # Add the data to the packet, if it has been specified. # Otherwise, the parameter was of variable length, and not given. if data: packet += data return packet
def _build_command(self, cmd, **kwargs)
_build_command: string (binary data) ... -> binary data _build_command will construct a command packet according to the specified command's specification in api_commands. It will expect named arguments for all fields other than those with a default value or a length of 'None'. Each field will be written out in the order they are defined in the command definition.
4.492496
4.053385
1.108332
# Fetch the first byte, identify the packet # If the spec doesn't exist, raise exception packet_id = data[0:1] try: packet = self.api_responses[packet_id] except AttributeError: raise NotImplementedError("API response specifications could not " "be found; use a derived class which " "defines 'api_responses'.") except KeyError: # Check to see if this ID can be found among transmittable packets for cmd_name, cmd in list(self.api_commands.items()): if cmd[0]['default'] == data[0:1]: raise CommandFrameException("Incoming frame with id {} " "looks like a command frame of " "type '{}' (these should not be" " received). Are you sure your " "devices are in " "API mode?".format( data[0], cmd_name) ) raise KeyError( "Unrecognized response packet with id byte {0}".format(data[0])) # Current byte index in the data stream index = 1 # Result info info = {'id': packet['name']} packet_spec = packet['structure'] # Parse the packet in the order specified for field in packet_spec: if field['len'] == 'null_terminated': field_data = b'' while data[index:index+1] != b'\x00': field_data += data[index:index+1] index += 1 index += 1 info[field['name']] = field_data elif field['len'] is not None: # Store the number of bytes specified # Are we trying to read beyond the last data element? expected_len = index + field['len'] if expected_len > len(data): raise ValueError("Response packet was shorter than " "expected; expected: {}, got: {} " "bytes".format(expected_len, len(data)) ) field_data = data[index:index + field['len']] info[field['name']] = field_data index += field['len'] # If the data field has no length specified, store any # leftover bytes and quit else: field_data = data[index:] # Were there any remaining bytes? if field_data: # If so, store them info[field['name']] = field_data index += len(field_data) break # If there are more bytes than expected, raise an exception if index < len(data): raise ValueError("Response packet was longer than expected; " "expected: {}, got: {} bytes".format( index, len(data)) ) # Apply parsing rules if any exist if 'parsing' in packet: for parse_rule in packet['parsing']: # Only apply a rule if it is relevant (raw data is available) if parse_rule[0] in info: # Apply the parse function to the indicated field and # replace the raw data with the result info[parse_rule[0]] = parse_rule[1](self, info) return info
def _split_response(self, data)
_split_response: binary data -> {'id':str, 'param':binary data, ...} _split_response takes a data packet received from an XBee device and converts it into a dictionary. This dictionary provides names for each segment of binary data as specified in the api_responses spec.
4.154043
3.956249
1.049995
header_size = 3 # number of samples (always 1?) is the first byte sample_count = byteToInt(io_bytes[0]) # part of byte 1 and byte 2 are the DIO mask ( 9 bits ) dio_mask = (byteToInt(io_bytes[1]) << 8 | byteToInt(io_bytes[2])) \ & 0x01FF # upper 7 bits of byte 1 is the AIO mask aio_mask = (byteToInt(io_bytes[1]) & 0xFE) >> 1 # sorted lists of enabled channels; value is position of bit in mask dio_chans = [] aio_chans = [] for i in range(0, 9): if dio_mask & (1 << i): dio_chans.append(i) dio_chans.sort() for i in range(0, 7): if aio_mask & (1 << i): aio_chans.append(i) aio_chans.sort() return (sample_count, dio_chans, aio_chans, dio_mask, header_size)
def _parse_samples_header(self, io_bytes)
_parse_samples_header: binary data in XBee IO data format -> (int, [int ...], [int ...], int, int) _parse_samples_header will read the first three bytes of the binary data given and will return the number of samples which follow, a list of enabled digital inputs, a list of enabled analog inputs, the dio_mask, and the size of the header in bytes
3.484859
3.104258
1.122606
sample_count, dio_chans, aio_chans, dio_mask, header_size = \ self._parse_samples_header(io_bytes) samples = [] # split the sample data into a list, so it can be pop()'d sample_bytes = [byteToInt(c) for c in io_bytes[header_size:]] # repeat for every sample provided for sample_ind in range(0, sample_count): tmp_samples = {} if dio_chans: # we have digital data digital_data_set = (sample_bytes.pop(0) << 8 | sample_bytes.pop(0)) digital_values = dio_mask & digital_data_set for i in dio_chans: tmp_samples['dio-{0}'.format(i)] = True \ if (digital_values >> i) & 1 else False for i in aio_chans: analog_sample = (sample_bytes.pop(0) << 8 | sample_bytes.pop(0)) tmp_samples['adc-{0}'.format(i)] = analog_sample samples.append(tmp_samples) return samples
def _parse_samples(self, io_bytes)
_parse_samples: binary data in XBee IO data format -> [ {"dio-0":True, "dio-1":False, "adc-0":100"}, ...] _parse_samples reads binary data from an XBee device in the IO data format specified by the API. It will then return a dictionary indicating the status of each enabled IO port.
3.703024
3.502377
1.057289
# Pass through the keyword arguments self._write(self._build_command(cmd, **kwargs))
def send(self, cmd, **kwargs)
send: string param=binary data ... -> None When send is called with the proper arguments, an API command will be written to the serial port for this XBee device containing the proper instructions and data. This method must be called with named arguments in accordance with the api_command specification. Arguments matching all field names other than those in reserved_names (like 'id' and 'order') should be given, unless they are of variable length (of 'None' in the specification. Those are optional).
9.861172
14.593808
0.675709
alarm = XBeeAlarm('/dev/ttyUSB0', '\x56\x78') routine = SimpleWakeupRoutine(alarm) from time import sleep while True: try: print "Waiting 5 seconds..." sleep(5) print "Firing" routine.trigger() except KeyboardInterrupt: break
def main()
Run through simple demonstration of alarm concept
7.453941
6.262103
1.190325
# Analog pin 0 self.hw.remote_at( dest_addr=self.remote_addr, command='D0', parameter='\x02') # Disengage remote LED, buzzer self.deactivate() self._set_send_samples(False)
def _reset(self)
reset: None -> None Resets the remote XBee device to a standard configuration
19.923391
17.204901
1.158007
# DIO pin 1 (LED), active low self.hw.remote_at( dest_addr=self.remote_addr, command='D1', parameter='\x04' if status else '\x05')
def _set_LED(self, status)
_set_LED: boolean -> None Sets the status of the remote LED
12.560422
14.777129
0.849991
# Receive samples from the remote device self._set_send_samples(True) while True: packet = self.hw.wait_read_frame() if 'adc-0' in packet['samples'][0]: # Stop receiving samples from the remote device self._set_send_samples(False) return packet['samples'][0]['adc-0'] > XBeeAlarm.DETECT_THRESH
def bed_occupied(self)
bed_occupied: None -> boolean Determines whether the bed is currently occupied by requesting data from the remote XBee and comparing the analog value with a threshold.
9.903178
7.436087
1.331773
total = 0 # Add together all bytes for byte in self.data: total += byteToInt(byte) # Only keep the last byte total = total & 0xFF return intToByte(0xFF - total)
def checksum(self)
checksum: None -> single checksum byte checksum adds all bytes of the binary, unescaped data in the frame, saves the last byte of the result, and subtracts it from 0xFF. The final result is the checksum
6.239002
5.970881
1.044905
total = 0 # Add together all bytes for byte in self.data: total += byteToInt(byte) # Add checksum too total += byteToInt(chksum) # Only keep low bits total &= 0xFF # Check result return total == 0xFF
def verify(self, chksum)
verify: 1 byte -> boolean verify checksums the frame, adds the expected checksum, and determines whether the result is correct. The result should be 0xFF.
7.184823
6.456395
1.112823
# start is one byte long, length is two bytes # data is n bytes long (indicated by length) # chksum is one byte long data = self.len_bytes() + self.data + self.checksum() # Only run the escaoe process if it hasn't been already if self.escaped and len(self.raw_data) < 1: self.raw_data = APIFrame.escape(data) if self.escaped: data = self.raw_data # Never escape start byte return APIFrame.START_BYTE + data
def output(self)
output: None -> valid API frame (binary data) output will produce a valid API frame for transmission to an XBee module.
10.41338
9.340141
1.114906
escaped_data = b"" for byte in data: if intToByte(byteToInt(byte)) in APIFrame.ESCAPE_BYTES: escaped_data += APIFrame.ESCAPE_BYTE escaped_data += intToByte(0x20 ^ byteToInt(byte)) else: escaped_data += intToByte(byteToInt(byte)) return escaped_data
def escape(data)
escape: byte string -> byte string When a 'special' byte is encountered in the given data string, it is preceded by an escape byte and XORed with 0x20.
4.30798
3.468982
1.241857
if self._unescape_next_byte: byte = intToByte(byteToInt(byte) ^ 0x20) self._unescape_next_byte = False elif self.escaped and byte == APIFrame.ESCAPE_BYTE: self._unescape_next_byte = True return self.raw_data += intToByte(byteToInt(byte))
def fill(self, byte)
fill: byte -> None Adds the given raw byte to this APIFrame. If this APIFrame is marked as escaped and this byte is an escape byte, the next byte in a call to fill() will be unescaped.
5.457683
3.58674
1.521628
if len(self.raw_data) < 3: ValueError("parse() may only be called on a frame containing at " "least 3 bytes of raw data (see fill())") # First two bytes are the length of the data raw_len = self.raw_data[1:3] # Unpack it data_len = struct.unpack("> h", raw_len)[0] # Read the data data = self.raw_data[3:3 + data_len] chksum = self.raw_data[-1] # Checksum check self.data = data if not self.verify(chksum): raise ValueError("Invalid checksum")
def parse(self)
parse: None -> None Given a valid API frame, parse extracts the data contained inside it and verifies it against its checksum
4.34683
3.995742
1.087866
if packet_info['id'] in ('at_response', 'remote_at_response') and \ packet_info['command'].lower() == b'is' and \ packet_info['status'] == b'\x00': return self._parse_samples(packet_info['parameter']) else: return packet_info['parameter']
def _parse_IS_at_response(self, packet_info)
If the given packet is a successful remote AT response for an IS command, parse the parameter field as IO data.
4.791006
3.63738
1.317158
if packet_info['id'] == 'at_response' and \ packet_info['command'].lower() == b'nd' and \ packet_info['status'] == b'\x00': result = {} # Parse each field directly result['source_addr'] = packet_info['parameter'][0:2] result['source_addr_long'] = packet_info['parameter'][2:10] # Parse the null-terminated node identifier field null_terminator_index = 10 while packet_info['parameter'][null_terminator_index: null_terminator_index+1] != b'\x00': null_terminator_index += 1 # Parse each field thereafter directly result['node_identifier'] = \ packet_info['parameter'][10:null_terminator_index] result['parent_address'] = \ packet_info['parameter'][null_terminator_index+1: null_terminator_index+3] result['device_type'] = \ packet_info['parameter'][null_terminator_index+3: null_terminator_index+4] result['status'] = \ packet_info['parameter'][null_terminator_index+4: null_terminator_index+5] result['profile_id'] = \ packet_info['parameter'][null_terminator_index+5: null_terminator_index+7] result['manufacturer'] = \ packet_info['parameter'][null_terminator_index+7: null_terminator_index+9] # Simple check to ensure a good parse if null_terminator_index+9 != len(packet_info['parameter']): raise ValueError("Improper ND response length: expected {0}, " "read {1} bytes".format( len(packet_info['parameter']), null_terminator_index+9) ) return result else: return packet_info['parameter']
def _parse_ND_at_response(self, packet_info)
If the given packet is a successful AT response for an ND command, parse the parameter field.
2.339528
2.259234
1.03554
if self._callback: self._thread_continue = False self._thread.join()
def halt(self)
halt: None -> None If this instance has a separate thread running, it will be halted. This method will wait until the thread has cleaned up before returning.
9.946175
10.655383
0.933441
while True: try: self._callback(self.wait_read_frame()) except ThreadQuitException: # Expected termintation of thread due to self.halt() break except Exception as e: # Unexpected thread quit. if self._error_callback: self._error_callback(e)
def run(self)
run: None -> None This method overrides threading.Thread.run() and is automatically called when an instance is created with threading enabled.
8.251287
7.986479
1.033157
frame = self._wait_for_frame(timeout) return self._split_response(frame.data)
def wait_read_frame(self, timeout=None)
wait_read_frame: None -> frame info dictionary wait_read_frame calls XBee._wait_for_frame() and waits until a valid frame appears on the serial port. Once it receives a frame, wait_read_frame attempts to parse the data contained within it and returns the resulting dictionary
7.375449
7.245634
1.017916
frame = APIFrame(escaped=self._escaped) deadline = 0 if timeout is not None and timeout > 0: deadline = time.time() + timeout while True: if self._callback and not self._thread_continue: raise ThreadQuitException if self.serial.inWaiting() == 0: if deadline and time.time() > deadline: raise _TimeoutException time.sleep(.01) continue byte = self.serial.read() if byte != APIFrame.START_BYTE: continue # Save all following bytes, if they are not empty if len(byte) == 1: frame.fill(byte) while(frame.remaining_bytes() > 0): byte = self.serial.read() if len(byte) == 1: frame.fill(byte) try: # Try to parse and return result frame.parse() # Ignore empty frames if len(frame.data) == 0: frame = APIFrame() continue return frame except ValueError: # Bad frame, so restart frame = APIFrame(escaped=self._escaped)
def _wait_for_frame(self, timeout=None)
_wait_for_frame: None -> binary data _wait_for_frame will read from the serial port until a valid API frame arrives. It will then return the binary data contained within the frame. If this method is called as a separate thread and self.thread_continue is set to False, the thread will exit by raising a ThreadQuitException.
3.940941
3.662315
1.076079
try: # Open serial port ser = serial.Serial('/dev/ttyUSB0', 9600) # Create XBee Series 1 object xbee = XBee(ser) # Send AT packet xbee.send('at', frame_id='A', command='DH') # Wait for response response = xbee.wait_read_frame() print response # Send AT packet xbee.send('at', frame_id='B', command='DL') # Wait for response response = xbee.wait_read_frame() print response # Send AT packet xbee.send('at', frame_id='C', command='MY') # Wait for response response = xbee.wait_read_frame() print response # Send AT packet xbee.send('at', frame_id='D', command='CE') # Wait for response response = xbee.wait_read_frame() print response except KeyboardInterrupt: pass finally: ser.close()
def main()
Sends an API AT command to read the lower-order address bits from an XBee Series 1 and looks for a response
2.306478
2.071261
1.113562
if self._callback: self._running.clear() self._ioloop.remove_handler(self.serial.fd) if self._frame_future is not None: self._frame_future.set_result(None) self._frame_future = None
def halt(self)
halt: None -> None Stop the event, and remove the FD from the loop handler
4.688219
4.797456
0.97723
while self._running.is_set(): try: frame = yield self._get_frame() info = self._split_response(frame.data) if info is not None: self._callback(info) except Exception as e: # Unexpected quit. if self._error_callback: self._error_callback(e)
def process_frames(self)
process_frames: None -> None Wait for a frame to become available, when resolved call the callback
4.756338
4.514608
1.053544
frame = APIFrame(escaped=self._escaped) byte = self.serial.read() if byte != APIFrame.START_BYTE: return # Save all following bytes, if they are not empty if len(byte) == 1: frame.fill(byte) while(frame.remaining_bytes() > 0): byte = self.serial.read() if len(byte) == 1: frame.fill(byte) try: # Try to parse and return result frame.parse() # Ignore empty frames if len(frame.data) == 0: return if self._frame_future is not None: self._frame_future.set_result(frame) self._frame_future = None else: self._frame_queue.append(frame) except ValueError: return
def _process_input(self, data, events)
_process_input: _process_input will be notified when there is data ready on the serial connection to be read. It will read and process the data into an API Frame and then either resolve a frame future, or push the frame into the queue of frames needing to be processed
4.077132
3.764361
1.083087
try: self.serial.port = p self.serial.open() print 'Opening serial port: %s' % p except Exception, e: print 'Unable to open serial port: %s' % p
def do_serial(self, p)
Set the serial port, e.g.: /dev/tty.usbserial-A4001ib8
2.683016
2.519103
1.065068
if name in self.names: raise ValueError("A callback has already been registered with \ the name '%s'" % name) self.handlers.append({ 'name': name, 'callback': callback, 'filter': filter }) self.names.add(name)
def register(self, name, callback, filter)
register: string, function: string, data -> None, function: data -> boolean -> None Register will save the given name, callback, and filter function for use when a packet arrives. When one arrives, the filter function will be called to determine whether to call its associated callback function. If the filter method returns true, the callback method will be called with its associated name string and the packet which triggered the call.
2.884269
3.7938
0.760259
if not self.xbee: raise ValueError("Either a serial port or an XBee must be provided \ to __init__ to execute run()") while True: self.dispatch(self.xbee.wait_read_frame()) if oneshot: break
def run(self, oneshot=False)
run: boolean -> None run will read and dispatch any packet which arrives from the XBee device
8.054512
6.483038
1.242398
for handler in self.handlers: if handler['filter'](packet): # Call the handler method with its associated # name and the packet which passed its filter check handler['callback'](handler['name'], packet)
def dispatch(self, packet)
dispatch: XBee data dict -> None When called, dispatch checks the given packet against each registered callback method and calls each callback whose filter function returns true.
8.229365
7.560724
1.088436
if hasattr(byte, 'bit_length'): # This is already an int return byte return ord(byte) if hasattr(byte, 'encode') else byte[0]
def byteToInt(byte)
byte -> int Determines whether to use ord() or not to get a byte's value.
5.14608
5.434193
0.946981
r_details = requests.get('https://talosintelligence.com/sb_api/query_lookup', headers={ 'Referer':'https://talosintelligence.com/reputation_center/lookup?search=%s'%search_string, 'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0' }, params = { 'query':'/api/v2/details/ip/', 'query_entry':search_string }).json() r_wscore = requests.get('https://talosintelligence.com/sb_api/remote_lookup', headers={ 'Referer':'https://talosintelligence.com/reputation_center/lookup?search=%s'%search_string, 'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0' }, params = {'hostname':'SDS', 'query_string':'/score/wbrs/json?url=%s' % search_string}).json() r_talos_blacklist = requests.get('https://www.talosintelligence.com/sb_api/blacklist_lookup', headers={ 'Referer':'https://talosintelligence.com/reputation_center/lookup?search=%s'%search_string, 'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0' }, params = {'query_type':'ipaddr', 'query_entry':search_string}).json() # would be nice to plot this values #r_volume = requests.get('https://talosintelligence.com/sb_api/query_lookup', # params = { # 'query':'/api/v2/volume/ip/', # 'query_entry':search_string # }).json() # No used for now #r_related_ips = requests.get('https://talosintelligence.com/sb_api/query_lookup', # params = { # 'query':'/api/v2/related_ips/ip/', # 'query_entry':search_string # }).json() talos_blacklisted = {'status':False} if 'classifications' in r_talos_blacklist['entry']: talos_blacklisted['status'] = True talos_blacklisted['classifications'] = ", ".join(r_talos_blacklist['entry']['classifications']) talos_blacklisted['first_seen'] = r_talos_blacklist['entry']['first_seen'] + "UTC" talos_blacklisted['expiration'] = r_talos_blacklist['entry']['expiration'] + "UTC" data = { 'address':search_string, 'hostname':r_details['hostname'] if 'hostname' in r_details else "nodata", 'volume_change':r_details['daychange'] if 'daychange' in r_details else "nodata", 'lastday_volume':r_details['daily_mag'] if 'daily_mag' in r_details else "nodata", 'month_volume':r_details['monthly_mag'] if 'monthly_mag' in r_details else "nodata", 'email_reputation':r_details['email_score_name'] if 'email_score_name' in r_details else "nodata", 'web_reputation':r_details['web_score_name'] if 'web_score_name' in r_details else "nodata", 'weighted_reputation_score':r_wscore['response'], 'talos_blacklisted':"Yes" if talos_blacklisted['status'] else "No" #'weighted_reputation_score':r_wscore[0]['response']['wbrs']['score'], #'volumes':zip(*r_volume['data']) } return data
def get_data(search_string, search_by='ip')
Download data from talosintelligence.com for the given IP Return tabbed data text
2.40412
2.394827
1.00388
jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Inserting video into channel") try: response = jwplatform_client.channels.videos.create( channel_key=channel_key, video_key=video_key, **kwargs) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error inserting {} into channel {}.\n{}".format(video_key, channel_key, e)) sys.exit(e.message) return response
def insert_into_channel(api_key, api_secret, channel_key, video_key, **kwargs)
Function which inserts video into a channel/playlist. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param channel_key: <string> Key of the channel to which add a video. :param video_key: <string> Key of the video that should be added to the channel. :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/videos/create.html :return: <dict> Dict which represents the JSON response.
2.307591
2.489693
0.926858
jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Querying for video conversions.") try: response = jwplatform_client.videos.conversions.list(video_key=video_key, **kwargs) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error querying for video conversions.\n{}".format(e)) sys.exit(e.message) return response
def list_conversions(api_key, api_secret, video_key, **kwargs)
Function which retrieves a list of a video object's conversions. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param video_key: <string> Video's object ID. Can be found within JWPlayer Dashboard. :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/videos/conversions/list.html :return: <dict> Dict which represents the JSON response.
2.595023
2.599287
0.998359
_url = '{scheme}://{host}{port}/{version}{path}'.format( scheme=self._scheme, host=self._host, port=':{}'.format(self._port) if self._port != 80 else '', version=self._api_version, path=path) if params is not None: _params = params.copy() else: _params = dict() # Add required API parameters _params['api_nonce'] = str(random.randint(0, 999999999)).zfill(9) _params['api_timestamp'] = int(time.time()) _params['api_key'] = self.__key _params['api_format'] = 'json' _params['api_kit'] = 'py-{}{}'.format( __version__, '-{}'.format(self._agent) if self._agent else '') # Construct Signature Base String sbs = '&'.join(['{}={}'.format( quote((unicode(key).encode('utf-8')), safe='~'), quote((unicode(value).encode('utf-8')), safe='~') ) for key, value in sorted(_params.items())]) # Add signature to the _params dict _params['api_signature'] = hashlib.sha1( '{}{}'.format(sbs, self.__secret).encode('utf-8')).hexdigest() return _url, _params
def _build_request(self, path, params=None)
Build API request
2.490895
2.439181
1.021201
# Setup API client jwplatform_client = jwplatform.Client(api_key, api_secret) # Make /videos/create API call logging.info("Registering new Video-Object") try: response = jwplatform_client.videos.create(upload_method='single', **kwargs) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error creating a video\n{}".format(e)) logging.info(response) # Construct base url for upload upload_url = '{}://{}{}'.format( response['link']['protocol'], response['link']['address'], response['link']['path'] ) # Query parameters for the upload query_parameters = response['link']['query'] query_parameters['api_format'] = api_format with open(local_video_path, 'rb') as f: files = {'file': f} r = requests.post(upload_url, params=query_parameters, files=files) logging.info('uploading file {} to url {}'.format(local_video_path, r.url)) logging.info('upload response: {}'.format(r.text)) logging.info(r)
def create_video(api_key, api_secret, local_video_path, api_format='json', **kwargs)
Function which creates new video object via singlefile upload method. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param local_video_path: <string> Path to media on local machine. :param api_format: <string> Acceptable values include 'py','xml','json',and 'php' :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/videos/create.html :return:
3.076677
3.012426
1.021329
filename = os.path.basename(local_video_path) # Setup API client jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Updating Video") try: response = jwplatform_client.videos.update( video_key=video_key, upload_method='s3', update_file='True', **kwargs) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error updating the video\n{}".format(e)) sys.exit(e.message) logging.info(response) # Construct base url for upload upload_url = '{}://{}{}'.format( response['link']['protocol'], response['link']['address'], response['link']['path'] ) # Query parameters for the upload query_parameters = response['link']['query'] # HTTP PUT upload using requests headers = {'Content-Disposition': 'attachment; filename="{}"'.format(filename)} with open(local_video_path, 'rb') as f: r = requests.put(upload_url, params=query_parameters, headers=headers, data=f) logging.info('uploading file {} to url {}'.format(local_video_path, r.url)) logging.info('upload response: {}'.format(r.text)) logging.info(r)
def replace_video(api_key, api_secret, local_video_path, video_key, **kwargs)
Function which allows to replace the content of an EXISTING video object. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param local_video_path: <string> Path to media on local machine. :param video_key: <string> Video's object ID. Can be found within JWPlayer Dashboard. :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/videos/create.html :return:
2.90731
3.0085
0.966365
jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Updating video thumbnail.") try: response = jwplatform_client.videos.thumbnails.update( video_key=video_key, position=position, # Parameter which specifies seconds into video to extract thumbnail from. **kwargs) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error updating thumbnail.\n{}".format(e)) sys.exit(e.message) return response
def update_thumbnail(api_key, api_secret, video_key, position=7.0, **kwargs)
Function which updates the thumbnail for an EXISTING video utilizing position parameter. This function is useful for selecting a new thumbnail from with the already existing video content. Instead of position parameter, user may opt to utilize thumbnail_index parameter. Please eee documentation for further information. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param video_key: <string> Video's object ID. Can be found within JWPlayer Dashboard. :param position: <float> Represents seconds into the duration of a video, for thumbnail extraction. :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/videos/thumbnails/update.html :return: <dict> Dict which represents the JSON response.
3.382872
3.039984
1.112793
jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Updating video thumbnail.") try: response = jwplatform_client.videos.thumbnails.update( video_key=video_key, **kwargs) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error updating thumbnail.\n{}".format(e)) sys.exit(e.message) logging.info(response) # Construct base url for upload upload_url = '{}://{}{}'.format( response['link']['protocol'], response['link']['address'], response['link']['path'] ) # Query parameters for the upload query_parameters = response['link']['query'] query_parameters['api_format'] = api_format with open(local_video_image_path, 'rb') as f: files = {'file': f} r = requests.post(upload_url, params=query_parameters, files=files) logging.info('uploading file {} to url {}'.format(local_video_image_path, r.url)) logging.info('upload response: {}'.format(r.text))
def update_thumbnail_via_upload(api_key, api_secret, video_key, local_video_image_path='', api_format='json', **kwargs)
Function which updates the thumbnail for a particular video object with a locally saved image. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param video_key: <string> Video's object ID. Can be found within JWPlayer Dashboard. :param local_video_image_path: <string> Local system path to an image. :param api_format: <string> REQUIRED Acceptable values include 'py','xml','json',and 'php' :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/videos/thumbnails/update.html :return: <dict> Dict which represents the JSON response.
2.549522
2.580223
0.988102
upload_parameters = { 'file_path': video_file_path, 'file_size': os.stat(video_file_path).st_size, 'file_name': os.path.basename(video_file_path) } try: # Setup API client jwplatform_client = Client(JW_API_KEY, JW_API_SECRET) # Make /videos/create API call with multipart parameter specified jwplatform_video_create_response = jwplatform_client.videos.create( upload_method='multipart', title=upload_parameters['file_name'] ) except JWPlatformError: logging.exception('An error occurred during the uploader setup. Check that your API keys are properly ' 'set up in your environment, and ensure that the video file path exists.') return # Construct base url for upload upload_parameters['upload_url'] = '{protocol}://{address}{path}'.format(**jwplatform_video_create_response['link']) logging.info('Upload URL to be used: {}'.format(upload_parameters['upload_url'])) upload_parameters['query_parameters'] = jwplatform_video_create_response['link']['query'] upload_parameters['query_parameters']['api_format'] = 'json' upload_parameters['headers'] = {'X-Session-ID': jwplatform_video_create_response['session_id']} # The chunk offset will be updated several times during the course of the upload upload_parameters['chunk_offset'] = 0 # Perform the multipart upload with open(upload_parameters['file_path'], 'rb') as file_to_upload: while True: chunk = file_to_upload.read(BYTES_TO_BUFFER) if len(chunk) <= 0: break try: upload_chunk(chunk, upload_parameters) # Log any exceptions that bubbled up except requests.exceptions.RequestException: logging.exception('Error posting data, stopping upload...') break
def run_upload(video_file_path)
Configures all of the needed upload_parameters and sets up all information pertinent to the video to be uploaded. :param video_file_path: <str> the absolute path to the video file
3.517804
3.500424
1.004965
begin_chunk = upload_parameters['chunk_offset'] # The next chunk will begin at (begin_chunk + len(chunk)), so the -1 ensures that the ranges do not overlap end_chunk = begin_chunk + len(chunk) - 1 file_size = upload_parameters['file_size'] filename = upload_parameters['file_size'] logging.info("begin_chunk / end_chunk = {} / {}".format(begin_chunk, end_chunk)) upload_parameters['headers'].update( { 'X-Content-Range': 'bytes {}-{}/{}'.format(begin_chunk, end_chunk, file_size), 'Content-Disposition': 'attachment; filename="{}"'.format(filename), 'Content-Type': 'application/octet-stream', 'Content-Length': str((end_chunk - begin_chunk) + 1) } ) response = requests.post( upload_parameters['upload_url'], params=upload_parameters['query_parameters'], headers=upload_parameters['headers'], data=chunk ) response.raise_for_status() # As noted before, the next chunk begins at (begin_chunk + len(chunk)) upload_parameters['chunk_offset'] = begin_chunk + len(chunk)
def upload_chunk(chunk, upload_parameters)
Handles the POST request needed to upload a single portion of the video file. Serves as a helper method for upload_by_multipart(). The offset used to determine where a chunk begins and ends is updated in the course of this method's execution. :param chunk: <byte[]> the raw bytes of data from the video file :param upload_parameters: <dict> a collection of all pieces of info needed to upload the video
2.505422
2.583118
0.969922
path_to_csv = path_to_csv or os.path.join(os.getcwd(), 'video_list.csv') timeout_in_seconds = 2 max_retries = 3 retries = 0 offset = 0 videos = list() jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Querying for video list.") while True: try: response = jwplatform_client.videos.list(result_limit=result_limit, result_offset=offset, **kwargs) except jwplatform.errors.JWPlatformRateLimitExceededError: logging.error("Encountered rate limiting error. Backing off on request time.") if retries == max_retries: raise jwplatform.errors.JWPlatformRateLimitExceededError() timeout_in_seconds *= timeout_in_seconds # Exponential back off for timeout in seconds. 2->4->8->etc.etc. retries += 1 time.sleep(timeout_in_seconds) continue except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error querying for videos list.\n{}".format(e)) raise e # Reset retry flow-control variables upon a non successful query (AKA not rate limited) retries = 0 timeout_in_seconds = 2 # Add all fetched video objects to our videos list. next_videos = response.get('videos', []) last_query_total = response.get('total', 0) videos.extend(next_videos) offset += len(next_videos) logging.info("Accumulated {} videos.".format(offset)) if offset >= last_query_total: # Condition which defines you've reached the end of the library break # Section for writing video library to csv desired_fields = ['key', 'title', 'description', 'tags', 'date', 'link'] should_write_header = not os.path.isfile(path_to_csv) with open(path_to_csv, 'a+') as path_to_csv: # Only write columns to the csv which are specified above. Columns not specified are ignored. writer = csv.DictWriter(path_to_csv, fieldnames=desired_fields, extrasaction='ignore') if should_write_header: writer.writeheader() writer.writerows(videos)
def make_csv(api_key, api_secret, path_to_csv=None, result_limit=1000, **kwargs)
Function which fetches a video library and writes each video_objects Metadata to CSV. Useful for CMS systems. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param path_to_csv: <string> Local system path to desired CSV. Default will be within current working directory. :param result_limit: <int> Number of video results returned in response. (Suggested to leave at default of 1000) :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/videos/list.html :return: <dict> Dict which represents the JSON response.
3.417412
3.257835
1.048983
jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Creating new channel with keyword args.") try: response = jwplatform_client.channels.create(type=channel_type, **kwargs) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error creating new channel.\n{}".format(e)) sys.exit(e.message) return response
def create_channel(api_key, api_secret, channel_type='manual', **kwargs)
Function which creates a new channel. Channels serve as containers of video/media objects. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param channel_type: <string> REQUIRED Acceptable values include 'manual','dynamic','trending','feed','search' :param kwargs: Arguments conforming to standards found @ https://developer.jwplayer.com/jw-platform/reference/v1/methods/channels/create.html :return: <dict> Dict which represents the JSON response.
3.222862
3.18868
1.01072
formatted_params = {'custom.{}'.format(k): v for k,v in params.items()} # Setup API client jwplatform_client = jwplatform.Client(api_key, api_secret) logging.info("Updating Video") try: response = jwplatform_client.videos.update( video_key=video_key, **formatted_params) except jwplatform.errors.JWPlatformError as e: logging.error("Encountered an error updating the video\n{}".format(e)) sys.exit(e.message) logging.info(response)
def update_custom_params(api_key, api_secret, video_key, params)
Function which allows you to update a video's custom params. Custom params are indicated by key-values of "custom.<key>" = "<value>" so they must be provided as a dictionary and passed to the platform API call. :param api_key: <string> JWPlatform api-key :param api_secret: <string> JWPlatform shared-secret :param video_key: <string> Video's object ID. Can be found within JWPlayer Dashboard. :param params: Custom params in the format of a dictionary, e.g. >>> params = {'year': '2017', 'category': 'comedy'} >>> update_custom_params('XXXXXXXX', 'XXXXXXXXXXXXXXXXX', 'dfT6JSb2', params) :return: None
3.102036
3.175621
0.976828
r text = self.text w, h = self.font.getsize(text) margin_x = round(self.margin_x * w / self.w) margin_y = round(self.margin_y * h / self.h) image = Image.new('RGB', (w + 2*margin_x, h + 2*margin_y), (255, 255, 255)) # Text self._writeText(image, text, pos=(margin_x, margin_y)) # Line self._drawLine(image) # White noise noise = self._whiteNoise(image.size) if noise is not None: image = Image.blend(image, noise, 0.5) # Resize image = image.resize(self.size, resample=self.resample) return (text, image)
def image(self)
r""" Tuple with a CAPTCHA text and a Image object. Images are generated on the fly, using given text source, TTF font and other parameters passable through __init__. All letters in used text are morphed. Also a line is morphed and pased onto CAPTCHA text. Additionaly, if self.noise > 1/255, a "snowy" image is merged with CAPTCHA image with a 50/50 ratio. Property returns a pair containing a string with text in returned image and image itself. :returns: ``tuple`` (CAPTCHA text, Image object)
2.905833
2.539462
1.144271
r text, image = self.image bytes = BytesIO() image.save(bytes, format=self.format) bytes.seek(0) return (text, bytes)
def bytes(self)
r""" Tuple with a CAPTCHA text and a BytesIO object. Property calls self.image and saves image contents in a BytesIO instance, returning CAPTCHA text and BytesIO as a tuple. See: image. :returns: ``tuple`` (CAPTCHA text, BytesIO object)
7.904338
4.440337
1.780121
r text, image = self.image image.save(file, format=self.format) return (text, file)
def write(self, file)
r""" Save CAPTCHA image in given filepath. Property calls self.image and saves image contents in a file, returning CAPTCHA text and filepath as a tuple. See: image. :param file: Path to file, where CAPTCHA image will be saved. :returns: ``tuple`` (CAPTCHA text, filepath)
12.327855
7.605144
1.620989
if isinstance(self.source, str): return self.source else: return self.source()
def text(self)
Text received from self.source.
4.723984
3.532701
1.337216
offset = 0 x, y = pos for c in text: # Write letter c_size = self.font.getsize(c) c_image = Image.new('RGBA', c_size, (0, 0, 0, 0)) c_draw = ImageDraw.Draw(c_image) c_draw.text((0, 0), c, font=self.font, fill=(0, 0, 0, 255)) # Transform c_image = self._rndLetterTransform(c_image) # Paste onto image image.paste(c_image, (x+offset, y), c_image) offset += c_size[0]
def _writeText(self, image, text, pos)
Write morphed text in Image object.
2.306571
2.265855
1.017969
w, h = image.size w *= 5 h *= 5 l_image = Image.new('RGBA', (w, h), (0, 0, 0, 0)) l_draw = ImageDraw.Draw(l_image) x1 = int(w * random.uniform(0, 0.1)) y1 = int(h * random.uniform(0, 1)) x2 = int(w * random.uniform(0.9, 1)) y2 = int(h * random.uniform(0, 1)) # Line width modifier was chosen as an educated guess # based on default image area. l_width = round((w * h)**0.5 * 2.284e-2) # Draw l_draw.line(((x1, y1), (x2, y2)), fill=(0, 0, 0, 255), width=l_width) # Transform l_image = self._rndLineTransform(l_image) l_image = l_image.resize(image.size, resample=self.resample) # Paste onto image image.paste(l_image, (0, 0), l_image)
def _drawLine(self, image)
Draw morphed line in Image object.
2.7885
2.721226
1.024722
if self.noise > 0.003921569: # 1./255. w, h = size pixel = (lambda noise: round(255 * random.uniform(1-noise, 1))) n_image = Image.new('RGB', size, (0, 0, 0, 0)) rnd_grid = map(lambda _: tuple([pixel(self.noise)]) * 3, [0] * w * h) n_image.putdata(list(rnd_grid)) return n_image else: return None
def _whiteNoise(self, size)
Generate white noise and merge it with given Image object.
4.877697
4.784313
1.019519
w, h = image.size dx = w * random.uniform(0.2, 0.7) dy = h * random.uniform(0.2, 0.7) x1, y1 = self.__class__._rndPointDisposition(dx, dy) x2, y2 = self.__class__._rndPointDisposition(dx, dy) w += abs(x1) + abs(x2) h += abs(x1) + abs(x2) quad = self.__class__._quadPoints((w, h), (x1, y1), (x2, y2)) return image.transform(image.size, Image.QUAD, data=quad, resample=self.resample)
def _rndLetterTransform(self, image)
Randomly morph a single character.
2.771267
2.72523
1.016893
x = int(random.uniform(-dx, dx)) y = int(random.uniform(-dy, dy)) return (x, y)
def _rndPointDisposition(dx, dy)
Return random disposition point.
2.695192
2.183927
1.234104
w, h = size x1, y1 = disp1 x2, y2 = disp2 return ( x1, -y1, -x1, h + y2, w + x2, h - y2, w - x2, y1 )
def _quadPoints(size, disp1, disp2)
Return points for QUAD transformation.
3.534172
3.359393
1.052027
operations.update(yaml_utils.load_operations_from_docstring(view.__doc__)) app = kwargs.get('app', _default_app) route = self._route_for_view(app, view) return self.bottle_path_to_openapi(route.rule)
def path_helper(self, operations, view, **kwargs)
Path helper that allows passing a bottle view function.
6.629685
6.319022
1.049163
rule = self._rule_for_view(view, app=app) operations.update(yaml_utils.load_operations_from_docstring(view.__doc__)) if hasattr(view, 'view_class') and issubclass(view.view_class, MethodView): for method in view.methods: if method in rule.methods: method_name = method.lower() method = getattr(view.view_class, method_name) operations[method_name] = yaml_utils.load_yaml_from_docstring(method.__doc__) return self.flaskpath2openapi(rule.rule)
def path_helper(self, operations, view, app=None, **kwargs)
Path helper that allows passing a Flask view function.
3.470339
3.326739
1.043165
for httpmethod in yaml_utils.PATH_KEYS: method = getattr(handler_class, httpmethod) operation_data = yaml_utils.load_yaml_from_docstring(method.__doc__) if operation_data: operation = {httpmethod: operation_data} yield operation
def _operations_from_methods(handler_class)
Generator of operations described in handler's http methods :param handler_class: :type handler_class: RequestHandler descendant
4.815869
4.81829
0.999498
if sys.version_info >= (3, 3): args = list(inspect.signature(method).parameters.keys())[1:] else: if getattr(method, '__tornado_coroutine__', False): method = method.__wrapped__ args = inspect.getargspec(method).args[1:] params = tuple('{{{}}}'.format(arg) for arg in args) try: path_tpl = urlspec.matcher._path except AttributeError: # tornado<4.5 path_tpl = urlspec._path path = (path_tpl % params) if path.count('/') > 1: path = path.rstrip('/?*') return path
def tornadopath2openapi(urlspec, method)
Convert Tornado URLSpec to OpenAPI-compliant path. :param urlspec: :type urlspec: URLSpec :param method: Handler http method :type method: function
3.818332
3.935448
0.970241