signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def default_setup():
service = LXCService<EOL>lxc_types = dict(LXC=LXC, LXCWithOverlays=LXCWithOverlays,<EOL>__default__=UnmanagedLXC)<EOL>loader = LXCLoader(lxc_types, service)<EOL>manager = LXCManager(loader, service)<EOL>return LXCAPI(manager=manager, service=service)<EOL>
The default API setup for lxc4u This is the API that you access globally from lxc4u.
f3853:m0
@classmethod<EOL><INDENT>def list_names(cls):<DEDENT>
response = subwrap.run(['<STR_LIT>'])<EOL>output = response.std_out<EOL>return map(str.strip, output.splitlines())<EOL>
Lists all known LXC names
f3854:c0:m0
@classmethod<EOL><INDENT>def lxc_path(cls, *join_paths):<DEDENT>
response = subwrap.run(['<STR_LIT>', '<STR_LIT>'])<EOL>output = response.std_out<EOL>lxc_path = output.splitlines()[<NUM_LIT:0>]<EOL>lxc_path = lxc_path.strip()<EOL>return os.path.join(lxc_path, *join_paths)<EOL>
Returns the LXC path (default on ubuntu is /var/lib/lxc)
f3854:c0:m1
@classmethod<EOL><INDENT>def create(cls, name, template=None):<DEDENT>
command = ['<STR_LIT>', '<STR_LIT>', name]<EOL>if template:<EOL><INDENT>command.extend(['<STR_LIT>', template])<EOL><DEDENT>subwrap.run(command)<EOL>
Creates an LXC
f3854:c0:m2
@classmethod<EOL><INDENT>def start(cls, name):<DEDENT>
command = ['<STR_LIT>', '<STR_LIT>', name, '<STR_LIT>']<EOL>subwrap.run(command)<EOL>
Starts an LXC as a daemon This cannot start an LXC as a non-daemon. That doesn't make sense.
f3854:c0:m3
@classmethod<EOL><INDENT>def stop(cls, name):<DEDENT>
command = ['<STR_LIT>', '<STR_LIT>', name]<EOL>subwrap.run(command)<EOL>
Stops a running LXC
f3854:c0:m4
@classmethod<EOL><INDENT>def info(cls, name, get_state=True, get_pid=True):<DEDENT>
<EOL>command = ['<STR_LIT>', '<STR_LIT>', name]<EOL>response = subwrap.run(command)<EOL>lines = map(split_info_line, response.std_out.splitlines())<EOL>return dict(lines)<EOL>
Retrieves and parses info about an LXC
f3854:c0:m6
def select_actions(root, action_space, max_episode_steps):
node = root<EOL>acts = []<EOL>steps = <NUM_LIT:0><EOL>while steps < max_episode_steps:<EOL><INDENT>if node is None:<EOL><INDENT>act = action_space.sample()<EOL><DEDENT>else:<EOL><INDENT>epsilon = EXPLORATION_PARAM / np.log(node.visits + <NUM_LIT:2>)<EOL>if random.random() < epsilon:<EOL><INDENT>act = action_space.sample()<EOL><DEDENT>else:<EOL><INDENT>act_value = {}<EOL>for act in range(action_space.n):<EOL><INDENT>if node is not None and act in node.children:<EOL><INDENT>act_value[act] = node.children[act].value<EOL><DEDENT>else:<EOL><INDENT>act_value[act] = -np.inf<EOL><DEDENT><DEDENT>best_value = max(act_value.values())<EOL>best_acts = [<EOL>act for act, value in act_value.items() if value == best_value<EOL>]<EOL>act = random.choice(best_acts)<EOL><DEDENT>if act in node.children:<EOL><INDENT>node = node.children[act]<EOL><DEDENT>else:<EOL><INDENT>node = None<EOL><DEDENT><DEDENT>acts.append(act)<EOL>steps += <NUM_LIT:1><EOL><DEDENT>return acts<EOL>
Select actions from the tree Normally we select the greedy action that has the highest reward associated with that subtree. We have a small chance to select a random action based on the exploration param and visit count of the current node at each step. We select actions for the longest possible episode, but normally these will not all be used. They will instead be truncated to the length of the actual episode and then used to update the tree.
f3874:m0
def rollout(env, acts):
total_rew = <NUM_LIT:0><EOL>env.reset()<EOL>steps = <NUM_LIT:0><EOL>for act in acts:<EOL><INDENT>_obs, rew, done, _info = env.step(act)<EOL>steps += <NUM_LIT:1><EOL>total_rew += rew<EOL>if done:<EOL><INDENT>break<EOL><DEDENT><DEDENT>return steps, total_rew<EOL>
Perform a rollout using a preset collection of actions
f3874:m1
def update_tree(root, executed_acts, total_rew):
root.value = max(total_rew, root.value)<EOL>root.visits += <NUM_LIT:1><EOL>new_nodes = <NUM_LIT:0><EOL>node = root<EOL>for step, act in enumerate(executed_acts):<EOL><INDENT>if act not in node.children:<EOL><INDENT>node.children[act] = Node()<EOL>new_nodes += <NUM_LIT:1><EOL><DEDENT>node = node.children[act]<EOL>node.value = max(total_rew, node.value)<EOL>node.visits += <NUM_LIT:1><EOL><DEDENT>return new_nodes<EOL>
Given the tree, a list of actions that were executed before the game ended, and a reward, update the tree so that the path formed by the executed actions are all updated to the new reward.
f3874:m2
@abc.abstractmethod<EOL><INDENT>def get_image(self, obs, venv):<DEDENT>
pass<EOL>
Given an observation and the Env object, return an rgb array to display to the user
f3875:c0:m4
@abc.abstractmethod<EOL><INDENT>def keys_to_act(self, keys):<DEDENT>
pass<EOL>
Given a list of keys that the user has input, produce a gym action to pass to the environment For sync environments, keys is a list of keys that have been pressed since the last step For async environments, keys is a list of keys currently held down
f3875:c0:m5
def run(self):
<EOL>prev_frame_time = time.time()<EOL>while True:<EOL><INDENT>self._win.switch_to()<EOL>self._win.dispatch_events()<EOL>now = time.time()<EOL>self._update(now - prev_frame_time)<EOL>prev_frame_time = now<EOL>self._draw()<EOL>self._win.flip()<EOL><DEDENT>
Run the interactive window until the user quits
f3875:c0:m6
def make(game, state=State.DEFAULT, inttype=retro.data.Integrations.DEFAULT, **kwargs):
try:<EOL><INDENT>retro.data.get_romfile_path(game, inttype)<EOL><DEDENT>except FileNotFoundError:<EOL><INDENT>if not retro.data.get_file_path(game, "<STR_LIT>", inttype):<EOL><INDENT>raise<EOL><DEDENT>else:<EOL><INDENT>raise FileNotFoundError('<STR_LIT>' % game)<EOL><DEDENT><DEDENT>return RetroEnv(game, state, inttype=inttype, **kwargs)<EOL>
Create a Gym environment for the specified game
f3879:m3
def get_file_path(game, file, inttype=Integrations.DEFAULT):
base = path()<EOL>for t in inttype.paths:<EOL><INDENT>possible_path = os.path.join(base, t, game, file)<EOL>if os.path.exists(possible_path):<EOL><INDENT>return possible_path<EOL><DEDENT><DEDENT>return None<EOL>
Return the path to a given game's directory
f3882:m4
def get_romfile_path(game, inttype=Integrations.DEFAULT):
for extension in EMU_EXTENSIONS.keys():<EOL><INDENT>possible_path = get_file_path(game, "<STR_LIT>" + extension, inttype)<EOL>if possible_path:<EOL><INDENT>return possible_path<EOL><DEDENT><DEDENT>raise FileNotFoundError("<STR_LIT>" % game)<EOL>
Return the path to a given game's romfile
f3882:m5
def mkdir_exist(directory):
if not os.path.exists(directory):<EOL><INDENT>try:<EOL><INDENT>os.makedirs(directory)<EOL><DEDENT>except OSError as e:<EOL><INDENT>if e.errno != errno.EEXIST:<EOL><INDENT>raise<EOL><DEDENT><DEDENT><DEDENT>
TODO: Docstring for mkdir_exist. Args: directory (str): TODO
f3892:m0
def execute_by_options(args):
if args['<STR_LIT>'] == '<STR_LIT>':<EOL><INDENT>s = Sphinx(proj_info)<EOL>if args['<STR_LIT>']:<EOL><INDENT>s.quickstart()<EOL><DEDENT>elif args['<STR_LIT>']:<EOL><INDENT>s.gen_code_api()<EOL><DEDENT>elif args['<STR_LIT>']:<EOL><INDENT>s.rst2html()<EOL><DEDENT>pass<EOL><DEDENT>elif args['<STR_LIT>'] == '<STR_LIT>':<EOL><INDENT>pod = PyOfflineDist()<EOL>if args['<STR_LIT>']:<EOL><INDENT>pod.freeze_deps()<EOL><DEDENT>elif args['<STR_LIT>']:<EOL><INDENT>pod.download_deps()<EOL><DEDENT>elif args['<STR_LIT>']:<EOL><INDENT>pod.install_deps()<EOL><DEDENT>elif args['<STR_LIT>']:<EOL><INDENT>pod.clean_deps()<EOL><DEDENT>elif args['<STR_LIT>']:<EOL><INDENT>pod.pyinstaller_mkbinary(args['<STR_LIT>'])<EOL><DEDENT>elif args['<STR_LIT>']:<EOL><INDENT>pod.clean_binary()<EOL><DEDENT><DEDENT>pass<EOL>
execute by argument dictionary Args: args (dict): command line argument dictionary
f3892:m2
def __init__(self, fpath):
self._fpath = fpath<EOL>self._swp_lines = []<EOL>with open(fpath) as f:<EOL><INDENT>self._swp_lines = [s.rstrip() for s in f.read().splitlines()]<EOL><DEDENT>
TODO: to be defined1.
f3892:c0:m0
def editline_with_regex(self, regex_tgtline, to_replace):
for idx, line in enumerate(self._swp_lines):<EOL><INDENT>mobj = re.match(regex_tgtline, line)<EOL>if mobj:<EOL><INDENT>self._swp_lines[idx] = to_replace<EOL>return<EOL><DEDENT><DEDENT>
find the first matched line, then replace Args: regex_tgtline (str): regular expression used to match the target line to_replace (str): line you wanna use to replace
f3892:c0:m2
def __init__(self, **kwinfo):
self._author_fakename = getpass.getuser()<EOL>self._author_truename = ProjectInfo.find_pakcage_info(<EOL>'<STR_LIT>', SRC_FOLDER, PROJECT_NAME, '<STR_LIT>')<EOL>self._email = ProjectInfo.find_pakcage_info(<EOL>'<STR_LIT:email>', SRC_FOLDER, PROJECT_NAME, '<STR_LIT>')<EOL>self._project_name = os.path.basename(<EOL>os.path.dirname(os.path.realpath(__file__)))<EOL>self._project_version = ProjectInfo.find_pakcage_info(<EOL>'<STR_LIT:version>', SRC_FOLDER, PROJECT_NAME, '<STR_LIT>')<EOL>for key, info in kwinfo.items():<EOL><INDENT>key = '<STR_LIT:_>' + key<EOL>setattr(self, key, info)<EOL><DEDENT>
init project info Args: author_fakename (str): TODO author_truename (str): TODO email (str): TODO project_name (str): TODO project_version (str): TODO
f3892:c1:m0
def __init__(self, proj_info):
self._proj_info = proj_info<EOL>self.__docfolder = DOC_FOLDER<EOL>self.__htmlfolder = HTML_FOLDER<EOL>self.conf_fpath = os.path.abspath(<EOL>os.path.join(self.__docfolder, '<STR_LIT>'))<EOL>self.code_fdpath = os.path.abspath(<EOL>os.path.join(SRC_FOLDER, self.proj_info.project_name))<EOL>self._sphinx_quickstart_cmd = [<EOL>'<STR_LIT>', self.__docfolder, '<STR_LIT>',<EOL>self.proj_info.project_name, '<STR_LIT>', self.proj_info.author_fakename,<EOL>'<STR_LIT>', self.proj_info.project_version, '<STR_LIT>',<EOL>self.proj_info.project_version, '<STR_LIT>', '<STR_LIT>', '<STR_LIT>',<EOL>'<STR_LIT>', '<STR_LIT>'<EOL>]<EOL>self._sphinx_apidoc_cmd = [<EOL>'<STR_LIT>', self.code_fdpath, '<STR_LIT>', self.__docfolder, '<STR_LIT>',<EOL>'<STR_LIT>'<EOL>]<EOL>self._sphinx_buildhtml_cmd = [<EOL>'<STR_LIT>', '<STR_LIT>', '<STR_LIT:html>', self.__docfolder, self.__htmlfolder<EOL>]<EOL>mkdir_exist(self.__docfolder)<EOL>mkdir_exist(self.__htmlfolder)<EOL>
TODO: to be defined1. Args: proj_info (ProjectInfo): TODO
f3892:c5:m0
def quickstart(self):
subprocess.call(self.sphinx_quickstart_cmd)<EOL>pass<EOL>
TODO: Docstring for quickstart.
f3892:c5:m3
def gen_code_api(self):
<EOL>conf_editor = Editor(self.conf_fpath)<EOL>conf_editor.editline_with_regex(r'<STR_LIT>', '<STR_LIT>')<EOL>conf_editor.editline_with_regex(r'<STR_LIT>', '<STR_LIT>')<EOL>conf_editor.editline_with_regex(<EOL>r'<STR_LIT>',<EOL>'<STR_LIT>'.format(self.code_fdpath))<EOL>conf_editor.editline_with_regex(<EOL>r"""<STR_LIT>""",<EOL>'<STR_LIT>'.format(self.code_fdpath))<EOL>conf_editor.finish_writing()<EOL>subprocess.call(self._sphinx_apidoc_cmd)<EOL>pass<EOL>
TODO: Docstring for gen_code_api.
f3892:c5:m4
def __init__(self, req_fpath='<STR_LIT>'):
self.__dep_folder = DEP_FOLDER<EOL>self.__req_fpath = req_fpath<EOL>self._srcpj_abfdpath = os.path.abspath(<EOL>os.path.join(SRC_FOLDER, PROJECT_NAME))<EOL>pass<EOL>
TODO: to be defined1.
f3892:c6:m0
def kill_proc_tree(pid,<EOL>sig=signal.SIGTERM,<EOL>include_parent=True,<EOL>timeout=None,<EOL>on_terminate=None):
if pid == os.getpid():<EOL><INDENT>raise RuntimeError("<STR_LIT>")<EOL><DEDENT>parent = psutil.Process(pid)<EOL>children = parent.children(recursive=True)<EOL>if include_parent:<EOL><INDENT>children.append(parent)<EOL><DEDENT>for p in children:<EOL><INDENT>p.send_signal(sig)<EOL><DEDENT>gone, alive = psutil.wait_procs(<EOL>children, timeout=timeout, callback=on_terminate)<EOL>return (gone, alive)<EOL>
Kill a process tree (including grandchildren) with signal "sig" and return a (gone, still_alive) tuple. "on_terminate", if specified, is a callabck function which is called as soon as a child terminates.
f3893:m1
def main():
<EOL>if (len(sys.argv) == <NUM_LIT:1>):<EOL><INDENT>(address, username, password, device, tstart, tend) = read_config()<EOL>r = Runner(device)<EOL>while True:<EOL><INDENT>now = datetime.datetime.now()<EOL>start = get_time_by_cfgtime(now, tstart)<EOL>end = get_time_by_cfgtime(now, tend)<EOL>logger.info('<STR_LIT>')<EOL>logger.info('<STR_LIT>' + now.strftime("<STR_LIT>"))<EOL>logger.info('<STR_LIT>' + start.strftime("<STR_LIT>"))<EOL>logger.info('<STR_LIT>' + end.strftime("<STR_LIT>"))<EOL>logger.info('<STR_LIT>')<EOL>if start > end:<EOL><INDENT>if now > start or now < end:<EOL><INDENT>logger.info('<STR_LIT>')<EOL>r.run_miner_if_free()<EOL><DEDENT>else:<EOL><INDENT>logger.info('<STR_LIT>')<EOL>r.kill_miner_if_exists()<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if now > start and now < end:<EOL><INDENT>logger.info('<STR_LIT>')<EOL>r.run_miner_if_free()<EOL><DEDENT>else:<EOL><INDENT>logger.info('<STR_LIT>')<EOL>r.kill_miner_if_exists()<EOL><DEDENT><DEDENT>time.sleep(interval)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>save_and_test()<EOL><DEDENT>
miner running secretly on cpu or gpu
f3893:m4
def run_miner_if_free(self):
(address, username, password, device, tstart, tend) = read_config()<EOL>if self.dtype == <NUM_LIT:0>:<EOL><INDENT>self.run_miner_cmd = [<EOL>cpu_miner_path, '<STR_LIT>', address, '<STR_LIT>', '<STR_LIT>'.format(<EOL>username, password)<EOL>]<EOL><DEDENT>elif self.dtype == <NUM_LIT:1>:<EOL><INDENT>r = urlparse(address)<EOL>url = '<STR_LIT>'.format(r.scheme, username, password,<EOL>r.netloc)<EOL>self.run_miner_cmd = [gpu_miner_path, '<STR_LIT>', url, '<STR_LIT>']<EOL><DEDENT>if (len(self.run_miner_cmd) != <NUM_LIT:0>):<EOL><INDENT>logger.info('<STR_LIT:U+0020>'.join(self.run_miner_cmd))<EOL>if (self.is_device_free()):<EOL><INDENT>logger.info('<STR_LIT>')<EOL>self.run_cmd(self.run_miner_cmd)<EOL><DEDENT><DEDENT>
TODO: docstring
f3893:c1:m3
def version():
return _VERSION<EOL>
Return the library version string.
f3897:m0
def zero_pad(buff, length):
return buff + '<STR_LIT:\x00>' * (length - len(buff))<EOL>
Pad `buff` with trailing zeros to a total of `length` bytes long.
f3897:m1
def __init__(self, libraryPath=DEFAULT_PATH, keylen=None, key=None):
self.initialized = False<EOL>self._loadlib(libraryPath)<EOL>if keylen and key:<EOL><INDENT>self.keygen(keylen, key)<EOL><DEDENT>
To instantiate an instance of this class, provide the full path to the camellia shared library (camellia.so) that it will reference. Raises an exception if the libraryPath is not specified or there is a problem loading the library.
f3897:c0:m0
def keygen(self, keyBitLength, rawKey):
if keyBitLength not in ACCEPTABLE_KEY_LENGTHS:<EOL><INDENT>raise Exception("<STR_LIT>")<EOL><DEDENT>self.bitlen = keyBitLength<EOL>if len(rawKey) <= <NUM_LIT:0> or len(rawKey) > self.bitlen/<NUM_LIT:8>:<EOL><INDENT>raise Exception("<STR_LIT>" % (self.bitlen/<NUM_LIT:8>))<EOL><DEDENT>rawKey = zero_pad(rawKey, self.bitlen/<NUM_LIT:8>)<EOL>keytable = ctypes.create_string_buffer(TABLE_BYTE_LEN)<EOL>self.ekeygen(self.bitlen, rawKey, keytable)<EOL>self.keytable = keytable<EOL>self.initialized = True<EOL>
This must be called on the object before any encryption or decryption can take place. Provide it the key bit length, which must be 128, 192, or 256, and the key, which may be a sequence of bytes or a simple string. Does not return any value. Raises an exception if the arguments are not sane.
f3897:c0:m2
def encrypt(self, plainText):
encryptedResult = '<STR_LIT>'<EOL>for index in range(<NUM_LIT:0>, len(plainText), BLOCK_SIZE):<EOL><INDENT>block = plainText[index:index + BLOCK_SIZE]<EOL>if len(block) < BLOCK_SIZE:<EOL><INDENT>block = zero_pad(block, BLOCK_SIZE)<EOL><DEDENT>encryptedResult += self.encrypt_block(block)<EOL><DEDENT>return encryptedResult<EOL>
Encrypt an arbitrary-length block of data. NOTE: This function formerly worked only on 16-byte blocks of `plainText`. code that assumed this should still work fine, but can optionally be modified to call `encrypt_block` instead. Args: plainText (str): data to encrypt. If the data is not a multiple of 16 bytes long, it will be padded with null (0x00) bytes until it is. Returns: encrypted data. Note that this will always be a multiple of 16 bytes long.
f3897:c0:m3
def decrypt(self, cipherText):
decryptedResult = '<STR_LIT>'<EOL>for index in range(<NUM_LIT:0>, len(cipherText), BLOCK_SIZE):<EOL><INDENT>block = cipherText[index:index + BLOCK_SIZE]<EOL>if len(block) < BLOCK_SIZE:<EOL><INDENT>block = zero_pad(block, BLOCK_SIZE)<EOL><DEDENT>decryptedResult += self.decrypt_block(block)<EOL><DEDENT>return decryptedResult<EOL>
Decrypt an arbitrary-length block of data. NOTE: This function formerly worked only on 16-byte blocks of `cipherText`. code that assumed this should still work fine, but can optionally be modified to call `decrypt_block` instead. Args: cipherText (str): data to decrypt. If the data is not a multiple of 16 bytes long, it will be padded with null (0x00) bytes until it is. WARNING: This is almost certainty never need to happen for correctly-encrypted data. Returns: decrypted data. Note that this will always be a multiple of 16 bytes long. If the original data was not a multiple of 16 bytes, the result will contain trailing null bytes, which can be removed with `.rstrip('\x00')`
f3897:c0:m4
def encrypt_block(self, plainText):
if not self.initialized:<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if len(plainText) != BLOCK_SIZE:<EOL><INDENT>raise ValueError("<STR_LIT>" %<EOL>(BLOCK_SIZE, len(plainText)))<EOL><DEDENT>cipher = ctypes.create_string_buffer(BLOCK_SIZE)<EOL>self.encblock(self.bitlen, plainText, self.keytable, cipher)<EOL>return cipher.raw<EOL>
Encrypt a 16-byte block of data. NOTE: This function was formerly called `encrypt`, but was changed when support for encrypting arbitrary-length strings was added. Args: plainText (str): 16-byte data. Returns: 16-byte str. Raises: TypeError if CamCrypt object has not been initialized. ValueError if `plainText` is not BLOCK_SIZE (i.e. 16) bytes.
f3897:c0:m5
def decrypt_block(self, cipherText):
if not self.initialized:<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>if len(cipherText) != BLOCK_SIZE:<EOL><INDENT>raise ValueError("<STR_LIT>" %<EOL>(BLOCK_SIZE, len(cipherText)))<EOL><DEDENT>plain = ctypes.create_string_buffer(BLOCK_SIZE)<EOL>self.decblock(self.bitlen, cipherText, self.keytable, plain)<EOL>return plain.raw<EOL>
Decrypt a 16-byte block of data. NOTE: This function was formerly called `decrypt`, but was changed when support for decrypting arbitrary-length strings was added. Args: cipherText (str): 16-byte data. Returns: 16-byte str. Raises: TypeError if CamCrypt object has not been initialized. ValueError if `cipherText` is not BLOCK_SIZE (i.e. 16) bytes.
f3897:c0:m6
def _get_parser(description):
parser = argparse.ArgumentParser(description=description)<EOL>parser.add_argument('<STR_LIT:key>', help="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', nargs='<STR_LIT:*>',<EOL>help="<STR_LIT>"<EOL>"<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>',<EOL>help="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', type=int, default=<NUM_LIT>,<EOL>help="<STR_LIT>"<EOL>"<STR_LIT>" % camcrypt.ACCEPTABLE_KEY_LENGTHS)<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', action='<STR_LIT:store_true>',<EOL>help="<STR_LIT>")<EOL>return parser<EOL>
Build an ArgumentParser with common arguments for both operations.
f3898:m0
def _get_crypto(keylen, hexkey, key):
if keylen not in camcrypt.ACCEPTABLE_KEY_LENGTHS:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if hexkey:<EOL><INDENT>key = key.decode('<STR_LIT>')<EOL><DEDENT>return camcrypt.CamCrypt(keylen=keylen, key=key)<EOL>
Return a camcrypt.CamCrypt object based on keylen, hexkey, and key.
f3898:m1
def _get_data(filenames):
if filenames:<EOL><INDENT>data = "<STR_LIT>"<EOL>for filename in filenames:<EOL><INDENT>with open(filename, "<STR_LIT:rb>") as f:<EOL><INDENT>data += f.read()<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>data = sys.stdin.read()<EOL><DEDENT>return data<EOL>
Read data from file(s) or STDIN. Args: filenames (list): List of files to read to get data. If empty or None, read from STDIN.
f3898:m2
def _print_results(filename, data):
if filename:<EOL><INDENT>with open(filename, '<STR_LIT:wb>') as f:<EOL><INDENT>f.write(data)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print(data)<EOL><DEDENT>
Print data to a file or STDOUT. Args: filename (str or None): If None, print to STDOUT; otherwise, print to the file with this name. data (str): Data to print.
f3898:m3
def create_config(allow_insecure_config_file=False):
config = Config()<EOL>config.add_properties(GLOBAL_CONFIG_FILENAME)<EOL>user_config_filename = get_user_config_filename()<EOL>if user_config_filename == LOCAL_CONFIG_FILENAME and not allow_insecure_config_file:<EOL><INDENT>verify_file_private(user_config_filename)<EOL><DEDENT>config.add_properties(user_config_filename)<EOL>return config<EOL>
Create config based on /etc/ddsclient.conf and ~/.ddsclient.conf($DDSCLIENT_CONF) :param allow_insecure_config_file: bool: when true we will not check ~/.ddsclient permissions. :return: Config with the configuration to use for DDSClient.
f3903:m1
def default_num_workers():
return min(multiprocessing.cpu_count(), MAX_DEFAULT_WORKERS)<EOL>
Return the number of workers to use as default if not specified by a config file. Returns the number of CPUs or MAX_DEFAULT_WORKERS (whichever is less).
f3903:m2
def add_properties(self, filename):
filename = os.path.expanduser(filename)<EOL>if os.path.exists(filename):<EOL><INDENT>with open(filename, '<STR_LIT:r>') as yaml_file:<EOL><INDENT>self.update_properties(yaml.safe_load(yaml_file))<EOL><DEDENT><DEDENT>
Add properties to config based on filename replacing previous values. :param filename: str path to YAML file to pull top level properties from
f3903:c0:m1
def update_properties(self, new_values):
self.values = dict(self.values, **new_values)<EOL>
Add items in new_values to the internal list replacing existing values. :param new_values: dict properties to set
f3903:c0:m2
@property<EOL><INDENT>def url(self):<DEDENT>
return self.values.get(Config.URL, DUKE_DATA_SERVICE_URL)<EOL>
Specifies the dataservice host we are connecting too. :return: str url to a dataservice host
f3903:c0:m3
def get_portal_url_base(self):
api_url = urlparse(self.url).hostname<EOL>portal_url = re.sub('<STR_LIT>', '<STR_LIT>', api_url)<EOL>portal_url = re.sub(r'<STR_LIT>', '<STR_LIT>', portal_url)<EOL>return portal_url<EOL>
Determine root url of the data service from the url specified. :return: str root url of the data service (eg: https://dataservice.duke.edu)
f3903:c0:m4
@property<EOL><INDENT>def user_key(self):<DEDENT>
return self.values.get(Config.USER_KEY, None)<EOL>
Contains user key user created from /api/v1/current_user/api_key used to create a login token. :return: str user key that can be used to create an auth token
f3903:c0:m5
@property<EOL><INDENT>def agent_key(self):<DEDENT>
return self.values.get(Config.AGENT_KEY, None)<EOL>
Contains user agent key created from /api/v1/software_agents/{id}/api_key used to create a login token. :return: str agent key that can be used to create an auth token
f3903:c0:m6
@property<EOL><INDENT>def auth(self):<DEDENT>
return self.values.get(Config.AUTH, os.environ.get(AUTH_ENV_KEY_NAME, None))<EOL>
Contains the auth token for use with connecting to the dataservice. :return:
f3903:c0:m7
@property<EOL><INDENT>def upload_bytes_per_chunk(self):<DEDENT>
value = self.values.get(Config.UPLOAD_BYTES_PER_CHUNK, DDS_DEFAULT_UPLOAD_CHUNKS)<EOL>return Config.parse_bytes_str(value)<EOL>
Return the bytes per chunk to be sent to external store. :return: int bytes per upload chunk
f3903:c0:m8
@property<EOL><INDENT>def upload_workers(self):<DEDENT>
return self.values.get(Config.UPLOAD_WORKERS, default_num_workers())<EOL>
Return the number of parallel works to use when uploading a file. :return: int number of workers. Specify None or 1 to disable parallel uploading
f3903:c0:m9
@property<EOL><INDENT>def download_workers(self):<DEDENT>
<EOL>default_workers = int(math.ceil(default_num_workers() / <NUM_LIT:2>))<EOL>return self.values.get(Config.DOWNLOAD_WORKERS, default_workers)<EOL>
Return the number of parallel works to use when downloading a file. :return: int number of workers. Specify None or 1 to disable parallel downloading
f3903:c0:m10
@property<EOL><INDENT>def debug_mode(self):<DEDENT>
return self.values.get(Config.DEBUG_MODE, False)<EOL>
Return true if we should show stack traces on error. :return: boolean True if debugging is enabled
f3903:c0:m12
@property<EOL><INDENT>def d4s2_url(self):<DEDENT>
return self.values.get(Config.D4S2_URL, D4S2_SERVICE_URL)<EOL>
Returns url for D4S2 service or '' if not setup. :return: str url
f3903:c0:m13
@staticmethod<EOL><INDENT>def parse_bytes_str(value):<DEDENT>
if type(value) == str:<EOL><INDENT>if "<STR_LIT>" in value:<EOL><INDENT>return int(value.replace("<STR_LIT>", "<STR_LIT>")) * MB_TO_BYTES<EOL><DEDENT>else:<EOL><INDENT>return int(value)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return value<EOL><DEDENT>
Given a value return the integer number of bytes it represents. Trailing "MB" causes the value multiplied by 1024*1024 :param value: :return: int number of bytes represented by value.
f3903:c0:m14
@property<EOL><INDENT>def file_exclude_regex(self):<DEDENT>
return self.values.get(Config.FILE_EXCLUDE_REGEX, FILE_EXCLUDE_REGEX_DEFAULT)<EOL>
Returns regex that should be used to filter out filenames. :return: str: regex that when matches we should exclude a file from uploading.
f3903:c0:m15
@property<EOL><INDENT>def page_size(self):<DEDENT>
return self.values.get(Config.GET_PAGE_SIZE, GET_PAGE_SIZE_DEFAULT)<EOL>
Returns the page size used to fetch paginated lists from DukeDS. For DukeDS APIs that fail related to timeouts lowering this value can help. :return:
f3903:c0:m16
@property<EOL><INDENT>def storage_provider_id(self):<DEDENT>
return self.values.get(Config.STORAGE_PROVIDER_ID, None)<EOL>
Returns storage provider id from /api/v1/storage_providers DukeDS API or None to use default. :return: str: uuid of storage provider
f3903:c0:m17
def read_argument_file_contents(infile):
if infile:<EOL><INDENT>if infile == sys.stdin:<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>return infile.read()<EOL><DEDENT>return "<STR_LIT>"<EOL>
return the contents of a file or "" if infile is None. If the infile is STDIN displays a message to tell user how to quit entering data. :param infile: file handle to read from :return: str: contents of the file
f3904:m1
def run_command(self, args):
parser = self._create_parser()<EOL>parser.run_command(args)<EOL>
Create a parser and have it parse the args then run the appropriate command. :param args: [str] command line args
f3904:c0:m1
def _create_parser(self):
parser = CommandParser(get_internal_version_str())<EOL>parser.register_list_command(self._setup_run_command(ListCommand))<EOL>parser.register_upload_command(self._setup_run_command(UploadCommand))<EOL>parser.register_add_user_command(self._setup_run_command(AddUserCommand))<EOL>parser.register_remove_user_command(self._setup_run_command(RemoveUserCommand))<EOL>parser.register_download_command(self._setup_run_command(DownloadCommand))<EOL>parser.register_share_command(self._setup_run_command(ShareCommand))<EOL>parser.register_deliver_command(self._setup_run_command(DeliverCommand))<EOL>parser.register_delete_command(self._setup_run_command(DeleteCommand))<EOL>parser.register_list_auth_roles_command(self._setup_run_command(ListAuthRolesCommand))<EOL>return parser<EOL>
Create a parser hooking up the command methods below to be run when chosen. :return: CommandParser parser with commands attached.
f3904:c0:m2
def _setup_run_command(self, command_constructor):
return lambda args: self._run_command(command_constructor, args)<EOL>
Create f(args) to run that will create the specified object and call run when invoked. The reason for this complexity is deferring the creation of expensive objects until we have decided to run a command. For instance setting up the data service api if we are just running -h. :param command_constructor: class specifies object to create and pass args to(eventually). :return: func function that will let the command created by command_constructor run with arguments.
f3904:c0:m3
def _check_pypi_version(self):
try:<EOL><INDENT>check_version()<EOL><DEDENT>except VersionException as err:<EOL><INDENT>print(str(err), file=sys.stderr)<EOL>time.sleep(TWO_SECONDS)<EOL><DEDENT>
When the version is out of date or we have trouble retrieving it print a error to stderr and pause.
f3904:c0:m4
def _run_command(self, command_constructor, args):
verify_terminal_encoding(sys.stdout.encoding)<EOL>self._check_pypi_version()<EOL>config = create_config(allow_insecure_config_file=args.allow_insecure_config_file)<EOL>self.show_error_stack_trace = config.debug_mode<EOL>command = command_constructor(config)<EOL>command.run(args)<EOL>
Run command_constructor and call run(args) on the resulting object :param command_constructor: class of an object that implements run(args) :param args: object arguments for specific command created by CommandParser
f3904:c0:m5
def __init__(self, config):
self.remote_store = RemoteStore(config)<EOL>self.config = config<EOL>
Pass in the config containing remote_store/url so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c1:m0
def make_user_list(self, emails, usernames):
to_users = []<EOL>remaining_emails = [] if not emails else list(emails)<EOL>remaining_usernames = [] if not usernames else list(usernames)<EOL>for user in self.remote_store.fetch_users():<EOL><INDENT>if user.email in remaining_emails:<EOL><INDENT>to_users.append(user)<EOL>remaining_emails.remove(user.email)<EOL><DEDENT>elif user.username in remaining_usernames:<EOL><INDENT>to_users.append(user)<EOL>remaining_usernames.remove(user.username)<EOL><DEDENT><DEDENT>if remaining_emails or remaining_usernames:<EOL><INDENT>unable_to_find_users = '<STR_LIT:U+002C>'.join(remaining_emails + remaining_usernames)<EOL>msg = "<STR_LIT>".format(unable_to_find_users)<EOL>raise ValueError(msg)<EOL><DEDENT>return to_users<EOL>
Given a list of emails and usernames fetch DukeDS user info. Parameters that are None will be skipped. :param emails: [str]: list of emails (can be null) :param usernames: [str]: list of usernames(netid) :return: [RemoteUser]: details about any users referenced the two parameters
f3904:c1:m3
def __init__(self, config):
super(UploadCommand, self).__init__(config)<EOL>
Pass in the config containing remote_store/url so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c2:m0
def run(self, args):
project_name_or_id = self.create_project_name_or_id_from_args(args)<EOL>folders = args.folders <EOL>follow_symlinks = args.follow_symlinks <EOL>dry_run = args.dry_run <EOL>project_upload = ProjectUpload(self.config, project_name_or_id, folders, follow_symlinks=follow_symlinks)<EOL>if dry_run:<EOL><INDENT>print(project_upload.dry_run_report())<EOL><DEDENT>else:<EOL><INDENT>print(project_upload.get_differences_summary())<EOL>if project_upload.needs_to_upload():<EOL><INDENT>project_upload.run()<EOL>print('<STR_LIT:\n>')<EOL>print(project_upload.get_upload_report())<EOL>print('<STR_LIT:\n>')<EOL><DEDENT>print(project_upload.get_url_msg())<EOL><DEDENT>
Upload contents of folders to a project with project_name on remote store. If follow_symlinks we will traverse symlinked directories. If content is already on remote site it will not be sent. :param args: Namespace arguments parsed from the command line.
f3904:c2:m1
def __init__(self, config):
super(DownloadCommand, self).__init__(config)<EOL>
Pass in the config who can create a remote_store so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c3:m0
def run(self, args):
project_name_or_id = self.create_project_name_or_id_from_args(args)<EOL>folder = args.folder <EOL>if not folder:<EOL><INDENT>folder = replace_invalid_path_chars(project_name_or_id.value.replace('<STR_LIT:U+0020>', '<STR_LIT:_>'))<EOL><DEDENT>destination_path = format_destination_path(folder)<EOL>path_filter = PathFilter(args.include_paths, args.exclude_paths)<EOL>project = self.fetch_project(args, must_exist=True)<EOL>project_download = ProjectDownload(self.remote_store, project, destination_path, path_filter)<EOL>project_download.run()<EOL>
Download a project based on passed in args. :param args: Namespace arguments parsed from the command line.
f3904:c3:m1
def __init__(self, config):
super(AddUserCommand, self).__init__(config)<EOL>
Pass in the config who can create a remote_store so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c4:m0
def run(self, args):
email = args.email <EOL>username = args.username <EOL>auth_role = args.auth_role <EOL>project = self.fetch_project(args, must_exist=True, include_children=False)<EOL>user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username)<EOL>self.remote_store.set_user_project_permission(project, user, auth_role)<EOL>print(u'<STR_LIT>'.format(user.full_name, auth_role, project.name))<EOL>
Give the user with user_full_name the auth_role permissions on the remote project with project_name. :param args Namespace arguments parsed from the command line
f3904:c4:m1
def __init__(self, config):
super(RemoveUserCommand, self).__init__(config)<EOL>
Pass in the config who can create a remote_store so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c5:m0
def run(self, args):
email = args.email <EOL>username = args.username <EOL>project = self.fetch_project(args, must_exist=True, include_children=False)<EOL>user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username)<EOL>self.remote_store.revoke_user_project_permission(project, user)<EOL>print(u'<STR_LIT>'.format(user.full_name, project.name))<EOL>
Remove permissions from the user with user_full_name or email on the remote project with project_name. :param args Namespace arguments parsed from the command line
f3904:c5:m1
def __init__(self, config):
super(ShareCommand, self).__init__(config)<EOL>self.service = D4S2Project(config, self.remote_store, print_func=print)<EOL>
Pass in the config who can create a remote_store so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c6:m0
def run(self, args):
email = args.email <EOL>username = args.username <EOL>force_send = args.resend <EOL>auth_role = args.auth_role <EOL>msg_file = args.msg_file <EOL>message = read_argument_file_contents(msg_file)<EOL>print("<STR_LIT>")<EOL>to_user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username)<EOL>try:<EOL><INDENT>project = self.fetch_project(args, must_exist=True, include_children=False)<EOL>dest_email = self.service.share(project, to_user, force_send, auth_role, message)<EOL>print("<STR_LIT>" + dest_email)<EOL><DEDENT>except D4S2Error as ex:<EOL><INDENT>if ex.warning:<EOL><INDENT>print(ex.message)<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>
Gives user permission based on auth_role arg and sends email to that user. :param args Namespace arguments parsed from the command line
f3904:c6:m1
def __init__(self, config):
super(DeliverCommand, self).__init__(config)<EOL>self.service = D4S2Project(config, self.remote_store, print_func=print)<EOL>
Pass in the config who can create a remote_store so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c7:m0
def run(self, args):
email = args.email <EOL>username = args.username <EOL>copy_project = args.copy_project <EOL>force_send = args.resend <EOL>msg_file = args.msg_file <EOL>share_usernames = args.share_usernames <EOL>share_emails = args.share_emails <EOL>message = read_argument_file_contents(msg_file)<EOL>project = self.fetch_project(args, must_exist=True, include_children=False)<EOL>share_users = self.make_user_list(share_emails, share_usernames)<EOL>print("<STR_LIT>")<EOL>new_project_name = None<EOL>if copy_project:<EOL><INDENT>new_project_name = self.get_new_project_name(project.name)<EOL><DEDENT>to_user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username)<EOL>try:<EOL><INDENT>path_filter = PathFilter(args.include_paths, args.exclude_paths)<EOL>dest_email = self.service.deliver(project, new_project_name, to_user, share_users,<EOL>force_send, path_filter, message)<EOL>print("<STR_LIT>" + dest_email)<EOL><DEDENT>except D4S2Error as ex:<EOL><INDENT>if ex.warning:<EOL><INDENT>print(ex.message)<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>
Begins process that will transfer the project to another user. Send delivery message to D4S2 service specifying a project and a user. When user accepts delivery they receive access and we lose admin privileges. :param args Namespace arguments parsed from the command line
f3904:c7:m1
def get_new_project_name(self, project_name):
timestamp_str = datetime.datetime.utcnow().strftime('<STR_LIT>')<EOL>return "<STR_LIT>".format(project_name, timestamp_str)<EOL>
Return a unique project name for the copy. :param project_name: str: name of project we will copy :return: str
f3904:c7:m2
def __init__(self, config):
super(ListCommand, self).__init__(config)<EOL>
Pass in the config for which data service and user to list data for. :param config: Config global configuration for use with this command.
f3904:c8:m0
def run(self, args):
long_format = args.long_format<EOL>if args.project_name or args.project_id:<EOL><INDENT>project = self.fetch_project(args, must_exist=True, include_children=True)<EOL>self.print_project_details(project, long_format)<EOL><DEDENT>else:<EOL><INDENT>self.print_project_list_details(args.auth_role, long_format)<EOL><DEDENT>
Lists project names. :param args Namespace arguments parsed from the command line
f3904:c8:m1
def print_project_list_details(self, filter_auth_role, long_format):
if filter_auth_role:<EOL><INDENT>projects_details = self.remote_store.get_projects_with_auth_role(auth_role=filter_auth_role)<EOL><DEDENT>else:<EOL><INDENT>projects_details = self.remote_store.get_projects_details()<EOL><DEDENT>if projects_details:<EOL><INDENT>for projects_detail in projects_details:<EOL><INDENT>print(self.get_project_info_line(projects_detail, long_format))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print(NO_PROJECTS_FOUND_MESSAGE)<EOL><DEDENT>
Prints project names to stdout for all projects or just those with the specified auth_role :param filter_auth_role: str: optional auth_role to filter project list
f3904:c8:m3
def __init__(self, config):
super(DeleteCommand, self).__init__(config)<EOL>
Pass in the config who can create a remote_store so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c9:m0
def run(self, args):
project = self.fetch_project(args, must_exist=True, include_children=False)<EOL>if not args.force:<EOL><INDENT>delete_prompt = "<STR_LIT>".format(project.name)<EOL>if not boolean_input_prompt(delete_prompt):<EOL><INDENT>return<EOL><DEDENT><DEDENT>self.remote_store.delete_project(self.create_project_name_or_id_from_args(args))<EOL>
Deletes a single project specified by project_name in args. :param args Namespace arguments parsed from the command line
f3904:c9:m1
def __init__(self, config):
super(ListAuthRolesCommand, self).__init__(config)<EOL>
Pass in the config who can create a remote_store so we can access the remote data. :param config: Config global configuration for use with this command.
f3904:c10:m0
def run(self, args):
auth_roles = self.remote_store.get_active_auth_roles(RemoteAuthRole.PROJECT_CONTEXT)<EOL>if auth_roles:<EOL><INDENT>for auth_role in auth_roles:<EOL><INDENT>print(auth_role.id, "<STR_LIT:->", auth_role.description)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>
Prints out non deprecated project-type auth roles. :param args Namespace arguments parsed from the command line
f3904:c10:m1
def wait_for_processes(processes, size, progress_queue, watcher, item):
while size > <NUM_LIT:0>:<EOL><INDENT>progress_type, value = progress_queue.get()<EOL>if progress_type == ProgressQueue.PROCESSED:<EOL><INDENT>chunk_size = value<EOL>watcher.transferring_item(item, increment_amt=chunk_size)<EOL>size -= chunk_size<EOL><DEDENT>elif progress_type == ProgressQueue.START_WAITING:<EOL><INDENT>watcher.start_waiting()<EOL><DEDENT>elif progress_type == ProgressQueue.DONE_WAITING:<EOL><INDENT>watcher.done_waiting()<EOL><DEDENT>else:<EOL><INDENT>error_message = value<EOL>for process in processes:<EOL><INDENT>process.terminate()<EOL><DEDENT>raise ValueError(error_message)<EOL><DEDENT><DEDENT>for process in processes:<EOL><INDENT>process.join()<EOL><DEDENT>
Watch progress queue for errors or progress. Cleanup processes on error or success. :param processes: [Process]: processes we are waiting to finish downloading a file :param size: int: how many values we expect to be processed by processes :param progress_queue: ProgressQueue: queue which will receive tuples of progress or error :param watcher: ProgressPrinter: we notify of our progress: :param item: object: RemoteFile/LocalFile we are transferring.
f3910:m0
def verify_terminal_encoding(encoding):
if encoding and not ("<STR_LIT>" in encoding.upper()):<EOL><INDENT>raise ValueError(TERMINAL_ENCODING_NOT_UTF_ERROR)<EOL><DEDENT>
Raises ValueError with error message when terminal encoding is not Unicode(contains UTF ignoring case). :param encoding: str: encoding we want to check
f3910:m1
def verify_file_private(filename):
if platform.system().upper() != '<STR_LIT>':<EOL><INDENT>filename = os.path.expanduser(filename)<EOL>if os.path.exists(filename):<EOL><INDENT>file_stat = os.stat(filename)<EOL>if mode_allows_group_or_other(file_stat.st_mode):<EOL><INDENT>raise ValueError(CONFIG_FILE_PERMISSIONS_ERROR)<EOL><DEDENT><DEDENT><DEDENT>
Raises ValueError the file permissions allow group/other On windows this never raises due to the implementation of stat.
f3910:m2
def mode_allows_group_or_other(st_mode):
return (st_mode & stat.S_IRWXO or st_mode & stat.S_IRWXG) != <NUM_LIT:0><EOL>
Returns True if st_mode bitset has group or other permissions :param st_mode: int: bit set from a file :return: bool: true when group or other has some permissions
f3910:m3
def __init__(self, total, msg_verb):
self.total = total<EOL>self.cnt = <NUM_LIT:0><EOL>self.max_width = <NUM_LIT:0><EOL>self.waiting = False<EOL>self.msg_verb = msg_verb<EOL>self.progress_bar = ProgressBar()<EOL>
Setup printer expecting to have sending_item called total times. :param total: int the number of items we are expecting, used to determine progress
f3910:c1:m0
def transferring_item(self, item, increment_amt=<NUM_LIT:1>):
self.cnt += increment_amt<EOL>percent_done = int(float(self.cnt) / float(self.total) * <NUM_LIT>)<EOL>if KindType.is_project(item):<EOL><INDENT>details = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>details = os.path.basename(item.path)<EOL><DEDENT>self.progress_bar.update(percent_done, '<STR_LIT>'.format(self.msg_verb, details))<EOL>self.progress_bar.show()<EOL>
Update progress that item is about to be transferred. :param item: LocalFile, LocalFolder, or LocalContent(project) that is about to be sent. :param increment_amt: int amount to increase our count(how much progress have we made)
f3910:c1:m1
def finished(self):
self.progress_bar.set_state(ProgressBar.STATE_DONE)<EOL>self.progress_bar.show()<EOL>
Must be called to print final progress label.
f3910:c1:m2
def show_warning(self, message):
print(message)<EOL>
Shows warnings to the user. :param message: str: Message to display
f3910:c1:m3
def start_waiting(self):
if not self.waiting:<EOL><INDENT>self.waiting = True<EOL>wait_msg = "<STR_LIT>".format(self.msg_verb)<EOL>self.progress_bar.show_waiting(wait_msg)<EOL><DEDENT>
Show waiting progress bar until done_waiting is called. Only has an effect if we are in waiting state.
f3910:c1:m4
def done_waiting(self):
if self.waiting:<EOL><INDENT>self.waiting = False<EOL>self.progress_bar.show_running()<EOL><DEDENT>
Show running progress bar (only has an effect if we are in waiting state).
f3910:c1:m5
def show_running(self):
self.set_state(ProgressBar.STATE_RUNNING)<EOL>self.show()<EOL>
Show running progress bar
f3910:c2:m6
def show_waiting(self, wait_msg):
self.wait_msg = wait_msg<EOL>self.set_state(ProgressBar.STATE_WAITING)<EOL>self.show()<EOL>
Show waiting progress bar until done_waiting is called. Only has an effect if we are in waiting state. :param wait_msg: str: message describing what we are waiting for
f3910:c2:m7
@staticmethod<EOL><INDENT>def walk_project(project, visitor):<DEDENT>
ProjectWalker._visit_content(project, None, visitor)<EOL>
Visit all nodes in the project tree(project, folders, files). :param project: LocalProject project we want to visit all children of. :param visitor: object must implement visit_project, visit_folder, visit_file
f3910:c3:m0
@staticmethod<EOL><INDENT>def _visit_content(item, parent, visitor):<DEDENT>
if KindType.is_project(item):<EOL><INDENT>visitor.visit_project(item)<EOL><DEDENT>elif KindType.is_folder(item):<EOL><INDENT>visitor.visit_folder(item, parent)<EOL><DEDENT>else:<EOL><INDENT>visitor.visit_file(item, parent)<EOL><DEDENT>if not KindType.is_file(item):<EOL><INDENT>for child in item.children:<EOL><INDENT>ProjectWalker._visit_content(child, item, visitor)<EOL><DEDENT><DEDENT>
Recursively visit nodes in the project tree. :param item: LocalContent/LocalFolder/LocalFile we are traversing down from :param parent: LocalContent/LocalFolder parent or None :param visitor: object visiting the tree
f3910:c3:m1