text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start_process(self, key): """Start a specific processes."""
if key in self.processes and key in self.paused: os.killpg(os.getpgid(self.processes[key].pid), signal.SIGCONT) self.queue[key]['status'] = 'running' self.paused.remove(key) return True elif key not in self.processes: if self.queue[key]['status'] in ['queued', 'stashed']: self.spawn_new(key) return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pause_process(self, key): """Pause a specific processes."""
if key in self.processes and key not in self.paused: os.killpg(os.getpgid(self.processes[key].pid), signal.SIGSTOP) self.queue[key]['status'] = 'paused' self.paused.append(key) return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def daemon_factory(path): """Create a closure which creates a running daemon. We need to create a closure that contains the correct path the daemon should be started with. This is needed as the `Daemonize` library requires a callable function for daemonization and doesn't accept any arguments. This function cleans up sockets and output files in case we encounter any exceptions. """
def start_daemon(): root_dir = path config_dir = os.path.join(root_dir, '.config/pueue') try: daemon = Daemon(root_dir=root_dir) daemon.main() except KeyboardInterrupt: print('Keyboard interrupt. Shutting down') daemon.stop_daemon() except Exception: try: daemon.stop_daemon() except Exception: pass cleanup(config_dir) raise return start_daemon
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """Execute entry function."""
args = parser.parse_args() args_dict = vars(args) root_dir = args_dict['root'] if 'root' in args else None # If a root directory is specified, get the absolute path and # check if it exists. Abort if it doesn't exist! if root_dir: root_dir = os.path.abspath(root_dir) if not os.path.exists(root_dir): print("The specified directory doesn't exist!") sys.exit(1) # Default to home directory if no root is specified else: root_dir = os.path.expanduser('~') if args.stopdaemon: print_command_factory('STOPDAEMON')(vars(args), root_dir) elif args.nodaemon: daemon_factory(root_dir)() elif args.daemon: config_dir = os.path.join(root_dir, '.config/pueue') os.makedirs(config_dir, exist_ok=True) daemon = Daemonize(app='pueue', pid=os.path.join(config_dir, 'pueue.pid'), action=daemon_factory(root_dir), chdir=root_dir) daemon.start() elif hasattr(args, 'func'): try: args.func(args_dict, root_dir) except EOFError: print('Apparently the daemon just died. Sorry for that :/') else: print('Invalid Command. Please check -h')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register(host=DFLT_ADDRESS[0], port=DFLT_ADDRESS[1], signum=signal.SIGUSR1): """Register a pdb handler for signal 'signum'. The handler sets pdb to listen on the ('host', 'port') internet address and to start a remote debugging session on accepting a socket connection. """
_pdbhandler._register(host, port, signum)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_handler(): """Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered. """
host, port, signum = _pdbhandler._registered() if signum: return Handler(host if host else DFLT_ADDRESS[0].encode(), port if port else DFLT_ADDRESS[1], signum)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def wait(self, timeout): '''Wait for the provided time to elapse''' logger.debug('Waiting for %fs', timeout) return self._event.wait(timeout)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def delay(self): '''How long to wait before the next check''' if self._last_checked: return self._interval - (time.time() - self._last_checked) return self._interval
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def callback(self): '''Run the callback''' self._callback(*self._args, **self._kwargs) self._last_checked = time.time()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def run(self): '''Run the callback periodically''' while not self.wait(self.delay()): try: logger.info('Invoking callback %s', self.callback) self.callback() except StandardError: logger.exception('Callback failed')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login(self, email=None, password=None, app_id=None, api_key=None): """Login to MediaFire account. Keyword arguments: email -- account email password -- account password app_id -- application ID api_key -- API Key (optional) """
session_token = self.api.user_get_session_token( app_id=app_id, email=email, password=password, api_key=api_key) # install session token back into api client self.api.session = session_token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_resource_by_uri(self, uri): """Return resource described by MediaFire URI. uri -- MediaFire URI Examples: Folder (using folderkey): mf:r5g3p2z0sqs3j mf:r5g3p2z0sqs3j/folder/file.ext File (using quickkey): mf:xkr43dadqa3o2p2 Path: mf:///Documents/file.ext """
location = self._parse_uri(uri) if location.startswith("/"): # Use path lookup only, root=myfiles result = self.get_resource_by_path(location) elif "/" in location: # mf:abcdefjhijklm/name resource_key, path = location.split('/', 2) parent_folder = self.get_resource_by_key(resource_key) if not isinstance(parent_folder, Folder): raise NotAFolderError(resource_key) # perform additional lookup by path result = self.get_resource_by_path( path, folder_key=parent_folder['folderkey']) else: # mf:abcdefjhijklm result = self.get_resource_by_key(location) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_resource_by_path(self, path, folder_key=None): """Return resource by remote path. path -- remote path Keyword arguments: folder_key -- what to use as the root folder (None for root) """
logger.debug("resolving %s", path) # remove empty path components path = posixpath.normpath(path) components = [t for t in path.split(posixpath.sep) if t != ''] if not components: # request for root return Folder( self.api.folder_get_info(folder_key)['folder_info'] ) resource = None for component in components: exists = False for item in self._folder_get_content_iter(folder_key): name = item['name'] if 'name' in item else item['filename'] if name == component: exists = True if components[-1] != component: # still have components to go through if 'filename' in item: # found a file, expected a directory raise NotAFolderError(item['filename']) folder_key = item['folderkey'] else: # found the leaf resource = item break if resource is not None: break if not exists: # intermediate component does not exist - bailing out break if resource is None: raise ResourceNotFoundError(path) if "quickkey" in resource: file_info = self.api.file_get_info( resource['quickkey'])['file_info'] result = File(file_info) elif "folderkey" in resource: folder_info = self.api.folder_get_info( resource['folderkey'])['folder_info'] result = Folder(folder_info) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _folder_get_content_iter(self, folder_key=None): """Iterator for api.folder_get_content"""
lookup_params = [ {'content_type': 'folders', 'node': 'folders'}, {'content_type': 'files', 'node': 'files'} ] for param in lookup_params: more_chunks = True chunk = 0 while more_chunks: chunk += 1 content = self.api.folder_get_content( content_type=param['content_type'], chunk=chunk, folder_key=folder_key)['folder_content'] # empty folder/file list if not content[param['node']]: break # no next page if content['more_chunks'] == 'no': more_chunks = False for resource_info in content[param['node']]: yield resource_info
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_folder_contents_iter(self, uri): """Return iterator for directory contents. uri -- mediafire URI Example: for item in get_folder_contents_iter('mf:///Documents'): print(item) """
resource = self.get_resource_by_uri(uri) if not isinstance(resource, Folder): raise NotAFolderError(uri) folder_key = resource['folderkey'] for item in self._folder_get_content_iter(folder_key): if 'filename' in item: # Work around https://mediafire.mantishub.com/view.php?id=5 # TODO: remove in 1.0 if ".patch." in item['filename']: continue yield File(item) elif 'name' in item: yield Folder(item)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_folder(self, uri, recursive=False): """Create folder. uri -- MediaFire URI Keyword arguments: recursive -- set to True to create intermediate folders. """
logger.info("Creating %s", uri) # check that folder exists already try: resource = self.get_resource_by_uri(uri) if isinstance(resource, Folder): return resource else: raise NotAFolderError(uri) except ResourceNotFoundError: pass location = self._parse_uri(uri) folder_name = posixpath.basename(location) parent_uri = 'mf://' + posixpath.dirname(location) try: parent_node = self.get_resource_by_uri(parent_uri) if not isinstance(parent_node, Folder): raise NotAFolderError(parent_uri) parent_key = parent_node['folderkey'] except ResourceNotFoundError: if recursive: result = self.create_folder(parent_uri, recursive=True) parent_key = result['folderkey'] else: raise # We specify exact location, so don't allow duplicates result = self.api.folder_create( folder_name, parent_key=parent_key, action_on_duplicate='skip') logger.info("Created folder '%s' [mf:%s]", result['name'], result['folder_key']) return self.get_resource_by_key(result['folder_key'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_folder(self, uri, purge=False): """Delete folder. uri -- MediaFire folder URI Keyword arguments: purge -- delete the folder without sending it to Trash """
try: resource = self.get_resource_by_uri(uri) except ResourceNotFoundError: # Nothing to remove return None if not isinstance(resource, Folder): raise ValueError("Folder expected, got {}".format(type(resource))) if purge: func = self.api.folder_purge else: func = self.api.folder_delete try: result = func(resource['folderkey']) except MediaFireApiError as err: if err.code == 100: logger.debug( "Delete folder returns error 900 but folder is deleted: " "http://forum.mediafiredev.com/showthread.php?129") result = {} else: raise return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_file(self, uri, purge=False): """Delete file. uri -- MediaFire file URI Keyword arguments: purge -- delete the file without sending it to Trash. """
try: resource = self.get_resource_by_uri(uri) except ResourceNotFoundError: # Nothing to remove return None if not isinstance(resource, File): raise ValueError("File expected, got {}".format(type(resource))) if purge: func = self.api.file_purge else: func = self.api.file_delete return func(resource['quickkey'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_resource(self, uri, purge=False): """Delete file or folder uri -- mediafire URI Keyword arguments: purge -- delete the resource without sending it to Trash. """
try: resource = self.get_resource_by_uri(uri) except ResourceNotFoundError: # Nothing to remove return None if isinstance(resource, File): result = self.delete_file(uri, purge) elif isinstance(resource, Folder): result = self.delete_folder(uri, purge) else: raise ValueError('Unsupported resource: {}'.format(type(resource))) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _prepare_upload_info(self, source, dest_uri): """Prepare Upload object, resolve paths"""
try: dest_resource = self.get_resource_by_uri(dest_uri) except ResourceNotFoundError: dest_resource = None is_fh = hasattr(source, 'read') folder_key = None name = None if dest_resource: if isinstance(dest_resource, File): folder_key = dest_resource['parent_folderkey'] name = dest_resource['filename'] elif isinstance(dest_resource, Folder): if is_fh: raise ValueError("Cannot determine target file name") basename = posixpath.basename(source) dest_uri = posixpath.join(dest_uri, basename) try: result = self.get_resource_by_uri(dest_uri) if isinstance(result, Folder): raise ValueError("Target is a folder (file expected)") folder_key = result.get('parent_folderkey', None) name = result['filename'] except ResourceNotFoundError: # ok, neither a file nor folder, proceed folder_key = dest_resource['folderkey'] name = basename else: raise Exception("Unknown resource type") else: # get parent resource parent_uri = '/'.join(dest_uri.split('/')[0:-1]) result = self.get_resource_by_uri(parent_uri) if not isinstance(result, Folder): raise NotAFolderError("Parent component is not a folder") folder_key = result['folderkey'] name = posixpath.basename(dest_uri) return folder_key, name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def upload_file(self, source, dest_uri): """Upload file to MediaFire. source -- path to the file or a file-like object (e.g. io.BytesIO) dest_uri -- MediaFire Resource URI """
folder_key, name = self._prepare_upload_info(source, dest_uri) is_fh = hasattr(source, 'read') fd = None try: if is_fh: # Re-using filehandle fd = source else: # Handling fs open/close fd = open(source, 'rb') return MediaFireUploader(self.api).upload( fd, name, folder_key=folder_key, action_on_duplicate='replace') finally: # Close filehandle if we opened it if fd and not is_fh: fd.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def download_file(self, src_uri, target): """Download file from MediaFire. src_uri -- MediaFire file URI to download target -- download path or file-like object in write mode """
resource = self.get_resource_by_uri(src_uri) if not isinstance(resource, File): raise MediaFireError("Only files can be downloaded") quick_key = resource['quickkey'] result = self.api.file_get_links(quick_key=quick_key, link_type='direct_download') direct_download = result['links'][0]['direct_download'] # Force download over HTTPS direct_download = direct_download.replace('http:', 'https:') name = resource['filename'] target_is_filehandle = True if hasattr(target, 'write') else False if not target_is_filehandle: if (os.path.exists(target) and os.path.isdir(target)) or \ target.endswith("/"): target = os.path.join(target, name) if not os.path.isdir(os.path.dirname(target)): os.makedirs(os.path.dirname(target)) logger.info("Downloading %s to %s", src_uri, target) response = requests.get(direct_download, stream=True) try: if target_is_filehandle: out_fd = target else: out_fd = open(target, 'wb') checksum = hashlib.sha256() for chunk in response.iter_content(chunk_size=4096): if chunk: out_fd.write(chunk) checksum.update(chunk) checksum_hex = checksum.hexdigest().lower() if checksum_hex != resource['hash']: raise DownloadError("Hash mismatch ({} != {})".format( resource['hash'], checksum_hex)) logger.info("Download completed successfully") finally: if not target_is_filehandle: out_fd.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_file_metadata(self, uri, filename=None, description=None, mtime=None, privacy=None): """Update file metadata. uri -- MediaFire file URI Supplying the following keyword arguments would change the metadata on the server side: filename -- rename file description -- set file description string mtime -- set file modification time privacy -- set file privacy - 'private' or 'public' """
resource = self.get_resource_by_uri(uri) if not isinstance(resource, File): raise ValueError('Expected File, got {}'.format(type(resource))) result = self.api.file_update(resource['quickkey'], filename=filename, description=description, mtime=mtime, privacy=privacy) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_folder_metadata(self, uri, foldername=None, description=None, mtime=None, privacy=None, privacy_recursive=None): """Update folder metadata. uri -- MediaFire file URI Supplying the following keyword arguments would change the metadata on the server side: filename -- rename file description -- set file description string mtime -- set file modification time privacy -- set file privacy - 'private' or 'public' recursive -- update folder privacy recursively """
resource = self.get_resource_by_uri(uri) if not isinstance(resource, Folder): raise ValueError('Expected Folder, got {}'.format(type(resource))) result = self.api.folder_update(resource['folderkey'], foldername=foldername, description=description, mtime=mtime, privacy=privacy, privacy_recursive=privacy_recursive) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_uri(uri): """Parse and validate MediaFire URI."""
tokens = urlparse(uri) if tokens.netloc != '': logger.error("Invalid URI: %s", uri) raise ValueError("MediaFire URI format error: " "host should be empty - mf:///path") if tokens.scheme != '' and tokens.scheme != URI_SCHEME: raise ValueError("MediaFire URI format error: " "must start with 'mf:' or '/'") return posixpath.normpath(tokens.path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def merged(self): '''The clean stats from all the hosts reporting to this host.''' stats = {} for topic in self.client.topics()['topics']: for producer in self.client.lookup(topic)['producers']: hostname = producer['broadcast_address'] port = producer['http_port'] host = '%s_%s' % (hostname, port) stats[host] = nsqd.Client( 'http://%s:%s/' % (hostname, port)).clean_stats() return stats
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def stats(self): '''Stats that have been aggregated appropriately.''' data = Counter() for name, value, aggregated in self.raw: if aggregated: data['%s.max' % name] = max(data['%s.max' % name], value) data['%s.total' % name] += value else: data[name] = value return sorted(data.items())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_curline(): """Return the current python source line."""
if Frame: frame = Frame.get_selected_python_frame() if frame: line = '' f = frame.get_pyop() if f and not f.is_optimized_out(): cwd = os.path.join(os.getcwd(), '') fname = f.filename() if cwd in fname: fname = fname[len(cwd):] try: line = f.current_line() except IOError: pass if line: # Use repr(line) to avoid UnicodeDecodeError on the # following print invocation. line = repr(line).strip("'") line = line[:-2] if line.endswith(r'\n') else line return ('-> %s(%s): %s' % (fname, f.current_line_num(), line)) return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def reconnected(self, conn): '''Subscribe connection and manipulate its RDY state''' conn.sub(self._topic, self._channel) conn.rdy(1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def distribute_ready(self): '''Distribute the ready state across all of the connections''' connections = [c for c in self.connections() if c.alive()] if len(connections) > self._max_in_flight: raise NotImplementedError( 'Max in flight must be greater than number of connections') else: # Distribute the ready count evenly among the connections for count, conn in distribute(self._max_in_flight, connections): # We cannot exceed the maximum RDY count for a connection if count > conn.max_rdy_count: logger.info( 'Using max_rdy_count (%i) instead of %i for %s RDY', conn.max_rdy_count, count, conn) count = conn.max_rdy_count logger.info('Sending RDY %i to %s', count, conn) conn.rdy(count)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def needs_distribute_ready(self): '''Determine whether or not we need to redistribute the ready state''' # Try to pre-empty starvation by comparing current RDY against # the last value sent. alive = [c for c in self.connections() if c.alive()] if any(c.ready <= (c.last_ready_sent * 0.25) for c in alive): return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def read(self): '''Read some number of messages''' found = Client.read(self) # Redistribute our ready state if necessary if self.needs_distribute_ready(): self.distribute_ready() # Finally, return all the results we've read return found
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def profiler(): '''Profile the block''' import cProfile import pstats pr = cProfile.Profile() pr.enable() yield pr.disable() ps = pstats.Stats(pr).sort_stats('tottime') ps.print_stats()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def messages(count, size): '''Generator for count messages of the provided size''' import string # Make sure we have at least 'size' letters letters = islice(cycle(chain(string.lowercase, string.uppercase)), size) return islice(cycle(''.join(l) for l in permutations(letters, size)), count)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def stats(): '''Read a stream of floats and give summary statistics''' import re import sys import math values = [] for line in sys.stdin: values.extend(map(float, re.findall(r'\d+\.?\d+', line))) mean = sum(values) / len(values) variance = sum((val - mean) ** 2 for val in values) / len(values) print '%3i items; mean: %10.5f; std-dev: %10.5f' % ( len(values), mean, math.sqrt(variance))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def ready(self): '''Whether or not enough time has passed since the last failure''' if self._last_failed: delta = time.time() - self._last_failed return delta >= self.backoff() return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute_log(args, root_dir): """Print the current log file. Args: args['keys'] (int): If given, we only look at the specified processes. root_dir (string): The path to the root directory the daemon is running in. """
# Print the logs of all specified processes if args.get('keys'): config_dir = os.path.join(root_dir, '.config/pueue') queue_path = os.path.join(config_dir, 'queue') if os.path.exists(queue_path): queue_file = open(queue_path, 'rb') try: queue = pickle.load(queue_file) except Exception: print('Queue log file seems to be corrupted. Aborting.') return queue_file.close() else: print('There is no queue log file. Aborting.') return for key in args.get('keys'): # Check if there is an entry with this key if queue.get(key) and queue[key]['status'] in ['failed', 'done']: entry = queue[key] print('Log of entry: {}'.format(key)) print('Returncode: {}'.format(entry['returncode'])) print('Command: {}'.format(entry['command'])) print('Path: {}'.format(entry['path'])) print('Start: {}, End: {} \n'.format(entry['start'], entry['end'])) # Write STDERR if len(entry['stderr']) > 0: print(Color('{autored}Stderr output: {/autored}\n ') + entry['stderr']) # Write STDOUT if len(entry['stdout']) > 0: print(Color('{autogreen}Stdout output: {/autogreen}\n ') + entry['stdout']) else: print('No finished process with key {}.'.format(key)) # Print the log of all processes else: log_path = os.path.join(root_dir, '.local/share/pueue/queue.log') log_file = open(log_path, 'r') print(log_file.read())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute_show(args, root_dir): """Print stderr and stdout of the current running process. Args: args['watch'] (bool): If True, we open a curses session and tail the output live in the console. root_dir (string): The path to the root directory the daemon is running in. """
key = None if args.get('key'): key = args['key'] status = command_factory('status')({}, root_dir=root_dir) if key not in status['data'] or status['data'][key]['status'] != 'running': print('No running process with this key, use `log` to show finished processes.') return # In case no key provided, we take the oldest running process else: status = command_factory('status')({}, root_dir=root_dir) if isinstance(status['data'], str): print(status['data']) return for k in sorted(status['data'].keys()): if status['data'][k]['status'] == 'running': key = k break if key is None: print('No running process, use `log` to show finished processes.') return config_dir = os.path.join(root_dir, '.config/pueue') # Get current pueueSTDout file from tmp stdoutFile = os.path.join(config_dir, 'pueue_process_{}.stdout'.format(key)) stderrFile = os.path.join(config_dir, 'pueue_process_{}.stderr'.format(key)) stdoutDescriptor = open(stdoutFile, 'r') stderrDescriptor = open(stderrFile, 'r') running = True # Continually print output with curses or just print once if args['watch']: # Initialize curses stdscr = curses.initscr() curses.noecho() curses.cbreak() curses.curs_set(2) stdscr.keypad(True) stdscr.refresh() try: # Update output every two seconds while running: stdscr.clear() stdoutDescriptor.seek(0) message = stdoutDescriptor.read() stdscr.addstr(0, 0, message) stdscr.refresh() time.sleep(2) except Exception: # Curses cleanup curses.nocbreak() stdscr.keypad(False) curses.echo() curses.endwin() else: print('Stdout output:\n') stdoutDescriptor.seek(0) print(get_descriptor_output(stdoutDescriptor, key)) print('\n\nStderr output:\n') stderrDescriptor.seek(0) print(get_descriptor_output(stderrDescriptor, key))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fetch_track(self, track_id, terr=KKBOXTerritory.TAIWAN): ''' Fetches a song track by given ID. :param track_id: the track ID. :type track_id: str :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#tracks-track_id`. ''' url = 'https://api.kkbox.com/v1.1/tracks/%s' % track_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def show(self, user, feed, id): """ Show a specific indicator by id :param user: feed username :param feed: feed name :param id: indicator endpoint id [INT] :return: dict Example: ret = Indicator.show('csirtgadgets','port-scanners', '1234') """
uri = '/users/{}/feeds/{}/indicators/{}'.format(user, feed, id) return self.client.get(uri)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create(self): """ Submit action on the Indicator object :return: Indicator Object """
uri = '/users/{0}/feeds/{1}/indicators'\ .format(self.user, self.feed) data = { "indicator": json.loads(str(self.indicator)), "comment": self.comment, "content": self.content } if self.attachment: attachment = self._file_to_attachment( self.attachment, filename=self.attachment_name) data['attachment'] = { 'data': attachment['data'], 'filename': attachment['filename'] } if not data['indicator'].get('indicator'): data['indicator']['indicator'] = attachment['sha1'] if not data['indicator'].get('indicator'): raise Exception('Missing indicator') return self.client.post(uri, data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_bulk(self, indicators, user, feed): from .constants import API_VERSION if API_VERSION == '1': print("create_bulk currently un-avail with APIv1") raise SystemExit """ Submit action against the IndicatorBulk endpoint :param indicators: list of Indicator Objects :param user: feed username :param feed: feed name :return: list of Indicator Objects submitted from csirtgsdk.client import Client from csirtgsdk.indicator import Indicator remote = 'https://csirtg.io/api' token = '' verify_ssl = True i = { 'indicator': 'example.com', 'feed': 'test', 'user': 'admin', 'comment': 'this is a test', } data = [] cli = Client(remote=remote, token=token, verify_ssl=verify_ssl) for x in range(0, 5): data.append( Indicator(cli, i) ) ret = cli.submit_bulk(data, 'csirtgadgets', 'test-feed') """
uri = '/users/{0}/feeds/{1}/indicators_bulk'.format(user, feed) data = { 'indicators': [ { 'indicator': i.args.indicator, 'feed_id': i.args.feed, 'tag_list': i.args.tags, "description": i.args.description, "portlist": i.args.portlist, "protocol": i.args.protocol, 'firsttime': i.args.firsttime, 'lasttime': i.args.lasttime, 'portlist_src': i.args.portlist_src, 'comment': { 'content': i.args.comment }, 'rdata': i.args.rdata, 'rtype': i.args.rtype, 'content': i.args.content, 'provider': i.args.provider, } for i in indicators ] } return self.client.post(uri, data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_keystring(conn, key_string): """ A utility function to turn strings like 'Mod1+Mod4+a' into a pair corresponding to its modifiers and keycode. :param key_string: String starting with zero or more modifiers followed by exactly one key press. Available modifiers: Control, Mod1, Mod2, Mod3, Mod4, Mod5, Shift, Lock :type key_string: str :return: Tuple of modifier mask and keycode :rtype: (mask, int) """
# FIXME this code is temporary hack, requires better abstraction from PyQt5.QtGui import QKeySequence from PyQt5.QtCore import Qt from .qt_keycodes import KeyTbl, ModsTbl keysequence = QKeySequence(key_string) ks = keysequence[0] # Calculate the modifiers mods = Qt.NoModifier qtmods = Qt.NoModifier modifiers = 0 if (ks & Qt.ShiftModifier == Qt.ShiftModifier): mods |= ModsTbl.index(Qt.ShiftModifier) qtmods |= Qt.ShiftModifier.real modifiers |= getattr(xproto.KeyButMask, "Shift", 0) if (ks & Qt.AltModifier == Qt.AltModifier): mods |= ModsTbl.index(Qt.AltModifier) qtmods |= Qt.AltModifier.real modifiers |= getattr(xproto.KeyButMask, "Mod1", 0) if (ks & Qt.ControlModifier == Qt.ControlModifier): mods |= ModsTbl.index(Qt.ControlModifier) qtmods |= Qt.ControlModifier.real modifiers |= getattr(xproto.KeyButMask, "Control", 0) # Calculate the keys qtkeys = ks ^ qtmods key = QKeySequence(Qt.Key(qtkeys)).toString().lower() keycode = lookup_string(conn, key) return modifiers, keycode # Fallback logic modifiers = 0 keycode = None key_string = "Shift+Control+A" for part in key_string.split('+'): if hasattr(xproto.KeyButMask, part): modifiers |= getattr(xproto.KeyButMask, part) else: if len(part) == 1: part = part.lower() keycode = lookup_string(conn, part) return modifiers, keycode
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lookup_string(conn, kstr): """ Finds the keycode associated with a string representation of a keysym. :param kstr: English representation of a keysym. :return: Keycode, if one exists. :rtype: int """
if kstr in keysyms: return get_keycode(conn, keysyms[kstr]) elif len(kstr) > 1 and kstr.capitalize() in keysyms: return get_keycode(conn, keysyms[kstr.capitalize()]) return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_keyboard_mapping(conn): """ Return a keyboard mapping cookie that can be used to fetch the table of keysyms in the current X environment. :rtype: xcb.xproto.GetKeyboardMappingCookie """
mn, mx = get_min_max_keycode(conn) return conn.core.GetKeyboardMapping(mn, mx - mn + 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_keyboard_mapping_unchecked(conn): """ Return an unchecked keyboard mapping cookie that can be used to fetch the table of keysyms in the current X environment. :rtype: xcb.xproto.GetKeyboardMappingCookie """
mn, mx = get_min_max_keycode() return conn.core.GetKeyboardMappingUnchecked(mn, mx - mn + 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_keycode(conn, keysym): """ Given a keysym, find the keycode mapped to it in the current X environment. It is necessary to search the keysym table in order to do this, including all columns. :param keysym: An X keysym. :return: A keycode or None if one could not be found. :rtype: int """
mn, mx = get_min_max_keycode(conn) cols = __kbmap.keysyms_per_keycode for i in range(mn, mx + 1): for j in range(0, cols): ks = get_keysym(conn, i, col=j) if ks == keysym: return i return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ungrab_key(conn, wid, modifiers, key): """ Ungrabs a key that was grabbed by ``grab_key``. Similarly, it will return True on success and False on failure. When ungrabbing a key, the parameters to this function should be *precisely* the same as the parameters to ``grab_key``. :param wid: A window identifier. :type wid: int :param modifiers: A modifier mask. :type modifiers: int :param key: A keycode. :type key: int :rtype: bool """
try: for mod in TRIVIAL_MODS: conn.core.UngrabKeyChecked(key, wid, modifiers | mod).check() return True except xproto.BadAccess: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_keyboard_mapping(conn, e): """ Whenever the keyboard mapping is changed, this function needs to be called to update xpybutil's internal representing of the current keysym table. Indeed, xpybutil will do this for you automatically. Moreover, if something is changed that affects the current keygrabs, xpybutil will initiate a regrab with the changed keycode. :param e: The MappingNotify event. :type e: xcb.xproto.MappingNotifyEvent :rtype: void """
global __kbmap, __keysmods newmap = get_keyboard_mapping(conn).reply() if e is None: __kbmap = newmap __keysmods = get_keys_to_mods(conn) return if e.request == xproto.Mapping.Keyboard: changes = {} for kc in range(*get_min_max_keycode(conn)): knew = get_keysym(kc, kbmap=newmap) oldkc = get_keycode(conn, knew) if oldkc != kc: changes[oldkc] = kc __kbmap = newmap __regrab(changes) elif e.request == xproto.Mapping.Modifier: __keysmods = get_keys_to_mods()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_storages(self, storage_type='normal'): """ Return a list of Storage objects from the API. Storage types: public, private, normal, backup, cdrom, template, favorite """
res = self.get_request('/storage/' + storage_type) return Storage._create_storage_objs(res['storages'], cloud_manager=self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_storage(self, storage): """ Return a Storage object from the API. """
res = self.get_request('/storage/' + str(storage)) return Storage(cloud_manager=self, **res['storage'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_storage(self, size=10, tier='maxiops', title='Storage disk', zone='fi-hel1', backup_rule={}): """ Create a Storage object. Returns an object based on the API's response. """
body = { 'storage': { 'size': size, 'tier': tier, 'title': title, 'zone': zone, 'backup_rule': backup_rule } } res = self.post_request('/storage', body) return Storage(cloud_manager=self, **res['storage'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def modify_storage(self, storage, size, title, backup_rule={}): """ Modify a Storage object. Returns an object based on the API's response. """
res = self._modify_storage(str(storage), size, title, backup_rule) return Storage(cloud_manager=self, **res['storage'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def attach_storage(self, server, storage, storage_type, address): """ Attach a Storage object to a Server. Return a list of the server's storages. """
body = {'storage_device': {}} if storage: body['storage_device']['storage'] = str(storage) if storage_type: body['storage_device']['type'] = storage_type if address: body['storage_device']['address'] = address url = '/server/{0}/storage/attach'.format(server) res = self.post_request(url, body) return Storage._create_storage_objs(res['server']['storage_devices'], cloud_manager=self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def detach_storage(self, server, address): """ Detach a Storage object to a Server. Return a list of the server's storages. """
body = {'storage_device': {'address': address}} url = '/server/{0}/storage/detach'.format(server) res = self.post_request(url, body) return Storage._create_storage_objs(res['server']['storage_devices'], cloud_manager=self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _reset(self, **kwargs): """ Reset after repopulating from API. """
# there are some inconsistenciens in the API regarding these # note: this could be written in fancier ways, but this way is simpler if 'uuid' in kwargs: self.uuid = kwargs['uuid'] elif 'storage' in kwargs: # let's never use storage.storage internally self.uuid = kwargs['storage'] if 'title' in kwargs: self.title = kwargs['title'] elif 'storage_title' in kwargs: self.title = kwargs['storage_title'] if 'size' in kwargs: self.size = kwargs['size'] elif 'storage_size' in kwargs: self.size = kwargs['storage_size'] # send the rest to super._reset filtered_kwargs = dict( (key, val) for key, val in kwargs.items() if key not in ['uuid', 'storage', 'title', 'storage_title', 'size', 'storage_size'] ) super(Storage, self)._reset(**filtered_kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fetch_album(self, album_id, terr=KKBOXTerritory.TAIWAN): ''' Fetches an album by given ID. :param album_id: the album ID. :type album_id: str :param terr: the current territory. :return: API response. :rtype: dict See `https://docs-en.kkbox.codes/v1.1/reference#albums-album_id`. ''' url = 'https://api.kkbox.com/v1.1/albums/%s' % album_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lookup(self): """ Prints name, author, size and age """
print "%s by %s, size: %s, uploaded %s ago" % (self.name, self.author, self.size, self.age)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_max_page(self, url): """ Open url and return amount of pages """
html = requests.get(url).text pq = PyQuery(html) try: tds = int(pq("h2").text().split()[-1]) if tds % 25: return tds / 25 + 1 return tds / 25 except ValueError: raise ValueError("No results found!")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build(self, update=True): """ Build and return url. Also update max_page. """
ret = self.base + self.query page = "".join(("/", str(self.page), "/")) if self.category: category = " category:" + self.category else: category = "" if self.order: order = "".join(("?field=", self.order[0], "&sorder=", self.order[1])) else: order = "" ret = "".join((self.base, self.query, category, page, order)) if update: self.max_page = self._get_max_page(ret) return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build(self, update=True): """ Build and return url. Also update max_page. URL structure for user torrent lists differs from other result lists as the page number is part of the query string and not the URL path """
query_str = "?page={}".format(self.page) if self.order: query_str += "".join(("&field=", self.order[0], "&sorder=",self.order[1])) ret = "".join((self.base, self.user, "/uploads/", query_str)) if update: self.max_page = self._get_max_page(ret) return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _items(self): """ Parse url and yield namedtuple Torrent for every torrent on page """
torrents = map(self._get_torrent, self._get_rows()) for t in torrents: yield t
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_torrent(self, row): """ Parse row into namedtuple """
td = row("td") name = td("a.cellMainLink").text() name = name.replace(" . ", ".").replace(" .", ".") author = td("a.plain").text() verified_author = True if td(".lightgrey>.ka-verify") else False category = td("span").find("strong").find("a").eq(0).text() verified_torrent = True if td(".icon16>.ka-green") else False comments = td(".iaconbox>.icommentjs>.iconvalue").text() torrent_link = "http://" + BASE.domain if td("a.cellMainLink").attr("href") is not None: torrent_link += td("a.cellMainLink").attr("href") magnet_link = td("a[data-nop]").eq(1).attr("href") download_link = td("a[data-download]").attr("href") td_centers = row("td.center") size = td_centers.eq(0).text() files = td_centers.eq(1).text() age = " ".join(td_centers.eq(2).text().split()) seed = td_centers.eq(3).text() leech = td_centers.eq(4).text() return Torrent(name, author, verified_author, category, size, files, age, seed, leech, verified_torrent, comments, torrent_link, magnet_link, download_link)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_rows(self): """ Return all rows on page """
html = requests.get(self.url.build()).text if re.search('did not match any documents', html): return [] pq = PyQuery(html) rows = pq("table.data").find("tr") return map(rows.eq, range(rows.size()))[1:]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pages(self, page_from, page_to): """ Yield torrents in range from page_from to page_to """
if not all([page_from < self.url.max_page, page_from > 0, page_to <= self.url.max_page, page_to > page_from]): raise IndexError("Invalid page numbers") size = (page_to + 1) - page_from threads = ret = [] page_list = range(page_from, page_to+1) locks = [threading.Lock() for i in range(size)] for lock in locks[1:]: lock.acquire() def t_function(pos): """ Thread function that fetch page for list of torrents """ res = self.page(page_list[pos]).list() locks[pos].acquire() ret.extend(res) if pos != size-1: locks[pos+1].release() threads = [threading.Thread(target=t_function, args=(i,)) for i in range(size)] for thread in threads: thread.start() for thread in threads: thread.join() for torrent in ret: yield torrent
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def all(self): """ Yield torrents in range from current page to last page """
return self.pages(self.url.page, self.url.max_page)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def order(self, field, order=None): """ Set field and order set by arguments """
if not order: order = ORDER.DESC self.url.order = (field, order) self.url.set_page(1) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def category(self, category): """ Change category of current search and return self """
self.url.category = category self.url.set_page(1) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def destroy(self): """ Remove this FirewallRule from the API. This instance must be associated with a server for this method to work, which is done by instantiating via server.get_firewall_rules(). """
if not hasattr(self, 'server') or not self.server: raise Exception( """FirewallRule not associated with server; please use or server.get_firewall_rules() to get objects that are associated with a server. """) return self.server.cloud_manager.delete_firewall_rule( self.server.uuid, self.position )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fetch_new_release_category(self, category_id, terr=KKBOXTerritory.TAIWAN): ''' Fetches new release categories by given ID. :param category_id: the station ID. :type category_id: str :param terr: the current territory. :return: API response. :rtype: list See `https://docs-en.kkbox.codes/v1.1/reference#newreleasecategories-category_id` ''' url = 'https://api.kkbox.com/v1.1/new-release-categories/%s' % category_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fetch_top_tracks_of_artist(self, artist_id, terr=KKBOXTerritory.TAIWAN): ''' Fetcher top tracks belong to an artist by given ID. :param artist_id: the artist ID. :type artist_id: str :param terr: the current territory. :return: API response. :rtype: dict See 'https://docs-en.kkbox.codes/v1.1/reference#artists-artist_id-toptracks' ''' url = 'https://api.kkbox.com/v1.1/artists/%s/top-tracks' % artist_id url += '?' + url_parse.urlencode({'territory': terr}) return self.http._post_data(url, None, self.http._headers_with_access_token())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_tags(self): """List all tags as Tag objects."""
res = self.get_request('/tag') return [Tag(cloud_manager=self, **tag) for tag in res['tags']['tag']]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_tag(self, name): """Return the tag as Tag object."""
res = self.get_request('/tag/' + name) return Tag(cloud_manager=self, **res['tag'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_tag(self, name, description=None, servers=[]): """ Create a new Tag. Only name is mandatory. Returns the created Tag object. """
servers = [str(server) for server in servers] body = {'tag': Tag(name, description, servers).to_dict()} res = self.request('POST', '/tag', body) return Tag(cloud_manager=self, **res['tag'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_tags(self, server, tags): """ Remove tags from a server. - server: Server object or UUID string - tags: list of Tag objects or strings """
uuid = str(server) tags = [str(tag) for tag in tags] url = '/server/{0}/untag/{1}'.format(uuid, ','.join(tags)) return self.post_request(url)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def assignIfExists(opts, default=None, **kwargs): """ Helper for assigning object attributes from API responses. """
for opt in opts: if(opt in kwargs): return kwargs[opt] return default
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def expand(self, info=b"", length=32): ''' Generate output key material based on an `info` value Arguments: - info - context to generate the OKM - length - length in bytes of the key to generate See the HKDF draft RFC for guidance. ''' return hkdf_expand(self._prk, info, length, self._hash)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login_user_block(username, ssh_keys, create_password=True): """ Helper function for creating Server.login_user blocks. (see: https://www.upcloud.com/api/8-servers/#create-server) """
block = { 'create_password': 'yes' if create_password is True else 'no', 'ssh_keys': { 'ssh_key': ssh_keys } } if username: block['username'] = username return block
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _reset(self, server, **kwargs): """ Reset the server object with new values given as params. - server: a dict representing the server. e.g the API response. - kwargs: any meta fields such as cloud_manager and populated. Note: storage_devices and ip_addresses may be given in server as dicts or in kwargs as lists containing Storage and IPAddress objects. """
if server: # handle storage, ip_address dicts and tags if they exist Server._handle_server_subobjs(server, kwargs.get('cloud_manager')) for key in server: object.__setattr__(self, key, server[key]) for key in kwargs: object.__setattr__(self, key, kwargs[key])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def populate(self): """ Sync changes from the API to the local object. Note: syncs ip_addresses and storage_devices too (/server/uuid endpoint) """
server, IPAddresses, storages = self.cloud_manager.get_server_data(self.uuid) self._reset( server, ip_addresses=IPAddresses, storage_devices=storages, populated=True ) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self): """ Sync local changes in server's attributes to the API. Note: DOES NOT sync IPAddresses and storage_devices, use add_ip, add_storage, remove_ip, remove_storage instead. """
# dict comprehension that also works with 2.6 # http://stackoverflow.com/questions/21069668/alternative-to-dict-comprehension-prior-to-python-2-7 kwargs = dict( (field, getattr(self, field)) for field in self.updateable_fields if hasattr(self, field) ) self.cloud_manager.modify_server(self.uuid, **kwargs) self._reset(kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def restart(self, hard=False, timeout=30, force=True): """ Restart the server. By default, issue a soft restart with a timeout of 30s and a hard restart after the timeout. After the a timeout a hard restart is performed if the server has not stopped. Note: API responds immediately (unlike in start), with state: started. This client will, however, set state as 'maintenance' to signal that the server is neither started nor stopped. """
body = dict() body['restart_server'] = { 'stop_type': 'hard' if hard else 'soft', 'timeout': '{0}'.format(timeout), 'timeout_action': 'destroy' if force else 'ignore' } path = '/server/{0}/restart'.format(self.uuid) self.cloud_manager.post_request(path, body) object.__setattr__(self, 'state', 'maintenance')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_ip(self, IPAddress): """ Release the specified IP-address from the server. """
self.cloud_manager.release_ip(IPAddress.address) self.ip_addresses.remove(IPAddress)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_storage(self, storage=None, type='disk', address=None): """ Attach the given storage to the Server. Default address is next available. """
self.cloud_manager.attach_storage(server=self.uuid, storage=storage.uuid, storage_type=type, address=address) storage.address = address storage.type = type self.storage_devices.append(storage)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_storage(self, storage): """ Remove Storage from a Server. The Storage must be a reference to an object in Server.storage_devices or the method will throw and Exception. A Storage from get_storage(uuid) will not work as it is missing the 'address' property. """
if not hasattr(storage, 'address'): raise Exception( ('Storage does not have an address. ' 'Access the Storage via Server.storage_devices ' 'so they include an address. ' '(This is due how the API handles Storages)') ) self.cloud_manager.detach_storage(server=self.uuid, address=storage.address) self.storage_devices.remove(storage)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def configure_firewall(self, FirewallRules): """ Helper function for automatically adding several FirewallRules in series. """
firewall_rule_bodies = [ FirewallRule.to_dict() for FirewallRule in FirewallRules ] return self.cloud_manager.configure_firewall(self, firewall_rule_bodies)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prepare_post_body(self): """ Prepare a JSON serializable dict from a Server instance with nested. Storage instances. """
body = dict() # mandatory body['server'] = { 'hostname': self.hostname, 'zone': self.zone, 'title': self.title, 'storage_devices': {} } # optional fields for optional_field in self.optional_fields: if hasattr(self, optional_field): body['server'][optional_field] = getattr(self, optional_field) # set password_delivery default as 'none' to prevent API from sending # emails (with credentials) about each created server if not hasattr(self, 'password_delivery'): body['server']['password_delivery'] = 'none' # collect storage devices and create a unique title (see: Storage.title in API doc) # for each of them body['server']['storage_devices'] = { 'storage_device': [] } storage_title_id = 0 # running number for unique storage titles for storage in self.storage_devices: if not hasattr(storage, 'os') or storage.os is None: storage_title_id += 1 storage_body = storage.to_dict() # setup default titles for storages unless the user has specified # them at storage.title if not hasattr(storage, 'title') or not storage.title: if hasattr(storage, 'os') and storage.os: storage_body['title'] = self.hostname + ' OS disk' else: storage_body['title'] = self.hostname + ' storage disk ' + str(storage_title_id) # figure out the storage `action` parameter # public template if hasattr(storage, 'os') and storage.os: storage_body['action'] = 'clone' storage_body['storage'] = OperatingSystems.get_OS_UUID(storage.os) # private template elif hasattr(storage, 'uuid'): storage_body['action'] = 'clone' storage_body['storage'] = storage.uuid # create a new storage else: storage_body['action'] = 'create' body['server']['storage_devices']['storage_device'].append(storage_body) if hasattr(self, 'ip_addresses') and self.ip_addresses: body['server']['ip_addresses'] = { 'ip_address': [ ip.to_dict() for ip in self.ip_addresses ] } return body
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_dict(self): """ Prepare a JSON serializable dict for read-only purposes. Includes storages and IP-addresses. Use prepare_post_body for POST and .save() for PUT. """
fields = dict(vars(self).items()) if self.populated: fields['ip_addresses'] = [] fields['storage_devices'] = [] for ip in self.ip_addresses: fields['ip_addresses'].append({ 'address': ip.address, 'access': ip.access, 'family': ip.family }) for storage in self.storage_devices: fields['storage_devices'].append({ 'address': storage.address, 'storage': storage.uuid, 'storage_size': storage.size, 'storage_title': storage.title, 'type': storage.type, }) del fields['populated'] del fields['cloud_manager'] return fields
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_ip(self, access='public', addr_family=None, strict=None): """ Return the server's IP address. Params: - addr_family: IPv4, IPv6 or None. None prefers IPv4 but will return IPv6 if IPv4 addr was not available. - access: 'public' or 'private' """
if addr_family not in ['IPv4', 'IPv6', None]: raise Exception("`addr_family` must be 'IPv4', 'IPv6' or None") if access not in ['private', 'public']: raise Exception("`access` must be 'public' or 'private'") if not hasattr(self, 'ip_addresses'): self.populate() # server can have several public or private IPs ip_addrs = [ ip_addr for ip_addr in self.ip_addresses if ip_addr.access == access ] # prefer addr_family (or IPv4 if none given) preferred_family = addr_family if addr_family else 'IPv4' for ip_addr in ip_addrs: if ip_addr.family == preferred_family: return ip_addr.address # any IP (of the right access) will do if available and addr_family is None return ip_addrs[0].address if ip_addrs and not addr_family else None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _wait_for_state_change(self, target_states, update_interval=10): """ Blocking wait until target_state reached. update_interval is in seconds. Warning: state change must begin before calling this method. """
while self.state not in target_states: if self.state == 'error': raise Exception('server is in error state') # update server state every 10s sleep(update_interval) self.populate()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stop_and_destroy(self, sync=True): """ Destroy a server and its storages. Stops the server before destroying. Syncs the server state from the API, use sync=False to disable. """
def _self_destruct(): """destroy the server and all storages attached to it.""" # try_it_n_times util is used as a convenience because # Servers and Storages can fluctuate between "maintenance" and their # original state due to several different reasons especially when # destroying infrastructure. # first destroy server try_it_n_times(operation=self.destroy, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='destroying server failed') # storages may be deleted instantly after server DELETE for storage in self.storage_devices: try_it_n_times(operation=storage.destroy, expected_error_codes=['STORAGE_STATE_ILLEGAL'], custom_error='destroying storage failed') if sync: self.populate() # server is either starting or stopping (or error) if self.state in ['maintenance', 'error']: self._wait_for_state_change(['stopped', 'started']) if self.state == 'started': try_it_n_times(operation=self.stop, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='stopping server failed') self._wait_for_state_change(['stopped']) if self.state == 'stopped': _self_destruct() else: raise Exception('unknown server state: ' + self.state)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def revert(self): """Revert the state to the version stored on disc."""
if self.filepath: if path.isfile(self.filepath): serialised_file = open(self.filepath, "r") try: self.state = json.load(serialised_file) except ValueError: print("No JSON information could be read from the persistence file - could be empty: %s" % self.filepath) self.state = {} finally: serialised_file.close() else: print("The persistence file has not yet been created or does not exist, so the state cannot be read from it yet.") else: print("Filepath to the persistence file is not set. State cannot be read.") return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sync(self): """Synchronise and update the stored state to the in-memory state."""
if self.filepath: serialised_file = open(self.filepath, "w") json.dump(self.state, serialised_file) serialised_file.close() else: print("Filepath to the persistence file is not set. State cannot be synced to disc.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _require_bucket(self, bucket_name): """ Also try to create the bucket. """
if not self.exists(bucket_name) and not self.claim_bucket(bucket_name): raise OFSException("Invalid bucket: %s" % bucket_name) return self._get_bucket(bucket_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def del_stream(self, bucket, label): """ Will fail if the bucket or label don't exist """
bucket = self._require_bucket(bucket) key = self._require_key(bucket, label) key.delete()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def authenticate_request(self, method, bucket='', key='', headers=None): '''Authenticate a HTTP request by filling in Authorization field header. :param method: HTTP method (e.g. GET, PUT, POST) :param bucket: name of the bucket. :param key: name of key within bucket. :param headers: dictionary of additional HTTP headers. :return: boto.connection.HTTPRequest object with Authorization header filled (NB: will also have a Date field if none before and a User-Agent field will be set to Boto). ''' # following is extracted from S3Connection.make_request and the method # it calls: AWSAuthConnection.make_request path = self.conn.calling_format.build_path_base(bucket, key) auth_path = self.conn.calling_format.build_auth_path(bucket, key) http_request = boto.connection.AWSAuthConnection.build_base_http_request( self.conn, method, path, auth_path, {}, headers ) http_request.authorize(connection=self.conn) return http_request
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_resources_to_check(client_site_url, apikey): """Return a list of resource IDs to check for broken links. Calls the client site's API to get a list of resource IDs. :raises CouldNotGetResourceIDsError: if getting the resource IDs fails for any reason """
url = client_site_url + u"deadoralive/get_resources_to_check" response = requests.get(url, headers=dict(Authorization=apikey)) if not response.ok: raise CouldNotGetResourceIDsError( u"Couldn't get resource IDs to check: {code} {reason}".format( code=response.status_code, reason=response.reason)) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_url_for_id(client_site_url, apikey, resource_id): """Return the URL for the given resource ID. Contacts the client site's API to get the URL for the ID and returns it. :raises CouldNotGetURLError: if getting the URL fails for any reason """
# TODO: Handle invalid responses from the client site. url = client_site_url + u"deadoralive/get_url_for_resource_id" params = {"resource_id": resource_id} response = requests.get(url, headers=dict(Authorization=apikey), params=params) if not response.ok: raise CouldNotGetURLError( u"Couldn't get URL for resource {id}: {code} {reason}".format( id=resource_id, code=response.status_code, reason=response.reason)) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_url(url): """Check whether the given URL is dead or alive. Returns a dict with four keys: "url": The URL that was checked (string) "alive": Whether the URL was working, True or False "status": The HTTP status code of the response from the URL, e.g. 200, 401, 500 (int) "reason": The reason for the success or failure of the check, e.g. "OK", "Unauthorized", "Internal Server Error" (string) The "status" may be None if we did not get a valid HTTP response, e.g. in the event of a timeout, DNS failure or invalid HTTP response. The "reason" will always be a string, but may be a requests library exception string rather than an HTTP reason string if we did not get a valid HTTP response. """
result = {"url": url} try: response = requests.get(url) result["status"] = response.status_code result["reason"] = response.reason response.raise_for_status() # Raise if status_code is not OK. result["alive"] = True except AttributeError as err: if err.message == "'NoneType' object has no attribute 'encode'": # requests seems to throw these for some invalid URLs. result["alive"] = False result["reason"] = "Invalid URL" result["status"] = None else: raise except requests.exceptions.RequestException as err: result["alive"] = False if "reason" not in result: result["reason"] = str(err) if "status" not in result: # This can happen if the response is invalid HTTP, if we get a DNS # failure, or a timeout, etc. result["status"] = None # We should always have these four fields in the result. assert "url" in result assert result.get("alive") in (True, False) assert "status" in result assert "reason" in result return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def upsert_result(client_site_url, apikey, resource_id, result): """Post the given link check result to the client site."""
# TODO: Handle exceptions and unexpected results. url = client_site_url + u"deadoralive/upsert" params = result.copy() params["resource_id"] = resource_id requests.post(url, headers=dict(Authorization=apikey), params=params)