Search is not available for this dataset
text
stringlengths
75
104k
def send_payload(self, params): """Performs the actual sending action and returns the result """ data = json.dumps({ 'jsonrpc': self.version, 'method': self.service_name, 'params': params, 'id': text_type(uuid.uuid4()) }) data_binary = data.encode('utf-8') url_request = Request(self.service_url, data_binary, headers=self.headers) return urlopen(url_request).read()
def make_response(self, rv): """Converts the return value from a view function to a real response object that is an instance of :attr:`response_class`. """ status_or_headers = headers = None if isinstance(rv, tuple): rv, status_or_headers, headers = rv + (None,) * (3 - len(rv)) if rv is None: raise ValueError('View function did not return a response') if isinstance(status_or_headers, (dict, list)): headers, status_or_headers = status_or_headers, None D = json.loads(extract_raw_data_request(request)) if type(D) is list: raise InvalidRequestError('JSON-RPC batch with decorator (make_response) not is supported') else: response_obj = self.empty_response(version=D['jsonrpc']) response_obj['id'] = D['id'] response_obj['result'] = rv response_obj.pop('error', None) rv = jsonify(response_obj) if status_or_headers is not None: if isinstance(status_or_headers, string_types): rv.status = status_or_headers else: rv.status_code = status_or_headers if headers: rv.headers.extend(headers) return rv
def json_rpc_format(self): """Return the Exception data in a format for JSON-RPC """ error = { 'name': text_type(self.__class__.__name__), 'code': self.code, 'message': '{0}'.format(text_type(self.message)), 'data': self.data } if current_app.config['DEBUG']: import sys, traceback error['stack'] = traceback.format_exc() error['executable'] = sys.executable return error
def from_file(cls, file): """Try loading given config file. :param str file: full path to the config file to load """ if not os.path.exists(file): raise ValueError("Config file not found.") try: config_parser = configparser.ConfigParser() config_parser.read(file) configuration = cls(file, config_parser) if not configuration.check_config_sanity(): raise ValueError("Error in config file.") else: return configuration except configparser.Error: raise ValueError("Config file is invalid.")
def discover(cls): """Make a guess about the config file location an try loading it.""" file = os.path.join(Config.config_dir, Config.config_name) return cls.from_file(file)
def create_config(cls, cfgfile, nick, twtfile, twturl, disclose_identity, add_news): """Create a new config file at the default location. :param str cfgfile: path to the config file :param str nick: nickname to use for own tweets :param str twtfile: path to the local twtxt file :param str twturl: URL to the remote twtxt file :param bool disclose_identity: if true the users id will be disclosed :param bool add_news: if true follow twtxt news feed """ cfgfile_dir = os.path.dirname(cfgfile) if not os.path.exists(cfgfile_dir): os.makedirs(cfgfile_dir) cfg = configparser.ConfigParser() cfg.add_section("twtxt") cfg.set("twtxt", "nick", nick) cfg.set("twtxt", "twtfile", twtfile) cfg.set("twtxt", "twturl", twturl) cfg.set("twtxt", "disclose_identity", str(disclose_identity)) cfg.set("twtxt", "character_limit", "140") cfg.set("twtxt", "character_warning", "140") cfg.add_section("following") if add_news: cfg.set("following", "twtxt", "https://buckket.org/twtxt_news.txt") conf = cls(cfgfile, cfg) conf.write_config() return conf
def write_config(self): """Writes `self.cfg` to `self.config_file`.""" with open(self.config_file, "w") as config_file: self.cfg.write(config_file)
def following(self): """A :class:`list` of all :class:`Source` objects.""" following = [] try: for (nick, url) in self.cfg.items("following"): source = Source(nick, url) following.append(source) except configparser.NoSectionError as e: logger.debug(e) return following
def options(self): """A :class:`dict` of all config options.""" try: return dict(self.cfg.items("twtxt")) except configparser.NoSectionError as e: logger.debug(e) return {}
def add_source(self, source): """Adds a new :class:`Source` to the config’s following section.""" if not self.cfg.has_section("following"): self.cfg.add_section("following") self.cfg.set("following", source.nick, source.url) self.write_config()
def get_source_by_nick(self, nick): """Returns the :class:`Source` of the given nick. :param str nick: nickname for which will be searched in the config """ url = self.cfg.get("following", nick, fallback=None) return Source(nick, url) if url else None
def remove_source_by_nick(self, nick): """Removes a :class:`Source` form the config’s following section. :param str nick: nickname for which will be searched in the config """ if not self.cfg.has_section("following"): return False ret_val = self.cfg.remove_option("following", nick) self.write_config() return ret_val
def build_default_map(self): """Maps config options to the default values used by click, returns :class:`dict`.""" default_map = { "following": { "check": self.check_following, "timeout": self.timeout, "porcelain": self.porcelain, }, "tweet": { "twtfile": self.twtfile, }, "timeline": { "pager": self.use_pager, "cache": self.use_cache, "limit": self.limit_timeline, "timeout": self.timeout, "sorting": self.sorting, "porcelain": self.porcelain, "twtfile": self.twtfile, "update_interval": self.timeline_update_interval, }, "view": { "pager": self.use_pager, "cache": self.use_cache, "limit": self.limit_timeline, "timeout": self.timeout, "sorting": self.sorting, "porcelain": self.porcelain, "update_interval": self.timeline_update_interval, } } return default_map
def check_config_sanity(self): """Checks if the given values in the config file are sane.""" is_sane = True # This extracts some properties which cannot be checked like "nick", # but it is definitely better than writing the property names as a # string literal. properties = [property_name for property_name, obj in self.__class__.__dict__.items() if isinstance(obj, property)] for property_name in properties: try: getattr(self, property_name) except ValueError as e: click.echo("✗ Config error on {0} - {1}".format(property_name, e)) is_sane = False return is_sane
def validate_config_key(ctx, param, value): """Validate a configuration key according to `section.item`.""" if not value: return value try: section, item = value.split(".", 1) except ValueError: raise click.BadArgumentUsage("Given key does not contain a section name.") else: return section, item
def expand_mentions(text, embed_names=True): """Searches the given text for mentions and expands them. For example: "@source.nick" will be expanded to "@<source.nick source.url>". """ if embed_names: mention_format = "@<{name} {url}>" else: mention_format = "@<{url}>" def handle_mention(match): source = get_source_by_name(match.group(1)) if source is None: return "@{0}".format(match.group(1)) return mention_format.format( name=source.nick, url=source.url) return short_mention_re.sub(handle_mention, text)
def format_mentions(text, format_callback=format_mention): """Searches the given text for mentions generated by `expand_mention()` and returns a human-readable form. For example: "@<bob http://example.org/twtxt.txt>" will result in "@bob" If you follow a source: source.nick will be bold If you are the mentioned source: source.nick will be bold and coloured If nothing from the above is true: nick will be unstyled If nothing from the above is true and nick is not given: url will be used """ def handle_mention(match): name, url = match.groups() return format_callback(name, url) return mention_re.sub(handle_mention, text)
def make_aware(dt): """Appends tzinfo and assumes UTC, if datetime object has no tzinfo already.""" return dt if dt.tzinfo else dt.replace(tzinfo=timezone.utc)
def parse_tweets(raw_tweets, source, now=None): """ Parses a list of raw tweet lines from a twtxt file and returns a list of :class:`Tweet` objects. :param list raw_tweets: list of raw tweet lines :param Source source: the source of the given tweets :param Datetime now: the current datetime :returns: a list of parsed tweets :class:`Tweet` objects :rtype: list """ if now is None: now = datetime.now(timezone.utc) tweets = [] for line in raw_tweets: try: tweet = parse_tweet(line, source, now) except (ValueError, OverflowError) as e: logger.debug("{0} - {1}".format(source.url, e)) else: tweets.append(tweet) return tweets
def parse_tweet(raw_tweet, source, now=None): """ Parses a single raw tweet line from a twtxt file and returns a :class:`Tweet` object. :param str raw_tweet: a single raw tweet line :param Source source: the source of the given tweet :param Datetime now: the current datetime :returns: the parsed tweet :rtype: Tweet """ if now is None: now = datetime.now(timezone.utc) raw_created_at, text = raw_tweet.split("\t", 1) created_at = parse_iso8601(raw_created_at) if created_at > now: raise ValueError("Tweet is from the future") return Tweet(click.unstyle(text.strip()), created_at, source)
def from_file(cls, file, *args, **kwargs): """Try loading given cache file.""" try: cache = shelve.open(file) return cls(file, cache, *args, **kwargs) except OSError as e: logger.debug("Loading {0} failed".format(file)) raise e
def discover(cls, *args, **kwargs): """Make a guess about the cache file location an try loading it.""" file = os.path.join(Cache.cache_dir, Cache.cache_name) return cls.from_file(file, *args, **kwargs)
def is_cached(self, url): """Checks if specified URL is cached.""" try: return True if url in self.cache else False except TypeError: return False
def add_tweets(self, url, last_modified, tweets): """Adds new tweets to the cache.""" try: self.cache[url] = {"last_modified": last_modified, "tweets": tweets} self.mark_updated() return True except TypeError: return False
def get_tweets(self, url, limit=None): """Retrieves tweets from the cache.""" try: tweets = self.cache[url]["tweets"] self.mark_updated() return sorted(tweets, reverse=True)[:limit] except KeyError: return []
def remove_tweets(self, url): """Tries to remove cached tweets.""" try: del self.cache[url] self.mark_updated() return True except KeyError: return False
def cli(ctx, config, verbose): """Decentralised, minimalist microblogging service for hackers.""" init_logging(debug=verbose) if ctx.invoked_subcommand == "quickstart": return # Skip initializing config file try: if config: conf = Config.from_file(config) else: conf = Config.discover() except ValueError as e: if "Error in config file." in str(e): click.echo("✗ Please correct the errors mentioned above an run twtxt again.") else: click.echo("✗ Config file not found or not readable. You may want to run twtxt quickstart.") sys.exit() ctx.default_map = conf.build_default_map() ctx.obj = {'conf': conf}
def tweet(ctx, created_at, twtfile, text): """Append a new tweet to your twtxt file.""" text = expand_mentions(text) tweet = Tweet(text, created_at) if created_at else Tweet(text) pre_tweet_hook = ctx.obj["conf"].pre_tweet_hook if pre_tweet_hook: run_pre_tweet_hook(pre_tweet_hook, ctx.obj["conf"].options) if not add_local_tweet(tweet, twtfile): click.echo("✗ Couldn’t write to file.") else: post_tweet_hook = ctx.obj["conf"].post_tweet_hook if post_tweet_hook: run_post_tweet_hook(post_tweet_hook, ctx.obj["conf"].options)
def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update): """Retrieve your personal timeline.""" if source: source_obj = ctx.obj["conf"].get_source_by_nick(source) if not source_obj: logger.debug("Not following {0}, trying as URL".format(source)) source_obj = Source(source, source) sources = [source_obj] else: sources = ctx.obj["conf"].following tweets = [] if cache: try: with Cache.discover(update_interval=ctx.obj["conf"].timeline_update_interval) as cache: force_update = force_update or not cache.is_valid if force_update: tweets = get_remote_tweets(sources, limit, timeout, cache) else: logger.debug("Multiple calls to 'timeline' within {0} seconds. Skipping update".format( cache.update_interval)) # Behold, almighty list comprehensions! (I might have gone overboard here…) tweets = list(chain.from_iterable([cache.get_tweets(source.url) for source in sources])) except OSError as e: logger.debug(e) tweets = get_remote_tweets(sources, limit, timeout) else: tweets = get_remote_tweets(sources, limit, timeout) if twtfile and not source: source = Source(ctx.obj["conf"].nick, ctx.obj["conf"].twturl, file=twtfile) tweets.extend(get_local_tweets(source, limit)) if not tweets: return tweets = sort_and_truncate_tweets(tweets, sorting, limit) if pager: click.echo_via_pager(style_timeline(tweets, porcelain)) else: click.echo(style_timeline(tweets, porcelain))
def following(ctx, check, timeout, porcelain): """Return the list of sources you’re following.""" sources = ctx.obj['conf'].following if check: sources = get_remote_status(sources, timeout) for (source, status) in sources: click.echo(style_source_with_status(source, status, porcelain)) else: sources = sorted(sources, key=lambda source: source.nick) for source in sources: click.echo(style_source(source, porcelain))
def follow(ctx, nick, url, force): """Add a new source to your followings.""" source = Source(nick, url) sources = ctx.obj['conf'].following if not force: if source.nick in (source.nick for source in sources): click.confirm("➤ You’re already following {0}. Overwrite?".format( click.style(source.nick, bold=True)), default=False, abort=True) _, status = get_remote_status([source])[0] if not status or status.status_code != 200: click.confirm("➤ The feed of {0} at {1} is not available. Follow anyway?".format( click.style(source.nick, bold=True), click.style(source.url, bold=True)), default=False, abort=True) ctx.obj['conf'].add_source(source) click.echo("✓ You’re now following {0}.".format( click.style(source.nick, bold=True)))
def unfollow(ctx, nick): """Remove an existing source from your followings.""" source = ctx.obj['conf'].get_source_by_nick(nick) try: with Cache.discover() as cache: cache.remove_tweets(source.url) except OSError as e: logger.debug(e) ret_val = ctx.obj['conf'].remove_source_by_nick(nick) if ret_val: click.echo("✓ You’ve unfollowed {0}.".format( click.style(source.nick, bold=True))) else: click.echo("✗ You’re not following {0}.".format( click.style(nick, bold=True)))
def quickstart(): """Quickstart wizard for setting up twtxt.""" width = click.get_terminal_size()[0] width = width if width <= 79 else 79 click.secho("twtxt - quickstart", fg="cyan") click.secho("==================", fg="cyan") click.echo() help_text = "This wizard will generate a basic configuration file for twtxt with all mandatory options set. " \ "You can change all of these later with either twtxt itself or by editing the config file manually. " \ "Have a look at the docs to get information about the other available options and their meaning." click.echo(textwrap.fill(help_text, width)) click.echo() nick = click.prompt("➤ Please enter your desired nick", default=os.environ.get("USER", "")) def overwrite_check(path): if os.path.isfile(path): click.confirm("➤ '{0}' already exists. Overwrite?".format(path), abort=True) cfgfile = click.prompt("➤ Please enter the desired location for your config file", os.path.join(Config.config_dir, Config.config_name), type=click.Path(readable=True, writable=True, file_okay=True)) cfgfile = os.path.expanduser(cfgfile) overwrite_check(cfgfile) twtfile = click.prompt("➤ Please enter the desired location for your twtxt file", os.path.expanduser("~/twtxt.txt"), type=click.Path(readable=True, writable=True, file_okay=True)) twtfile = os.path.expanduser(twtfile) overwrite_check(twtfile) twturl = click.prompt("➤ Please enter the URL your twtxt file will be accessible from", default="https://example.org/twtxt.txt") disclose_identity = click.confirm("➤ Do you want to disclose your identity? Your nick and URL will be shared when " "making HTTP requests", default=False) click.echo() add_news = click.confirm("➤ Do you want to follow the twtxt news feed?", default=True) conf = Config.create_config(cfgfile, nick, twtfile, twturl, disclose_identity, add_news) twtfile_dir = os.path.dirname(twtfile) if not os.path.exists(twtfile_dir): os.makedirs(twtfile_dir) open(twtfile, "a").close() click.echo() click.echo("✓ Created config file at '{0}'.".format(click.format_filename(conf.config_file))) click.echo("✓ Created twtxt file at '{0}'.".format(click.format_filename(twtfile)))
def config(ctx, key, value, remove, edit): """Get or set config item.""" conf = ctx.obj["conf"] if not edit and not key: raise click.BadArgumentUsage("You have to specify either a key or use --edit.") if edit: return click.edit(filename=conf.config_file) if remove: try: conf.cfg.remove_option(key[0], key[1]) except Exception as e: logger.debug(e) else: conf.write_config() return if not value: try: click.echo(conf.cfg.get(key[0], key[1])) except Exception as e: logger.debug(e) return if not conf.cfg.has_section(key[0]): conf.cfg.add_section(key[0]) conf.cfg.set(key[0], key[1], value) conf.write_config()
def relative_datetime(self): """Return human-readable relative time string.""" now = datetime.now(timezone.utc) tense = "from now" if self.created_at > now else "ago" return "{0} {1}".format(humanize.naturaldelta(now - self.created_at), tense)
def save(url, *args, **kwargs): """ Parse the options, set defaults and then fire up PhantomJS. """ device = heimdallDevice(kwargs.get('device', None)) kwargs['width'] = kwargs.get('width', None) or device.width kwargs['height'] = kwargs.get('height', None) or device.height kwargs['user_agent'] = kwargs.get('user_agent', None) or device.user_agent screenshot_image = screenshot(url, **kwargs) if kwargs.get('optimize'): image = Image.open(screenshot_image.path) image.save(screenshot_image.path, optimize=True) return screenshot_image
def screenshot(url, *args, **kwargs): """ Call PhantomJS with the specified flags and options. """ phantomscript = os.path.join(os.path.dirname(__file__), 'take_screenshot.js') directory = kwargs.get('save_dir', '/tmp') image_name = kwargs.get('image_name', None) or _image_name_from_url(url) ext = kwargs.get('format', 'png').lower() save_path = os.path.join(directory, image_name) + '.' + ext crop_to_visible = kwargs.get('crop_to_visible', False) cmd_args = [ 'phantomjs', '--ssl-protocol=any', phantomscript, url, '--width', str(kwargs['width']), '--height', str(kwargs['height']), '--useragent', str(kwargs['user_agent']), '--dir', directory, '--ext', ext, '--name', str(image_name), ] if crop_to_visible: cmd_args.append('--croptovisible') # TODO: # - quality # - renderafter # - maxexecutiontime # - resourcetimeout output = subprocess.Popen(cmd_args, stdout=subprocess.PIPE).communicate()[0] return Screenshot(save_path, directory, image_name + '.' + ext, ext)
def _image_name_from_url(url): """ Create a nice image name from the url. """ find = r'https?://|[^\w]' replace = '_' return re.sub(find, replace, url).strip('_')
def worker(f): """ Decorator. Abortable worker. If wrapped task will be cancelled by dispatcher, decorator will send ftp codes of successful interrupt. :: >>> @worker ... async def worker(self, connection, rest): ... ... """ @functools.wraps(f) async def wrapper(cls, connection, rest): try: await f(cls, connection, rest) except asyncio.CancelledError: connection.response("426", "transfer aborted") connection.response("226", "abort successful") return wrapper
def get_permissions(self, path): """ Return nearest parent permission for `path`. :param path: path which permission you want to know :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :rtype: :py:class:`aioftp.Permission` """ path = pathlib.PurePosixPath(path) parents = filter(lambda p: p.is_parent(path), self.permissions) perm = min( parents, key=lambda p: len(path.relative_to(p.path).parts), default=Permission(), ) return perm
def release(self): """ Release, incrementing the internal counter by one. """ if self.value is not None: self.value += 1 if self.value > self.maximum_value: raise ValueError("Too many releases")
async def start(self, host=None, port=0, **kwargs): """ :py:func:`asyncio.coroutine` Start server. :param host: ip address to bind for listening. :type host: :py:class:`str` :param port: port number to bind for listening. :type port: :py:class:`int` :param kwargs: keyword arguments, they passed to :py:func:`asyncio.start_server` """ self._start_server_extra_arguments = kwargs self.connections = {} self.server_host = host self.server_port = port self.server = await asyncio.start_server( self.dispatcher, host, port, ssl=self.ssl, **self._start_server_extra_arguments, ) for sock in self.server.sockets: if sock.family in (socket.AF_INET, socket.AF_INET6): host, port, *_ = sock.getsockname() if not self.server_port: self.server_port = port if not self.server_host: self.server_host = host logger.info("serving on %s:%s", host, port)
async def close(self): """ :py:func:`asyncio.coroutine` Shutdown the server and close all connections. """ self.server.close() tasks = [self.server.wait_closed()] for connection in self.connections.values(): connection._dispatcher.cancel() tasks.append(connection._dispatcher) logger.info("waiting for %d tasks", len(tasks)) await asyncio.wait(tasks)
async def write_response(self, stream, code, lines="", list=False): """ :py:func:`asyncio.coroutine` Complex method for sending response. :param stream: command connection stream :type stream: :py:class:`aioftp.StreamIO` :param code: server response code :type code: :py:class:`str` :param lines: line or lines, which are response information :type lines: :py:class:`str` or :py:class:`collections.Iterable` :param list: if true, then lines will be sended without code prefix. This is useful for **LIST** FTP command and some others. :type list: :py:class:`bool` """ lines = wrap_with_container(lines) write = functools.partial(self.write_line, stream) if list: head, *body, tail = lines await write(code + "-" + head) for line in body: await write(" " + line) await write(code + " " + tail) else: *body, tail = lines for line in body: await write(code + "-" + line) await write(code + " " + tail)
async def parse_command(self, stream): """ :py:func:`asyncio.coroutine` Complex method for getting command. :param stream: connection steram :type stream: :py:class:`asyncio.StreamIO` :return: (code, rest) :rtype: (:py:class:`str`, :py:class:`str`) """ line = await stream.readline() if not line: raise ConnectionResetError s = line.decode(encoding=self.encoding).rstrip() logger.info(s) cmd, _, rest = s.partition(" ") return cmd.lower(), rest
async def response_writer(self, stream, response_queue): """ :py:func:`asyncio.coroutine` Worker for write_response with current connection. Get data to response from queue, this is for right order of responses. Exits if received :py:class:`None`. :param stream: command connection stream :type connection: :py:class:`aioftp.StreamIO` :param response_queue: :type response_queue: :py:class:`asyncio.Queue` """ while True: args = await response_queue.get() try: await self.write_response(stream, *args) finally: response_queue.task_done()
def get_paths(self, connection, path): """ Return *real* and *virtual* paths, resolves ".." with "up" action. *Real* path is path for path_io, when *virtual* deals with "user-view" and user requests :param connection: internal options for current connected user :type connection: :py:class:`dict` :param path: received path from user :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :return: (real_path, virtual_path) :rtype: (:py:class:`pathlib.Path`, :py:class:`pathlib.PurePosixPath`) """ virtual_path = pathlib.PurePosixPath(path) if not virtual_path.is_absolute(): virtual_path = connection.current_directory / virtual_path resolved_virtual_path = pathlib.PurePosixPath("/") for part in virtual_path.parts[1:]: if part == "..": resolved_virtual_path = resolved_virtual_path.parent else: resolved_virtual_path /= part base_path = connection.user.base_path real_path = base_path / resolved_virtual_path.relative_to("/") return real_path, resolved_virtual_path
def bytes2human(n, format="%(value).1f%(symbol)s"): """ >>> bytes2human(10000) '9K' >>> bytes2human(100001221) '95M' """ symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') prefix = {} for i, s in enumerate(symbols[1:]): prefix[s] = 1 << (i + 1) * 10 for symbol in reversed(symbols[1:]): if n >= prefix[symbol]: value = float(n) / prefix[symbol] return format % locals() return format % dict(symbol=symbols[0], value=n)
def human2bytes(s): """ >>> human2bytes('1M') 1048576 >>> human2bytes('1G') 1073741824 """ symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') letter = s[-1:].strip().upper() num = s[:-1] assert num.isdigit() and letter in symbols, s num = float(num) prefix = {symbols[0]: 1} for i, s in enumerate(symbols[1:]): prefix[s] = 1 << (i + 1) * 10 return int(num * prefix[letter])
def register_memory(): """Register an approximation of memory used by FTP server process and all of its children. """ # XXX How to get a reliable representation of memory being used is # not clear. (rss - shared) seems kind of ok but we might also use # the private working set via get_memory_maps().private*. def get_mem(proc): if os.name == 'posix': mem = proc.memory_info_ex() counter = mem.rss if 'shared' in mem._fields: counter -= mem.shared return counter else: # TODO figure out what to do on Windows return proc.get_memory_info().rss if SERVER_PROC is not None: mem = get_mem(SERVER_PROC) for child in SERVER_PROC.children(): mem += get_mem(child) server_memory.append(bytes2human(mem))
def timethis(what): """"Utility function for making simple benchmarks (calculates time calls). It can be used either as a context manager or as a decorator. """ @contextlib.contextmanager def benchmark(): timer = time.clock if sys.platform == "win32" else time.time start = timer() yield stop = timer() res = (stop - start) print_bench(what, res, "secs") if hasattr(what, "__call__"): def timed(*args, **kwargs): with benchmark(): return what(*args, **kwargs) return timed else: return benchmark()
def connect(): """Connect to FTP server, login and return an ftplib.FTP instance.""" ftp_class = ftplib.FTP if not SSL else ftplib.FTP_TLS ftp = ftp_class(timeout=TIMEOUT) ftp.connect(HOST, PORT) ftp.login(USER, PASSWORD) if SSL: ftp.prot_p() # secure data connection return ftp
def retr(ftp): """Same as ftplib's retrbinary() but discard the received data.""" ftp.voidcmd('TYPE I') with contextlib.closing(ftp.transfercmd("RETR " + TESTFN)) as conn: recv_bytes = 0 while True: data = conn.recv(BUFFER_LEN) if not data: break recv_bytes += len(data) ftp.voidresp()
def stor(ftp=None): """Same as ftplib's storbinary() but just sends dummy data instead of reading it from a real file. """ if ftp is None: ftp = connect() quit = True else: quit = False ftp.voidcmd('TYPE I') with contextlib.closing(ftp.transfercmd("STOR " + TESTFN)) as conn: chunk = b'x' * BUFFER_LEN total_sent = 0 while True: sent = conn.send(chunk) total_sent += sent if total_sent >= FILE_SIZE: break ftp.voidresp() if quit: ftp.quit() return ftp
def bytes_per_second(ftp, retr=True): """Return the number of bytes transmitted in 1 second.""" tot_bytes = 0 if retr: def request_file(): ftp.voidcmd('TYPE I') conn = ftp.transfercmd("retr " + TESTFN) return conn with contextlib.closing(request_file()) as conn: register_memory() stop_at = time.time() + 1.0 while stop_at > time.time(): chunk = conn.recv(BUFFER_LEN) if not chunk: a = time.time() ftp.voidresp() conn.close() conn = request_file() stop_at += time.time() - a tot_bytes += len(chunk) try: while chunk: chunk = conn.recv(BUFFER_LEN) ftp.voidresp() conn.close() except (ftplib.error_temp, ftplib.error_perm): pass else: ftp.voidcmd('TYPE I') with contextlib.closing(ftp.transfercmd("STOR " + TESTFN)) as conn: register_memory() chunk = b'x' * BUFFER_LEN stop_at = time.time() + 1 while stop_at > time.time(): tot_bytes += conn.send(chunk) ftp.voidresp() return tot_bytes
def universal_exception(coro): """ Decorator. Reraising any exception (except `CancelledError` and `NotImplementedError`) with universal exception :py:class:`aioftp.PathIOError` """ @functools.wraps(coro) async def wrapper(*args, **kwargs): try: return await coro(*args, **kwargs) except (asyncio.CancelledError, NotImplementedError, StopAsyncIteration): raise except Exception: raise errors.PathIOError(reason=sys.exc_info()) return wrapper
def defend_file_methods(coro): """ Decorator. Raises exception when file methods called with wrapped by :py:class:`aioftp.AsyncPathIOContext` file object. """ @functools.wraps(coro) async def wrapper(self, file, *args, **kwargs): if isinstance(file, AsyncPathIOContext): raise ValueError("Native path io file methods can not be used " "with wrapped file object") return await coro(self, file, *args, **kwargs) return wrapper
def async_enterable(f): """ Decorator. Bring coroutine result up, so it can be used as async context :: >>> async def foo(): ... ... ... ... return AsyncContextInstance(...) ... ... ctx = await foo() ... async with ctx: ... ... # do :: >>> @async_enterable ... async def foo(): ... ... ... ... return AsyncContextInstance(...) ... ... async with foo() as ctx: ... ... # do ... ... ctx = await foo() ... async with ctx: ... ... # do """ @functools.wraps(f) def wrapper(*args, **kwargs): class AsyncEnterableInstance: async def __aenter__(self): self.context = await f(*args, **kwargs) return await self.context.__aenter__() async def __aexit__(self, *args, **kwargs): await self.context.__aexit__(*args, **kwargs) def __await__(self): return f(*args, **kwargs).__await__() return AsyncEnterableInstance() return wrapper
def setlocale(name): """ Context manager with threading lock for set locale on enter, and set it back to original state on exit. :: >>> with setlocale("C"): ... ... """ with LOCALE_LOCK: old_locale = locale.setlocale(locale.LC_ALL) try: yield locale.setlocale(locale.LC_ALL, name) finally: locale.setlocale(locale.LC_ALL, old_locale)
async def wait(self): """ :py:func:`asyncio.coroutine` Wait until can do IO """ if self._limit is not None and self._limit > 0 and \ self._start is not None: now = _now() end = self._start + self._sum / self._limit await asyncio.sleep(max(0, end - now))
def append(self, data, start): """ Count `data` for throttle :param data: bytes of data for count :type data: :py:class:`bytes` :param start: start of read/write time from :py:meth:`asyncio.BaseEventLoop.time` :type start: :py:class:`float` """ if self._limit is not None and self._limit > 0: if self._start is None: self._start = start if start - self._start > self.reset_rate: self._sum -= round((start - self._start) * self._limit) self._start = start self._sum += len(data)
def limit(self, value): """ Set throttle limit :param value: bytes per second :type value: :py:class:`int` or :py:class:`None` """ self._limit = value self._start = None self._sum = 0
def clone(self): """ Clone throttles without memory """ return StreamThrottle( read=self.read.clone(), write=self.write.clone() )
def from_limits(cls, read_speed_limit=None, write_speed_limit=None): """ Simple wrapper for creation :py:class:`aioftp.StreamThrottle` :param read_speed_limit: stream read speed limit in bytes or :py:class:`None` for unlimited :type read_speed_limit: :py:class:`int` or :py:class:`None` :param write_speed_limit: stream write speed limit in bytes or :py:class:`None` for unlimited :type write_speed_limit: :py:class:`int` or :py:class:`None` """ return cls(read=Throttle(limit=read_speed_limit), write=Throttle(limit=write_speed_limit))
async def wait(self, name): """ :py:func:`asyncio.coroutine` Wait for all throttles :param name: name of throttle to acquire ("read" or "write") :type name: :py:class:`str` """ waiters = [] for throttle in self.throttles.values(): curr_throttle = getattr(throttle, name) if curr_throttle.limit: waiters.append(curr_throttle.wait()) if waiters: await asyncio.wait(waiters)
def append(self, name, data, start): """ Update timeout for all throttles :param name: name of throttle to append to ("read" or "write") :type name: :py:class:`str` :param data: bytes of data for count :type data: :py:class:`bytes` :param start: start of read/write time from :py:meth:`asyncio.BaseEventLoop.time` :type start: :py:class:`float` """ for throttle in self.throttles.values(): getattr(throttle, name).append(data, start)
async def read(self, count=-1): """ :py:func:`asyncio.coroutine` :py:meth:`aioftp.StreamIO.read` proxy """ await self.wait("read") start = _now() data = await super().read(count) self.append("read", data, start) return data
async def readline(self): """ :py:func:`asyncio.coroutine` :py:meth:`aioftp.StreamIO.readline` proxy """ await self.wait("read") start = _now() data = await super().readline() self.append("read", data, start) return data
async def write(self, data): """ :py:func:`asyncio.coroutine` :py:meth:`aioftp.StreamIO.write` proxy """ await self.wait("write") start = _now() await super().write(data) self.append("write", data, start)
def matches(self, mask): """ :param mask: Template for comparision. If mask symbol is not digit then it passes. :type mask: :py:class:`str` :: >>> Code("123").matches("1") True >>> Code("123").matches("1x3") True """ return all(map(lambda m, c: not m.isdigit() or m == c, mask, self))
async def finish(self, expected_codes="2xx", wait_codes="1xx"): """ :py:func:`asyncio.coroutine` Close connection and wait for `expected_codes` response from server passing `wait_codes`. :param expected_codes: tuple of expected codes or expected code :type expected_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` :param wait_codes: tuple of wait codes or wait code :type wait_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` """ self.close() await self.client.command(None, expected_codes, wait_codes)
async def parse_line(self): """ :py:func:`asyncio.coroutine` Parsing server response line. :return: (code, line) :rtype: (:py:class:`aioftp.Code`, :py:class:`str`) :raises ConnectionResetError: if received data is empty (this means, that connection is closed) :raises asyncio.TimeoutError: if there where no data for `timeout` period """ line = await self.stream.readline() if not line: self.stream.close() raise ConnectionResetError s = line.decode(encoding=self.encoding).rstrip() logger.info(s) return Code(s[:3]), s[3:]
async def parse_response(self): """ :py:func:`asyncio.coroutine` Parsing full server response (all lines). :return: (code, lines) :rtype: (:py:class:`aioftp.Code`, :py:class:`list` of :py:class:`str`) :raises aioftp.StatusCodeError: if received code does not matches all already received codes """ code, rest = await self.parse_line() info = [rest] curr_code = code while rest.startswith("-") or not curr_code.isdigit(): curr_code, rest = await self.parse_line() if curr_code.isdigit(): info.append(rest) if curr_code != code: raise errors.StatusCodeError(code, curr_code, info) else: info.append(curr_code + rest) return code, info
def check_codes(self, expected_codes, received_code, info): """ Checks if any of expected matches received. :param expected_codes: tuple of expected codes :type expected_codes: :py:class:`tuple` :param received_code: received code for matching :type received_code: :py:class:`aioftp.Code` :param info: list of response lines from server :type info: :py:class:`list` :raises aioftp.StatusCodeError: if received code does not matches any expected code """ if not any(map(received_code.matches, expected_codes)): raise errors.StatusCodeError(expected_codes, received_code, info)
async def command(self, command=None, expected_codes=(), wait_codes=()): """ :py:func:`asyncio.coroutine` Basic command logic. 1. Send command if not omitted. 2. Yield response until no wait code matches. 3. Check code for expected. :param command: command line :type command: :py:class:`str` :param expected_codes: tuple of expected codes or expected code :type expected_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` :param wait_codes: tuple of wait codes or wait code :type wait_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` """ expected_codes = wrap_with_container(expected_codes) wait_codes = wrap_with_container(wait_codes) if command: logger.info(command) message = command + END_OF_LINE await self.stream.write(message.encode(encoding=self.encoding)) if expected_codes or wait_codes: code, info = await self.parse_response() while any(map(code.matches, wait_codes)): code, info = await self.parse_response() if expected_codes: self.check_codes(expected_codes, code, info) return code, info
def parse_epsv_response(s): """ Parsing `EPSV` (`message (|||port|)`) response. :param s: response line :type s: :py:class:`str` :return: (ip, port) :rtype: (:py:class:`None`, :py:class:`int`) """ matches = tuple(re.finditer(r"\((.)\1\1\d+\1\)", s)) s = matches[-1].group() port = int(s[4:-2]) return None, port
def parse_pasv_response(s): """ Parsing `PASV` server response. :param s: response line :type s: :py:class:`str` :return: (ip, port) :rtype: (:py:class:`str`, :py:class:`int`) """ sub, *_ = re.findall(r"[^(]*\(([^)]*)", s) nums = tuple(map(int, sub.split(","))) ip = ".".join(map(str, nums[:4])) port = (nums[4] << 8) | nums[5] return ip, port
def parse_directory_response(s): """ Parsing directory server response. :param s: response line :type s: :py:class:`str` :rtype: :py:class:`pathlib.PurePosixPath` """ seq_quotes = 0 start = False directory = "" for ch in s: if not start: if ch == "\"": start = True else: if ch == "\"": seq_quotes += 1 else: if seq_quotes == 1: break elif seq_quotes == 2: seq_quotes = 0 directory += '"' directory += ch return pathlib.PurePosixPath(directory)
def parse_unix_mode(s): """ Parsing unix mode strings ("rwxr-x--t") into hexacimal notation. :param s: mode string :type s: :py:class:`str` :return mode: :rtype: :py:class:`int` """ parse_rw = {"rw": 6, "r-": 4, "-w": 2, "--": 0} mode = 0 mode |= parse_rw[s[0:2]] << 6 mode |= parse_rw[s[3:5]] << 3 mode |= parse_rw[s[6:8]] if s[2] == "s": mode |= 0o4100 elif s[2] == "x": mode |= 0o0100 elif s[2] != "-": raise ValueError if s[5] == "s": mode |= 0o2010 elif s[5] == "x": mode |= 0o0010 elif s[5] != "-": raise ValueError if s[8] == "t": mode |= 0o1000 elif s[8] == "x": mode |= 0o0001 elif s[8] != "-": raise ValueError return mode
def parse_ls_date(self, s, *, now=None): """ Parsing dates from the ls unix utility. For example, "Nov 18 1958" and "Nov 18 12:29". :param s: ls date :type s: :py:class:`str` :rtype: :py:class:`str` """ with setlocale("C"): try: if now is None: now = datetime.datetime.now() d = datetime.datetime.strptime(s, "%b %d %H:%M") d = d.replace(year=now.year) diff = (now - d).total_seconds() if diff > HALF_OF_YEAR_IN_SECONDS: d = d.replace(year=now.year + 1) elif diff < -HALF_OF_YEAR_IN_SECONDS: d = d.replace(year=now.year - 1) except ValueError: d = datetime.datetime.strptime(s, "%b %d %Y") return self.format_date_time(d)
def parse_list_line_unix(self, b): """ Attempt to parse a LIST line (similar to unix ls utility). :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ s = b.decode(encoding=self.encoding).rstrip() info = {} if s[0] == "-": info["type"] = "file" elif s[0] == "d": info["type"] = "dir" elif s[0] == "l": info["type"] = "link" else: info["type"] = "unknown" # TODO: handle symlinks(beware the symlink loop) info["unix.mode"] = self.parse_unix_mode(s[1:10]) s = s[10:].lstrip() i = s.index(" ") info["unix.links"] = s[:i] if not info["unix.links"].isdigit(): raise ValueError s = s[i:].lstrip() i = s.index(" ") info["unix.owner"] = s[:i] s = s[i:].lstrip() i = s.index(" ") info["unix.group"] = s[:i] s = s[i:].lstrip() i = s.index(" ") info["size"] = s[:i] if not info["size"].isdigit(): raise ValueError s = s[i:].lstrip() info["modify"] = self.parse_ls_date(s[:12]) s = s[12:].strip() if info["type"] == "link": i = s.rindex(" -> ") link_dst = s[i + 4:] link_src = s[:i] i = -2 if link_dst[-1] == "\'" or link_dst[-1] == "\"" else -1 info["type"] = "dir" if link_dst[i] == "/" else "file" s = link_src return pathlib.PurePosixPath(s), info
def parse_list_line_windows(self, b): """ Parsing Microsoft Windows `dir` output :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ line = b.decode(encoding=self.encoding).rstrip("\r\n") date_time_end = line.index("M") date_time_str = line[:date_time_end + 1].strip().split(" ") date_time_str = " ".join([x for x in date_time_str if len(x) > 0]) line = line[date_time_end + 1:].lstrip() with setlocale("C"): strptime = datetime.datetime.strptime date_time = strptime(date_time_str, "%m/%d/%Y %I:%M %p") info = {} info["modify"] = self.format_date_time(date_time) next_space = line.index(" ") if line.startswith("<DIR>"): info["type"] = "dir" else: info["type"] = "file" info["size"] = line[:next_space].replace(",", "") if not info["size"].isdigit(): raise ValueError # This here could cause a problem if a filename started with # whitespace, but if we were to try to detect such a condition # we would have to make strong assumptions about the input format filename = line[next_space:].lstrip() if filename == "." or filename == "..": raise ValueError return pathlib.PurePosixPath(filename), info
def parse_list_line(self, b): """ Parse LIST response with both Microsoft Windows® parser and UNIX parser :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ ex = [] parsers = (self.parse_list_line_unix, self.parse_list_line_windows) for parser in parsers: try: return parser(b) except (ValueError, KeyError, IndexError) as e: ex.append(e) raise ValueError("All parsers failed to parse", b, ex)
def parse_mlsx_line(self, b): """ Parsing MLS(T|D) response. :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ if isinstance(b, bytes): s = b.decode(encoding=self.encoding) else: s = b line = s.rstrip() facts_found, _, name = line.partition(" ") entry = {} for fact in facts_found[:-1].split(";"): key, _, value = fact.partition("=") entry[key.lower()] = value return pathlib.PurePosixPath(name), entry
async def connect(self, host, port=DEFAULT_PORT): """ :py:func:`asyncio.coroutine` Connect to server. :param host: host name for connection :type host: :py:class:`str` :param port: port number for connection :type port: :py:class:`int` """ await super().connect(host, port) code, info = await self.command(None, "220", "120") return info
async def login(self, user=DEFAULT_USER, password=DEFAULT_PASSWORD, account=DEFAULT_ACCOUNT): """ :py:func:`asyncio.coroutine` Server authentication. :param user: username :type user: :py:class:`str` :param password: password :type password: :py:class:`str` :param account: account (almost always blank) :type account: :py:class:`str` :raises aioftp.StatusCodeError: if unknown code received """ code, info = await self.command("USER " + user, ("230", "33x")) while code.matches("33x"): if code == "331": cmd = "PASS " + password elif code == "332": cmd = "ACCT " + account else: raise errors.StatusCodeError("33x", code, info) code, info = await self.command(cmd, ("230", "33x"))
async def get_current_directory(self): """ :py:func:`asyncio.coroutine` Getting current working directory. :rtype: :py:class:`pathlib.PurePosixPath` """ code, info = await self.command("PWD", "257") directory = self.parse_directory_response(info[-1]) return directory
async def change_directory(self, path=".."): """ :py:func:`asyncio.coroutine` Change current directory. Goes «up» if no parameters passed. :param path: new directory, goes «up» if omitted :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ path = pathlib.PurePosixPath(path) if path == pathlib.PurePosixPath(".."): cmd = "CDUP" else: cmd = "CWD " + str(path) await self.command(cmd, "2xx")
async def make_directory(self, path, *, parents=True): """ :py:func:`asyncio.coroutine` Make directory. :param path: path to directory to create :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param parents: create parents if does not exists :type parents: :py:class:`bool` """ path = pathlib.PurePosixPath(path) need_create = [] while path.name and not await self.exists(path): need_create.append(path) path = path.parent if not parents: break need_create.reverse() for path in need_create: await self.command("MKD " + str(path), "257")
def list(self, path="", *, recursive=False, raw_command=None): """ :py:func:`asyncio.coroutine` List all files and directories in "path". :param path: directory or file path :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param recursive: list recursively :type recursive: :py:class:`bool` :param raw_command: optional ftp command to use in place of fallback logic (must be one of "MLSD", "LIST") :type raw_command: :py:class:`str` :rtype: :py:class:`list` or `async for` context :: >>> # lazy list >>> async for path, info in client.list(): ... # no interaction with client should be here(!) >>> # eager list >>> for path, info in (await client.list()): ... # interaction with client allowed, since all paths are ... # collected already :: >>> stats = await client.list() """ class AsyncLister(AsyncListerMixin): stream = None async def _new_stream(cls, local_path): cls.path = local_path cls.parse_line = self.parse_mlsx_line if raw_command not in [None, "MLSD", "LIST"]: raise ValueError("raw_command must be one of MLSD or " f"LIST, but got {raw_command}") if raw_command in [None, "MLSD"]: try: command = ("MLSD " + str(cls.path)).strip() return await self.get_stream(command, "1xx") except errors.StatusCodeError as e: code = e.received_codes[-1] if not code.matches("50x") or raw_command is not None: raise if raw_command in [None, "LIST"]: cls.parse_line = self.parse_list_line command = ("LIST " + str(cls.path)).strip() return await self.get_stream(command, "1xx") def __aiter__(cls): cls.directories = collections.deque() return cls async def __anext__(cls): if cls.stream is None: cls.stream = await cls._new_stream(path) while True: line = await cls.stream.readline() while not line: await cls.stream.finish() if cls.directories: current_path, info = cls.directories.popleft() cls.stream = await cls._new_stream(current_path) line = await cls.stream.readline() else: raise StopAsyncIteration try: name, info = cls.parse_line(line) except Exception: continue stat = cls.path / name, info if info["type"] == "dir" and recursive: cls.directories.append(stat) return stat return AsyncLister()
async def stat(self, path): """ :py:func:`asyncio.coroutine` Getting path stats. :param path: path for getting info :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :return: path info :rtype: :py:class:`dict` """ path = pathlib.PurePosixPath(path) try: code, info = await self.command("MLST " + str(path), "2xx") name, info = self.parse_mlsx_line(info[1].lstrip()) return info except errors.StatusCodeError as e: if not e.received_codes[-1].matches("50x"): raise for p, info in await self.list(path.parent): if p.name == path.name: return info else: raise errors.StatusCodeError( Code("2xx"), Code("550"), "path does not exists", )
async def exists(self, path): """ :py:func:`asyncio.coroutine` Check path for existence. :param path: path to check :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :rtype: :py:class:`bool` """ try: await self.stat(path) return True except errors.StatusCodeError as e: if e.received_codes[-1].matches("550"): return False raise
async def rename(self, source, destination): """ :py:func:`asyncio.coroutine` Rename (move) file or directory. :param source: path to rename :type source: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param destination: path new name :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ await self.command("RNFR " + str(source), "350") await self.command("RNTO " + str(destination), "2xx")
async def remove(self, path): """ :py:func:`asyncio.coroutine` High level remove method for removing path recursively (file or directory). :param path: path to remove :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ if await self.exists(path): info = await self.stat(path) if info["type"] == "file": await self.remove_file(path) elif info["type"] == "dir": for name, info in (await self.list(path)): if info["type"] in ("dir", "file"): await self.remove(name) await self.remove_directory(path)
def upload_stream(self, destination, *, offset=0): """ Create stream for write data to `destination` file. :param destination: destination path of file on server side :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param offset: byte offset for stream start position :type offset: :py:class:`int` :rtype: :py:class:`aioftp.DataConnectionThrottleStreamIO` """ return self.get_stream( "STOR " + str(destination), "1xx", offset=offset, )
def append_stream(self, destination, *, offset=0): """ Create stream for append (write) data to `destination` file. :param destination: destination path of file on server side :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param offset: byte offset for stream start position :type offset: :py:class:`int` :rtype: :py:class:`aioftp.DataConnectionThrottleStreamIO` """ return self.get_stream( "APPE " + str(destination), "1xx", offset=offset, )
async def upload(self, source, destination="", *, write_into=False, block_size=DEFAULT_BLOCK_SIZE): """ :py:func:`asyncio.coroutine` High level upload method for uploading files and directories recursively from file system. :param source: source path of file or directory on client side :type source: :py:class:`str` or :py:class:`pathlib.Path` :param destination: destination path of file or directory on server side :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param write_into: write source into destination (if you want upload file and change it name, as well with directories) :type write_into: :py:class:`bool` :param block_size: block size for transaction :type block_size: :py:class:`int` """ source = pathlib.Path(source) destination = pathlib.PurePosixPath(destination) if not write_into: destination = destination / source.name if await self.path_io.is_file(source): await self.make_directory(destination.parent) async with self.path_io.open(source, mode="rb") as file_in, \ self.upload_stream(destination) as stream: async for block in file_in.iter_by_block(block_size): await stream.write(block) elif await self.path_io.is_dir(source): await self.make_directory(destination) sources = collections.deque([source]) while sources: src = sources.popleft() async for path in self.path_io.list(src): if write_into: relative = destination.name / path.relative_to(source) else: relative = path.relative_to(source.parent) if await self.path_io.is_dir(path): await self.make_directory(relative) sources.append(path) else: await self.upload( path, relative, write_into=True, block_size=block_size )
def download_stream(self, source, *, offset=0): """ :py:func:`asyncio.coroutine` Create stream for read data from `source` file. :param source: source path of file on server side :type source: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param offset: byte offset for stream start position :type offset: :py:class:`int` :rtype: :py:class:`aioftp.DataConnectionThrottleStreamIO` """ return self.get_stream("RETR " + str(source), "1xx", offset=offset)
async def download(self, source, destination="", *, write_into=False, block_size=DEFAULT_BLOCK_SIZE): """ :py:func:`asyncio.coroutine` High level download method for downloading files and directories recursively and save them to the file system. :param source: source path of file or directory on server side :type source: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param destination: destination path of file or directory on client side :type destination: :py:class:`str` or :py:class:`pathlib.Path` :param write_into: write source into destination (if you want download file and change it name, as well with directories) :type write_into: :py:class:`bool` :param block_size: block size for transaction :type block_size: :py:class:`int` """ source = pathlib.PurePosixPath(source) destination = pathlib.Path(destination) if not write_into: destination = destination / source.name if await self.is_file(source): await self.path_io.mkdir(destination.parent, parents=True, exist_ok=True) async with self.path_io.open(destination, mode="wb") as file_out, \ self.download_stream(source) as stream: async for block in stream.iter_by_block(block_size): await file_out.write(block) elif await self.is_dir(source): await self.path_io.mkdir(destination, parents=True, exist_ok=True) for name, info in (await self.list(source)): full = destination / name.relative_to(source) if info["type"] in ("file", "dir"): await self.download(name, full, write_into=True, block_size=block_size)
async def get_passive_connection(self, conn_type="I", commands=("epsv", "pasv")): """ :py:func:`asyncio.coroutine` Getting pair of reader, writer for passive connection with server. :param conn_type: connection type ("I", "A", "E", "L") :type conn_type: :py:class:`str` :param commands: sequence of commands to try to initiate passive server creation. First success wins. Default is EPSV, then PASV. :type commands: :py:class:`list` :rtype: (:py:class:`asyncio.StreamReader`, :py:class:`asyncio.StreamWriter`) """ functions = { "epsv": self._do_epsv, "pasv": self._do_pasv, } if not commands: raise ValueError("No passive commands provided") await self.command("TYPE " + conn_type, "200") for i, name in enumerate(commands, start=1): name = name.lower() if name not in functions: raise ValueError(f"{name!r} not in {set(functions)!r}") try: ip, port = await functions[name]() break except errors.StatusCodeError as e: is_last = i == len(commands) if is_last or not e.received_codes[-1].matches("50x"): raise if ip in ("0.0.0.0", None): ip = self.server_host reader, writer = await open_connection( ip, port, self.create_connection, self.ssl, ) return reader, writer