sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def filter_files(self, path): """ Exclude files based on blueprint and project configuration as well as hidden files. """ excludes = r'|'.join([fnmatch.translate(x) for x in self.project.EXCLUDES]) or r'$.' for root, dirs, files in os.walk(path, topdown=True): dirs[:] = [d for d in dirs if not re.match(excludes, d)] dirs[:] = [os.path.join(root, d) for d in dirs] rel_path = os.path.relpath(root, path) paths = [] for f in files: if rel_path == '.': file_path = f else: file_path = os.path.join(rel_path, f) if not re.match(excludes, file_path): paths.append(f) files[:] = paths yield root, dirs, files
Exclude files based on blueprint and project configuration as well as hidden files.
entailment
def find_files(self): """ Find all file paths for publishing, yield (urlname, kwargs) """ # yield blueprint paths first if getattr(self, 'blueprint_name', None): for path in walk_directory(os.path.join(self.path, self.blueprint_name), ignore=self.project.EXCLUDES): yield 'preview', {'path': path} # then yield project paths for path in walk_directory(self.path, ignore=self.project.EXCLUDES): yield 'preview', {'path': path}
Find all file paths for publishing, yield (urlname, kwargs)
entailment
def deploy_to_s3(self): """ Deploy a directory to an s3 bucket. """ self.tempdir = tempfile.mkdtemp('s3deploy') for keyname, absolute_path in self.find_file_paths(): self.s3_upload(keyname, absolute_path) shutil.rmtree(self.tempdir, True) return True
Deploy a directory to an s3 bucket.
entailment
def s3_upload(self, keyname, absolute_path): """ Upload a file to s3 """ mimetype = mimetypes.guess_type(absolute_path) options = {'Content-Type': mimetype[0]} if mimetype[0] is not None and mimetype[0].startswith('text/'): upload = open(absolute_path, 'rb') options['Content-Encoding'] = 'gzip' key_parts = keyname.split('/') filename = key_parts.pop() temp_path = os.path.join(self.tempdir, filename) gzfile = gzip.GzipFile(temp_path, 'wb', 9, None, GZIP_TIMESTAMP) gzfile.write(upload.read()) gzfile.close() absolute_path = temp_path hash = '"{0}"'.format(hashlib.md5(open(absolute_path, 'rb').read()).hexdigest()) key = "{0}/{1}".format(self.bucket.path, keyname) existing = self.connection.get_key(key) if self.force or not existing or (existing.etag != hash): k = Key(self.connection) k.key = key puts("+ Uploading {0}/{1}".format(self.bucket, keyname)) k.set_contents_from_filename(absolute_path, options, policy='public-read') else: puts("- Skipping {0}/{1}, files match".format(self.bucket, keyname))
Upload a file to s3
entailment
def find_file_paths(self): """ A generator function that recursively finds all files in the upload directory. """ paths = [] for root, dirs, files in os.walk(self.directory, topdown=True): rel_path = os.path.relpath(root, self.directory) for f in files: if rel_path == '.': path = (f, os.path.join(root, f)) else: path = (os.path.join(rel_path, f), os.path.join(root, f)) paths.append(path) return paths
A generator function that recursively finds all files in the upload directory.
entailment
def get_drive_api(): """ Get drive API client based on settings. """ settings = Settings() if settings.credentials: return get_drive_api_from_file(settings.credentials_path) if settings.client_secrets: return get_drive_api_from_client_secrets(settings.client_secrets_path)
Get drive API client based on settings.
entailment
def get_drive_api_from_client_secrets(path, reset_creds=False): """ Reads the local client secrets file if available (otherwise, opens a browser tab to walk through the OAuth 2.0 process, and stores the client secrets for future use) and then authorizes those credentials. Returns a Google Drive API service object. """ storage = keyring_storage.Storage('tarbell', getpass.getuser()) credentials = None if not reset_creds: credentials = storage.get() if path and not credentials: flow = client.flow_from_clientsecrets(path, scope=OAUTH_SCOPE) credentials = tools.run_flow(flow, storage, flags) storage.put(credentials) return _get_drive_api(credentials)
Reads the local client secrets file if available (otherwise, opens a browser tab to walk through the OAuth 2.0 process, and stores the client secrets for future use) and then authorizes those credentials. Returns a Google Drive API service object.
entailment
def get_drive_api_from_file(path): """ Open file with OAuth tokens. """ f = open(path) credentials = client.OAuth2Credentials.from_json(f.read()) return _get_drive_api(credentials)
Open file with OAuth tokens.
entailment
def _get_drive_api(credentials): """ For a given set of credentials, return a drive API object. """ http = httplib2.Http() http = credentials.authorize(http) service = discovery.build('drive', 'v2', http=http) service.credentials = credentials # duck punch service obj. with credentials return service
For a given set of credentials, return a drive API object.
entailment
def main(): """ Primary Tarbell command dispatch. """ command = Command.lookup(args.get(0)) if len(args) == 0 or args.contains(('-h', '--help', 'help')): display_info(args) sys.exit(1) elif args.contains(('-v', '--version')): display_version() sys.exit(1) elif command: arg = args.get(0) args.remove(arg) command.__call__(command, args) sys.exit() else: show_error(colored.red('Error! Unknown command \'{0}\'.\n' .format(args.get(0)))) display_info(args) sys.exit(1)
Primary Tarbell command dispatch.
entailment
def display_info(args): """ Displays Tarbell info. """ puts('\nTarbell: Simple web publishing\n') puts('Usage: {0}\n'.format(colored.cyan('tarbell <command>'))) puts('Commands:\n') for command in Command.all_commands(): usage = command.usage or command.name help = command.help or '' puts('{0} {1}'.format( colored.yellow('{0: <37}'.format(usage)), split_sentences(help, 37) )) puts("") settings = Settings() if settings.file_missing: puts('---\n{0}: {1}'.format( colored.red("Warning"), "No Tarbell configuration found. Run:" )) puts('\n{0}'.format( colored.green("tarbell configure") )) puts('\n{0}\n---'.format( "to configure Tarbell." ))
Displays Tarbell info.
entailment
def tarbell_generate(command, args, skip_args=False, extra_context=None, quiet=False): """ Generate static files. """ output_root = None with ensure_settings(command, args) as settings, ensure_project(command, args) as site: if not skip_args: output_root = list_get(args, 0, False) if output_root: is_folder = os.path.exists(output_root) else: puts("\nYou must specify an output directory (e.g. `{0}`)".format( colored.cyan("tarbell generate _out") )) sys.exit() if quiet: site.quiet = True if not output_root: output_root = tempfile.mkdtemp(prefix="{0}-".format(site.project.__name__)) is_folder = False if args.contains('--context'): site.project.CONTEXT_SOURCE_FILE = args.value_after('--context') if args.contains('--overwrite'): is_folder = False #check to see if the folder we're trying to create already exists if is_folder: output_file = raw_input(("\nA folder named {0} already exists! Do you want to delete it? (selecting 'N' will quit) [y/N] ").format( output_root )) if output_file and output_file.lower() == "y": puts(("\nDeleting {0}...\n").format( colored.cyan(output_root) )) _delete_dir(output_root) else: puts("\nNot overwriting. See ya!") sys.exit() site.generate_static_site(output_root, extra_context) if not quiet: puts("\nCreated site in {0}".format(colored.cyan(output_root))) return output_root
Generate static files.
entailment
def tarbell_install(command, args): """ Install a project. """ with ensure_settings(command, args) as settings: project_url = args.get(0) puts("\n- Getting project information for {0}".format(project_url)) project_name = project_url.split("/").pop() error = None # Create a tempdir and clone tempdir = tempfile.mkdtemp() try: testgit = sh.git.bake(_cwd=tempdir, _tty_in=True, _tty_out=False) # _err_to_out=True) testclone = testgit.clone(project_url, '.', '--depth=1', '--bare') puts(testclone) config = testgit.show("HEAD:tarbell_config.py") puts("\n- Found tarbell_config.py") path = _get_path(_clean_suffix(project_name, ".git"), settings) _mkdir(path) git = sh.git.bake(_cwd=path) clone = git.clone(project_url, '.', _tty_in=True, _tty_out=False, _err_to_out=True) puts(clone) puts(git.submodule.update('--init', '--recursive', _tty_in=True, _tty_out=False, _err_to_out=True)) _install_requirements(path) # Get site, run hook with ensure_project(command, args, path) as site: site.call_hook("install", site, git) except sh.ErrorReturnCode_128 as e: if e.message.endswith('Device not configured\n'): error = 'Git tried to prompt for a username or password.\n\nTarbell doesn\'t support interactive sessions. Please configure ssh key access to your Git repository. (See https://help.github.com/articles/generating-ssh-keys/)' else: error = 'Not a valid repository or Tarbell project' finally: _delete_dir(tempdir) if error: show_error(error) else: puts("\n- Done installing project in {0}".format(colored.yellow(path)))
Install a project.
entailment
def tarbell_install_blueprint(command, args): """ Install a project template. """ with ensure_settings(command, args) as settings: name = None error = None template_url = args.get(0) matches = [template for template in settings.config["project_templates"] if template.get("url") == template_url] tempdir = tempfile.mkdtemp() if matches: puts("\n{0} already exists. Nothing more to do.\n".format( colored.yellow(template_url) )) sys.exit() try: puts("\nInstalling {0}".format(colored.cyan(template_url))) puts("\n- Cloning repo") git = sh.git.bake(_cwd=tempdir, _tty_in=True, _tty_out=False, _err_to_out=True) puts(git.clone(template_url, '.')) _install_requirements(tempdir) filename, pathname, description = imp.find_module('blueprint', [tempdir]) blueprint = imp.load_module('blueprint', filename, pathname, description) puts("\n- Found _blueprint/blueprint.py") name = blueprint.NAME puts("\n- Name specified in blueprint.py: {0}".format(colored.yellow(name))) settings.config["project_templates"].append({"name": name, "url": template_url}) settings.save() except AttributeError: name = template_url.split("/")[-1] error = "\n- No name specified in blueprint.py, using '{0}'".format(colored.yellow(name)) except ImportError: error = 'No blueprint.py found' except sh.ErrorReturnCode_128 as e: if e.stdout.strip('\n').endswith('Device not configured'): error = 'Git tried to prompt for a username or password.\n\nTarbell doesn\'t support interactive sessions. Please configure ssh key access to your Git repository. (See https://help.github.com/articles/generating-ssh-keys/)' else: error = 'Not a valid repository or Tarbell project' finally: _delete_dir(tempdir) if error: show_error(error) else: puts("\n+ Added new project template: {0}".format(colored.yellow(name)))
Install a project template.
entailment
def tarbell_list(command, args): """ List tarbell projects. """ with ensure_settings(command, args) as settings: projects_path = settings.config.get("projects_path") if not projects_path: show_error("{0} does not exist".format(projects_path)) sys.exit() puts("Listing projects in {0}\n".format( colored.yellow(projects_path) )) longest_title = 0 projects = [] for directory in os.listdir(projects_path): project_path = os.path.join(projects_path, directory) try: filename, pathname, description = imp.find_module('tarbell_config', [project_path]) config = imp.load_module(directory, filename, pathname, description) title = config.DEFAULT_CONTEXT.get("title", directory) projects.append((directory, title)) if len(title) > longest_title: longest_title = len(title) except ImportError: pass if len(projects): fmt = "{0: <"+str(longest_title+1)+"} {1}" puts(fmt.format( 'title', 'project name' )) for projectname, title in projects: title = codecs.encode(title, 'utf8') puts(colored.yellow(fmt.format( title, colored.cyan(projectname) ))) puts("\nUse {0} to switch to a project".format( colored.green("tarbell switch <project name>") )) else: puts("No projects found")
List tarbell projects.
entailment
def tarbell_list_templates(command, args): """ List available Tarbell blueprints. """ with ensure_settings(command, args) as settings: puts("\nAvailable project templates\n") _list_templates(settings) puts("")
List available Tarbell blueprints.
entailment
def tarbell_publish(command, args): """ Publish to s3. """ with ensure_settings(command, args) as settings, ensure_project(command, args) as site: bucket_name = list_get(args, 0, "staging") try: bucket_url = S3Url(site.project.S3_BUCKETS[bucket_name]) except KeyError: show_error( "\nThere's no bucket configuration called '{0}' in " "tarbell_config.py.".format(colored.yellow(bucket_name))) sys.exit(1) extra_context = { "ROOT_URL": bucket_url, "S3_BUCKET": bucket_url.root, "BUCKET_NAME": bucket_name, } tempdir = "{0}/".format(tarbell_generate(command, args, extra_context=extra_context, skip_args=True, quiet=True)) try: title = site.project.DEFAULT_CONTEXT.get("title", "") puts("\nDeploying {0} to {1} ({2})\n".format( colored.yellow(title), colored.red(bucket_name), colored.green(bucket_url) )) # Get creds if settings.config: # If settings has a config section, use it kwargs = settings.config['s3_credentials'].get(bucket_url.root) if not kwargs: kwargs = { 'access_key_id': settings.config.get('default_s3_access_key_id'), 'secret_access_key': settings.config.get('default_s3_secret_access_key'), } puts("Using default bucket credentials") else: puts("Using custom bucket configuration for {0}".format(bucket_url.root)) else: # If no configuration exists, read from environment variables if possible puts("Attemping to use AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY") kwargs = { 'access_key_id': os.environ["AWS_ACCESS_KEY_ID"], 'secret_access_key': os.environ["AWS_SECRET_ACCESS_KEY"], } if not kwargs.get('access_key_id') and not kwargs.get('secret_access_key'): show_error('S3 access is not configured. Set up S3 with {0} to publish.' .format(colored.green('tarbell configure'))) sys.exit() s3 = S3Sync(tempdir, bucket_url, **kwargs) s3.deploy_to_s3() site.call_hook("publish", site, s3) puts("\nIf you have website hosting enabled, you can see your project at:") puts(colored.green("http://{0}\n".format(bucket_url))) except KeyboardInterrupt: show_error("ctrl-c pressed, bailing out!") finally: _delete_dir(tempdir)
Publish to s3.
entailment
def tarbell_newproject(command, args): """ Create new Tarbell project. """ with ensure_settings(command, args) as settings: # Set it up and make the directory name = _get_project_name(args) puts("Creating {0}".format(colored.cyan(name))) path = _get_path(name, settings) _mkdir(path) try: _newproject(command, path, name, settings) except KeyboardInterrupt: _delete_dir(path) show_error("ctrl-c pressed, not creating new project.") except: _delete_dir(path) show_error("Unexpected error: {0}".format(sys.exc_info()[0])) raise
Create new Tarbell project.
entailment
def tarbell_serve(command, args): """ Serve the current Tarbell project. """ with ensure_project(command, args) as site: with ensure_settings(command, args) as settings: address = list_get(args, 0, "").split(":") ip = list_get(address, 0, settings.config['default_server_ip']) port = int(list_get(address, 1, settings.config['default_server_port'])) puts("\n * Running local server. Press {0} to stop the server".format(colored.red("ctrl-c"))) puts(" * Edit this project's templates at {0}".format(colored.yellow(site.path))) try: if not is_werkzeug_process(): site.call_hook("server_start", site) site.app.run(ip, port=port) if not is_werkzeug_process(): site.call_hook("server_stop", site) except socket.error: show_error("Address {0} is already in use, please try another port or address." .format(colored.yellow("{0}:{1}".format(ip, port))))
Serve the current Tarbell project.
entailment
def tarbell_switch(command, args): """ Switch to a project. """ with ensure_settings(command, args) as settings: projects_path = settings.config.get("projects_path") if not projects_path: show_error("{0} does not exist".format(projects_path)) sys.exit() project = args.get(0) args.remove(project) project_path = os.path.join(projects_path, project) if os.path.isdir(project_path): os.chdir(project_path) puts("\nSwitching to {0}".format(colored.red(project))) tarbell_serve(command, args) else: show_error("{0} isn't a tarbell project".format(project_path))
Switch to a project.
entailment
def tarbell_update(command, args): """ Update the current tarbell project. """ with ensure_settings(command, args) as settings, ensure_project(command, args) as site: puts("Updating to latest blueprint\n") git = sh.git.bake(_cwd=site.base.base_dir) # stash then pull puts(colored.yellow("Stashing local changes")) puts(git.stash()) puts(colored.yellow("Pull latest changes")) puts(git.pull()) # need to pop any local changes back to get back on the original branch # this may behave oddly if you have old changes stashed if git.stash.list(): puts(git.stash.pop())
Update the current tarbell project.
entailment
def tarbell_unpublish(command, args): """ Delete a project. """ with ensure_settings(command, args) as settings, ensure_project(command, args) as site: show_error("Not implemented!")
Delete a project.
entailment
def tarbell_spreadsheet(command, args): """ Open context spreadsheet """ with ensure_settings(command, args) as settings, ensure_project(command, args) as site: try: # First, try to get the Google Spreadsheet URL spreadsheet_url = _google_spreadsheet_url(site.project.SPREADSHEET_KEY) except AttributeError: # The project doesn't seem to be using a Google Spreadsheet. # Try the URL or path specified in the CONTEXT_SOURCE_FILE setting try: spreadsheet_url = _context_source_file_url( site.project.CONTEXT_SOURCE_FILE) print(spreadsheet_url) except AttributeError: puts(colored.red("No Google spreadsheet or context source file " "has been configured.\n")) return # Use the webbrowser package to try to open the file whether it's a # remote URL on the web, or a local file. On some platforms it will # successfully open local files in the default application. # This seems preferable to trying to do os detection and calling # the system-specific command for opening files in default # applications. # See # http://stackoverflow.com/questions/434597/open-document-with-default-application-in-python webbrowser.open(spreadsheet_url)
Open context spreadsheet
entailment
def _context_source_file_url(path_or_url): """ Returns a URL for a remote or local context CSV file """ if path_or_url.startswith('http'): # Remote CSV. Just return the URL return path_or_url if path_or_url.startswith('/'): # Absolute path return "file://" + path_or_url return "file://" + os.path.join(os.path.realpath(os.getcwd()), path_or_url)
Returns a URL for a remote or local context CSV file
entailment
def _newproject(command, path, name, settings): """ Helper to create new project. """ key = None title = _get_project_title() template = _get_template(settings) # Init repo git = sh.git.bake(_cwd=path) puts(git.init()) if template.get("url"): # Create submodule puts(git.submodule.add(template['url'], '_blueprint')) puts(git.submodule.update(*['--init'])) # Create spreadsheet key = _create_spreadsheet(name, title, path, settings) # Copy html files puts(colored.green("\nCopying html files...")) files = glob.iglob(os.path.join(path, "_blueprint", "*.html")) for file in files: if os.path.isfile(file): dir, filename = os.path.split(file) if not filename.startswith("_") and not filename.startswith("."): puts("Copying {0} to {1}".format(filename, path)) shutil.copy2(file, path) ignore = os.path.join(path, "_blueprint", ".gitignore") if os.path.isfile(ignore): shutil.copy2(ignore, path) else: empty_index_path = os.path.join(path, "index.html") open(empty_index_path, "w") # Create config file _copy_config_template(name, title, template, path, key, settings) # Commit puts(colored.green("\nInitial commit")) puts(git.add('.')) puts(git.commit(m='Created {0} from {1}'.format(name, template['name']))) _install_requirements(path) # Get site, run hook with ensure_project(command, args, path) as site: site.call_hook("newproject", site, git) # Messages puts("\nAll done! To preview your new project, type:\n") puts("{0} {1}".format(colored.green("tarbell switch"), colored.green(name))) puts("\nor\n") puts("{0}".format(colored.green("cd %s" % path))) puts("{0}".format(colored.green("tarbell serve\n"))) puts("\nYou got this!\n")
Helper to create new project.
entailment
def _install_requirements(path): """ Install a blueprint's requirements.txt """ locations = [os.path.join(path, "_blueprint"), os.path.join(path, "_base"), path] success = True for location in locations: try: with open(os.path.join(location, "requirements.txt")): puts("\nRequirements file found at {0}".format(os.path.join(location, "requirements.txt"))) install_reqs = raw_input("Install requirements now with pip install -r requirements.txt? [Y/n] ") if not install_reqs or install_reqs.lower() == 'y': pip = sh.pip.bake(_cwd=location) puts("\nInstalling requirements...") puts(pip("install", "-r", "requirements.txt")) else: success = False puts("Not installing requirements. This may break everything! Vaya con dios.") except IOError: pass return success
Install a blueprint's requirements.txt
entailment
def _get_project_name(args): """ Get project name. """ name = args.get(0) puts("") while not name: name = raw_input("What is the project's short directory name? (e.g. my_project) ") return name
Get project name.
entailment
def _clean_suffix(string, suffix): """ If string endswith the suffix, remove it. Else leave it alone. """ suffix_len = len(suffix) if len(string) < suffix_len: # the string param was shorter than the suffix raise ValueError("A suffix can not be bigger than string argument.") if string.endswith(suffix): # return from the beginning up to # but not including the first letter # in the suffix return string[0:-suffix_len] else: # leave unharmed return string
If string endswith the suffix, remove it. Else leave it alone.
entailment
def _get_path(name, settings, mkdir=True): """ Generate a project path. """ default_projects_path = settings.config.get("projects_path") path = None if default_projects_path: path = raw_input("\nWhere would you like to create this project? [{0}/{1}] ".format(default_projects_path, name)) if not path: path = os.path.join(default_projects_path, name) else: while not path: path = raw_input("\nWhere would you like to create this project? (e.g. ~/tarbell/) ") return os.path.expanduser(path)
Generate a project path.
entailment
def _mkdir(path): """ Make a directory or bail. """ try: os.mkdir(path) except OSError as e: if e.errno == 17: show_error("ABORTING: Directory {0} already exists.".format(path)) else: show_error("ABORTING: OSError {0}".format(e)) sys.exit()
Make a directory or bail.
entailment
def _get_template(settings): """ Prompt user to pick template from a list. """ puts("\nPick a template\n") template = None while not template: _list_templates(settings) index = raw_input("\nWhich template would you like to use? [1] ") if not index: index = "1" try: index = int(index) - 1 return settings.config["project_templates"][index] except: puts("\"{0}\" isn't a valid option!".format(colored.red("{0}".format(index)))) pass
Prompt user to pick template from a list.
entailment
def _list_templates(settings): """ List templates from settings. """ for idx, option in enumerate(settings.config.get("project_templates"), start=1): puts(" {0!s:5} {1!s:36}".format( colored.yellow("[{0}]".format(idx)), colored.cyan(option.get("name")) )) if option.get("url"): puts(" {0}\n".format(option.get("url")))
List templates from settings.
entailment
def _create_spreadsheet(name, title, path, settings): """ Create Google spreadsheet. """ if not settings.client_secrets: return None create = raw_input("Would you like to create a Google spreadsheet? [Y/n] ") if create and not create.lower() == "y": return puts("Not creating spreadsheet.") email_message = ( "What Google account(s) should have access to this " "this spreadsheet? (Use a full email address, such as " "your.name@gmail.com. Separate multiple addresses with commas.)") if settings.config.get("google_account"): emails = raw_input("\n{0}(Default: {1}) ".format(email_message, settings.config.get("google_account") )) if not emails: emails = settings.config.get("google_account") else: emails = None while not emails: emails = raw_input(email_message) try: media_body = _MediaFileUpload(os.path.join(path, '_blueprint/_spreadsheet.xlsx'), mimetype='application/vnd.ms-excel') except IOError: show_error("_blueprint/_spreadsheet.xlsx doesn't exist!") return None service = get_drive_api() body = { 'title': '{0} (Tarbell)'.format(title), 'description': '{0} ({1})'.format(title, name), 'mimeType': 'application/vnd.ms-excel', } try: newfile = service.files()\ .insert(body=body, media_body=media_body, convert=True).execute() for email in emails.split(","): _add_user_to_file(newfile['id'], service, user_email=email.strip()) puts("\n{0!s}! View the spreadsheet at {1!s}".format( colored.green("Success"), colored.yellow("https://docs.google.com/spreadsheet/ccc?key={0}" .format(newfile['id'])) )) return newfile['id'] except errors.HttpError as error: show_error('An error occurred creating spreadsheet: {0}'.format(error)) return None
Create Google spreadsheet.
entailment
def _add_user_to_file(file_id, service, user_email, perm_type='user', role='writer'): """ Grants the given set of permissions for a given file_id. service is an already-credentialed Google Drive service instance. """ new_permission = { 'value': user_email, 'type': perm_type, 'role': role } try: service.permissions()\ .insert(fileId=file_id, body=new_permission)\ .execute() except errors.HttpError as error: show_error('An error adding users to spreadsheet: {0}'.format(error))
Grants the given set of permissions for a given file_id. service is an already-credentialed Google Drive service instance.
entailment
def _copy_config_template(name, title, template, path, key, settings): """ Get and render tarbell_config.py.template from Tarbell default. """ puts("\nCopying configuration file") context = settings.config context.update({ "default_context": { "name": name, "title": title, }, "name": name, "title": title, "template_repo_url": template.get('url'), "key": key, }) # @TODO refactor this a bit if not key: spreadsheet_path = os.path.join(path, '_blueprint/', '_spreadsheet.xlsx') try: with open(spreadsheet_path, "rb") as f: puts("Copying _blueprint/_spreadsheet.xlsx to tarbell_config.py's DEFAULT_CONTEXT") data = process_xlsx(f.read()) if 'values' in data: data = copy_global_values(data) context["default_context"].update(data) except IOError: pass s3_buckets = settings.config.get("s3_buckets") if s3_buckets: puts("") for bucket, bucket_conf in s3_buckets.items(): puts("Configuring {0!s} bucket at {1!s}\n".format( colored.green(bucket), colored.yellow("{0}/{1}".format(bucket_conf['uri'], name)) )) puts("\n- Creating {0!s} project configuration file".format( colored.cyan("tarbell_config.py") )) template_dir = os.path.dirname(pkg_resources.resource_filename("tarbell", "templates/tarbell_config.py.template")) loader = jinja2.FileSystemLoader(template_dir) env = jinja2.Environment(loader=loader) env.filters["pprint_lines"] = pprint_lines # For dumping context content = env.get_template('tarbell_config.py.template').render(context) codecs.open(os.path.join(path, "tarbell_config.py"), "w", encoding="utf-8").write(content) puts("\n- Done copying configuration file")
Get and render tarbell_config.py.template from Tarbell default.
entailment
def _delete_dir(dir): """ Delete a directory. """ try: shutil.rmtree(dir) # delete directory except OSError as exc: if exc.errno != 2: # code 2 - no such file or directory raise # re-raise exception except UnboundLocalError: pass
Delete a directory.
entailment
def def_cmd(name=None, short=None, fn=None, usage=None, help=None): """ Define a command. """ command = Command(name=name, short=short, fn=fn, usage=usage, help=help) Command.register(command)
Define a command.
entailment
def save(self): """ Save settings. """ with open(self.path, "w") as f: self.config["project_templates"] = list(filter(lambda template: template.get("url"), self.config["project_templates"])) yaml.dump(self.config, f, default_flow_style=False)
Save settings.
entailment
def read_file(path, absolute=False, encoding='utf-8'): """ Read the file at `path`. If `absolute` is True, use absolute path, otherwise path is assumed to be relative to Tarbell template root dir. For example: .. code-block:: html+jinja <div class="chapter"> {{ read_file('_chapters/one.txt')|linebreaks }} </div> """ site = g.current_site if not absolute: path = os.path.join(site.path, path) try: return codecs.open(path, 'r', encoding).read() except IOError: return None
Read the file at `path`. If `absolute` is True, use absolute path, otherwise path is assumed to be relative to Tarbell template root dir. For example: .. code-block:: html+jinja <div class="chapter"> {{ read_file('_chapters/one.txt')|linebreaks }} </div>
entailment
def render_file(context, path, absolute=False): """ Like :py:func:`read_file`, except that the file is rendered as a Jinja template using the current context. If `absolute` is True, use absolute path, otherwise path is assumed to be relative to Tarbell template root dir. For example: .. code-block:: html+jinja <div class="chapter"> {{ render_file('_chapters/one.txt') }} </div> """ site = g.current_site if not absolute: path = os.path.join(site.path, path) return render_template(path, **context)
Like :py:func:`read_file`, except that the file is rendered as a Jinja template using the current context. If `absolute` is True, use absolute path, otherwise path is assumed to be relative to Tarbell template root dir. For example: .. code-block:: html+jinja <div class="chapter"> {{ render_file('_chapters/one.txt') }} </div>
entailment
def format_date(value, format='%b %d, %Y', convert_tz=None): """ Format an Excel date or date string, returning a formatted date. To return a Python :py:class:`datetime.datetime` object, pass ``None`` as a ``format`` argument. >>> format_date(42419.82163) 'Feb. 19, 2016' .. code-block:: html+jinja {{ row.date|format_date('%Y-%m-%d') }} """ if isinstance(value, float) or isinstance(value, int): seconds = (value - 25569) * 86400.0 parsed = datetime.datetime.utcfromtimestamp(seconds) else: parsed = dateutil.parser.parse(value) if convert_tz: local_zone = dateutil.tz.gettz(convert_tz) parsed = parsed.astimezone(tz=local_zone) if format: return parsed.strftime(format) else: return parsed
Format an Excel date or date string, returning a formatted date. To return a Python :py:class:`datetime.datetime` object, pass ``None`` as a ``format`` argument. >>> format_date(42419.82163) 'Feb. 19, 2016' .. code-block:: html+jinja {{ row.date|format_date('%Y-%m-%d') }}
entailment
def pprint_lines(value): """ Pretty print lines """ pformatted = pformat(value, width=1, indent=4) formatted = "{0}\n {1}\n{2}".format( pformatted[0], pformatted[1:-1], pformatted[-1] ) return Markup(formatted)
Pretty print lines
entailment
def plot_composition(df, intervals, axes=None): """ Plot time series of generics and label underlying instruments which these series are composed of. Parameters: ----------- df: pd.DataFrame DataFrame of time series to be plotted. Each column is a generic time series. intervals: pd.DataFrame A DataFrame including information for when each contract is used in the generic series. Columns are['contract', 'generic', 'start_date', 'end_date'] axes: list List of matplotlib.axes.Axes Example ------- >>> import mapping.plot as mplot >>> import pandas as pd >>> from pandas import Timestamp as TS >>> idx = pd.date_range("2017-01-01", "2017-01-15") >>> rets_data = pd.np.random.randn(len(idx)) >>> rets = pd.DataFrame({"CL1": rets_data, "CL2": rets_data}, index=idx) >>> intervals = pd.DataFrame( ... [(TS("2017-01-01"), TS("2017-01-05"), "2017_CL_F", "CL1"), ... (TS("2017-01-05"), TS("2017-01-15"), "2017_CL_G", "CL1"), ... (TS("2017-01-01"), TS("2017-01-12"), "2017_CL_G", "CL2"), ... (TS("2017-01-10"), TS("2017-01-15"), "2017_CL_H", "CL2")], ... columns=["start_date", "end_date", "contract", "generic"]) >>> mplot.plot_composition(rets, intervals) """ generics = df.columns if (axes is not None) and (len(axes) != len(generics)): raise ValueError("If 'axes' is not None then it must be the same " "length as 'df.columns'") if axes is None: _, axes = plt.subplots(nrows=len(generics), ncols=1) if len(generics) == 1: axes = [axes] for ax, generic in zip(axes, generics): ax.plot(df.loc[:, generic], label=generic) # no legend line to avoid clutter ax.legend(loc='center right', handlelength=0) dates = intervals.loc[intervals.loc[:, "generic"] == generic, ["start_date", "end_date", "contract"]] date_ticks = set( dates.loc[:, "start_date"].tolist() + dates.loc[:, "end_date"].tolist() ) xticks = [ts.toordinal() for ts in date_ticks] xlabels = [ts.strftime("%Y-%m-%d") for ts in date_ticks] ax.set_xticks(xticks) ax.set_xticklabels(xlabels) y_top = ax.get_ylim()[1] count = 0 # label and colour each underlying for _, dt1, dt2, instr in dates.itertuples(): if count % 2: fc = "b" else: fc = "r" count += 1 ax.axvspan(dt1, dt2, facecolor=fc, alpha=0.2) x_mid = dt1 + (dt2 - dt1) / 2 ax.text(x_mid, y_top, instr, rotation=45) return axes
Plot time series of generics and label underlying instruments which these series are composed of. Parameters: ----------- df: pd.DataFrame DataFrame of time series to be plotted. Each column is a generic time series. intervals: pd.DataFrame A DataFrame including information for when each contract is used in the generic series. Columns are['contract', 'generic', 'start_date', 'end_date'] axes: list List of matplotlib.axes.Axes Example ------- >>> import mapping.plot as mplot >>> import pandas as pd >>> from pandas import Timestamp as TS >>> idx = pd.date_range("2017-01-01", "2017-01-15") >>> rets_data = pd.np.random.randn(len(idx)) >>> rets = pd.DataFrame({"CL1": rets_data, "CL2": rets_data}, index=idx) >>> intervals = pd.DataFrame( ... [(TS("2017-01-01"), TS("2017-01-05"), "2017_CL_F", "CL1"), ... (TS("2017-01-05"), TS("2017-01-15"), "2017_CL_G", "CL1"), ... (TS("2017-01-01"), TS("2017-01-12"), "2017_CL_G", "CL2"), ... (TS("2017-01-10"), TS("2017-01-15"), "2017_CL_H", "CL2")], ... columns=["start_date", "end_date", "contract", "generic"]) >>> mplot.plot_composition(rets, intervals)
entailment
def intervals(weights): """ Extract intervals where generics are composed of different tradeable instruments. Parameters ---------- weights: DataFrame or dict A DataFrame or dictionary of DataFrames with columns representing generics and a MultiIndex of date and contract. Values represent weights on tradeables for each generic. Returns ------- A DataFrame with [columns] ['contract', 'generic', 'start_date', 'end_date'] """ intrvls = [] if isinstance(weights, dict): for root in weights: wts = weights[root] intrvls.append(_intervals(wts)) intrvls = pd.concat(intrvls, axis=0) else: intrvls = _intervals(weights) intrvls = intrvls.reset_index(drop=True) return intrvls
Extract intervals where generics are composed of different tradeable instruments. Parameters ---------- weights: DataFrame or dict A DataFrame or dictionary of DataFrames with columns representing generics and a MultiIndex of date and contract. Values represent weights on tradeables for each generic. Returns ------- A DataFrame with [columns] ['contract', 'generic', 'start_date', 'end_date']
entailment
def synchronize(self): """Synchronizes Router DB from Neturon DB with EOS. Walks through the Neturon Db and ensures that all the routers created in Netuton DB match with EOS. After creating appropriate routers, it ensures to add interfaces as well. Uses idempotent properties of EOS configuration, which means same commands can be repeated. """ LOG.info(_LI('Syncing Neutron Router DB <-> EOS')) routers, router_interfaces = self.get_routers_and_interfaces() expected_vrfs = set() if self._use_vrf: expected_vrfs.update(self.driver._arista_router_name( r['id'], r['name']) for r in routers) expected_vlans = set(r['seg_id'] for r in router_interfaces) if self._enable_cleanup: self.do_cleanup(expected_vrfs, expected_vlans) self.create_routers(routers) self.create_router_interfaces(router_interfaces)
Synchronizes Router DB from Neturon DB with EOS. Walks through the Neturon Db and ensures that all the routers created in Netuton DB match with EOS. After creating appropriate routers, it ensures to add interfaces as well. Uses idempotent properties of EOS configuration, which means same commands can be repeated.
entailment
def create_router(self, context, router): """Create a new router entry in DB, and create it Arista HW.""" # Add router to the DB new_router = super(AristaL3ServicePlugin, self).create_router( context, router) # create router on the Arista Hw try: self.driver.create_router(context, new_router) return new_router except Exception: with excutils.save_and_reraise_exception(): LOG.error(_LE("Error creating router on Arista HW router=%s "), new_router) super(AristaL3ServicePlugin, self).delete_router( context, new_router['id'] )
Create a new router entry in DB, and create it Arista HW.
entailment
def update_router(self, context, router_id, router): """Update an existing router in DB, and update it in Arista HW.""" # Read existing router record from DB original_router = self.get_router(context, router_id) # Update router DB new_router = super(AristaL3ServicePlugin, self).update_router( context, router_id, router) # Modify router on the Arista Hw try: self.driver.update_router(context, router_id, original_router, new_router) return new_router except Exception: LOG.error(_LE("Error updating router on Arista HW router=%s "), new_router)
Update an existing router in DB, and update it in Arista HW.
entailment
def delete_router(self, context, router_id): """Delete an existing router from Arista HW as well as from the DB.""" router = self.get_router(context, router_id) # Delete router on the Arista Hw try: self.driver.delete_router(context, router_id, router) except Exception as e: LOG.error(_LE("Error deleting router on Arista HW " "router %(r)s exception=%(e)s"), {'r': router, 'e': e}) super(AristaL3ServicePlugin, self).delete_router(context, router_id)
Delete an existing router from Arista HW as well as from the DB.
entailment
def add_router_interface(self, context, router_id, interface_info): """Add a subnet of a network to an existing router.""" new_router = super(AristaL3ServicePlugin, self).add_router_interface( context, router_id, interface_info) core = directory.get_plugin() # Get network info for the subnet that is being added to the router. # Check if the interface information is by port-id or subnet-id add_by_port, add_by_sub = self._validate_interface_info(interface_info) if add_by_sub: subnet = core.get_subnet(context, interface_info['subnet_id']) elif add_by_port: port = core.get_port(context, interface_info['port_id']) subnet_id = port['fixed_ips'][0]['subnet_id'] subnet = core.get_subnet(context, subnet_id) network_id = subnet['network_id'] # To create SVI's in Arista HW, the segmentation Id is required # for this network. ml2_db = NetworkContext(self, context, {'id': network_id}) seg_id = ml2_db.network_segments[0]['segmentation_id'] # Package all the info needed for Hw programming router = self.get_router(context, router_id) router_info = copy.deepcopy(new_router) router_info['seg_id'] = seg_id router_info['name'] = router['name'] router_info['cidr'] = subnet['cidr'] router_info['gip'] = subnet['gateway_ip'] router_info['ip_version'] = subnet['ip_version'] try: self.driver.add_router_interface(context, router_info) return new_router except Exception: with excutils.save_and_reraise_exception(): LOG.error(_LE("Error Adding subnet %(subnet)s to " "router %(router_id)s on Arista HW"), {'subnet': subnet, 'router_id': router_id}) super(AristaL3ServicePlugin, self).remove_router_interface( context, router_id, interface_info)
Add a subnet of a network to an existing router.
entailment
def remove_router_interface(self, context, router_id, interface_info): """Remove a subnet of a network from an existing router.""" router_to_del = ( super(AristaL3ServicePlugin, self).remove_router_interface( context, router_id, interface_info) ) # Get network information of the subnet that is being removed core = directory.get_plugin() subnet = core.get_subnet(context, router_to_del['subnet_id']) network_id = subnet['network_id'] # For SVI removal from Arista HW, segmentation ID is needed ml2_db = NetworkContext(self, context, {'id': network_id}) seg_id = ml2_db.network_segments[0]['segmentation_id'] router = self.get_router(context, router_id) router_info = copy.deepcopy(router_to_del) router_info['seg_id'] = seg_id router_info['name'] = router['name'] try: self.driver.remove_router_interface(context, router_info) return router_to_del except Exception as exc: LOG.error(_LE("Error removing interface %(interface)s from " "router %(router_id)s on Arista HW" "Exception =(exc)s"), {'interface': interface_info, 'router_id': router_id, 'exc': exc})
Remove a subnet of a network from an existing router.
entailment
def initialize_switch_endpoints(self): """Initialize endpoints for switch communication""" self._switches = {} self._port_group_info = {} self._validate_config() for s in cfg.CONF.ml2_arista.switch_info: switch_ip, switch_user, switch_pass = s.split(":") if switch_pass == "''": switch_pass = '' self._switches[switch_ip] = api.EAPIClient( switch_ip, switch_user, switch_pass, verify=False, timeout=cfg.CONF.ml2_arista.conn_timeout) self._check_dynamic_acl_support()
Initialize endpoints for switch communication
entailment
def _check_dynamic_acl_support(self): """Log an error if any switches don't support dynamic ACLs""" cmds = ['ip access-list openstack-test dynamic', 'no ip access-list openstack-test'] for switch_ip, switch_client in self._switches.items(): try: self.run_openstack_sg_cmds(cmds) except Exception: LOG.error("Switch %s does not support dynamic ACLs. SG " "support will not be enabled on this switch.", switch_ip)
Log an error if any switches don't support dynamic ACLs
entailment
def _validate_config(self): """Ensure at least one switch is configured""" if len(cfg.CONF.ml2_arista.get('switch_info')) < 1: msg = _('Required option - when "sec_group_support" is enabled, ' 'at least one switch must be specified ') LOG.exception(msg) raise arista_exc.AristaConfigError(msg=msg)
Ensure at least one switch is configured
entailment
def run_openstack_sg_cmds(self, commands, switch): """Execute/sends a CAPI (Command API) command to EOS. In this method, list of commands is appended with prefix and postfix commands - to make is understandble by EOS. :param commands : List of command to be executed on EOS. :param switch: Endpoint on the Arista switch to be configured """ if not switch: LOG.exception("No client found for switch") return [] if len(commands) == 0: return [] command_start = ['enable', 'configure'] command_end = ['exit'] full_command = command_start + commands + command_end return self._run_eos_cmds(full_command, switch)
Execute/sends a CAPI (Command API) command to EOS. In this method, list of commands is appended with prefix and postfix commands - to make is understandble by EOS. :param commands : List of command to be executed on EOS. :param switch: Endpoint on the Arista switch to be configured
entailment
def _run_eos_cmds(self, commands, switch): """Execute/sends a CAPI (Command API) command to EOS. This method is useful for running show commands that require no prefix or postfix commands. :param commands : List of commands to be executed on EOS. :param switch: Endpoint on the Arista switch to be configured """ LOG.info(_LI('Executing command on Arista EOS: %s'), commands) try: # this returns array of return values for every command in # commands list ret = switch.execute(commands) LOG.info(_LI('Results of execution on Arista EOS: %s'), ret) return ret except Exception: msg = (_('Error occurred while trying to execute ' 'commands %(cmd)s on EOS %(host)s') % {'cmd': commands, 'host': switch}) LOG.exception(msg)
Execute/sends a CAPI (Command API) command to EOS. This method is useful for running show commands that require no prefix or postfix commands. :param commands : List of commands to be executed on EOS. :param switch: Endpoint on the Arista switch to be configured
entailment
def _get_switchports(profile): """Return list of (switch_ip, interface) tuples from local_link_info""" switchports = [] if profile.get('local_link_information'): for link in profile['local_link_information']: if 'switch_info' in link and 'port_id' in link: switch = link['switch_info'] interface = link['port_id'] switchports.append((switch, interface)) else: LOG.warning("Incomplete link information: %s", link) return switchports
Return list of (switch_ip, interface) tuples from local_link_info
entailment
def _update_port_group_info(self, switches=None): """Refresh data on switch interfaces' port group membership""" if switches is None: switches = self._switches.keys() for switch_ip in switches: client = self._switches.get(switch_ip) ret = self._run_eos_cmds(['show interfaces'], client) if not ret or len(ret) == 0: LOG.warning("Unable to retrieve interface info for %s", switch_ip) continue intf_info = ret[0] self._port_group_info[switch_ip] = intf_info.get('interfaces', {})
Refresh data on switch interfaces' port group membership
entailment
def _get_port_for_acl(self, port_id, switch): """Gets interface name for ACLs Finds the Port-Channel name if port_id is in a Port-Channel, otherwise ACLs are applied to Ethernet interface. :param port_id: Name of port from ironic db :param server: Server endpoint on the Arista switch to be configured """ all_intf_info = self._port_group_info.get(switch, {}) intf_info = all_intf_info.get(port_id, {}) member_info = intf_info.get('interfaceMembership', '') port_group_info = re.search('Member of (?P<port_group>\S+)', member_info) if port_group_info: port_id = port_group_info.group('port_group') return port_id
Gets interface name for ACLs Finds the Port-Channel name if port_id is in a Port-Channel, otherwise ACLs are applied to Ethernet interface. :param port_id: Name of port from ironic db :param server: Server endpoint on the Arista switch to be configured
entailment
def _supported_rule(protocol, ethertype): """Checks that the rule is an IPv4 rule of a supported protocol""" if not protocol or protocol not in utils.SUPPORTED_SG_PROTOCOLS: return False if ethertype != n_const.IPv4: return False return True
Checks that the rule is an IPv4 rule of a supported protocol
entailment
def _format_rule(self, protocol, cidr, min_port, max_port, direction): """Get EOS formatted rule""" if cidr is None: cidr = 'any' if direction == n_const.INGRESS_DIRECTION: dst_ip = 'any' src_ip = cidr elif direction == n_const.EGRESS_DIRECTION: dst_ip = cidr src_ip = 'any' if protocol == n_const.PROTO_NAME_ICMP: rule = "permit icmp %s %s" % (src_ip, dst_ip) if min_port: rule += " %s" % (min_port) if max_port: rule += " %s" % (max_port) else: rule = "permit %s %s %s" % (protocol, src_ip, dst_ip) if min_port and max_port: rule += " range %s %s" % (min_port, max_port) elif min_port and not max_port: rule += " eq %s" % min_port return rule
Get EOS formatted rule
entailment
def _format_rules_for_eos(self, rules): """Format list of rules for EOS and sort into ingress/egress rules""" in_rules = [] eg_rules = [] for rule in rules: protocol = rule.get('protocol') cidr = rule.get('remote_ip_prefix', 'any') min_port = rule.get('port_range_min') max_port = rule.get('port_range_max') direction = rule.get('direction') ethertype = rule.get('ethertype') if not self._supported_rule(protocol, ethertype): continue formatted_rule = self._format_rule(protocol, cidr, min_port, max_port, direction) if rule['direction'] == n_const.INGRESS_DIRECTION: in_rules.append(formatted_rule) elif rule['direction'] == n_const.EGRESS_DIRECTION: eg_rules.append(formatted_rule) return in_rules, eg_rules
Format list of rules for EOS and sort into ingress/egress rules
entailment
def run_cmds_on_all_switches(self, cmds): """Runs all cmds on all configured switches This helper is used for ACL and rule creation/deletion as ACLs and rules must exist on all switches. """ for switch in self._switches.values(): self.run_openstack_sg_cmds(cmds, switch)
Runs all cmds on all configured switches This helper is used for ACL and rule creation/deletion as ACLs and rules must exist on all switches.
entailment
def run_per_switch_cmds(self, switch_cmds): """Applies cmds to appropriate switches This takes in a switch->cmds mapping and runs only the set of cmds specified for a switch on that switch. This helper is used for applying/removing ACLs to/from interfaces as this config will vary from switch to switch. """ for switch_ip, cmds in switch_cmds.items(): switch = self._switches.get(switch_ip) self.run_openstack_sg_cmds(cmds, switch)
Applies cmds to appropriate switches This takes in a switch->cmds mapping and runs only the set of cmds specified for a switch on that switch. This helper is used for applying/removing ACLs to/from interfaces as this config will vary from switch to switch.
entailment
def _get_switches(self, profile): """Get set of switches referenced in a port binding profile""" switchports = self._get_switchports(profile) switches = set([switchport[0] for switchport in switchports]) return switches
Get set of switches referenced in a port binding profile
entailment
def get_create_security_group_commands(self, sg_id, sg_rules): """Commands for creating ACL""" cmds = [] in_rules, eg_rules = self._format_rules_for_eos(sg_rules) cmds.append("ip access-list %s dynamic" % self._acl_name(sg_id, n_const.INGRESS_DIRECTION)) for in_rule in in_rules: cmds.append(in_rule) cmds.append("exit") cmds.append("ip access-list %s dynamic" % self._acl_name(sg_id, n_const.EGRESS_DIRECTION)) for eg_rule in eg_rules: cmds.append(eg_rule) cmds.append("exit") return cmds
Commands for creating ACL
entailment
def get_delete_security_group_commands(self, sg_id): """Commands for deleting ACL""" cmds = [] cmds.append("no ip access-list %s" % self._acl_name(sg_id, n_const.INGRESS_DIRECTION)) cmds.append("no ip access-list %s" % self._acl_name(sg_id, n_const.EGRESS_DIRECTION)) return cmds
Commands for deleting ACL
entailment
def _get_rule_cmds(self, sg_id, sg_rule, delete=False): """Helper for getting add/delete ACL rule commands""" rule_prefix = "" if delete: rule_prefix = "no " in_rules, eg_rules = self._format_rules_for_eos([sg_rule]) cmds = [] if in_rules: cmds.append("ip access-list %s dynamic" % self._acl_name(sg_id, n_const.INGRESS_DIRECTION)) for in_rule in in_rules: cmds.append(rule_prefix + in_rule) cmds.append("exit") if eg_rules: cmds.append("ip access-list %s dynamic" % self._acl_name(sg_id, n_const.EGRESS_DIRECTION)) for eg_rule in eg_rules: cmds.append(rule_prefix + eg_rule) cmds.append("exit") return cmds
Helper for getting add/delete ACL rule commands
entailment
def get_delete_security_group_rule_commands(self, sg_id, sg_rule): """Commands for removing rule from ACLS""" return self._get_rule_cmds(sg_id, sg_rule, delete=True)
Commands for removing rule from ACLS
entailment
def _get_interface_commands(self, sg_id, profile, delete=False): """Helper for getting interface ACL apply/remove commands""" rule_prefix = "" if delete: rule_prefix = "no " switch_cmds = {} switchports = self._get_switchports(profile) for switch_ip, intf in switchports: cmds = [] intf_id = self._get_port_for_acl(intf, switch_ip) cmds.append("interface %s" % intf_id) name = self._acl_name(sg_id, n_const.INGRESS_DIRECTION) cmds.append(rule_prefix + "ip access-group %s %s" % (name, a_const.INGRESS_DIRECTION)) name = self._acl_name(sg_id, n_const.EGRESS_DIRECTION) cmds.append(rule_prefix + "ip access-group %s %s" % (name, a_const.EGRESS_DIRECTION)) cmds.append("exit") if switch_ip not in switch_cmds.keys(): switch_cmds[switch_ip] = [] switch_cmds[switch_ip].extend(cmds) return switch_cmds
Helper for getting interface ACL apply/remove commands
entailment
def get_remove_security_group_commands(self, sg_id, profile): """Commands for removing ACL from interface""" return self._get_interface_commands(sg_id, profile, delete=True)
Commands for removing ACL from interface
entailment
def _parse_acl_config(self, acl_config): """Parse configured ACLs and rules ACLs are returned as a dict of rule sets: {<eos_acl1_name>: set([<eos_acl1_rules>]), <eos_acl2_name>: set([<eos_acl2_rules>]), ..., } """ parsed_acls = dict() for acl in acl_config['aclList']: parsed_acls[acl['name']] = set() for rule in acl['sequence']: parsed_acls[acl['name']].add(rule['text']) return parsed_acls
Parse configured ACLs and rules ACLs are returned as a dict of rule sets: {<eos_acl1_name>: set([<eos_acl1_rules>]), <eos_acl2_name>: set([<eos_acl2_rules>]), ..., }
entailment
def _parse_binding_config(self, binding_config): """Parse configured interface -> ACL bindings Bindings are returned as a set of (intf, name, direction) tuples: set([(intf1, acl_name, direction), (intf2, acl_name, direction), ..., ]) """ parsed_bindings = set() for acl in binding_config['aclList']: for intf in acl['configuredIngressIntfs']: parsed_bindings.add((intf['name'], acl['name'], a_const.INGRESS_DIRECTION)) for intf in acl['configuredEgressIntfs']: parsed_bindings.add((intf['name'], acl['name'], a_const.EGRESS_DIRECTION)) return parsed_bindings
Parse configured interface -> ACL bindings Bindings are returned as a set of (intf, name, direction) tuples: set([(intf1, acl_name, direction), (intf2, acl_name, direction), ..., ])
entailment
def _get_dynamic_acl_info(self, switch_ip): """Retrieve ACLs, ACLs rules and interface bindings from switch""" cmds = ["enable", "show ip access-lists dynamic", "show ip access-lists summary dynamic"] switch = self._switches.get(switch_ip) _, acls, bindings = self._run_eos_cmds(cmds, switch) parsed_acls = self._parse_acl_config(acls) parsed_bindings = self._parse_binding_config(bindings) return parsed_acls, parsed_bindings
Retrieve ACLs, ACLs rules and interface bindings from switch
entailment
def get_expected_acls(self): """Query the neutron DB for Security Groups and Rules Groups and rules are returned as a dict of rule sets: {<eos_acl1_name>: set([<eos_acl1_rules>]), <eos_acl2_name>: set([<eos_acl2_rules>]), ..., } """ security_groups = db_lib.get_security_groups() expected_acls = collections.defaultdict(set) for sg in security_groups: in_rules, out_rules = self._format_rules_for_eos(sg['rules']) ingress_acl_name = self._acl_name(sg['id'], n_const.INGRESS_DIRECTION) egress_acl_name = self._acl_name(sg['id'], n_const.EGRESS_DIRECTION) expected_acls[ingress_acl_name].update(in_rules) expected_acls[egress_acl_name].update(out_rules) return expected_acls
Query the neutron DB for Security Groups and Rules Groups and rules are returned as a dict of rule sets: {<eos_acl1_name>: set([<eos_acl1_rules>]), <eos_acl2_name>: set([<eos_acl2_rules>]), ..., }
entailment
def get_expected_bindings(self): """Query the neutron DB for SG->switch interface bindings Bindings are returned as a dict of bindings for each switch: {<switch1>: set([(intf1, acl_name, direction), (intf2, acl_name, direction)]), <switch2>: set([(intf1, acl_name, direction)]), ..., } """ sg_bindings = db_lib.get_baremetal_sg_bindings() all_expected_bindings = collections.defaultdict(set) for sg_binding, port_binding in sg_bindings: sg_id = sg_binding['security_group_id'] try: binding_profile = json.loads(port_binding.profile) except ValueError: binding_profile = {} switchports = self._get_switchports(binding_profile) for switch, intf in switchports: ingress_name = self._acl_name(sg_id, n_const.INGRESS_DIRECTION) egress_name = self._acl_name(sg_id, n_const.EGRESS_DIRECTION) all_expected_bindings[switch].add( (intf, ingress_name, a_const.INGRESS_DIRECTION)) all_expected_bindings[switch].add( (intf, egress_name, a_const.EGRESS_DIRECTION)) return all_expected_bindings
Query the neutron DB for SG->switch interface bindings Bindings are returned as a dict of bindings for each switch: {<switch1>: set([(intf1, acl_name, direction), (intf2, acl_name, direction)]), <switch2>: set([(intf1, acl_name, direction)]), ..., }
entailment
def adjust_bindings_for_lag(self, switch_ip, bindings): """Adjusting interface names for expected bindings where LAGs exist""" # Get latest LAG info for switch self._update_port_group_info([switch_ip]) # Update bindings to account for LAG info adjusted_bindings = set() for binding in bindings: adjusted_bindings.add( (self._get_port_for_acl(binding[0], switch_ip),) + binding[1:]) return adjusted_bindings
Adjusting interface names for expected bindings where LAGs exist
entailment
def get_sync_acl_cmds(self, switch_acls, expected_acls): """Returns the list of commands required synchronize switch ACLs 1. Identify unexpected ACLs and delete them 2. Iterate over expected ACLs a. Add missing ACLs + all rules b. Delete unexpected rules c. Add missing rules """ switch_cmds = list() # Delete any stale ACLs acls_to_delete = (set(switch_acls.keys()) - set(expected_acls.keys())) for acl in acls_to_delete: switch_cmds.append('no ip access-list %s' % acl) # Update or create ACLs and rules for acl, expected_rules in expected_acls.items(): switch_rules = switch_acls.get(acl, set()) rules_to_delete = switch_rules - expected_rules rules_to_add = expected_rules - switch_rules # Check if ACL requires create or rule changes if (acl in switch_acls and len(rules_to_add | rules_to_delete) == 0): continue switch_cmds.append('ip access-list %s dynamic' % acl) # Delete any stale rules for rule in rules_to_delete: switch_cmds.append('no ' + rule) # Add any missing rules for rule in rules_to_add: switch_cmds.append(rule) switch_cmds.append('exit') return switch_cmds
Returns the list of commands required synchronize switch ACLs 1. Identify unexpected ACLs and delete them 2. Iterate over expected ACLs a. Add missing ACLs + all rules b. Delete unexpected rules c. Add missing rules
entailment
def get_sync_binding_cmds(self, switch_bindings, expected_bindings): """Returns the list of commands required to synchronize ACL bindings 1. Delete any unexpected bindings 2. Add any missing bindings """ switch_cmds = list() # Update any necessary switch interface ACLs bindings_to_delete = switch_bindings - expected_bindings bindings_to_add = expected_bindings - switch_bindings for intf, acl, direction in bindings_to_delete: switch_cmds.extend(['interface %s' % intf, 'no ip access-group %s %s' % (acl, direction), 'exit']) for intf, acl, direction in bindings_to_add: switch_cmds.extend(['interface %s' % intf, 'ip access-group %s %s' % (acl, direction), 'exit']) return switch_cmds
Returns the list of commands required to synchronize ACL bindings 1. Delete any unexpected bindings 2. Add any missing bindings
entailment
def read_price_data(files, name_func=None): """ Convenience function for reading in pricing data from csv files Parameters ---------- files: list List of strings refering to csv files to read data in from, first column should be dates name_func: func A function to apply to the file strings to infer the instrument name, used in the second level of the MultiIndex index. Default is the file name excluding the pathname and file ending, e.g. /path/to/file/name.csv -> name Returns ------- A pandas.DataFrame with a pandas.MultiIndex where the top level is pandas.Timestamps and the second level is instrument names. Columns are given by the csv file columns. """ if name_func is None: def name_func(x): return os.path.split(x)[1].split(".")[0] dfs = [] for f in files: name = name_func(f) df = pd.read_csv(f, index_col=0, parse_dates=True) df.sort_index(inplace=True) df.index = pd.MultiIndex.from_product([df.index, [name]], names=["date", "contract"]) dfs.append(df) return pd.concat(dfs, axis=0, sort=False).sort_index()
Convenience function for reading in pricing data from csv files Parameters ---------- files: list List of strings refering to csv files to read data in from, first column should be dates name_func: func A function to apply to the file strings to infer the instrument name, used in the second level of the MultiIndex index. Default is the file name excluding the pathname and file ending, e.g. /path/to/file/name.csv -> name Returns ------- A pandas.DataFrame with a pandas.MultiIndex where the top level is pandas.Timestamps and the second level is instrument names. Columns are given by the csv file columns.
entailment
def flatten(weights): """ Flatten weights into a long DataFrame. Parameters ---------- weights: pandas.DataFrame or dict A DataFrame of instrument weights with a MultiIndex where the top level contains pandas. Timestamps and the second level is instrument names. The columns consist of generic names. If dict is given this should be a dict of pandas.DataFrame in the above format, with keys for different root generics, e.g. 'CL' Returns ------- A long DataFrame of weights, where columns are "date", "contract", "generic" and "weight". If a dictionary is passed, DataFrame will contain additional colum "key" containing the key value and be sorted according to this key value. Example ------- >>> import pandas as pd >>> import mapping.util as util >>> vals = [[1, 0], [0, 1], [1, 0], [0, 1]] >>> widx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-03'), 'CLF5'), ... (pd.Timestamp('2015-01-03'), 'CLG5'), ... (pd.Timestamp('2015-01-04'), 'CLG5'), ... (pd.Timestamp('2015-01-04'), 'CLH5')]) >>> weights = pd.DataFrame(vals, index=widx, columns=["CL1", "CL2"]) >>> util.flatten(weights) """ # NOQA if isinstance(weights, pd.DataFrame): wts = weights.stack().reset_index() wts.columns = ["date", "contract", "generic", "weight"] elif isinstance(weights, dict): wts = [] for key in sorted(weights.keys()): wt = weights[key].stack().reset_index() wt.columns = ["date", "contract", "generic", "weight"] wt.loc[:, "key"] = key wts.append(wt) wts = pd.concat(wts, axis=0).reset_index(drop=True) else: raise ValueError("weights must be pd.DataFrame or dict") return wts
Flatten weights into a long DataFrame. Parameters ---------- weights: pandas.DataFrame or dict A DataFrame of instrument weights with a MultiIndex where the top level contains pandas. Timestamps and the second level is instrument names. The columns consist of generic names. If dict is given this should be a dict of pandas.DataFrame in the above format, with keys for different root generics, e.g. 'CL' Returns ------- A long DataFrame of weights, where columns are "date", "contract", "generic" and "weight". If a dictionary is passed, DataFrame will contain additional colum "key" containing the key value and be sorted according to this key value. Example ------- >>> import pandas as pd >>> import mapping.util as util >>> vals = [[1, 0], [0, 1], [1, 0], [0, 1]] >>> widx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-03'), 'CLF5'), ... (pd.Timestamp('2015-01-03'), 'CLG5'), ... (pd.Timestamp('2015-01-04'), 'CLG5'), ... (pd.Timestamp('2015-01-04'), 'CLH5')]) >>> weights = pd.DataFrame(vals, index=widx, columns=["CL1", "CL2"]) >>> util.flatten(weights)
entailment
def unflatten(flat_weights): """ Pivot weights from long DataFrame into weighting matrix. Parameters ---------- flat_weights: pandas.DataFrame A long DataFrame of weights, where columns are "date", "contract", "generic", "weight" and optionally "key". If "key" column is present a dictionary of unflattened DataFrames is returned with the dictionary keys corresponding to the "key" column and each sub DataFrame containing rows for this key. Returns ------- A DataFrame or dict of DataFrames of instrument weights with a MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. The columns consist of generic names. If dict is returned the dict keys correspond to the "key" column of the input. Example ------- >>> import pandas as pd >>> from pandas import Timestamp as TS >>> import mapping.util as util >>> long_wts = pd.DataFrame( ... {"date": [TS('2015-01-03')] * 4 + [TS('2015-01-04')] * 4, ... "contract": ['CLF5'] * 2 + ['CLG5'] * 4 + ['CLH5'] * 2, ... "generic": ["CL1", "CL2"] * 4, ... "weight": [1, 0, 0, 1, 1, 0, 0, 1]} ... ).loc[:, ["date", "contract", "generic", "weight"]] >>> util.unflatten(long_wts) See also: calc_rets() """ # NOQA if flat_weights.columns.contains("key"): weights = {} for key in flat_weights.loc[:, "key"].unique(): flt_wts = flat_weights.loc[flat_weights.loc[:, "key"] == key, :] flt_wts = flt_wts.drop(labels="key", axis=1) wts = flt_wts.pivot_table(index=["date", "contract"], columns=["generic"], values=["weight"]) wts.columns = wts.columns.droplevel(0) weights[key] = wts else: weights = flat_weights.pivot_table(index=["date", "contract"], columns=["generic"], values=["weight"]) weights.columns = weights.columns.droplevel(0) return weights
Pivot weights from long DataFrame into weighting matrix. Parameters ---------- flat_weights: pandas.DataFrame A long DataFrame of weights, where columns are "date", "contract", "generic", "weight" and optionally "key". If "key" column is present a dictionary of unflattened DataFrames is returned with the dictionary keys corresponding to the "key" column and each sub DataFrame containing rows for this key. Returns ------- A DataFrame or dict of DataFrames of instrument weights with a MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. The columns consist of generic names. If dict is returned the dict keys correspond to the "key" column of the input. Example ------- >>> import pandas as pd >>> from pandas import Timestamp as TS >>> import mapping.util as util >>> long_wts = pd.DataFrame( ... {"date": [TS('2015-01-03')] * 4 + [TS('2015-01-04')] * 4, ... "contract": ['CLF5'] * 2 + ['CLG5'] * 4 + ['CLH5'] * 2, ... "generic": ["CL1", "CL2"] * 4, ... "weight": [1, 0, 0, 1, 1, 0, 0, 1]} ... ).loc[:, ["date", "contract", "generic", "weight"]] >>> util.unflatten(long_wts) See also: calc_rets()
entailment
def calc_rets(returns, weights): """ Calculate continuous return series for futures instruments. These consist of weighted underlying instrument returns, who's weights can vary over time. Parameters ---------- returns: pandas.Series or dict A Series of instrument returns with a MultiIndex where the top level is pandas.Timestamps and the second level is instrument names. Values correspond to one period instrument returns. returns should be available for all for all Timestamps and instruments provided in weights. If dict is given this should be a dict of pandas.Series in the above format, with keys which are a subset of the keys given in weights weights: pandas.DataFrame or dict A DataFrame of instrument weights with a MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. The columns consist of generic names. If dict is given this should be a dict of pandas.DataFrame in the above format, with keys for different root generics, e.g. 'CL' Returns ------- A pandas.DataFrame of continuous returns for generics. The index is pandas.Timestamps and the columns is generic names, corresponding to weights.columns Examples -------- >>> import pandas as pd >>> import mapping.util as util >>> idx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-02'), 'CLF5'), ... (pd.Timestamp('2015-01-03'), 'CLF5'), ... (pd.Timestamp('2015-01-03'), 'CLG5'), ... (pd.Timestamp('2015-01-04'), 'CLF5'), ... (pd.Timestamp('2015-01-04'), 'CLG5'), ... (pd.Timestamp('2015-01-05'), 'CLG5')]) >>> price = pd.Series([45.63, 45.85, 46.13, 46.05, 46.25, 46.20], index=idx) >>> vals = [1, 1/2, 1/2, 1] >>> widx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-03'), 'CLF5'), ... (pd.Timestamp('2015-01-04'), 'CLF5'), ... (pd.Timestamp('2015-01-04'), 'CLG5'), ... (pd.Timestamp('2015-01-05'), 'CLG5')]) >>> weights = pd.DataFrame(vals, index=widx, columns=["CL1"]) >>> irets = price.groupby(level=-1).pct_change() >>> util.calc_rets(irets, weights) """ # NOQA if not isinstance(returns, dict): returns = {"": returns} if not isinstance(weights, dict): weights = {"": weights} generic_superset = [] for root in weights: generic_superset.extend(weights[root].columns.tolist()) if len(set(generic_superset)) != len(generic_superset): raise ValueError("Columns for weights must all be unique") _check_indices(returns, weights) grets = [] cols = [] for root in returns: root_wts = weights[root] root_rets = returns[root] for generic in root_wts.columns: gnrc_wts = root_wts.loc[:, generic] # drop generics where weight is 0, this avoids potential KeyError # in later indexing of rets even when ret has weight of 0 gnrc_wts = gnrc_wts.loc[gnrc_wts != 0] rets = root_rets.loc[gnrc_wts.index] # groupby time group_rets = (rets * gnrc_wts).groupby(level=0) grets.append(group_rets.apply(pd.DataFrame.sum, skipna=False)) cols.extend(root_wts.columns.tolist()) rets = pd.concat(grets, axis=1, keys=cols).sort_index(axis=1) return rets
Calculate continuous return series for futures instruments. These consist of weighted underlying instrument returns, who's weights can vary over time. Parameters ---------- returns: pandas.Series or dict A Series of instrument returns with a MultiIndex where the top level is pandas.Timestamps and the second level is instrument names. Values correspond to one period instrument returns. returns should be available for all for all Timestamps and instruments provided in weights. If dict is given this should be a dict of pandas.Series in the above format, with keys which are a subset of the keys given in weights weights: pandas.DataFrame or dict A DataFrame of instrument weights with a MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. The columns consist of generic names. If dict is given this should be a dict of pandas.DataFrame in the above format, with keys for different root generics, e.g. 'CL' Returns ------- A pandas.DataFrame of continuous returns for generics. The index is pandas.Timestamps and the columns is generic names, corresponding to weights.columns Examples -------- >>> import pandas as pd >>> import mapping.util as util >>> idx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-02'), 'CLF5'), ... (pd.Timestamp('2015-01-03'), 'CLF5'), ... (pd.Timestamp('2015-01-03'), 'CLG5'), ... (pd.Timestamp('2015-01-04'), 'CLF5'), ... (pd.Timestamp('2015-01-04'), 'CLG5'), ... (pd.Timestamp('2015-01-05'), 'CLG5')]) >>> price = pd.Series([45.63, 45.85, 46.13, 46.05, 46.25, 46.20], index=idx) >>> vals = [1, 1/2, 1/2, 1] >>> widx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-03'), 'CLF5'), ... (pd.Timestamp('2015-01-04'), 'CLF5'), ... (pd.Timestamp('2015-01-04'), 'CLG5'), ... (pd.Timestamp('2015-01-05'), 'CLG5')]) >>> weights = pd.DataFrame(vals, index=widx, columns=["CL1"]) >>> irets = price.groupby(level=-1).pct_change() >>> util.calc_rets(irets, weights)
entailment
def reindex(prices, index, limit): """ Reindex a pd.Series of prices such that when instrument level returns are calculated they are compatible with a pd.MultiIndex of instrument weights in calc_rets(). This amount to reindexing the series by an augmented version of index which includes the preceding date for the first appearance of each instrument. Fill forward missing values with previous price up to some limit. Parameters ---------- prices: pandas.Series A Series of instrument prices with a MultiIndex where the top level is pandas.Timestamps and the second level is instrument names. index: pandas.MultiIndex A MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. limt: int Number of periods to fill prices forward. Returns ------- A pandas.Series of reindexed prices where the top level is pandas.Timestamps and the second level is instrument names. See also: calc_rets() Example ------- >>> import pandas as pd >>> from pandas import Timestamp as TS >>> import mapping.util as util >>> idx = pd.MultiIndex.from_tuples([(TS('2015-01-04'), 'CLF5'), ... (TS('2015-01-05'), 'CLF5'), ... (TS('2015-01-05'), 'CLH5'), ... (TS('2015-01-06'), 'CLF5'), ... (TS('2015-01-06'), 'CLH5'), ... (TS('2015-01-07'), 'CLF5'), ... (TS('2015-01-07'), 'CLH5')]) >>> prices = pd.Series([100.12, 101.50, 102.51, 103.51, 102.73, 102.15, ... 104.37], index=idx) >>> widx = pd.MultiIndex.from_tuples([(TS('2015-01-05'), 'CLF5'), ... (TS('2015-01-05'), 'CLH5'), ... (TS('2015-01-07'), 'CLF5'), ... (TS('2015-01-07'), 'CLH5')]) >>> util.reindex(prices, widx, limit=0) """ if not index.is_unique: raise ValueError("'index' must be unique") index = index.sort_values() index.names = ["date", "instrument"] price_dts = prices.sort_index().index.unique(level=0) index_dts = index.unique(level=0) mask = price_dts < index_dts[0] leading_price_dts = price_dts[mask] if len(leading_price_dts) == 0: raise ValueError("'prices' must have a date preceding first date in " "'index'") prev_dts = index_dts.tolist() prev_dts.insert(0, leading_price_dts[-1]) # avoid just lagging to preserve the calendar previous_date = dict(zip(index_dts, prev_dts)) first_instr = index.to_frame(index=False) first_instr = ( first_instr.drop_duplicates(subset=["instrument"], keep="first") ) first_instr.loc[:, "prev_date"] = ( first_instr.loc[:, "date"].apply(lambda x: previous_date[x]) ) additional_indices = pd.MultiIndex.from_tuples( first_instr.loc[:, ["prev_date", "instrument"]].values.tolist() ) augmented_index = index.union(additional_indices).sort_values() prices = prices.reindex(augmented_index) if limit != 0: prices = prices.groupby(level=1).fillna(method="ffill", limit=limit) return prices
Reindex a pd.Series of prices such that when instrument level returns are calculated they are compatible with a pd.MultiIndex of instrument weights in calc_rets(). This amount to reindexing the series by an augmented version of index which includes the preceding date for the first appearance of each instrument. Fill forward missing values with previous price up to some limit. Parameters ---------- prices: pandas.Series A Series of instrument prices with a MultiIndex where the top level is pandas.Timestamps and the second level is instrument names. index: pandas.MultiIndex A MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. limt: int Number of periods to fill prices forward. Returns ------- A pandas.Series of reindexed prices where the top level is pandas.Timestamps and the second level is instrument names. See also: calc_rets() Example ------- >>> import pandas as pd >>> from pandas import Timestamp as TS >>> import mapping.util as util >>> idx = pd.MultiIndex.from_tuples([(TS('2015-01-04'), 'CLF5'), ... (TS('2015-01-05'), 'CLF5'), ... (TS('2015-01-05'), 'CLH5'), ... (TS('2015-01-06'), 'CLF5'), ... (TS('2015-01-06'), 'CLH5'), ... (TS('2015-01-07'), 'CLF5'), ... (TS('2015-01-07'), 'CLH5')]) >>> prices = pd.Series([100.12, 101.50, 102.51, 103.51, 102.73, 102.15, ... 104.37], index=idx) >>> widx = pd.MultiIndex.from_tuples([(TS('2015-01-05'), 'CLF5'), ... (TS('2015-01-05'), 'CLH5'), ... (TS('2015-01-07'), 'CLF5'), ... (TS('2015-01-07'), 'CLH5')]) >>> util.reindex(prices, widx, limit=0)
entailment
def calc_trades(current_contracts, desired_holdings, trade_weights, prices, multipliers, **kwargs): """ Calculate the number of tradeable contracts for rebalancing from a set of current contract holdings to a set of desired generic notional holdings based on prevailing prices and mapping from generics to tradeable instruments. Differences between current holdings and desired holdings are treated as 0. Zero trades are dropped. Parameters ---------- current_contracts: pandas.Series Series of current number of contracts held for tradeable instruments. Can pass 0 if all holdings are 0. desired_holdings: pandas.Series Series of desired holdings in base notional currency of generics. Index is generic contracts, these should be the same generics as in trade_weights. trade_weights: pandas.DataFrame or dict A pandas.DataFrame of loadings of generic contracts on tradeable instruments **for a given date**. The columns refer to generic contracts and the index is strings representing instrument names. If dict is given keys should be root generic names, e.g. 'CL', and values should be pandas.DataFrames of loadings. The union of all columns should be a superset of the desired_holdings.index prices: pandas.Series Series of instrument prices. Index is instrument name and values are number of contracts. Extra instrument prices will be ignored. multipliers: pandas.Series Series of instrument multipliers. Index is instrument name and values are the multiplier associated with the contract. multipliers.index should be a superset of mapped desired_holdings intruments. kwargs: key word arguments Key word arguments to be passed to to_contracts() Returns ------- A pandas.Series of instrument contract trades, lexigraphically sorted. Example ------- >>> import pandas as pd >>> import mapping.util as util >>> wts = pd.DataFrame([[0.5, 0], [0.5, 0.5], [0, 0.5]], ... index=["CLX16", "CLZ16", "CLF17"], ... columns=["CL1", "CL2"]) >>> desired_holdings = pd.Series([200000, -50000], index=["CL1", "CL2"]) >>> current_contracts = pd.Series([0, 1, 0], ... index=['CLX16', 'CLZ16', 'CLF17']) >>> prices = pd.Series([50.32, 50.41, 50.48], ... index=['CLX16', 'CLZ16', 'CLF17']) >>> multipliers = pd.Series([100, 100, 100], ... index=['CLX16', 'CLZ16', 'CLF17']) >>> trades = util.calc_trades(current_contracts, desired_holdings, wts, ... prices, multipliers) """ if not isinstance(trade_weights, dict): trade_weights = {"": trade_weights} generics = [] for key in trade_weights: generics.extend(trade_weights[key].columns) if not set(desired_holdings.index).issubset(set(generics)): raise ValueError("'desired_holdings.index' contains values which " "cannot be mapped to tradeables.\n" "Received: 'desired_holdings.index'\n {0}\n" "Expected in 'trade_weights' set of columns:\n {1}\n" .format(sorted(desired_holdings.index), sorted(generics))) desired_contracts = [] for root_key in trade_weights: gnrc_weights = trade_weights[root_key] subset = gnrc_weights.columns.intersection(desired_holdings.index) gnrc_des_hlds = desired_holdings.loc[subset] gnrc_weights = gnrc_weights.loc[:, subset] # drop indexes where all non zero weights were in columns dropped above gnrc_weights = gnrc_weights.loc[~(gnrc_weights == 0).all(axis=1)] instr_des_hlds = gnrc_des_hlds * gnrc_weights instr_des_hlds = instr_des_hlds.sum(axis=1) wprices = prices.loc[instr_des_hlds.index] desired_contracts.append(to_contracts(instr_des_hlds, wprices, multipliers, **kwargs)) desired_contracts = pd.concat(desired_contracts, axis=0) trades = desired_contracts.subtract(current_contracts, fill_value=0) trades = trades.loc[trades != 0] trades = trades.sort_index() return trades
Calculate the number of tradeable contracts for rebalancing from a set of current contract holdings to a set of desired generic notional holdings based on prevailing prices and mapping from generics to tradeable instruments. Differences between current holdings and desired holdings are treated as 0. Zero trades are dropped. Parameters ---------- current_contracts: pandas.Series Series of current number of contracts held for tradeable instruments. Can pass 0 if all holdings are 0. desired_holdings: pandas.Series Series of desired holdings in base notional currency of generics. Index is generic contracts, these should be the same generics as in trade_weights. trade_weights: pandas.DataFrame or dict A pandas.DataFrame of loadings of generic contracts on tradeable instruments **for a given date**. The columns refer to generic contracts and the index is strings representing instrument names. If dict is given keys should be root generic names, e.g. 'CL', and values should be pandas.DataFrames of loadings. The union of all columns should be a superset of the desired_holdings.index prices: pandas.Series Series of instrument prices. Index is instrument name and values are number of contracts. Extra instrument prices will be ignored. multipliers: pandas.Series Series of instrument multipliers. Index is instrument name and values are the multiplier associated with the contract. multipliers.index should be a superset of mapped desired_holdings intruments. kwargs: key word arguments Key word arguments to be passed to to_contracts() Returns ------- A pandas.Series of instrument contract trades, lexigraphically sorted. Example ------- >>> import pandas as pd >>> import mapping.util as util >>> wts = pd.DataFrame([[0.5, 0], [0.5, 0.5], [0, 0.5]], ... index=["CLX16", "CLZ16", "CLF17"], ... columns=["CL1", "CL2"]) >>> desired_holdings = pd.Series([200000, -50000], index=["CL1", "CL2"]) >>> current_contracts = pd.Series([0, 1, 0], ... index=['CLX16', 'CLZ16', 'CLF17']) >>> prices = pd.Series([50.32, 50.41, 50.48], ... index=['CLX16', 'CLZ16', 'CLF17']) >>> multipliers = pd.Series([100, 100, 100], ... index=['CLX16', 'CLZ16', 'CLF17']) >>> trades = util.calc_trades(current_contracts, desired_holdings, wts, ... prices, multipliers)
entailment
def to_notional(instruments, prices, multipliers, desired_ccy=None, instr_fx=None, fx_rates=None): """ Convert number of contracts of tradeable instruments to notional value of tradeable instruments in a desired currency. Parameters ---------- instruments: pandas.Series Series of instrument holdings. Index is instrument name and values are number of contracts. prices: pandas.Series Series of instrument prices. Index is instrument name and values are instrument prices. prices.index should be a superset of instruments.index otherwise NaN returned for instruments without prices multipliers: pandas.Series Series of instrument multipliers. Index is instrument name and values are the multiplier associated with the contract. multipliers.index should be a superset of instruments.index desired_ccy: str Three letter string representing desired currency to convert notional values to, e.g. 'USD'. If None is given currency conversion is ignored. instr_fx: pandas.Series Series of instrument fx denominations. Index is instrument name and values are three letter strings representing the currency the instrument is denominated in. instr_fx.index should match prices.index fx_rates: pandas.Series Series of fx rates used for conversion to desired_ccy. Index is strings representing the FX pair, e.g. 'AUDUSD' or 'USDCAD'. Values are the corresponding exchange rates. Returns ------- pandas.Series of notional amounts of instruments with Index of instruments names Example ------- >>> import pandas as pd >>> import mapping.util as util >>> current_contracts = pd.Series([-1, 1], index=['CLX16', 'CLZ16']) >>> prices = pd.Series([50.32, 50.41], index=['CLX16', 'CLZ16']) >>> multipliers = pd.Series([100, 100], index=['CLX16', 'CLZ16']) >>> ntln = util.to_notional(current_contracts, prices, multipliers) """ notionals = _instr_conv(instruments, prices, multipliers, True, desired_ccy, instr_fx, fx_rates) return notionals
Convert number of contracts of tradeable instruments to notional value of tradeable instruments in a desired currency. Parameters ---------- instruments: pandas.Series Series of instrument holdings. Index is instrument name and values are number of contracts. prices: pandas.Series Series of instrument prices. Index is instrument name and values are instrument prices. prices.index should be a superset of instruments.index otherwise NaN returned for instruments without prices multipliers: pandas.Series Series of instrument multipliers. Index is instrument name and values are the multiplier associated with the contract. multipliers.index should be a superset of instruments.index desired_ccy: str Three letter string representing desired currency to convert notional values to, e.g. 'USD'. If None is given currency conversion is ignored. instr_fx: pandas.Series Series of instrument fx denominations. Index is instrument name and values are three letter strings representing the currency the instrument is denominated in. instr_fx.index should match prices.index fx_rates: pandas.Series Series of fx rates used for conversion to desired_ccy. Index is strings representing the FX pair, e.g. 'AUDUSD' or 'USDCAD'. Values are the corresponding exchange rates. Returns ------- pandas.Series of notional amounts of instruments with Index of instruments names Example ------- >>> import pandas as pd >>> import mapping.util as util >>> current_contracts = pd.Series([-1, 1], index=['CLX16', 'CLZ16']) >>> prices = pd.Series([50.32, 50.41], index=['CLX16', 'CLZ16']) >>> multipliers = pd.Series([100, 100], index=['CLX16', 'CLZ16']) >>> ntln = util.to_notional(current_contracts, prices, multipliers)
entailment
def to_contracts(instruments, prices, multipliers, desired_ccy=None, instr_fx=None, fx_rates=None, rounder=None): """ Convert notional amount of tradeable instruments to number of instrument contracts, rounding to nearest integer number of contracts. Parameters ---------- instruments: pandas.Series Series of instrument holdings. Index is instrument name and values are notional amount on instrument. prices: pandas.Series Series of instrument prices. Index is instrument name and values are instrument prices. prices.index should be a superset of instruments.index multipliers: pandas.Series Series of instrument multipliers. Index is instrument name and values are the multiplier associated with the contract. multipliers.index should be a superset of instruments.index desired_ccy: str Three letter string representing desired currency to convert notional values to, e.g. 'USD'. If None is given currency conversion is ignored. instr_fx: pandas.Series Series of instrument fx denominations. Index is instrument name and values are three letter strings representing the currency the instrument is denominated in. instr_fx.index should match prices.index fx_rates: pandas.Series Series of fx rates used for conversion to desired_ccy. Index is strings representing the FX pair, e.g. 'AUDUSD' or 'USDCAD'. Values are the corresponding exchange rates. rounder: function Function to round pd.Series contracts to integers, if None default pd.Series.round is used. Returns ------- pandas.Series of contract numbers of instruments with Index of instruments names """ contracts = _instr_conv(instruments, prices, multipliers, False, desired_ccy, instr_fx, fx_rates) if rounder is None: rounder = pd.Series.round contracts = rounder(contracts) contracts = contracts.astype(int) return contracts
Convert notional amount of tradeable instruments to number of instrument contracts, rounding to nearest integer number of contracts. Parameters ---------- instruments: pandas.Series Series of instrument holdings. Index is instrument name and values are notional amount on instrument. prices: pandas.Series Series of instrument prices. Index is instrument name and values are instrument prices. prices.index should be a superset of instruments.index multipliers: pandas.Series Series of instrument multipliers. Index is instrument name and values are the multiplier associated with the contract. multipliers.index should be a superset of instruments.index desired_ccy: str Three letter string representing desired currency to convert notional values to, e.g. 'USD'. If None is given currency conversion is ignored. instr_fx: pandas.Series Series of instrument fx denominations. Index is instrument name and values are three letter strings representing the currency the instrument is denominated in. instr_fx.index should match prices.index fx_rates: pandas.Series Series of fx rates used for conversion to desired_ccy. Index is strings representing the FX pair, e.g. 'AUDUSD' or 'USDCAD'. Values are the corresponding exchange rates. rounder: function Function to round pd.Series contracts to integers, if None default pd.Series.round is used. Returns ------- pandas.Series of contract numbers of instruments with Index of instruments names
entailment
def get_multiplier(weights, root_generic_multiplier): """ Determine tradeable instrument multiplier based on generic asset multipliers and weights mapping from generics to tradeables. Parameters ---------- weights: pandas.DataFrame or dict A pandas.DataFrame of loadings of generic contracts on tradeable instruments **for a given date**. The columns are integers refering to generic number indexed from 0, e.g. [0, 1], and the index is strings representing instrument names. If dict is given keys should be generic instrument names, e.g. 'CL', and values should be pandas.DataFrames of loadings. The union of all indexes should be a superset of the instruments.index root_generic_multiplier: pandas.Series Series of multipliers for generic instruments lexigraphically sorted. If a dictionary of weights is given, root_generic_multiplier.index should correspond to the weights keys. Returns ------- A pandas.Series of multipliers for tradeable instruments. Examples -------- >>> import pandas as pd >>> import mapping.util as util >>> wts = pd.DataFrame([[0.5, 0], [0.5, 0.5], [0, 0.5]], ... index=["CLX16", "CLZ16", "CLF17"], ... columns=[0, 1]) >>> ast_mult = pd.Series([1000], index=["CL"]) >>> util.get_multiplier(wts, ast_mult) """ if len(root_generic_multiplier) > 1 and not isinstance(weights, dict): raise ValueError("For multiple generic instruments weights must be a " "dictionary") mults = [] intrs = [] for ast, multiplier in root_generic_multiplier.iteritems(): if isinstance(weights, dict): weights_ast = weights[ast].index else: weights_ast = weights.index mults.extend(np.repeat(multiplier, len(weights_ast))) intrs.extend(weights_ast) imults = pd.Series(mults, intrs) imults = imults.sort_index() return imults
Determine tradeable instrument multiplier based on generic asset multipliers and weights mapping from generics to tradeables. Parameters ---------- weights: pandas.DataFrame or dict A pandas.DataFrame of loadings of generic contracts on tradeable instruments **for a given date**. The columns are integers refering to generic number indexed from 0, e.g. [0, 1], and the index is strings representing instrument names. If dict is given keys should be generic instrument names, e.g. 'CL', and values should be pandas.DataFrames of loadings. The union of all indexes should be a superset of the instruments.index root_generic_multiplier: pandas.Series Series of multipliers for generic instruments lexigraphically sorted. If a dictionary of weights is given, root_generic_multiplier.index should correspond to the weights keys. Returns ------- A pandas.Series of multipliers for tradeable instruments. Examples -------- >>> import pandas as pd >>> import mapping.util as util >>> wts = pd.DataFrame([[0.5, 0], [0.5, 0.5], [0, 0.5]], ... index=["CLX16", "CLZ16", "CLF17"], ... columns=[0, 1]) >>> ast_mult = pd.Series([1000], index=["CL"]) >>> util.get_multiplier(wts, ast_mult)
entailment
def weighted_expiration(weights, contract_dates): """ Calculate the days to expiration for generic futures, weighted by the composition of the underlying tradeable instruments. Parameters: ----------- weights: pandas.DataFrame A DataFrame of instrument weights with a MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. The columns consist of generic names. contract_dates: pandas.Series Series with index of tradeable contract names and pandas.Timestamps representing the last date of the roll as values Returns: -------- A pandas.DataFrame with columns of generic futures and index of dates. Values are the weighted average of days to expiration for the underlying contracts. Examples: --------- >>> import pandas as pd >>> import mapping.util as util >>> vals = [[1, 0, 1/2, 1/2, 0, 1, 0], [0, 1, 0, 1/2, 1/2, 0, 1]] >>> widx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-03'), 'CLF15'), ... (pd.Timestamp('2015-01-03'), 'CLG15'), ... (pd.Timestamp('2015-01-04'), 'CLF15'), ... (pd.Timestamp('2015-01-04'), 'CLG15'), ... (pd.Timestamp('2015-01-04'), 'CLH15'), ... (pd.Timestamp('2015-01-05'), 'CLG15'), ... (pd.Timestamp('2015-01-05'), 'CLH15')]) >>> weights = pd.DataFrame({"CL1": vals[0], "CL2": vals[1]}, index=widx) >>> contract_dates = pd.Series([pd.Timestamp('2015-01-20'), ... pd.Timestamp('2015-02-21'), ... pd.Timestamp('2015-03-20')], ... index=['CLF15', 'CLG15', 'CLH15']) >>> util.weighted_expiration(weights, contract_dates) """ # NOQA cols = weights.columns weights = weights.reset_index(level=-1) expiries = contract_dates.to_dict() weights.loc[:, "expiry"] = weights.iloc[:, 0].apply(lambda x: expiries[x]) diffs = (pd.DatetimeIndex(weights.expiry) - pd.Series(weights.index, weights.index)).apply(lambda x: x.days) weights = weights.loc[:, cols] wexp = weights.mul(diffs, axis=0).groupby(level=0).sum() return wexp
Calculate the days to expiration for generic futures, weighted by the composition of the underlying tradeable instruments. Parameters: ----------- weights: pandas.DataFrame A DataFrame of instrument weights with a MultiIndex where the top level contains pandas.Timestamps and the second level is instrument names. The columns consist of generic names. contract_dates: pandas.Series Series with index of tradeable contract names and pandas.Timestamps representing the last date of the roll as values Returns: -------- A pandas.DataFrame with columns of generic futures and index of dates. Values are the weighted average of days to expiration for the underlying contracts. Examples: --------- >>> import pandas as pd >>> import mapping.util as util >>> vals = [[1, 0, 1/2, 1/2, 0, 1, 0], [0, 1, 0, 1/2, 1/2, 0, 1]] >>> widx = pd.MultiIndex.from_tuples([(pd.Timestamp('2015-01-03'), 'CLF15'), ... (pd.Timestamp('2015-01-03'), 'CLG15'), ... (pd.Timestamp('2015-01-04'), 'CLF15'), ... (pd.Timestamp('2015-01-04'), 'CLG15'), ... (pd.Timestamp('2015-01-04'), 'CLH15'), ... (pd.Timestamp('2015-01-05'), 'CLG15'), ... (pd.Timestamp('2015-01-05'), 'CLH15')]) >>> weights = pd.DataFrame({"CL1": vals[0], "CL2": vals[1]}, index=widx) >>> contract_dates = pd.Series([pd.Timestamp('2015-01-20'), ... pd.Timestamp('2015-02-21'), ... pd.Timestamp('2015-03-20')], ... index=['CLF15', 'CLG15', 'CLH15']) >>> util.weighted_expiration(weights, contract_dates)
entailment
def _valid_baremetal_port(port): """Check if port is a baremetal port with exactly one security group""" if port.get(portbindings.VNIC_TYPE) != portbindings.VNIC_BAREMETAL: return False sgs = port.get('security_groups', []) if len(sgs) == 0: # Nothing to do return False if len(port.get('security_groups', [])) > 1: LOG.warning('SG provisioning failed for %(port)s. Only one ' 'SG may be applied per port.', {'port': port['id']}) return False return True
Check if port is a baremetal port with exactly one security group
entailment
def synchronize_resources(self): """Synchronize worker with CVX All database queries must occur while the sync lock is held. This tightly couples reads with writes and ensures that an older read does not result in the last write. Eg: Worker 1 reads (P1 created) Worder 2 reads (P1 deleted) Worker 2 writes (Delete P1 from CVX) Worker 1 writes (Create P1 on CVX) By ensuring that all reads occur with the sync lock held, we ensure that Worker 1 completes its writes before Worker2 is allowed to read. A failure to write results in a full resync and purges all reads from memory. It is also important that we compute resources to sync in reverse sync order in order to avoid missing dependencies on creation. Eg: If we query in sync order 1. Query Instances -> I1 isn't there 2. Query Port table -> Port P1 is there, connected to I1 3. We send P1 to CVX without sending I1 -> Error raised But if we query P1 first: 1. Query Ports P1 -> P1 is not there 2. Query Instances -> find I1 3. We create I1, not P1 -> harmless, mech driver creates P1 Missing dependencies on deletion will helpfully result in the dependent resource not being created: 1. Query Ports -> P1 is found 2. Query Instances -> I1 not found 3. Creating P1 fails on CVX """ # Grab the sync lock if not self._rpc.sync_start(): LOG.info("%(pid)s Failed to grab the sync lock", {'pid': os.getpid()}) greenthread.sleep(1) return for resource in self._resources_to_update: self.update_neutron_resource(resource) self._resources_to_update = list() # Sync any necessary resources. # We delete in reverse order and create in order to ensure that # dependent resources are deleted before the resources they depend # on and created after them for resource_type in reversed(self.sync_order): resource_type.delete_cvx_resources() for resource_type in self.sync_order: resource_type.create_cvx_resources() # Release the sync lock self._rpc.sync_end() # Update local uuid if this was a full sync if self._synchronizing_uuid: LOG.info("%(pid)s Full sync for cvx uuid %(uuid)s complete", {'uuid': self._synchronizing_uuid, 'pid': os.getpid()}) self._cvx_uuid = self._synchronizing_uuid self._synchronizing_uuid = None
Synchronize worker with CVX All database queries must occur while the sync lock is held. This tightly couples reads with writes and ensures that an older read does not result in the last write. Eg: Worker 1 reads (P1 created) Worder 2 reads (P1 deleted) Worker 2 writes (Delete P1 from CVX) Worker 1 writes (Create P1 on CVX) By ensuring that all reads occur with the sync lock held, we ensure that Worker 1 completes its writes before Worker2 is allowed to read. A failure to write results in a full resync and purges all reads from memory. It is also important that we compute resources to sync in reverse sync order in order to avoid missing dependencies on creation. Eg: If we query in sync order 1. Query Instances -> I1 isn't there 2. Query Port table -> Port P1 is there, connected to I1 3. We send P1 to CVX without sending I1 -> Error raised But if we query P1 first: 1. Query Ports P1 -> P1 is not there 2. Query Instances -> find I1 3. We create I1, not P1 -> harmless, mech driver creates P1 Missing dependencies on deletion will helpfully result in the dependent resource not being created: 1. Query Ports -> P1 is found 2. Query Instances -> I1 not found 3. Creating P1 fails on CVX
entailment
def register(self, resource, event, trigger, **kwargs): """Called in trunk plugin's AFTER_INIT""" super(AristaTrunkDriver, self).register(resource, event, trigger, kwargs) registry.subscribe(self.subport_create, resources.SUBPORTS, events.AFTER_CREATE) registry.subscribe(self.subport_delete, resources.SUBPORTS, events.AFTER_DELETE) registry.subscribe(self.trunk_create, resources.TRUNK, events.AFTER_CREATE) registry.subscribe(self.trunk_update, resources.TRUNK, events.AFTER_UPDATE) registry.subscribe(self.trunk_delete, resources.TRUNK, events.AFTER_DELETE) self.core_plugin = directory.get_plugin() LOG.debug("Arista trunk driver initialized.")
Called in trunk plugin's AFTER_INIT
entailment
def create_router_on_eos(self, router_name, rdm, server): """Creates a router on Arista HW Device. :param router_name: globally unique identifier for router/VRF :param rdm: A value generated by hashing router name :param server: Server endpoint on the Arista switch to be configured """ cmds = [] rd = "%s:%s" % (rdm, rdm) for c in self.routerDict['create']: cmds.append(c.format(router_name, rd)) if self._mlag_configured: mac = VIRTUAL_ROUTER_MAC for c in self._additionalRouterCmdsDict['create']: cmds.append(c.format(mac)) self._run_config_cmds(cmds, server)
Creates a router on Arista HW Device. :param router_name: globally unique identifier for router/VRF :param rdm: A value generated by hashing router name :param server: Server endpoint on the Arista switch to be configured
entailment
def delete_router_from_eos(self, router_name, server): """Deletes a router from Arista HW Device. :param router_name: globally unique identifier for router/VRF :param server: Server endpoint on the Arista switch to be configured """ cmds = [] for c in self.routerDict['delete']: cmds.append(c.format(router_name)) if self._mlag_configured: for c in self._additionalRouterCmdsDict['delete']: cmds.append(c) self._run_config_cmds(cmds, server)
Deletes a router from Arista HW Device. :param router_name: globally unique identifier for router/VRF :param server: Server endpoint on the Arista switch to be configured
entailment
def add_interface_to_router(self, segment_id, router_name, gip, router_ip, mask, server): """Adds an interface to existing HW router on Arista HW device. :param segment_id: VLAN Id associated with interface that is added :param router_name: globally unique identifier for router/VRF :param gip: Gateway IP associated with the subnet :param router_ip: IP address of the router :param mask: subnet mask to be used :param server: Server endpoint on the Arista switch to be configured """ if not segment_id: segment_id = DEFAULT_VLAN cmds = [] for c in self._interfaceDict['add']: if self._mlag_configured: # In VARP config, use router ID else, use gateway IP address. ip = router_ip else: ip = gip + '/' + mask cmds.append(c.format(segment_id, router_name, ip)) if self._mlag_configured: for c in self._additionalInterfaceCmdsDict['add']: cmds.append(c.format(gip)) self._run_config_cmds(cmds, server)
Adds an interface to existing HW router on Arista HW device. :param segment_id: VLAN Id associated with interface that is added :param router_name: globally unique identifier for router/VRF :param gip: Gateway IP associated with the subnet :param router_ip: IP address of the router :param mask: subnet mask to be used :param server: Server endpoint on the Arista switch to be configured
entailment
def delete_interface_from_router(self, segment_id, router_name, server): """Deletes an interface from existing HW router on Arista HW device. :param segment_id: VLAN Id associated with interface that is added :param router_name: globally unique identifier for router/VRF :param server: Server endpoint on the Arista switch to be configured """ if not segment_id: segment_id = DEFAULT_VLAN cmds = [] for c in self._interfaceDict['remove']: cmds.append(c.format(segment_id)) self._run_config_cmds(cmds, server)
Deletes an interface from existing HW router on Arista HW device. :param segment_id: VLAN Id associated with interface that is added :param router_name: globally unique identifier for router/VRF :param server: Server endpoint on the Arista switch to be configured
entailment
def create_router(self, context, router): """Creates a router on Arista Switch. Deals with multiple configurations - such as Router per VRF, a router in default VRF, Virtual Router in MLAG configurations """ if router: router_name = self._arista_router_name(router['id'], router['name']) hashed = hashlib.sha256(router_name.encode('utf-8')) rdm = str(int(hashed.hexdigest(), 16) % 65536) mlag_peer_failed = False for s in self._servers: try: self.create_router_on_eos(router_name, rdm, s) mlag_peer_failed = False except Exception: if self._mlag_configured and not mlag_peer_failed: # In paied switch, it is OK to fail on one switch mlag_peer_failed = True else: msg = (_('Failed to create router %s on EOS') % router_name) LOG.exception(msg) raise arista_exc.AristaServicePluginRpcError(msg=msg)
Creates a router on Arista Switch. Deals with multiple configurations - such as Router per VRF, a router in default VRF, Virtual Router in MLAG configurations
entailment
def delete_router(self, context, router_id, router): """Deletes a router from Arista Switch.""" if router: router_name = self._arista_router_name(router_id, router['name']) mlag_peer_failed = False for s in self._servers: try: self.delete_router_from_eos(router_name, s) mlag_peer_failed = False except Exception: if self._mlag_configured and not mlag_peer_failed: # In paied switch, it is OK to fail on one switch mlag_peer_failed = True else: msg = (_('Failed to create router %s on EOS') % router_name) LOG.exception(msg) raise arista_exc.AristaServicePluginRpcError(msg=msg)
Deletes a router from Arista Switch.
entailment
def add_router_interface(self, context, router_info): """Adds an interface to a router created on Arista HW router. This deals with both IPv6 and IPv4 configurations. """ if router_info: self._select_dicts(router_info['ip_version']) cidr = router_info['cidr'] subnet_mask = cidr.split('/')[1] router_name = self._arista_router_name(router_info['id'], router_info['name']) if self._mlag_configured: # For MLAG, we send a specific IP address as opposed to cidr # For now, we are using x.x.x.253 and x.x.x.254 as virtual IP mlag_peer_failed = False for i, server in enumerate(self._servers): # Get appropriate virtual IP address for this router router_ip = self._get_router_ip(cidr, i, router_info['ip_version']) try: self.add_interface_to_router(router_info['seg_id'], router_name, router_info['gip'], router_ip, subnet_mask, server) mlag_peer_failed = False except Exception: if not mlag_peer_failed: mlag_peer_failed = True else: msg = (_('Failed to add interface to router ' '%s on EOS') % router_name) LOG.exception(msg) raise arista_exc.AristaServicePluginRpcError( msg=msg) else: for s in self._servers: self.add_interface_to_router(router_info['seg_id'], router_name, router_info['gip'], None, subnet_mask, s)
Adds an interface to a router created on Arista HW router. This deals with both IPv6 and IPv4 configurations.
entailment
def remove_router_interface(self, context, router_info): """Removes previously configured interface from router on Arista HW. This deals with both IPv6 and IPv4 configurations. """ if router_info: router_name = self._arista_router_name(router_info['id'], router_info['name']) mlag_peer_failed = False for s in self._servers: try: self.delete_interface_from_router(router_info['seg_id'], router_name, s) if self._mlag_configured: mlag_peer_failed = False except Exception: if self._mlag_configured and not mlag_peer_failed: mlag_peer_failed = True else: msg = (_('Failed to add interface to router ' '%s on EOS') % router_name) LOG.exception(msg) raise arista_exc.AristaServicePluginRpcError(msg=msg)
Removes previously configured interface from router on Arista HW. This deals with both IPv6 and IPv4 configurations.
entailment
def _run_config_cmds(self, commands, server): """Execute/sends a CAPI (Command API) command to EOS. In this method, list of commands is appended with prefix and postfix commands - to make is understandble by EOS. :param commands : List of command to be executed on EOS. :param server: Server endpoint on the Arista switch to be configured """ command_start = ['enable', 'configure'] command_end = ['exit'] full_command = command_start + commands + command_end self._run_eos_cmds(full_command, server)
Execute/sends a CAPI (Command API) command to EOS. In this method, list of commands is appended with prefix and postfix commands - to make is understandble by EOS. :param commands : List of command to be executed on EOS. :param server: Server endpoint on the Arista switch to be configured
entailment