idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
55,100
def get_subclass_from_module ( module , parent_class ) : try : r = __recursive_import ( module ) member_dict = dict ( inspect . getmembers ( r ) ) sprinter_class = parent_class for v in member_dict . values ( ) : if inspect . isclass ( v ) and issubclass ( v , parent_class ) and v != parent_class : if sprinter_class is parent_class : sprinter_class = v if sprinter_class is None : raise SprinterException ( "No subclass %s that extends %s exists in classpath!" % ( module , str ( parent_class ) ) ) return sprinter_class except ImportError : e = sys . exc_info ( ) [ 1 ] raise e
Get a subclass of parent_class from the module at module
55,101
def __recursive_import ( module_name ) : names = module_name . split ( "." ) path = None module = None while len ( names ) > 0 : if module : path = module . __path__ name = names . pop ( 0 ) ( module_file , pathname , description ) = imp . find_module ( name , path ) module = imp . load_module ( name , module_file , pathname , description ) return module
Recursively looks for and imports the names returning the module desired
55,102
def err_exit ( msg , rc = 1 ) : print ( msg , file = sys . stderr ) sys . exit ( rc )
Print msg to stderr and exit with rc .
55,103
def read_file ( self , infile ) : try : with open ( infile , 'rt' ) as file : return file . read ( ) except UnicodeDecodeError as e : err_exit ( 'Error reading %s: %s' % ( infile , e ) ) except ( IOError , OSError ) as e : err_exit ( 'Error reading %s: %s' % ( infile , e . strerror or e ) )
Read a reST file into a string .
55,104
def write_file ( self , html , outfile ) : try : with open ( outfile , 'wt' ) as file : file . write ( html ) except ( IOError , OSError ) as e : err_exit ( 'Error writing %s: %s' % ( outfile , e . strerror or e ) )
Write an HTML string to a file .
55,105
def convert_string ( self , rest ) : try : html = publish_string ( rest , writer_name = 'html' ) except SystemExit as e : err_exit ( 'HTML conversion failed with error: %s' % e . code ) else : if sys . version_info [ 0 ] >= 3 : return html . decode ( 'utf-8' ) return html
Convert a reST string to an HTML string .
55,106
def apply_styles ( self , html , styles ) : index = html . find ( '</head>' ) if index >= 0 : return '' . join ( ( html [ : index ] , styles , html [ index : ] ) ) return html
Insert style information into the HTML string .
55,107
def publish_string ( self , rest , outfile , styles = '' ) : html = self . convert_string ( rest ) html = self . strip_xml_header ( html ) html = self . apply_styles ( html , styles ) self . write_file ( html , outfile ) return outfile
Render a reST string as HTML .
55,108
def publish_file ( self , infile , outfile , styles = '' ) : rest = self . read_file ( infile ) return self . publish_string ( rest , outfile , styles )
Render a reST file as HTML .
55,109
def upgrade ( self ) : warn ( 'Upgrading ' + self . filename ) if self . backup_config ( self . filename ) : return self . write_default_config ( self . filename ) return False
Upgrade the config file .
55,110
def backup_config ( self , filename ) : backup_name = filename + '-' + self . version warn ( 'Moving current configuration to ' + backup_name ) try : shutil . copy2 ( filename , backup_name ) return True except ( IOError , OSError ) as e : print ( 'Error copying %s: %s' % ( filename , e . strerror or e ) , file = sys . stderr ) return False
Backup the current config file .
55,111
def write_default_config ( self , filename ) : try : with open ( filename , 'wt' ) as file : file . write ( DEFAULT_CONFIG ) return True except ( IOError , OSError ) as e : print ( 'Error writing %s: %s' % ( filename , e . strerror or e ) , file = sys . stderr ) return False
Write the default config file .
55,112
def reset_defaults ( self , config_file ) : if not exists ( config_file ) : err_exit ( 'No such file: %(config_file)s' % locals ( ) ) if not isfile ( config_file ) : err_exit ( 'Not a file: %(config_file)s' % locals ( ) ) if not os . access ( config_file , os . R_OK ) : err_exit ( 'File cannot be read: %(config_file)s' % locals ( ) ) self . set_defaults ( config_file )
Reset defaults .
55,113
def write_defaults ( self ) : self . defaults . write ( ) self . reset_defaults ( self . defaults . filename )
Create default config file and reload .
55,114
def upgrade_defaults ( self ) : self . defaults . upgrade ( ) self . reset_defaults ( self . defaults . filename )
Upgrade config file and reload .
55,115
def list_styles ( self ) : known = sorted ( self . defaults . known_styles ) if not known : err_exit ( 'No styles' , 0 ) for style in known : if style == self . defaults . default_style : print ( style , '(default)' ) else : print ( style ) sys . exit ( 0 )
Print available styles and exit .
55,116
def render_file ( self , filename ) : dirname , basename = split ( filename ) with changedir ( dirname ) : infile = abspath ( basename ) outfile = abspath ( '.%s.html' % basename ) self . docutils . publish_file ( infile , outfile , self . styles ) return outfile
Convert a reST file to HTML .
55,117
def render_long_description ( self , dirname ) : with changedir ( dirname ) : self . setuptools . check_valid_package ( ) long_description = self . setuptools . get_long_description ( ) outfile = abspath ( '.long-description.html' ) self . docutils . publish_string ( long_description , outfile , self . styles ) return outfile
Convert a package s long description to HTML .
55,118
def open_in_browser ( self , outfile ) : if self . browser == 'default' : webbrowser . open ( 'file://%s' % outfile ) else : browser = webbrowser . get ( self . browser ) browser . open ( 'file://%s' % outfile )
Open the given HTML file in a browser .
55,119
def run ( self ) : os . environ [ 'JARN_RUN' ] = '1' self . python . check_valid_python ( ) args = self . parse_options ( self . args ) if args : arg = args [ 0 ] else : arg = os . curdir if arg : arg = expanduser ( arg ) if isfile ( arg ) : outfile = self . render_file ( arg ) elif isdir ( arg ) : outfile = self . render_long_description ( arg ) else : err_exit ( 'No such file or directory: %s' % arg ) self . open_in_browser ( outfile )
Render and display Python package documentation .
55,120
def preprocess_cell ( self , cell : "NotebookNode" , resources : dict , cell_index : int ) -> Tuple [ "NotebookNode" , dict ] : output_files_dir = resources . get ( "output_files_dir" , None ) if not isinstance ( resources [ "outputs" ] , dict ) : resources [ "outputs" ] = { } for name , attach in cell . get ( "attachments" , { } ) . items ( ) : orig_name = name name = re . sub ( r"%[\w\d][\w\d]" , "-" , name ) for mime , data in attach . items ( ) : if mime not in self . extract_output_types : continue if mime in { "image/png" , "image/jpeg" , "application/pdf" } : data = a2b_base64 ( data ) elif sys . platform == "win32" : data = data . replace ( "\n" , "\r\n" ) . encode ( "UTF-8" ) else : data = data . encode ( "UTF-8" ) filename = self . output_filename_template . format ( cell_index = cell_index , name = name , unique_key = resources . get ( "unique_key" , "" ) , ) if output_files_dir is not None : filename = os . path . join ( output_files_dir , filename ) if name . endswith ( ".gif" ) and mime == "image/png" : filename = filename . replace ( ".gif" , ".png" ) resources [ "outputs" ] [ filename ] = data attach_str = "attachment:" + orig_name if attach_str in cell . source : cell . source = cell . source . replace ( attach_str , filename ) return cell , resources
Apply a transformation on each cell .
55,121
def combine_pdf_as_bytes ( pdfs : List [ BytesIO ] ) -> bytes : writer = PdfWriter ( ) for pdf in pdfs : writer . addpages ( PdfReader ( pdf ) . pages ) bio = BytesIO ( ) writer . write ( bio ) bio . seek ( 0 ) output = bio . read ( ) bio . close ( ) return output
Combine PDFs and return a byte - string with the result .
55,122
def split ( self , granularity_after_split , exclude_partial = True ) : if granularity_after_split == Granularity . DAY : return self . get_days ( ) elif granularity_after_split == Granularity . WEEK : return self . get_weeks ( exclude_partial ) elif granularity_after_split == Granularity . MONTH : return self . get_months ( exclude_partial ) elif granularity_after_split == Granularity . QUARTER : return self . get_quarters ( exclude_partial ) elif granularity_after_split == Granularity . HALF_YEAR : return self . get_half_years ( exclude_partial ) elif granularity_after_split == Granularity . YEAR : return self . get_years ( exclude_partial ) else : raise Exception ( "Invalid granularity: %s" % granularity_after_split )
Split a period into a given granularity . Optionally include partial periods at the start and end of the period .
55,123
def apply_calibration ( df , calibration_df , calibration ) : from dmf_control_board_firmware import FeedbackResults for i , ( fb_resistor , R_fb , C_fb ) in calibration_df [ [ 'fb_resistor' , 'R_fb' , 'C_fb' ] ] . iterrows ( ) : calibration . R_fb [ int ( fb_resistor ) ] = R_fb calibration . C_fb [ int ( fb_resistor ) ] = C_fb cleaned_df = df . dropna ( ) grouped = cleaned_df . groupby ( [ 'frequency' , 'test_capacitor' , 'repeat_index' ] ) for ( f , channel , repeat_index ) , group in grouped : r = FeedbackResults ( group . V_actuation . iloc [ 0 ] , f , 5.0 , group . V_hv . values , group . hv_resistor . values , group . V_fb . values , group . fb_resistor . values , calibration ) df . loc [ group . index , 'C' ] = r . capacitance ( )
Apply calibration values from fit_fb_calibration result to calibration object .
55,124
def config_dict ( config ) : return dict ( ( key , getattr ( config , key ) ) for key in config . values )
Given a Sphinx config object return a dictionary of config values .
55,125
def from_defn ( cls , defn ) : "Return the first Repl subclass that works with this" instances = ( subcl ( defn ) for subcl in cls . __subclasses__ ( ) ) return next ( filter ( None , instances ) )
Return the first Repl subclass that works with this
55,126
def show_colormap ( cls , names = [ ] , N = 10 , show = True , * args , ** kwargs ) : names = safe_list ( names ) obj = cls ( names , N , * args , ** kwargs ) vbox = obj . layout ( ) buttons = QDialogButtonBox ( QDialogButtonBox . Close , parent = obj ) buttons . rejected . connect ( obj . close ) vbox . addWidget ( buttons ) if show : obj . show ( ) return obj
Show a colormap dialog
55,127
def cmd_list ( args ) : for penlist in penStore . data : puts ( penlist + " (" + str ( len ( penStore . data [ penlist ] ) ) + ")" )
List all element in pen
55,128
def cmd_all ( args ) : for penlist in penStore . data : puts ( penlist ) with indent ( 4 , ' -' ) : for penfile in penStore . data [ penlist ] : puts ( penfile )
List everything recursively
55,129
def cmd_create ( args ) : name = args . get ( 0 ) if name : penStore . createList ( name ) else : puts ( "not valid" )
Creates a list
55,130
def cmd_touch_note ( args ) : major = args . get ( 0 ) minor = args . get ( 1 ) if major in penStore . data : if minor is None : for note in penStore . data [ major ] : puts ( note ) elif minor in penStore . data [ major ] : penStore . openNote ( major , minor ) else : penStore . createNote ( major , minor ) penStore . openNote ( major , minor ) else : puts ( "No list of that name." )
Create a note
55,131
def cmd_delete ( args ) : major = args . get ( 0 ) minor = args . get ( 1 ) if major is not None : if major in penStore . data : if minor is None : if len ( penStore . data [ major ] ) > 0 : if raw_input ( "are you sure (y/n)? " ) not in [ 'y' , 'Y' , 'yes' , 'Yes' ] : return ExitStatus . ABORT penStore . deleteList ( major ) puts ( "list deleted" ) elif minor in penStore . data [ major ] : penStore . deleteNote ( major , minor ) puts ( "note deleted" ) else : puts ( "no such note, sorry! (%s)" % minor ) else : puts ( "no such list, sorry! (%s)" % major ) else : print
Deletes a node
55,132
def restclient_admin_required ( view_func ) : def wrapper ( request , * args , ** kwargs ) : template = 'access_denied.html' if hasattr ( settings , 'RESTCLIENTS_ADMIN_AUTH_MODULE' ) : auth_func = import_string ( settings . RESTCLIENTS_ADMIN_AUTH_MODULE ) else : context = { 'error_msg' : ( "Your application must define an authorization function as " "RESTCLIENTS_ADMIN_AUTH_MODULE in settings.py." ) } return render ( request , template , context = context , status = 401 ) service = args [ 0 ] if len ( args ) > 0 else None url = args [ 1 ] if len ( args ) > 1 else None if auth_func ( request , service , url ) : return view_func ( request , * args , ** kwargs ) return render ( request , template , status = 401 ) return login_required ( function = wrapper )
View decorator that checks whether the user is permitted to view proxy restclients . Calls login_required in case the user is not authenticated .
55,133
def destination_heuristic ( data ) : counter = collections . Counter ( ) for entry in data : file_field = entry [ 'fields' ] . get ( 'file' ) if not file_field : continue path = os . path . dirname ( file_field ) counter [ path ] += 1 if not counter : raise click . ClickException ( 'Path finding heuristics failed: no paths in the database' ) sorted_paths = sorted ( counter , reverse = True ) groupby = itertools . groupby ( sorted_paths , key = len ) _ , group = next ( groupby ) candidate = next ( group ) try : next ( group ) except StopIteration : return candidate else : raise click . ClickException ( 'Path finding heuristics failed: ' 'there are multiple equally valid paths in the database' )
A heuristic to get the folder with all other files from bib using majority vote .
55,134
def remove_entry ( data , entry ) : file_field = entry [ 'fields' ] . get ( 'file' ) if file_field : try : os . remove ( file_field ) except IOError : click . echo ( 'This entry\'s file was missing' ) data . remove ( entry )
Remove an entry in place .
55,135
def string_to_basename ( s ) : s = s . strip ( ) . lower ( ) s = re . sub ( r'[^\w\s-]' , '' , s ) return re . sub ( r'[\s-]+' , '-' , s )
Converts to lowercase removes non - alpha characters and converts spaces to hyphens .
55,136
def editor ( * args , ** kwargs ) : result = click . edit ( * args , ** kwargs ) if result is None : msg = 'Editor exited without saving, command aborted' raise click . ClickException ( msg ) return result
Wrapper for click . edit that raises an error when None is returned .
55,137
def terms ( self , facet_name , field , size = 10 , order = None , all_terms = False , exclude = [ ] , regex = '' , regex_flags = '' ) : self [ facet_name ] = dict ( terms = dict ( field = field , size = size ) ) if order : self [ facet_name ] [ terms ] [ 'order' ] = order if all_terms : self [ facet_name ] [ terms ] [ 'all_terms' ] = True if exclude : self [ facet_name ] [ terms ] [ 'exclude' ] = exclude if regex : self [ facet_name ] [ terms ] [ 'regex' ] = regex if regex_flags : self [ facet_name ] [ terms ] [ 'regex_flags' ] = regex_flags return self
Allow to specify field facets that return the N most frequent terms .
55,138
def backup_file ( filename ) : if not os . path . exists ( filename ) : return BACKUP_SUFFIX = ".sprinter.bak" backup_filename = filename + BACKUP_SUFFIX shutil . copyfile ( filename , backup_filename )
create a backup of the file desired
55,139
def inject ( self , filename , content ) : content = _unicode ( content ) . rstrip ( ) + "\n" if filename not in self . inject_dict : self . inject_dict [ filename ] = "" self . inject_dict [ filename ] += content
add the injection content to the dictionary
55,140
def commit ( self ) : self . logger . debug ( "Starting injections..." ) self . logger . debug ( "Injections dict is:" ) self . logger . debug ( self . inject_dict ) self . logger . debug ( "Clear list is:" ) self . logger . debug ( self . clear_set ) for filename , content in self . inject_dict . items ( ) : content = _unicode ( content ) self . logger . debug ( "Injecting values into %s..." % filename ) self . destructive_inject ( filename , content ) for filename in self . clear_set : self . logger . debug ( "Clearing injection from %s..." % filename ) self . destructive_clear ( filename )
commit the injections desired overwriting any previous injections in the file .
55,141
def injected ( self , filename ) : full_path = os . path . expanduser ( filename ) if not os . path . exists ( full_path ) : return False with codecs . open ( full_path , 'r+' , encoding = "utf-8" ) as fh : contents = fh . read ( ) return self . wrapper_match . search ( contents ) is not None
Return true if the file has already been injected before .
55,142
def destructive_inject ( self , filename , content ) : content = _unicode ( content ) backup_file ( filename ) full_path = self . __generate_file ( filename ) with codecs . open ( full_path , 'r' , encoding = "utf-8" ) as f : new_content = self . inject_content ( f . read ( ) , content ) with codecs . open ( full_path , 'w+' , encoding = "utf-8" ) as f : f . write ( new_content )
Injects the injections desired immediately . This should generally be run only during the commit phase when no future injections will be done .
55,143
def __generate_file ( self , file_path ) : file_path = os . path . expanduser ( file_path ) if not os . path . exists ( os . path . dirname ( file_path ) ) : self . logger . debug ( "Directories missing! Creating directories for %s..." % file_path ) os . makedirs ( os . path . dirname ( file_path ) ) if not os . path . exists ( file_path ) : open ( file_path , "w+" ) . close ( ) return file_path
Generate the file at the file_path desired . Creates any needed directories on the way . returns the absolute path of the file .
55,144
def in_noninjected_file ( self , file_path , content ) : if os . path . exists ( file_path ) : file_content = codecs . open ( file_path , encoding = "utf-8" ) . read ( ) file_content = self . wrapper_match . sub ( u"" , file_content ) else : file_content = "" return file_content . find ( content ) != - 1
Checks if a string exists in the file sans the injected
55,145
def clear_content ( self , content ) : content = _unicode ( content ) return self . wrapper_match . sub ( "" , content )
Clear the injected content from the content buffer and return the results
55,146
def add_order ( self , order ) : key = '%s_%s' % ( order . region_id , order . type_id ) if not self . _orders . has_key ( key ) : self . set_empty_region ( order . region_id , order . type_id , order . generated_at ) self . _orders [ key ] . add_order ( order )
Adds a MarketOrder instance to the list of market orders contained within this order list . Does some behind - the - scenes magic to get it all ready for serialization .
55,147
def add_entry ( self , entry ) : key = '%s_%s' % ( entry . region_id , entry . type_id ) if not self . _history . has_key ( key ) : self . set_empty_region ( entry . region_id , entry . type_id , entry . generated_at ) self . _history [ key ] . add_entry ( entry )
Adds a MarketHistoryEntry instance to the list of market history entries contained within this instance . Does some behind - the - scenes magic to get it all ready for serialization .
55,148
def _find_file ( self , file_name : str , lookup_dir : Path ) -> Path or None : self . logger . debug ( 'Trying to find the file {file_name} inside the directory {lookup_dir}' ) result = None for item in lookup_dir . rglob ( '*' ) : if item . name == file_name : result = item break else : raise FileNotFoundError ( file_name ) self . logger . debug ( 'File found: {result}' ) return result
Find a file in a directory by name . Check subdirectories recursively .
55,149
def _sync_repo ( self , repo_url : str , revision : str or None = None ) -> Path : repo_name = repo_url . split ( '/' ) [ - 1 ] . rsplit ( '.' , maxsplit = 1 ) [ 0 ] repo_path = ( self . _cache_path / repo_name ) . resolve ( ) self . logger . debug ( f'Synchronizing with repo; URL: {repo_url}, revision: {revision}' ) try : self . logger . debug ( f'Cloning repo {repo_url} to {repo_path}' ) run ( f'git clone {repo_url} {repo_path}' , shell = True , check = True , stdout = PIPE , stderr = STDOUT ) except CalledProcessError as exception : if repo_path . exists ( ) : self . logger . debug ( 'Repo already cloned; pulling from remote' ) try : run ( 'git pull' , cwd = repo_path , shell = True , check = True , stdout = PIPE , stderr = STDOUT ) except CalledProcessError as exception : self . logger . warning ( str ( exception ) ) else : self . logger . error ( str ( exception ) ) if revision : run ( f'git checkout {revision}' , cwd = repo_path , shell = True , check = True , stdout = PIPE , stderr = STDOUT ) return repo_path
Clone a Git repository to the cache dir . If it has been cloned before update it .
55,150
def _shift_headings ( self , content : str , shift : int ) -> str : def _sub ( heading ) : new_heading_level = len ( heading . group ( 'hashes' ) ) + shift self . logger . debug ( f'Shift heading level to {new_heading_level}, heading title: {heading.group("title")}' ) if new_heading_level <= 6 : return f'{"#" * new_heading_level} {heading.group("title")}{heading.group("tail")}' else : self . logger . debug ( 'New heading level is out of range, using bold paragraph text instead of heading' ) return f'**{heading.group("title")}**{heading.group("tail")}' return self . _heading_pattern . sub ( _sub , content )
Shift Markdown headings in a string by a given value . The shift can be positive or negative .
55,151
def _cut_from_heading_to_heading ( self , content : str , from_heading : str , to_heading : str or None = None , options = { } ) -> str : self . logger . debug ( f'Cutting from heading: {from_heading}, to heading: {to_heading}, options: {options}' ) from_heading_pattern = re . compile ( r'^\#{1,6}\s+' + rf'{from_heading}\s*$' , flags = re . MULTILINE ) if not from_heading_pattern . findall ( content ) : return '' from_heading_line = from_heading_pattern . findall ( content ) [ 0 ] from_heading_level = len ( self . _heading_pattern . match ( from_heading_line ) . group ( 'hashes' ) ) self . logger . debug ( f'From heading level: {from_heading_level}' ) result = from_heading_pattern . split ( content ) [ 1 ] if to_heading : to_heading_pattern = re . compile ( r'^\#{1,6}\s+' + rf'{to_heading}\s*$' , flags = re . MULTILINE ) else : to_heading_pattern = re . compile ( rf'^\#{{1,{from_heading_level}}}[^\#]+?$' , flags = re . MULTILINE ) result = to_heading_pattern . split ( result ) [ 0 ] if not options . get ( 'nohead' ) : result = from_heading_line + result if options . get ( 'sethead' ) : if options [ 'sethead' ] > 0 : result = self . _shift_headings ( result , options [ 'sethead' ] - from_heading_level ) return result
Cut part of Markdown string between two headings set internal heading level and remove top heading .
55,152
def _cut_to_heading ( self , content : str , to_heading : str or None = None , options = { } ) -> str : self . logger . debug ( f'Cutting to heading: {to_heading}, options: {options}' ) content_buffer = StringIO ( content ) first_line = content_buffer . readline ( ) if self . _heading_pattern . fullmatch ( first_line ) : from_heading_line = first_line from_heading_level = len ( self . _heading_pattern . match ( from_heading_line ) . group ( 'hashes' ) ) result = content_buffer . read ( ) else : from_heading_line = '' from_heading_level = self . _find_top_heading_level ( content ) result = content self . logger . debug ( f'From heading level: {from_heading_level}' ) if to_heading : to_heading_pattern = re . compile ( r'^\#{1,6}\s+' + rf'{to_heading}\s*$' , flags = re . MULTILINE ) result = to_heading_pattern . split ( result ) [ 0 ] if not options . get ( 'nohead' ) : result = from_heading_line + result if options . get ( 'sethead' ) : if options [ 'sethead' ] > 0 : result = self . _shift_headings ( result , options [ 'sethead' ] - from_heading_level ) return result
Cut part of Markdown string from the start to a certain heading set internal heading level and remove top heading .
55,153
def _adjust_image_paths ( self , content : str , md_file_path : Path ) -> str : def _sub ( image ) : image_caption = image . group ( 'caption' ) image_path = md_file_path . parent / Path ( image . group ( 'path' ) ) self . logger . debug ( f'Updating image reference; user specified path: {image.group("path")}, ' + f'absolute path: {image_path}, caption: {image_caption}' ) return f'![{image_caption}]({image_path.absolute().as_posix()})' return self . _image_pattern . sub ( _sub , content )
Locate images referenced in a Markdown string and replace their paths with the absolute ones .
55,154
def _get_src_file_path ( self , markdown_file_path : Path ) -> Path : path_relative_to_working_dir = markdown_file_path . relative_to ( self . working_dir . resolve ( ) ) self . logger . debug ( 'Currently processed Markdown file path relative to working dir: ' + f'{path_relative_to_working_dir}' ) path_mapped_to_src_dir = ( self . project_path . resolve ( ) / self . config [ 'src_dir' ] / path_relative_to_working_dir ) self . logger . debug ( 'Currently processed Markdown file path mapped to source dir: ' + f'{path_mapped_to_src_dir}' ) return path_mapped_to_src_dir
Translate the path of Markdown file that is located inside the temporary working directory into the path of the corresponding Markdown file that is located inside the source directory of Foliant project .
55,155
def _get_included_file_path ( self , user_specified_path : str , current_processed_file_path : Path ) -> Path : self . logger . debug ( f'Currently processed Markdown file: {current_processed_file_path}' ) included_file_path = ( current_processed_file_path . parent / user_specified_path ) . resolve ( ) self . logger . debug ( f'User-specified included file path: {included_file_path}' ) if ( self . working_dir . resolve ( ) in current_processed_file_path . parents and self . working_dir . resolve ( ) not in included_file_path . parents ) : self . logger . debug ( 'Currently processed file is located inside the working dir, ' + 'but included file is located outside the working dir. ' + 'So currently processed file path should be rewritten with the path of corresponding file ' + 'that is located inside the source dir' ) included_file_path = ( self . _get_src_file_path ( current_processed_file_path ) . parent / user_specified_path ) . resolve ( ) else : self . logger . debug ( 'Using these paths without changes' ) self . logger . debug ( f'Finally, included file path: {included_file_path}' ) return included_file_path
Resolve user specified path to the local included file .
55,156
def process_includes ( self , markdown_file_path : Path , content : str ) -> str : markdown_file_path = markdown_file_path . resolve ( ) self . logger . debug ( f'Processing Markdown file: {markdown_file_path}' ) processed_content = '' include_statement_pattern = re . compile ( rf'((?<!\<)\<{"|".join(self.tags)}(?:\s[^\<\>]*)?\>.*?\<\/{"|".join(self.tags)}\>)' , flags = re . DOTALL ) content_parts = include_statement_pattern . split ( content ) for content_part in content_parts : include_statement = self . pattern . fullmatch ( content_part ) if include_statement : body = self . _tag_body_pattern . match ( include_statement . group ( 'body' ) . strip ( ) ) options = self . get_options ( include_statement . group ( 'options' ) ) self . logger . debug ( f'Processing include statement; body: {body}, options: {options}' ) if body . group ( 'repo' ) : repo = body . group ( 'repo' ) repo_from_alias = self . options [ 'aliases' ] . get ( repo ) revision = None if repo_from_alias : self . logger . debug ( f'Alias found: {repo}, resolved as: {repo_from_alias}' ) if '#' in repo_from_alias : repo_url , revision = repo_from_alias . split ( '#' , maxsplit = 1 ) else : repo_url = repo_from_alias else : repo_url = repo if body . group ( 'revision' ) : revision = body . group ( 'revision' ) self . logger . debug ( f'Highest priority revision specified in the include statement: {revision}' ) self . logger . debug ( f'File in Git repository referenced; URL: {repo_url}, revision: {revision}' ) repo_path = self . _sync_repo ( repo_url , revision ) self . logger . debug ( f'Local path of the repo: {repo_path}' ) included_file_path = repo_path / body . group ( 'path' ) self . logger . debug ( f'Resolved path to the included file: {included_file_path}' ) processed_content_part = self . _process_include ( included_file_path , body . group ( 'from_heading' ) , body . group ( 'to_heading' ) , options ) else : self . logger . debug ( 'Local file referenced' ) included_file_path = self . _get_included_file_path ( body . group ( 'path' ) , markdown_file_path ) self . logger . debug ( f'Resolved path to the included file: {included_file_path}' ) processed_content_part = self . _process_include ( included_file_path , body . group ( 'from_heading' ) , body . group ( 'to_heading' ) , options ) if self . options [ 'recursive' ] and self . pattern . search ( processed_content_part ) : self . logger . debug ( 'Recursive call of include statements processing' ) processed_content_part = self . process_includes ( included_file_path , processed_content_part ) if options . get ( 'inline' ) : self . logger . debug ( 'Processing included content part as inline' ) processed_content_part = re . sub ( r'\s+' , ' ' , processed_content_part ) . strip ( ) else : processed_content_part = content_part processed_content += processed_content_part return processed_content
Replace all include statements with the respective file contents .
55,157
def get_logger ( name , CFG = None ) : logger = logging . getLogger ( name ) if CFG : for handler in CFG . get ( 'handlers' , { } ) . itervalues ( ) : if 'filename' in handler : log_dir = os . path . dirname ( handler [ 'filename' ] ) if not os . path . exists ( log_dir ) : os . makedirs ( log_dir ) try : logging . config . dictConfig ( CFG ) except AttributeError : print >> sys . stderr , '"logging.config.dictConfig" doesn\'t seem to be supported in your python' raise return logger
set up logging for a service using the py 2 . 7 dictConfig
55,158
def t_trailingwhitespace ( self , token ) : ur'.+? \n' print "Error: trailing whitespace at line %s in text '%s'" % ( token . lexer . lineno + 1 , token . value [ : - 1 ] ) token . lexer . lexerror = True token . lexer . skip ( 1 )
ur . + ? \ n
55,159
def exec_before_request_actions ( actions , ** kwargs ) : groups = ( "before" , "before_" + flask . request . method . lower ( ) ) return execute_actions ( actions , limit_groups = groups , ** kwargs )
Execute actions in the before and before_METHOD groups
55,160
def exec_after_request_actions ( actions , response , ** kwargs ) : current_context [ "response" ] = response groups = ( "after_" + flask . request . method . lower ( ) , "after" ) try : rv = execute_actions ( actions , limit_groups = groups , ** kwargs ) except ReturnValueException as e : rv = e . value if rv : return rv return response
Executes actions of the after and after_METHOD groups . A response var will be injected in the current context .
55,161
def as_view ( url = None , methods = None , view_class = ActionsView , name = None , url_rules = None , ** kwargs ) : def decorator ( f ) : if url is not None : f = expose ( url , methods = methods ) ( f ) clsdict = { "name" : name or f . __name__ , "actions" : getattr ( f , "actions" , None ) , "url_rules" : url_rules or getattr ( f , "urls" , None ) } if isinstance ( f , WithActionsDecorator ) : f = f . func clsdict [ 'func' ] = f def constructor ( self , ** ctorkwargs ) : for k , v in kwargs . items ( ) : if k not in ctorkwargs or ctorkwargs [ k ] is None : ctorkwargs [ k ] = v view_class . __init__ ( self , func = f , ** ctorkwargs ) clsdict [ "__init__" ] = constructor return type ( f . __name__ , ( view_class , ) , clsdict ) return decorator
Decorator to transform a function into a view class . Be warned that this will replace the function with the view class .
55,162
def register ( self , target ) : for rule , options in self . url_rules : target . add_url_rule ( rule , self . name , self . dispatch_request , ** options )
Registers url_rules on the blueprint
55,163
def view ( self , * args , ** kwargs ) : def decorator ( f ) : kwargs . setdefault ( "view_class" , self . view_class ) return self . add_view ( as_view ( * args , ** kwargs ) ( f ) ) return decorator
Decorator to automatically apply as_view decorator and register it .
55,164
def add_action_view ( self , name , url , actions , ** kwargs ) : view = ActionsView ( name , url = url , self_var = self , ** kwargs ) if isinstance ( actions , dict ) : for group , actions in actions . iteritems ( ) : view . actions . extend ( load_actions ( actions , group = group or None ) ) else : view . actions . extend ( load_actions ( actions ) ) self . add_view ( view ) return view
Creates an ActionsView instance and registers it .
55,165
def process ( exam_num : int , time : str , date : str ) -> None : prefix = Path ( f"exams/exam-{exam_num}" ) problems = list ( prefix . glob ( f"exam-{exam_num}-{time}-[0-9].ipynb" ) ) problems = sorted ( problems , key = lambda k : k . stem [ - 1 ] ) output_directory = ( prefix / "output" ) . resolve ( ) fw = FilesWriter ( build_directory = str ( output_directory ) ) assignment_zip_name = output_directory / f"exam-{exam_num}-{time}.zip" solution_zip_name = output_directory / f"exam-{exam_num}-{time}-soln.zip" solution_pdfs : List [ BytesIO ] = [ ] exam_date_time = datetime . strptime ( time + date , "%H%M%d-%b-%Y" ) res : Dict [ str , Union [ str , int ] ] = { "exam_num" : exam_num , "time" : exam_date_time . strftime ( "%I:%M %p" ) , "date" : exam_date_time . strftime ( "%b. %d, %Y" ) , "delete_pymarkdown" : True , } for problem in problems : res [ "unique_key" ] = problem . stem problem_fname = str ( problem . resolve ( ) ) if problem . stem . endswith ( "1" ) : assignment_nb , _ = sa_nb_exp . from_filename ( problem_fname , resources = res ) with ZipFile ( assignment_zip_name , mode = "a" ) as zip_file : zip_file . writestr ( problem . name , assignment_nb ) else : assignment_nb , _ = prob_nb_exp . from_filename ( problem_fname , resources = res ) with ZipFile ( assignment_zip_name , mode = "a" ) as zip_file : zip_file . writestr ( problem . name , assignment_nb ) solution_pdf , _ = solution_pdf_exp . from_filename ( problem_fname , resources = res ) solution_pdfs . append ( BytesIO ( solution_pdf ) ) solution_nb , _ = solution_nb_exp . from_filename ( problem_fname , resources = res ) with ZipFile ( solution_zip_name , mode = "a" ) as zip_file : zip_file . writestr ( problem . name , solution_nb ) resources : Dict [ str , Any ] = { "metadata" : { "name" : f"exam-{exam_num}-{time}-soln" , "path" : str ( prefix ) , "modified_date" : datetime . today ( ) . strftime ( "%B %d, %Y" ) , } , "output_extension" : ".pdf" , } fw . write ( combine_pdf_as_bytes ( solution_pdfs ) , resources , f"exam-{exam_num}-{time}-soln" )
Process the exams in the exam_num folder for the time .
55,166
def main ( argv : Optional [ Sequence [ str ] ] = None ) -> None : parser = ArgumentParser ( description = "Convert Jupyter Notebook exams to PDFs" ) parser . add_argument ( "--exam" , type = int , required = True , help = "Exam number to convert" , dest = "exam_num" , ) parser . add_argument ( "--time" , type = str , required = True , help = "Time of exam to convert" ) parser . add_argument ( "--date" , type = str , required = True , help = "The date the exam will take place" ) args = parser . parse_args ( argv ) process ( args . exam_num , args . time , args . date )
Parse arguments and process the exam assignment .
55,167
def extract_meta ( self , text ) : first_line = True metadata = [ ] content = [ ] metadata_parsed = False for line in text . split ( '\n' ) : if first_line : first_line = False if line . strip ( ) != '---' : raise MetaParseException ( 'Invalid metadata' ) else : continue if line . strip ( ) == '' and not metadata_parsed : continue if line . strip ( ) == '---' and not metadata_parsed : metadata_parsed = True elif not metadata_parsed : metadata . append ( line ) else : content . append ( line ) content = '\n' . join ( content ) try : metadata = yaml . load ( '\n' . join ( metadata ) ) except : raise content = text metadata = yaml . load ( '' ) return content , metadata
Takes input as the entire file . Reads the first yaml document as metadata . and the rest of the document as text
55,168
def set_defaults ( self ) : for key , value in self . spec . items ( ) : setattr ( self , key . upper ( ) , value . get ( "default" , None ) )
Add each model entry with it s default
55,169
def load_env ( self ) : for key , value in self . spec . items ( ) : if value [ 'type' ] in ( dict , list ) : envar = ( self . env_prefix + "_" + key ) . upper ( ) try : envvar = env . json ( envar , default = getattr ( self , key . upper ( ) , value . get ( 'default' ) ) ) except ConfigurationError as _err : print ( _err ) self . log . critical ( f"Error parsing json from env var. {os.environ.get(envar)}" ) print ( envar ) raise else : envvar = env ( ( self . env_prefix + "_" + key ) . upper ( ) , default = getattr ( self , key . upper ( ) , value . get ( 'default' ) ) , cast = value [ 'type' ] ) setattr ( self , key . upper ( ) , envvar )
Load the model fron environment variables
55,170
def parse_args ( self ) : parser = ArgumentParser ( description = '' , formatter_class = RawTextHelpFormatter ) parser . add_argument ( "--generate" , action = "store" , dest = 'generate' , choices = [ 'command' , 'docker-run' , 'docker-compose' , 'ini' , 'env' , 'kubernetes' , 'readme' , 'drone-plugin' ] , help = "Generate a template " ) parser . add_argument ( "--settings" , action = "store" , dest = 'settings' , help = "Specify a settings file. (ie settings.dev)" ) for key , value in self . spec . items ( ) : if value [ 'type' ] in [ str , int , float ] : parser . add_argument ( f"--{key.lower()}" , action = "store" , dest = key , type = value [ 'type' ] , choices = value . get ( "choices" ) , help = self . help ( value ) ) elif value [ 'type' ] == bool : parser . add_argument ( f"--{key.lower()}" , action = "store" , dest = key , type = lambda x : bool ( strtobool ( x ) ) , choices = value . get ( "choices" ) , help = self . help ( value ) ) elif value [ 'type' ] == list : parser . add_argument ( f"--{key.lower()}" , action = "store" , dest = key , nargs = '+' , choices = value . get ( "choices" ) , help = self . help ( value ) ) elif value [ 'type' ] == dict : parser . add_argument ( f"--{key.lower()}" , action = "store" , dest = key , type = json . loads , choices = value . get ( "choices" ) , help = self . help ( value ) ) args , _unknown = parser . parse_known_args ( ) return args
Parse the cli args
55,171
def add_args ( self , args ) : for key , value in vars ( args ) . items ( ) : if value is not None : setattr ( self , key . upper ( ) , value )
Add the args
55,172
def load_ini ( self , ini_file ) : if ini_file and not os . path . exists ( ini_file ) : self . log . critical ( f"Settings file specified but not found. {ini_file}" ) sys . exit ( 1 ) if not ini_file : ini_file = f"{self.cwd}/settings.ini" if os . path . exists ( ini_file ) : config = configparser . RawConfigParser ( allow_no_value = True ) config . read ( ini_file ) for key , value in self . spec . items ( ) : entry = None if value [ 'type' ] == str : entry = config . get ( "settings" , option = key . lower ( ) , fallback = None ) elif value [ 'type' ] == bool : entry = config . getboolean ( "settings" , option = key . lower ( ) , fallback = None ) elif value [ 'type' ] == int : entry = config . getint ( "settings" , option = key . lower ( ) , fallback = None ) elif value [ 'type' ] == float : entry = config . getfloat ( "settings" , option = key . lower ( ) , fallback = None ) elif value [ 'type' ] in [ list , dict ] : entries = config . get ( "settings" , option = key . lower ( ) , fallback = None ) if entries : try : entry = json . loads ( entries ) except json . decoder . JSONDecodeError as _err : self . log . critical ( f"Error parsing json from ini file. {entries}" ) sys . exit ( 1 ) if entry is not None : setattr ( self , key . upper ( ) , entry )
Load the contents from the ini file
55,173
def check_required ( self ) : die = False for key , value in self . spec . items ( ) : if not getattr ( self , key . upper ( ) ) and value [ 'required' ] : print ( f"{key} is a required setting. " "Set via command-line params, env or file. " "For examples, try '--generate' or '--help'." ) die = True if die : sys . exit ( 1 )
Check all required settings have been provided
55,174
def generate ( self ) : otype = getattr ( self , 'GENERATE' ) if otype : if otype == 'env' : self . generate_env ( ) elif otype == "command" : self . generate_command ( ) elif otype == "docker-run" : self . generate_docker_run ( ) elif otype == "docker-compose" : self . generate_docker_compose ( ) elif otype == "kubernetes" : self . generate_kubernetes ( ) elif otype == 'ini' : self . generate_ini ( ) elif otype == 'readme' : self . generate_readme ( ) elif otype == 'drone-plugin' : self . generate_drone_plugin ( ) sys . exit ( 0 )
Generate sample settings
55,175
def generate_env ( self ) : for key in sorted ( list ( self . spec . keys ( ) ) ) : if self . spec [ key ] [ 'type' ] in ( dict , list ) : value = f"\'{json.dumps(self.spec[key].get('example', ''))}\'" else : value = f"{self.spec[key].get('example', '')}" print ( f"export {self.env_prefix}_{key.upper()}={value}" )
Generate sample environment variables
55,176
def generate_command ( self ) : example = [ ] example . append ( f"{sys.argv[0]}" ) for key in sorted ( list ( self . spec . keys ( ) ) ) : if self . spec [ key ] [ 'type' ] == list : value = " " . join ( self . spec [ key ] . get ( 'example' , '' ) ) elif self . spec [ key ] [ 'type' ] == dict : value = f"\'{json.dumps(self.spec[key].get('example', ''))}\'" else : value = self . spec [ key ] . get ( 'example' , '' ) string = f" --{key.lower()} {value}" example . append ( string ) print ( " \\\n" . join ( example ) )
Generate a sample command
55,177
def generate_docker_run ( self ) : example = [ ] example . append ( "docker run -it" ) for key in sorted ( list ( self . spec . keys ( ) ) ) : if self . spec [ key ] [ 'type' ] in ( dict , list ) : value = f"\'{json.dumps(self.spec[key].get('example', ''))}\'" else : value = f"{self.spec[key].get('example', '')}" string = f" -e {self.env_prefix}_{key.upper()}={value}" example . append ( string ) example . append ( " <container-name>" ) print ( " \\\n" . join ( example ) )
Generate a sample docker run
55,178
def generate_docker_compose ( self ) : example = { } example [ 'app' ] = { } example [ 'app' ] [ 'environment' ] = [ ] for key in sorted ( list ( self . spec . keys ( ) ) ) : if self . spec [ key ] [ 'type' ] in ( dict , list ) : value = f"\'{json.dumps(self.spec[key].get('example', ''))}\'" else : value = f"{self.spec[key].get('example', '')}" example [ 'app' ] [ 'environment' ] . append ( f"{self.env_prefix}_{key.upper()}={value}" ) print ( yaml . dump ( example , default_flow_style = False ) )
Generate a sample docker compose
55,179
def generate_ini ( self ) : example = [ ] example . append ( "[settings]" ) for key in sorted ( list ( self . spec . keys ( ) ) ) : if self . spec [ key ] [ 'type' ] in [ list , dict ] : value = json . dumps ( self . spec [ key ] . get ( 'example' , '' ) ) else : value = self . spec [ key ] . get ( 'example' , '' ) string = f"{key.lower()}={value}" example . append ( string ) print ( "\n" . join ( example ) )
Generate a sample ini
55,180
def generate_kubernetes ( self ) : example = { } example [ 'spec' ] = { } example [ 'spec' ] [ 'containers' ] = [ ] example [ 'spec' ] [ 'containers' ] . append ( { "name" : '' , "image" : '' , "env" : [ ] } ) for key , value in self . spec . items ( ) : if value [ 'type' ] in ( dict , list ) : kvalue = f"\'{json.dumps(value.get('example', ''))}\'" else : kvalue = f"{value.get('example', '')}" entry = { "name" : f"{self.env_prefix}_{key.upper()}" , "value" : kvalue } example [ 'spec' ] [ 'containers' ] [ 0 ] [ 'env' ] . append ( entry ) print ( yaml . dump ( example , default_flow_style = False ) )
Generate a sample kubernetes
55,181
def generate_drone_plugin ( self ) : example = { } example [ 'pipeline' ] = { } example [ 'pipeline' ] [ 'appname' ] = { } example [ 'pipeline' ] [ 'appname' ] [ 'image' ] = "" example [ 'pipeline' ] [ 'appname' ] [ 'secrets' ] = "" for key , value in self . spec . items ( ) : if value [ 'type' ] in ( dict , list ) : kvalue = f"\'{json.dumps(value.get('example', ''))}\'" else : kvalue = f"{value.get('example', '')}" example [ 'pipeline' ] [ 'appname' ] [ key . lower ( ) ] = kvalue print ( yaml . dump ( example , default_flow_style = False ) )
Generate a sample drone plugin configuration
55,182
def generate_readme ( self ) : print ( "## Examples of settings runtime params" ) print ( "### Command-line parameters" ) print ( "```" ) self . generate_command ( ) print ( "```" ) print ( "### Environment variables" ) print ( "```" ) self . generate_env ( ) print ( "```" ) print ( "### ini file" ) print ( "```" ) self . generate_ini ( ) print ( "```" ) print ( "### docker run" ) print ( "```" ) self . generate_docker_run ( ) print ( "```" ) print ( "### docker compose" ) print ( "```" ) self . generate_docker_compose ( ) print ( "```" ) print ( "### kubernetes" ) print ( "```" ) self . generate_kubernetes ( ) print ( "```" ) print ( "### drone plugin" ) print ( "```" ) self . generate_drone_plugin ( ) print ( "```" )
Generate a readme with all the generators
55,183
def file_exists ( self , subdir , prefix , suffix ) : real_path = os . path . join ( self . STATIC_DIR , self . DIR , subdir , prefix + suffix ) return os . path . exists ( real_path )
Returns true if the resource file exists else False .
55,184
def add_css ( self , subdir , file_name_prefix ) : suffix_maxify = '.css' suffix_minify = '.min.css' if self . minify and self . file_exists ( subdir , file_name_prefix , suffix_minify ) : self . resources_css . append ( posixpath . join ( self . DIR , subdir , file_name_prefix + suffix_minify ) ) elif self . file_exists ( subdir , file_name_prefix , suffix_maxify ) : self . resources_css . append ( posixpath . join ( self . DIR , subdir , file_name_prefix + suffix_maxify ) ) else : file_path = os . path . join ( self . STATIC_DIR , self . DIR , subdir , file_name_prefix + suffix_maxify ) raise IOError ( 'Resource file not found: {0}' . format ( file_path ) )
Add a css file for this resource .
55,185
def read_dataframe_from_xls ( desired_type : Type [ T ] , file_path : str , encoding : str , logger : Logger , ** kwargs ) -> pd . DataFrame : return pd . read_excel ( file_path , ** kwargs )
We register this method rather than the other because pandas guesses the encoding by itself .
55,186
def read_df_or_series_from_csv ( desired_type : Type [ pd . DataFrame ] , file_path : str , encoding : str , logger : Logger , ** kwargs ) -> pd . DataFrame : if desired_type is pd . Series : if 'index_col' not in kwargs . keys ( ) : one_col_df = pd . read_csv ( file_path , encoding = encoding , index_col = 0 , ** kwargs ) else : one_col_df = pd . read_csv ( file_path , encoding = encoding , ** kwargs ) if one_col_df . shape [ 1 ] == 1 : return one_col_df [ one_col_df . columns [ 0 ] ] else : raise Exception ( 'Cannot build a series from this csv: it has more than two columns (one index + one value).' ' Probably the parsing chain $read_df_or_series_from_csv => single_row_or_col_df_to_series$' 'will work, though.' ) else : return pd . read_csv ( file_path , encoding = encoding , ** kwargs )
Helper method to read a dataframe from a csv file . By default this is well suited for a dataframe with headers in the first row for example a parameter dataframe .
55,187
def dict_to_df ( desired_type : Type [ T ] , dict_obj : Dict , logger : Logger , orient : str = None , ** kwargs ) -> pd . DataFrame : if len ( dict_obj ) > 0 : first_val = dict_obj [ next ( iter ( dict_obj ) ) ] if isinstance ( first_val , dict ) or isinstance ( first_val , list ) : orient = orient or 'index' return pd . DataFrame . from_dict ( dict_obj , orient = orient ) else : orient = orient or 'columns' if orient is 'columns' : return pd . DataFrame ( dict_obj , index = [ 0 ] ) else : res = pd . DataFrame . from_dict ( dict_obj , orient = orient ) res . index . name = 'key' return res . rename ( columns = { 0 : 'value' } ) else : return pd . DataFrame . from_dict ( dict_obj )
Helper method to convert a dictionary into a dataframe . It supports both simple key - value dicts as well as true table dicts . For this it uses pd . DataFrame constructor or pd . DataFrame . from_dict intelligently depending on the case .
55,188
def single_row_or_col_df_to_series ( desired_type : Type [ T ] , single_rowcol_df : pd . DataFrame , logger : Logger , ** kwargs ) -> pd . Series : if single_rowcol_df . shape [ 0 ] == 1 : return single_rowcol_df . transpose ( ) [ 0 ] elif single_rowcol_df . shape [ 1 ] == 2 and isinstance ( single_rowcol_df . index , pd . RangeIndex ) : d = single_rowcol_df . set_index ( single_rowcol_df . columns [ 0 ] ) return d [ d . columns [ 0 ] ] elif single_rowcol_df . shape [ 1 ] == 1 : d = single_rowcol_df return d [ d . columns [ 0 ] ] else : raise ValueError ( 'Unable to convert provided dataframe to a series : ' 'expected exactly 1 row or 1 column, found : ' + str ( single_rowcol_df . shape ) + '' )
Helper method to convert a dataframe with one row or one or two columns into a Series
55,189
def single_row_or_col_df_to_dict ( desired_type : Type [ T ] , single_rowcol_df : pd . DataFrame , logger : Logger , ** kwargs ) -> Dict [ str , str ] : if single_rowcol_df . shape [ 0 ] == 1 : return single_rowcol_df . transpose ( ) [ 0 ] . to_dict ( ) elif single_rowcol_df . shape [ 1 ] == 2 and isinstance ( single_rowcol_df . index , pd . RangeIndex ) : d = single_rowcol_df . set_index ( single_rowcol_df . columns [ 0 ] ) return d [ d . columns [ 0 ] ] . to_dict ( ) elif single_rowcol_df . shape [ 1 ] == 1 : d = single_rowcol_df return d [ d . columns [ 0 ] ] . to_dict ( ) else : raise ValueError ( 'Unable to convert provided dataframe to a parameters dictionary : ' 'expected exactly 1 row or 1 column, found : ' + str ( single_rowcol_df . shape ) + '' )
Helper method to convert a dataframe with one row or one or two columns into a dictionary
55,190
def full_subgraph ( self , vertices ) : subgraph_vertices = { v for v in vertices } subgraph_edges = { edge for v in subgraph_vertices for edge in self . _out_edges [ v ] if self . _heads [ edge ] in subgraph_vertices } subgraph_heads = { edge : self . _heads [ edge ] for edge in subgraph_edges } subgraph_tails = { edge : self . _tails [ edge ] for edge in subgraph_edges } return DirectedGraph . _raw ( vertices = subgraph_vertices , edges = subgraph_edges , heads = subgraph_heads , tails = subgraph_tails , )
Return the subgraph of this graph whose vertices are the given ones and whose edges are all the edges of the original graph between those vertices .
55,191
def _raw ( cls , vertices , edges , heads , tails ) : self = object . __new__ ( cls ) self . _vertices = vertices self . _edges = edges self . _heads = heads self . _tails = tails self . _out_edges = collections . defaultdict ( set ) self . _in_edges = collections . defaultdict ( set ) for edge in self . _edges : self . _out_edges [ self . _tails [ edge ] ] . add ( edge ) self . _in_edges [ self . _heads [ edge ] ] . add ( edge ) return self
Private constructor for direct construction of a DirectedGraph from its consituents .
55,192
def from_out_edges ( cls , vertices , edge_mapper ) : vertices = set ( vertices ) edges = set ( ) heads = { } tails = { } edge_identifier = itertools . count ( ) for tail in vertices : for head in edge_mapper [ tail ] : edge = next ( edge_identifier ) edges . add ( edge ) heads [ edge ] = head tails [ edge ] = tail return cls . _raw ( vertices = vertices , edges = edges , heads = heads , tails = tails , )
Create a DirectedGraph from a collection of vertices and a mapping giving the vertices that each vertex is connected to .
55,193
def from_edge_pairs ( cls , vertices , edge_pairs ) : vertices = set ( vertices ) edges = set ( ) heads = { } tails = { } edge_identifier = itertools . count ( ) for tail , head in edge_pairs : edge = next ( edge_identifier ) edges . add ( edge ) heads [ edge ] = head tails [ edge ] = tail return cls . _raw ( vertices = vertices , edges = edges , heads = heads , tails = tails , )
Create a DirectedGraph from a collection of vertices and a collection of pairs giving links between the vertices .
55,194
def annotated ( self ) : annotated_vertices = { vertex : AnnotatedVertex ( id = vertex_id , annotation = six . text_type ( vertex ) , ) for vertex_id , vertex in zip ( itertools . count ( ) , self . vertices ) } annotated_edges = [ AnnotatedEdge ( id = edge_id , annotation = six . text_type ( edge ) , head = annotated_vertices [ self . head ( edge ) ] . id , tail = annotated_vertices [ self . tail ( edge ) ] . id , ) for edge_id , edge in zip ( itertools . count ( ) , self . edges ) ] return AnnotatedGraph ( vertices = annotated_vertices . values ( ) , edges = annotated_edges , )
Return an AnnotatedGraph with the same structure as this graph .
55,195
def load ( self ) : if self . exists ( ) : with open ( self . dot_file , 'r' ) as handle : self . update ( json . load ( handle ) ) if self . options [ 'context' ] is not None : self [ 'context' ] = self . options [ 'context' ] else : self . options [ 'context' ] = self [ 'context' ] if self . options [ 'defaults' ] is not None : self [ 'defaults' ] = self . options [ 'defaults' ] else : self . options [ 'defaults' ] = self [ 'defaults' ] if self . options [ 'output' ] is not None : self [ 'output' ] = self . options [ 'output' ] if self . options . get ( 'inclusive' , False ) : self [ 'inclusive' ] = True if self . options . get ( 'exclude' , [ ] ) : self [ 'exclude' ] . extend ( self . options [ 'exclude' ] ) if self [ 'output' ] is None : self [ 'output' ] = os . path . join ( os . getcwd ( ) , 'dockerstache-output' ) self [ 'output_path' ] = self . abs_output_dir ( ) self [ 'input_path' ] = self . abs_input_dir ( ) if self [ 'context' ] is not None : self [ 'context_path' ] = absolute_path ( self [ 'context' ] ) if self [ 'defaults' ] is not None : self [ 'defaults_path' ] = absolute_path ( self [ 'defaults' ] )
read dotfile and populate self opts will override the dotfile settings make sure everything is synced in both opts and this object
55,196
def env_dictionary ( self ) : none_to_str = lambda x : str ( x ) if x else "" return { "DOCKERSTACHE_{}" . format ( k . upper ( ) ) : none_to_str ( v ) for k , v in six . iteritems ( self ) }
convert the options to this script into an env var dictionary for pre and post scripts
55,197
def pre_script ( self ) : if self [ 'pre_script' ] is None : return LOGGER . info ( "Executing pre script: {}" . format ( self [ 'pre_script' ] ) ) cmd = self [ 'pre_script' ] execute_command ( self . abs_input_dir ( ) , cmd , self . env_dictionary ( ) ) LOGGER . info ( "Pre Script completed" )
execute the pre script if it is defined
55,198
def say_tmp_filepath ( text = None , preference_program = "festival" ) : filepath = shijian . tmp_filepath ( ) + ".wav" say ( text = text , preference_program = preference_program , filepath = filepath ) return filepath
Say specified text to a temporary file and return the filepath .
55,199
def clacks_overhead ( fn ) : @ wraps ( fn ) def _wrapped ( * args , ** kw ) : response = fn ( * args , ** kw ) response [ 'X-Clacks-Overhead' ] = 'GNU Terry Pratchett' return response return _wrapped
A Django view decorator that will add the X - Clacks - Overhead header .