idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
1,500
def acknowledge ( self , status ) : logging . info ( 'Acknowledges status update {}' . format ( status ) ) return self . driver . acknowledgeStatusUpdate ( encode ( status ) )
Acknowledges the status update .
1,501
def message ( self , executor_id , slave_id , message ) : logging . info ( 'Sends message `{}` to executor `{}` on slave `{}`' . format ( message , executor_id , slave_id ) ) return self . driver . sendFrameworkMessage ( encode ( executor_id ) , encode ( slave_id ) , message )
Sends a message from the framework to one of its executors .
1,502
def _connect_func ( builder , obj , signal_name , handler_name , connect_object , flags , cls ) : if connect_object is None : extra = ( ) else : extra = ( connect_object , ) template_inst = builder . get_object ( cls . __gtype_name__ ) if template_inst is None : errmsg = "Internal error: cannot find template instance! obj: %s; " "signal: %s; handler: %s; connect_obj: %s; class: %s" % ( obj , signal_name , handler_name , connect_object , cls ) warnings . warn ( errmsg , GtkTemplateWarning ) return handler = getattr ( template_inst , handler_name ) if flags == GObject . ConnectFlags . AFTER : obj . connect_after ( signal_name , handler , * extra ) else : obj . connect ( signal_name , handler , * extra ) template_inst . __connected_template_signals__ . add ( handler_name )
Handles GtkBuilder signal connect events
1,503
def _register_template ( cls , template_bytes ) : if not hasattr ( cls , 'set_template' ) : raise TypeError ( "Requires PyGObject 3.13.2 or greater" ) cls . set_template ( template_bytes ) bound_methods = set ( ) bound_widgets = set ( ) for name in dir ( cls ) : o = getattr ( cls , name , None ) if inspect . ismethod ( o ) : if hasattr ( o , '_gtk_callback' ) : bound_methods . add ( name ) elif isinstance ( o , _Child ) : cls . bind_template_child_full ( name , True , 0 ) bound_widgets . add ( name ) cls . set_connect_func ( _connect_func , cls ) cls . __gtemplate_methods__ = bound_methods cls . __gtemplate_widgets__ = bound_widgets base_init_template = cls . init_template cls . init_template = lambda s : _init_template ( s , cls , base_init_template )
Registers the template for the widget and hooks init_template
1,504
def _init_template ( self , cls , base_init_template ) : if self . __class__ is not cls : raise TypeError ( "Inheritance from classes with @GtkTemplate decorators " "is not allowed at this time" ) connected_signals = set ( ) self . __connected_template_signals__ = connected_signals base_init_template ( self ) for name in self . __gtemplate_widgets__ : widget = self . get_template_child ( cls , name ) self . __dict__ [ name ] = widget if widget is None : raise AttributeError ( "A missing child widget was set using " "GtkTemplate.Child and the entire " "template is now broken (widgets: %s)" % ', ' . join ( self . __gtemplate_widgets__ ) ) for name in self . __gtemplate_methods__ . difference ( connected_signals ) : errmsg = ( "Signal '%s' was declared with @GtkTemplate.Callback " + "but was not present in template" ) % name warnings . warn ( errmsg , GtkTemplateWarning )
This would be better as an override for Gtk . Widget
1,505
def extract_haml ( fileobj , keywords , comment_tags , options ) : import haml from mako import lexer , parsetree from mako . ext . babelplugin import extract_nodes encoding = options . get ( 'input_encoding' , options . get ( 'encoding' , None ) ) template_node = lexer . Lexer ( haml . preprocessor ( fileobj . read ( ) ) , input_encoding = encoding ) . parse ( ) for extracted in extract_nodes ( template_node . get_children ( ) , keywords , comment_tags , options ) : yield extracted
babel translation token extract function for haml files
1,506
def get_single_allele_from_reads ( allele_reads ) : allele_reads = list ( allele_reads ) if len ( allele_reads ) == 0 : raise ValueError ( "Expected non-empty list of AlleleRead objects" ) seq = allele_reads [ 0 ] . allele if any ( read . allele != seq for read in allele_reads ) : raise ValueError ( "Expected all AlleleRead objects to have same allele '%s', got %s" % ( seq , allele_reads ) ) return seq
Given a sequence of AlleleRead objects which are expected to all have the same allele return that allele .
1,507
def iter_all_children ( self ) : if self . inline_child : yield self . inline_child for x in self . children : yield x
Return an iterator that yields every node which is a child of this one .
1,508
def initialize ( self , ** kwargs ) : if not set ( kwargs . keys ( ) ) . issuperset ( self . init_keys ) : raise Exception ( "TransferFn needs to be initialized with %s" % ',' . join ( repr ( el ) for el in self . init_keys ) )
Transfer functions may need additional information before the supplied numpy array can be modified in place . For instance transfer functions may have state which needs to be allocated in memory with a certain size . In other cases the transfer function may need to know about the coordinate system associated with the input data .
1,509
def override_plasticity_state ( self , new_plasticity_state ) : self . _plasticity_setting_stack . append ( self . plastic ) self . plastic = new_plasticity_state
Temporarily disable plasticity of internal state .
1,510
def register_host ( ) : pyblish . api . register_host ( "hython" ) pyblish . api . register_host ( "hpython" ) pyblish . api . register_host ( "houdini" )
Register supported hosts
1,511
def maintained_selection ( ) : previous_selection = hou . selectedNodes ( ) try : yield finally : if previous_selection : for node in previous_selection : node . setSelected ( on = True ) else : for node in previous_selection : node . setSelected ( on = False )
Maintain selection during context
1,512
def execute_transaction ( conn , statements : Iterable ) : with conn : with conn . cursor ( ) as cursor : for statement in statements : cursor . execute ( statement ) conn . commit ( )
Execute several statements in single DB transaction .
1,513
def execute_transactions ( conn , statements : Iterable ) : with conn . cursor ( ) as cursor : for statement in statements : try : cursor . execute ( statement ) conn . commit ( ) except psycopg2 . ProgrammingError : conn . rollback ( )
Execute several statements each as a single DB transaction .
1,514
def execute_closing_transaction ( statements : Iterable ) : with closing ( connect ( ) ) as conn : with conn . cursor ( ) as cursor : for statement in statements : cursor . execute ( statement )
Open a connection commit a transaction and close it .
1,515
def select ( conn , query : str , params = None , name = None , itersize = 5000 ) : with conn . cursor ( name , cursor_factory = NamedTupleCursor ) as cursor : cursor . itersize = itersize cursor . execute ( query , params ) for result in cursor : yield result
Return a select statement s results as a namedtuple .
1,516
def select_dict ( conn , query : str , params = None , name = None , itersize = 5000 ) : with conn . cursor ( name , cursor_factory = RealDictCursor ) as cursor : cursor . itersize = itersize cursor . execute ( query , params ) for result in cursor : yield result
Return a select statement s results as dictionary .
1,517
def select_each ( conn , query : str , parameter_groups , name = None ) : with conn : with conn . cursor ( name = name ) as cursor : for parameters in parameter_groups : cursor . execute ( query , parameters ) yield cursor . fetchone ( )
Run select query for each parameter set in single transaction .
1,518
def query_columns ( conn , query , name = None ) : with conn . cursor ( name ) as cursor : cursor . itersize = 1 cursor . execute ( query ) cursor . fetchmany ( 0 ) column_names = [ column . name for column in cursor . description ] return column_names
Lightweight query to retrieve column list of select query .
1,519
def from_variant_and_transcript ( cls , variant , transcript , context_size ) : if not transcript . contains_start_codon : logger . info ( "Expected transcript %s for variant %s to have start codon" , transcript . name , variant ) return None if not transcript . contains_stop_codon : logger . info ( "Expected transcript %s for variant %s to have stop codon" , transcript . name , variant ) return None if not transcript . protein_sequence : logger . info ( "Expected transript %s for variant %s to have protein sequence" , transcript . name , variant ) return None sequence_key = ReferenceSequenceKey . from_variant_and_transcript ( variant = variant , transcript = transcript , context_size = context_size ) if sequence_key is None : logger . info ( "No sequence key for variant %s on transcript %s" , variant , transcript . name ) return None return cls . from_variant_and_transcript_and_sequence_key ( variant = variant , transcript = transcript , sequence_key = sequence_key )
Extracts the reference sequence around a variant locus on a particular transcript and determines the reading frame at the start of that sequence context .
1,520
def create_readme_with_long_description ( ) : this_dir = os . path . abspath ( os . path . dirname ( __file__ ) ) readme_md = os . path . join ( this_dir , 'README.md' ) readme = os . path . join ( this_dir , 'README' ) if os . path . exists ( readme_md ) : if os . path . exists ( readme ) : os . remove ( readme ) try : import pypandoc long_description = pypandoc . convert ( readme_md , 'rst' , format = 'md' ) except ( ImportError ) : with open ( readme_md , encoding = 'utf-8' ) as in_ : long_description = in_ . read ( ) with open ( readme , 'w' ) as out : out . write ( long_description ) else : with open ( readme , encoding = 'utf-8' ) as in_ : long_description = in_ . read ( ) return long_description
Try to convert content of README . md into rst format using pypandoc write it into README and return it .
1,521
def stat ( package , graph ) : client = requests . Session ( ) for name_or_url in package : package = get_package ( name_or_url , client ) if not package : secho ( u'Invalid name or URL: "{name}"' . format ( name = name_or_url ) , fg = 'red' , file = sys . stderr ) continue try : version_downloads = package . version_downloads except NotFoundError : secho ( u'No versions found for "{0}". ' u'Skipping. . .' . format ( package . name ) , fg = 'red' , file = sys . stderr ) continue echo ( u"Fetching statistics for '{url}'. . ." . format ( url = package . package_url ) ) min_ver , min_downloads = package . min_version max_ver , max_downloads = package . max_version if min_ver is None or max_ver is None : raise click . ClickException ( 'Package has no releases' ) avg_downloads = package . average_downloads total = package . downloads echo ( ) header = u'Download statistics for {name}' . format ( name = package . name ) echo_header ( header ) if graph : echo ( ) echo ( 'Downloads by version' ) echo ( package . chart ( ) ) echo ( ) echo ( "Min downloads: {min_downloads:12,} ({min_ver})" . format ( ** locals ( ) ) ) echo ( "Max downloads: {max_downloads:12,} ({max_ver})" . format ( ** locals ( ) ) ) echo ( "Avg downloads: {avg_downloads:12,}" . format ( ** locals ( ) ) ) echo ( "Total downloads: {total:12,}" . format ( ** locals ( ) ) ) echo ( ) echo_download_summary ( package ) echo ( )
Print download statistics for a package .
1,522
def browse ( package , homepage ) : p = Package ( package ) try : if homepage : secho ( u'Opening homepage for "{0}"...' . format ( package ) , bold = True ) url = p . home_page else : secho ( u'Opening PyPI page for "{0}"...' . format ( package ) , bold = True ) url = p . package_url except NotFoundError : abort_not_found ( package ) click . launch ( url )
Browse to a package s PyPI or project homepage .
1,523
def search ( query , n_results , web ) : if web : secho ( u'Opening search page for "{0}"...' . format ( query ) , bold = True ) url = SEARCH_URL . format ( query = urlquote ( query ) ) click . launch ( url ) else : searcher = Searcher ( ) results = searcher . search ( query , n = n_results ) first_line = style ( u'Search results for "{0}"\n' . format ( query ) , bold = True ) echo_via_pager ( first_line + '\n' . join ( [ format_result ( result ) for result in results ] ) )
Search for a pypi package .
1,524
def info ( package , long_description , classifiers , license ) : client = requests . Session ( ) for name_or_url in package : package = get_package ( name_or_url , client ) if not package : secho ( u'Invalid name or URL: "{name}"' . format ( name = name_or_url ) , fg = 'red' , file = sys . stderr ) continue try : info = package . data [ 'info' ] except NotFoundError : secho ( u'No versions found for "{0}". ' u'Skipping. . .' . format ( package . name ) , fg = 'red' , file = sys . stderr ) continue echo_header ( name_or_url ) if package . summary : echo ( package . summary ) echo ( ) echo ( 'Latest release: {version:12}' . format ( version = info [ 'version' ] ) ) if long_description : echo ( ) echo ( package . description ) echo ( ) echo_download_summary ( package ) echo ( ) author , author_email = package . author , package . author_email if author : echo ( u'Author: {author:12}' . format ( ** locals ( ) ) ) if author_email : echo ( u'Author email: {author_email:12}' . format ( ** locals ( ) ) ) maintainer , maintainer_email = ( package . maintainer , package . maintainer_email ) if maintainer or maintainer_email : echo ( ) if maintainer : echo ( u'Maintainer: {maintainer:12}' . format ( ** locals ( ) ) ) if maintainer_email : echo ( u'Maintainer email: {maintainer_email:12}' . format ( ** locals ( ) ) ) echo ( ) echo ( u'PyPI URL: {pypi_url:12}' . format ( pypi_url = package . package_url ) ) if package . home_page : echo ( u'Home Page: {home_page:12}' . format ( home_page = package . home_page ) ) if package . docs_url : echo ( u'Documentation: {docs_url:12}' . format ( docs_url = package . docs_url ) ) if classifiers : echo ( ) echo ( u'Classifiers: ' ) for each in info . get ( 'classifiers' , [ ] ) : echo ( '\t' + each ) if license and package . license : echo ( ) echo ( u'License: ' , nl = False ) if package . license . find ( '\n' ) >= 0 or len ( package . license ) > 80 : echo ( ) echo ( package . license ) echo ( )
Get info about a package or packages .
1,525
def bargraph ( data , max_key_width = 30 ) : lines = [ ] max_length = min ( max ( len ( key ) for key in data . keys ( ) ) , max_key_width ) max_val = max ( data . values ( ) ) max_val_length = max ( len ( _style_value ( val ) ) for val in data . values ( ) ) term_width = get_terminal_size ( ) [ 0 ] max_bar_width = term_width - MARGIN - ( max_length + 3 + max_val_length + 3 ) template = u"{key:{key_width}} [ {value:{val_width}} ] {bar}" for key , value in data . items ( ) : try : bar = int ( math . ceil ( max_bar_width * value / max_val ) ) * TICK except ZeroDivisionError : bar = '' line = template . format ( key = key [ : max_length ] , value = _style_value ( value ) , bar = bar , key_width = max_length , val_width = max_val_length ) lines . append ( line ) return '\n' . join ( lines )
Return a bar graph as a string given a dictionary of data .
1,526
def max_version ( self ) : data = self . version_downloads if not data : return None , 0 return max ( data . items ( ) , key = lambda item : item [ 1 ] )
Version with the most downloads .
1,527
def min_version ( self ) : data = self . version_downloads if not data : return ( None , 0 ) return min ( data . items ( ) , key = lambda item : item [ 1 ] )
Version with the fewest downloads .
1,528
def ripping_of_cds ( ) : install_package ( 'ripit' ) install_file_legacy ( path = '~/.ripit/config' , username = env . user ) run ( 'mkdir -p ~/bin' ) install_file_legacy ( '~/bin/burnit' ) run ( 'chmod 755 ~/bin/burnit' )
Install the tools ripit and burnit in order to rip and burn audio cds .
1,529
def i3 ( ) : install_package ( 'i3' ) install_file_legacy ( path = '~/.i3/config' , username = env . user , repos_dir = 'repos' ) install_packages ( [ 'make' , 'pkg-config' , 'gcc' , 'libc6-dev' , 'libx11-dev' ] ) checkup_git_repo_legacy ( url = 'https://github.com/aktau/hhpc.git' ) run ( 'cd ~/repos/hhpc && make' )
Install and customize the tiling window manager i3 .
1,530
def solarized ( ) : install_packages ( [ 'rxvt-unicode' , 'tmux' , 'vim' ] ) install_file_legacy ( '~/.Xresources' ) if env . host_string == 'localhost' : run ( 'xrdb ~/.Xresources' ) run ( 'mkdir -p ~/bin' ) install_file_legacy ( '~/bin/term_colors' ) run ( 'chmod 755 ~/bin/term_colors' ) run ( '~/bin/term_colors' )
Set solarized colors in urxvt tmux and vim .
1,531
def vim ( ) : install_package ( 'vim' ) print_msg ( '## install ~/.vimrc\n' ) install_file_legacy ( '~/.vimrc' ) print_msg ( '\n## set up pathogen\n' ) run ( 'mkdir -p ~/.vim/autoload ~/.vim/bundle' ) checkup_git_repo_legacy ( url = 'https://github.com/tpope/vim-pathogen.git' ) run ( 'ln -snf ~/repos/vim-pathogen/autoload/pathogen.vim ' '~/.vim/autoload/pathogen.vim' ) print_msg ( '\n## install vim packages\n' ) install_package ( 'ctags' ) repos = [ { 'name' : 'vim-colors-solarized' , 'url' : 'git://github.com/altercation/vim-colors-solarized.git' , } , { 'name' : 'nerdtree' , 'url' : 'https://github.com/scrooloose/nerdtree.git' , } , { 'name' : 'vim-nerdtree-tabs' , 'url' : 'https://github.com/jistr/vim-nerdtree-tabs.git' , } , { 'name' : 'tagbar' , 'url' : 'https://github.com/majutsushi/tagbar.git' , } , ] checkup_git_repos_legacy ( repos , base_dir = '~/.vim/bundle' )
Customize vim install package manager pathogen and some vim - packages .
1,532
def pyenv ( ) : install_packages ( [ 'make' , 'build-essential' , 'libssl-dev' , 'zlib1g-dev' , 'libbz2-dev' , 'libreadline-dev' , 'libsqlite3-dev' , 'wget' , 'curl' , 'llvm' , 'libncurses5-dev' , 'libncursesw5-dev' , ] ) if exists ( '~/.pyenv' ) : run ( 'cd ~/.pyenv && git pull' ) run ( '~/.pyenv/bin/pyenv update' ) else : run ( 'curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/' 'master/bin/pyenv-installer | bash' ) bash_snippet = '~/.bashrc_pyenv' install_file_legacy ( path = bash_snippet ) prefix = flo ( 'if [ -f {bash_snippet} ]; ' ) enabler = flo ( 'if [ -f {bash_snippet} ]; then source {bash_snippet}; fi' ) if env . host == 'localhost' : uncomment_or_update_or_append_line ( filename = '~/.bashrc' , prefix = prefix , new_line = enabler ) else : print ( cyan ( '\nappend to ~/.bashrc:\n\n ' ) + enabler )
Install or update the pyenv python environment .
1,533
def virtualbox_host ( ) : if query_yes_no ( question = 'Uninstall virtualbox-dkms?' , default = 'yes' ) : run ( 'sudo apt-get remove virtualbox-dkms' ) install_packages ( [ 'virtualbox' , 'virtualbox-qt' , 'virtualbox-dkms' , 'virtualbox-guest-dkms' , 'virtualbox-guest-additions-iso' , ] ) users = [ env . user ] for username in users : run ( flo ( 'sudo adduser {username} vboxusers' ) )
Install a VirtualBox host system .
1,534
def pencil2 ( ) : repo_name = 'pencil2' repo_dir = flo ( '~/repos/{repo_name}' ) print_msg ( '## fetch latest pencil\n' ) checkup_git_repo_legacy ( url = 'https://github.com/prikhi/pencil.git' , name = repo_name ) print_msg ( '\n## build properties\n' ) update_or_append_line ( flo ( '{repo_dir}/build/properties.sh' ) , prefix = 'export MAX_VERSION=' , new_line = "export MAX_VERSION='100.*'" ) run ( flo ( 'cat {repo_dir}/build/properties.sh' ) ) run ( flo ( 'cd {repo_dir}/build && ./build.sh linux' ) , msg = '\n## build pencil\n' ) install_user_command_legacy ( 'pencil2' , pencil2_repodir = repo_dir ) print_msg ( '\nNow You can start pencil version 2 with this command:\n\n' ' pencil2' )
Install or update latest Pencil version 2 a GUI prototyping tool .
1,535
def pencil3 ( ) : repo_name = 'pencil3' repo_dir = flo ( '~/repos/{repo_name}' ) print_msg ( '## fetch latest pencil\n' ) checkup_git_repo_legacy ( url = 'https://github.com/evolus/pencil.git' , name = repo_name ) run ( flo ( 'cd {repo_dir} && npm install' ) , msg = '\n## install npms\n' ) install_user_command_legacy ( 'pencil3' , pencil3_repodir = repo_dir ) print_msg ( '\nNow You can start pencil version 3 with this command:\n\n' ' pencil3' )
Install or update latest Pencil version 3 a GUI prototyping tool .
1,536
def powerline_shell ( ) : assert env . host == 'localhost' , 'This task cannot run on a remote host' checkup_git_repo_legacy ( 'https://github.com/powerline/fonts.git' , name = 'powerline-fonts' ) run ( 'cd ~/repos/powerline-fonts && ./install.sh' ) prefix = 'URxvt*font: ' from config import fontlist line = prefix + fontlist update_or_append_line ( filename = '~/.Xresources' , prefix = prefix , new_line = line ) if env . host_string == 'localhost' : run ( 'xrdb ~/.Xresources' ) checkup_git_repo_legacy ( 'https://github.com/banga/powerline-shell.git' ) install_file_legacy ( path = '~/repos/powerline-shell/config.py' ) run ( 'cd ~/repos/powerline-shell && ./install.py' ) question = 'Use normal question mark (u003F) for untracked files instead ' 'of fancy "black question mark ornament" (u2753, which may not work)?' if query_yes_no ( question , default = 'yes' ) : filename = '~/repos/powerline-shell/powerline-shell.py' update_or_append_line ( filename , keep_backup = False , prefix = " 'untracked': u'\u2753'," , new_line = " 'untracked': u'\u003F'," ) run ( flo ( 'chmod u+x {filename}' ) ) bash_snippet = '~/.bashrc_powerline_shell' install_file_legacy ( path = bash_snippet ) prefix = flo ( 'if [ -f {bash_snippet} ]; ' ) enabler = flo ( 'if [ -f {bash_snippet} ]; then source {bash_snippet}; fi' ) uncomment_or_update_or_append_line ( filename = '~/.bashrc' , prefix = prefix , new_line = enabler )
Install and set up powerline - shell prompt .
1,537
def _init_boto3_clients ( self , profile , region ) : try : session = None if profile and region : session = boto3 . session . Session ( profile_name = profile , region_name = region ) elif profile : session = boto3 . session . Session ( profile_name = profile ) elif region : session = boto3 . session . Session ( region_name = region ) else : session = boto3 . session . Session ( ) self . _cloud_formation = session . client ( 'cloudformation' ) return True except Exception as wtf : logging . error ( wtf , exc_info = True ) return False
The utililty requires boto3 clients to CloudFormation .
1,538
def determine_drift ( self ) : try : response = self . _cloud_formation . detect_stack_drift ( StackName = self . _stack_name ) drift_request_id = response . get ( 'StackDriftDetectionId' , None ) if drift_request_id : logging . info ( 'drift_request_id: %s - polling' , drift_request_id ) drift_calc_done = False while not drift_calc_done : time . sleep ( self . nap_time ) response = self . _cloud_formation . describe_stack_drift_detection_status ( StackDriftDetectionId = drift_request_id ) current_state = response . get ( 'DetectionStatus' , None ) logging . info ( 'describe_stack_drift_detection_status(): {}' . format ( current_state ) ) drift_calc_done = current_state in CALC_DONE_STATES drift_answer = response . get ( 'StackDriftStatus' , 'UNKNOWN' ) logging . info ( 'drift of {}: {}' . format ( self . _stack_name , drift_answer ) ) if drift_answer == 'DRIFTED' : if self . _verbose : self . _print_drift_report ( ) return False else : return True else : logging . warning ( 'drift_request_id is None' ) return False except Exception as wtf : logging . error ( wtf , exc_info = True ) return False
Determine the drift of the stack .
1,539
def _print_drift_report ( self ) : try : response = self . _cloud_formation . describe_stack_resources ( StackName = self . _stack_name ) rows = [ ] for resource in response . get ( 'StackResources' , [ ] ) : row = [ ] row . append ( resource . get ( 'LogicalResourceId' , 'unknown' ) ) row . append ( resource . get ( 'PhysicalResourceId' , 'unknown' ) ) row . append ( resource . get ( 'ResourceStatus' , 'unknown' ) ) row . append ( resource . get ( 'DriftInformation' , { } ) . get ( 'StackResourceDriftStatus' , 'unknown' ) ) rows . append ( row ) print ( 'Drift Report:' ) print ( tabulate ( rows , headers = [ 'Logical ID' , 'Physical ID' , 'Resource Status' , 'Drift Info' ] ) ) except Exception as wtf : logging . error ( wtf , exc_info = True ) return False return True
Report the drift of the stack .
1,540
def set_data ( self , data ) : "Use this method to set the data for this blob" if data is None : self . data_size = 0 self . data = None return self . data_size = len ( data ) self . data = ctypes . cast ( ctypes . create_string_buffer ( data ) , ctypes . c_void_p )
Use this method to set the data for this blob
1,541
def get_data ( self ) : "Get the data for this blob" array = ctypes . POINTER ( ctypes . c_char * len ( self ) ) return ctypes . cast ( self . data , array ) . contents . raw
Get the data for this blob
1,542
def printMetaDataFor ( archive , location ) : desc = archive . getMetadataForLocation ( location ) if desc . isEmpty ( ) : print ( " no metadata for '{0}'" . format ( location ) ) return None print ( " metadata for '{0}':" . format ( location ) ) print ( " Created : {0}" . format ( desc . getCreated ( ) . getDateAsString ( ) ) ) for i in range ( desc . getNumModified ( ) ) : print ( " Modified : {0}" . format ( desc . getModified ( i ) . getDateAsString ( ) ) ) print ( " # Creators: {0}" . format ( desc . getNumCreators ( ) ) ) for i in range ( desc . getNumCreators ( ) ) : creator = desc . getCreator ( i ) print ( " {0} {1}" . format ( creator . getGivenName ( ) , creator . getFamilyName ( ) ) )
Prints metadata for given location .
1,543
def printArchive ( fileName ) : archive = CombineArchive ( ) if archive . initializeFromArchive ( fileName ) is None : print ( "Invalid Combine Archive" ) return None print ( '*' * 80 ) print ( 'Print archive:' , fileName ) print ( '*' * 80 ) printMetaDataFor ( archive , "." ) print ( "Num Entries: {0}" . format ( archive . getNumEntries ( ) ) ) for i in range ( archive . getNumEntries ( ) ) : entry = archive . getEntry ( i ) print ( " {0}: location: {1} format: {2}" . format ( i , entry . getLocation ( ) , entry . getFormat ( ) ) ) printMetaDataFor ( archive , entry . getLocation ( ) ) for j in range ( entry . getNumCrossRefs ( ) ) : print ( " {0}: crossRef location {1}" . format ( j , entry . getCrossRef ( j ) . getLocation ( ) ) ) archive . cleanUp ( )
Prints content of combine archive
1,544
def mklink ( ) : from optparse import OptionParser parser = OptionParser ( usage = "usage: %prog [options] link target" ) parser . add_option ( '-d' , '--directory' , help = "Target is a directory (only necessary if not present)" , action = "store_true" ) options , args = parser . parse_args ( ) try : link , target = args except ValueError : parser . error ( "incorrect number of arguments" ) symlink ( target , link , options . directory ) sys . stdout . write ( "Symbolic link created: %(link)s % vars ( ) )
Like cmd . exe s mklink except it will infer directory status of the target .
1,545
def is_reparse_point ( path ) : res = api . GetFileAttributes ( path ) return ( res != api . INVALID_FILE_ATTRIBUTES and bool ( res & api . FILE_ATTRIBUTE_REPARSE_POINT ) )
Determine if the given path is a reparse point . Return False if the file does not exist or the file attributes cannot be determined .
1,546
def is_symlink ( path ) : path = _patch_path ( path ) try : return _is_symlink ( next ( find_files ( path ) ) ) except WindowsError as orig_error : tmpl = "Error accessing {path}: {orig_error.message}" raise builtins . WindowsError ( tmpl . format ( ** locals ( ) ) )
Assuming path is a reparse point determine if it s a symlink .
1,547
def get_final_path ( path ) : r desired_access = api . NULL share_mode = ( api . FILE_SHARE_READ | api . FILE_SHARE_WRITE | api . FILE_SHARE_DELETE ) security_attributes = api . LPSECURITY_ATTRIBUTES ( ) hFile = api . CreateFile ( path , desired_access , share_mode , security_attributes , api . OPEN_EXISTING , api . FILE_FLAG_BACKUP_SEMANTICS , api . NULL , ) if hFile == api . INVALID_HANDLE_VALUE : raise WindowsError ( ) buf_size = api . GetFinalPathNameByHandle ( hFile , LPWSTR ( ) , 0 , api . VOLUME_NAME_DOS ) handle_nonzero_success ( buf_size ) buf = create_unicode_buffer ( buf_size ) result_length = api . GetFinalPathNameByHandle ( hFile , buf , len ( buf ) , api . VOLUME_NAME_DOS ) assert result_length < len ( buf ) handle_nonzero_success ( result_length ) handle_nonzero_success ( api . CloseHandle ( hFile ) ) return buf [ : result_length ]
r For a given path determine the ultimate location of that path . Useful for resolving symlink targets . This functions wraps the GetFinalPathNameByHandle from the Windows SDK .
1,548
def join ( * paths ) : r paths_with_drives = map ( os . path . splitdrive , paths ) drives , paths = zip ( * paths_with_drives ) drive = next ( filter ( None , reversed ( drives ) ) , '' ) return os . path . join ( drive , os . path . join ( * paths ) )
r Wrapper around os . path . join that works with Windows drive letters .
1,549
def resolve_path ( target , start = os . path . curdir ) : r return os . path . normpath ( join ( start , target ) )
r Find a path from start to target where target is relative to start .
1,550
def trace_symlink_target ( link ) : if not is_symlink ( link ) : raise ValueError ( "link must point to a symlink on the system" ) while is_symlink ( link ) : orig = os . path . dirname ( link ) link = readlink ( link ) link = resolve_path ( link , orig ) return link
Given a file that is known to be a symlink trace it to its ultimate target .
1,551
def patch_os_module ( ) : if not hasattr ( os , 'symlink' ) : os . symlink = symlink os . path . islink = islink if not hasattr ( os , 'readlink' ) : os . readlink = readlink
jaraco . windows provides the os . symlink and os . readlink functions . Monkey - patch the os module to include them if not present .
1,552
def task ( func , * args , ** kwargs ) : prefix = '\n# ' tail = '\n' return fabric . api . task ( print_full_name ( color = magenta , prefix = prefix , tail = tail ) ( print_doc1 ( func ) ) , * args , ** kwargs )
Composition of decorator functions for inherent self - documentation on task execution .
1,553
def subtask ( * args , ** kwargs ) : depth = kwargs . get ( 'depth' , 2 ) prefix = kwargs . get ( 'prefix' , '\n' + '#' * depth + ' ' ) tail = kwargs . get ( 'tail' , '\n' ) doc1 = kwargs . get ( 'doc1' , False ) color = kwargs . get ( 'color' , cyan ) def real_decorator ( func ) : if doc1 : return print_full_name ( color = color , prefix = prefix , tail = tail ) ( print_doc1 ( func ) ) return print_full_name ( color = color , prefix = prefix , tail = tail ) ( func ) invoked = bool ( not args or kwargs ) if not invoked : return real_decorator ( func = args [ 0 ] ) return real_decorator
Decorator which prints out the name of the decorated function on execution .
1,554
def _is_sudoer ( what_for = '' ) : if env . get ( 'nosudo' , None ) is None : if what_for : print ( yellow ( what_for ) ) with quiet ( ) : output = run ( 'sudo -nv' , capture = True ) env . nosudo = not ( output . startswith ( 'sudo: ' ) or output == '' ) if env . nosudo : print ( 'Cannot execute sudo-commands' ) return not env . nosudo
Return True if current user is a sudoer else False .
1,555
def install_packages ( packages , what_for = 'for a complete setup to work properly' ) : res = True non_installed_packages = _non_installed ( packages ) packages_str = ' ' . join ( non_installed_packages ) if non_installed_packages : with quiet ( ) : dpkg = _has_dpkg ( ) hint = ' (You may have to install them manually)' do_install = False go_on = True if dpkg : if _is_sudoer ( 'Want to install dpkg packages' ) : do_install = True else : do_install is False info = yellow ( ' ' . join ( [ 'This deb packages are missing to be installed' , flo ( "{what_for}: " ) , ', ' . join ( non_installed_packages ) , ] ) ) question = ' Continue anyway?' go_on = query_yes_no ( info + hint + question , default = 'no' ) else : do_install = False info = yellow ( ' ' . join ( [ flo ( 'Required {what_for}: ' ) , ', ' . join ( non_installed_packages ) , ] ) ) go_on = query_yes_no ( info + hint + ' Continue?' , default = 'yes' ) if not go_on : sys . exit ( 'Abort' ) if do_install : command = flo ( 'sudo apt-get install {packages_str}' ) res = run ( command ) . return_code == 0 return res
Try to install . deb packages given by list .
1,556
def checkup_git_repos_legacy ( repos , base_dir = '~/repos' , verbose = False , prefix = '' , postfix = '' ) : run ( flo ( 'mkdir -p {base_dir}' ) ) for repo in repos : cur_base_dir = repo . get ( 'base_dir' , base_dir ) checkup_git_repo_legacy ( url = repo [ 'url' ] , name = repo . get ( 'name' , None ) , base_dir = cur_base_dir , verbose = verbose , prefix = prefix , postfix = postfix )
Checkout or update git repos .
1,557
def checkup_git_repo_legacy ( url , name = None , base_dir = '~/repos' , verbose = False , prefix = '' , postfix = '' ) : if not name : match = re . match ( r'.*/(.+)\.git' , url ) assert match , flo ( "Unable to extract repo name from '{url}'" ) name = match . group ( 1 ) assert name is not None , flo ( 'Cannot extract repo name from repo: {url}' ) assert name != '' , flo ( 'Cannot extract repo name from repo: {url} (empty)' ) if verbose : name_blue = blue ( name ) print_msg ( flo ( '{prefix}Checkout or update {name_blue}{postfix}' ) ) if not exists ( base_dir ) : run ( flo ( 'mkdir -p {base_dir}' ) ) if not exists ( flo ( '{base_dir}/{name}/.git' ) ) : run ( flo ( ' && ' . join ( [ 'cd {base_dir}' , 'git clone {url} {name}' ] ) ) , msg = 'clone repo' ) else : if verbose : print_msg ( 'update: pull from origin' ) run ( flo ( 'cd {base_dir}/{name} && git pull' ) ) return name
Checkout or update a git repo .
1,558
def install_file_legacy ( path , sudo = False , from_path = None , ** substitutions ) : from_path = from_path or path from_tail = join ( 'files' , from_path . lstrip ( os . sep ) ) if from_path . startswith ( '~/' ) : from_tail = join ( 'files' , 'home' , 'USERNAME' , from_path [ 2 : ] ) from_common = join ( FABFILE_DATA_DIR , from_tail ) from_custom = join ( FABSETUP_CUSTOM_DIR , from_tail ) for subst in [ 'SITENAME' , 'USER' , 'ADDON' , 'TASK' ] : sitename = substitutions . get ( subst , False ) if sitename : path = path . replace ( subst , sitename ) to_ = path if sudo : to_ = join ( os . sep , 'tmp' , 'fabsetup_' + os . path . basename ( path ) ) path_dir = dirname ( path ) if isfile ( from_custom ) : run ( flo ( 'mkdir -p {path_dir}' ) ) put ( from_custom , to_ ) elif isfile ( from_custom + '.template' ) : _install_file_from_template_legacy ( from_custom + '.template' , to_ = to_ , ** substitutions ) elif isfile ( from_common ) : run ( flo ( 'mkdir -p {path_dir}' ) ) put ( from_common , to_ ) else : _install_file_from_template_legacy ( from_common + '.template' , to_ = to_ , ** substitutions ) if sudo : run ( flo ( 'sudo mv --force {to_} {path}' ) )
Install file with path on the host target .
1,559
def install_user_command_legacy ( command , ** substitutions ) : path = flo ( '~/bin/{command}' ) install_file_legacy ( path , ** substitutions ) run ( flo ( 'chmod 755 {path}' ) )
Install command executable file into users bin dir .
1,560
def _line_2_pair ( line ) : key , val = line . split ( '=' ) return key . lower ( ) , val . strip ( '"' )
Return bash variable declaration as name - value pair .
1,561
def extract_minors_from_setup_py ( filename_setup_py ) : minors_str = fabric . api . local ( flo ( 'grep --perl-regexp --only-matching ' '"(?<=Programming Language :: Python :: )\\d+\\.\\d+" ' '{filename_setup_py}' ) , capture = True ) minors = minors_str . split ( ) return minors
Extract supported python minor versions from setup . py and return them as a list of str .
1,562
def vim_janus ( uninstall = None ) : if uninstall is not None : uninstall_janus ( ) else : if not exists ( '~/.vim/janus' ) : print_msg ( 'not installed => install' ) install_janus ( ) else : print_msg ( 'already installed => update' ) update_janus ( ) customize_janus ( ) show_files_used_by_vim_and_janus ( )
Install or update Janus a distribution of addons and mappings for vim .
1,563
def scan ( host , port = 80 , url = None , https = False , timeout = 1 , max_size = 65535 ) : starts = OrderedDict ( ) ends = OrderedDict ( ) port = int ( port ) result = dict ( host = host , port = port , state = 'closed' , durations = OrderedDict ( ) ) if url : timeout = 1 result [ 'code' ] = None starts [ 'all' ] = starts [ 'dns' ] = datetime . datetime . now ( ) try : hostip = socket . gethostbyname ( host ) result [ 'ip' ] = hostip ends [ 'dns' ] = datetime . datetime . now ( ) except socket . gaierror : raise ScanFailed ( 'DNS Lookup failed' , result = result ) starts [ 'connect' ] = datetime . datetime . now ( ) network_socket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) network_socket . settimeout ( timeout ) result_connection = network_socket . connect_ex ( ( hostip , port ) ) ends [ 'connect' ] = datetime . datetime . now ( ) if https : starts [ 'ssl' ] = datetime . datetime . now ( ) try : network_socket = ssl . wrap_socket ( network_socket ) except socket . timeout : raise ScanFailed ( 'SSL socket timeout' , result = result ) ends [ 'ssl' ] = datetime . datetime . now ( ) if result_connection == 0 and url : starts [ 'request' ] = datetime . datetime . now ( ) network_socket . send ( "GET {0} HTTP/1.0\r\nHost: {1}\r\n\r\n" . format ( url , host ) . encode ( 'ascii' ) ) if max_size : data = network_socket . recv ( max_size ) else : data = network_socket . recv ( ) result [ 'length' ] = len ( data ) data = data . decode ( 'ascii' , errors = 'ignore' ) result [ 'response' ] = ( data ) try : result [ 'code' ] = int ( data . split ( '\n' ) [ 0 ] . split ( ) [ 1 ] ) except IndexError : pass ends [ 'request' ] = datetime . datetime . now ( ) network_socket . close ( ) ends [ 'all' ] = datetime . datetime . now ( ) for duration in starts . keys ( ) : if duration in ends . keys ( ) : result [ 'durations' ] [ duration ] = ends [ duration ] - starts [ duration ] if result_connection == 0 : result [ 'state' ] = 'open' return result
Scan a network port
1,564
def ping ( host , port = 80 , url = None , https = False , timeout = 1 , max_size = 65535 , sequence = 0 ) : try : result = scan ( host = host , port = port , url = url , https = https , timeout = timeout , max_size = max_size ) except ScanFailed as failure : result = failure . result result [ 'error' ] = True result [ 'error_message' ] = str ( failure ) result_obj = PingResponse ( host = host , port = port , ip = result . get ( 'ip' , None ) , sequence = sequence , durations = result . get ( 'durations' , None ) , code = result . get ( 'code' , None ) , state = result . get ( 'state' , 'unknown' ) , length = result . get ( 'length' , 0 ) , response = result . get ( 'response' , None ) , error = result . get ( 'error' , False ) , error_message = result . get ( 'error_message' , None ) , responding = True if result . get ( 'state' , 'unknown' ) in [ 'open' ] else False , start = datetime . datetime . now ( ) , end = datetime . datetime . now ( ) + result [ 'durations' ] . get ( 'all' , datetime . timedelta ( 0 ) ) if result . get ( 'durations' , None ) else None ) return result_obj
Ping a host
1,565
def delete ( stack , region , profile ) : ini_data = { } environment = { } environment [ 'stack_name' ] = stack if region : environment [ 'region' ] = region else : environment [ 'region' ] = find_myself ( ) if profile : environment [ 'profile' ] = profile ini_data [ 'environment' ] = environment if start_smash ( ini_data ) : sys . exit ( 0 ) else : sys . exit ( 1 )
Delete the given CloudFormation stack .
1,566
def list ( region , profile ) : ini_data = { } environment = { } if region : environment [ 'region' ] = region else : environment [ 'region' ] = find_myself ( ) if profile : environment [ 'profile' ] = profile ini_data [ 'environment' ] = environment if start_list ( ini_data ) : sys . exit ( 0 ) else : sys . exit ( 1 )
List all the CloudFormation stacks in the given region .
1,567
def drift ( stack , region , profile ) : logging . debug ( 'finding drift - stack: {}' . format ( stack ) ) logging . debug ( 'region: {}' . format ( region ) ) logging . debug ( 'profile: {}' . format ( profile ) ) tool = DriftTool ( Stack = stack , Region = region , Profile = profile , Verbose = True ) if tool . determine_drift ( ) : sys . exit ( 0 ) else : sys . exit ( 1 )
Produce a CloudFormation drift report for the given stack .
1,568
def start_upsert ( ini_data ) : stack_driver = CloudStackUtility ( ini_data ) poll_stack = not ini_data . get ( 'no_poll' , False ) if stack_driver . upsert ( ) : logging . info ( 'stack create/update was started successfully.' ) if poll_stack : stack_tool = None try : profile = ini_data . get ( 'environment' , { } ) . get ( 'profile' ) if profile : boto3_session = boto3 . session . Session ( profile_name = profile ) else : boto3_session = boto3 . session . Session ( ) region = ini_data [ 'environment' ] [ 'region' ] stack_name = ini_data [ 'environment' ] [ 'stack_name' ] cf_client = stack_driver . get_cloud_formation_client ( ) if not cf_client : cf_client = boto3_session . client ( 'cloudformation' , region_name = region ) stack_tool = stack_tool = StackTool ( stack_name , region , cf_client ) except Exception as wtf : logging . warning ( 'there was a problems creating stack tool: {}' . format ( wtf ) ) if stack_driver . poll_stack ( ) : try : logging . info ( 'stack create/update was finished successfully.' ) stack_tool . print_stack_info ( ) except Exception as wtf : logging . warning ( 'there was a problems printing stack info: {}' . format ( wtf ) ) sys . exit ( 0 ) else : try : logging . error ( 'stack create/update was did not go well.' ) stack_tool . print_stack_events ( ) except Exception as wtf : logging . warning ( 'there was a problems printing stack events: {}' . format ( wtf ) ) sys . exit ( 1 ) else : logging . error ( 'start of stack create/update did not go well.' ) sys . exit ( 1 )
Helper function to facilitate upsert .
1,569
def read_config_info ( ini_file ) : try : config = RawConfigParser ( ) config . optionxform = lambda option : option config . read ( ini_file ) the_stuff = { } for section in config . sections ( ) : the_stuff [ section ] = { } for option in config . options ( section ) : the_stuff [ section ] [ option ] = config . get ( section , option ) return the_stuff except Exception as wtf : logging . error ( 'Exception caught in read_config_info(): {}' . format ( wtf ) ) traceback . print_exc ( file = sys . stdout ) return sys . exit ( 1 )
Read the INI file
1,570
def print_stack_info ( self ) : try : rest_api_id = None deployment_found = False response = self . _cf_client . describe_stack_resources ( StackName = self . _stack_name ) print ( '\nThe following resources were created:' ) rows = [ ] for resource in response [ 'StackResources' ] : if resource [ 'ResourceType' ] == 'AWS::ApiGateway::RestApi' : rest_api_id = resource [ 'PhysicalResourceId' ] elif resource [ 'ResourceType' ] == 'AWS::ApiGateway::Deployment' : deployment_found = True row = [ ] row . append ( resource [ 'ResourceType' ] ) row . append ( resource [ 'LogicalResourceId' ] ) row . append ( resource [ 'PhysicalResourceId' ] ) rows . append ( row ) print ( tabulate ( rows , headers = [ 'Resource Type' , 'Logical ID' , 'Physical ID' ] ) ) if rest_api_id and deployment_found : url = 'https://{}.execute-api.{}.amazonaws.com/{}' . format ( rest_api_id , self . _region , '<stage>' ) print ( '\nThe deployed service can be found at this URL:' ) print ( '\t{}\n' . format ( url ) ) return response except Exception as wtf : print ( wtf ) return None
List resources from the given stack
1,571
def print_stack_events ( self ) : first_token = '7be7981bd6287dd8112305e8f3822a6f' keep_going = True next_token = first_token current_request_token = None rows = [ ] try : while keep_going and next_token : if next_token == first_token : response = self . _cf_client . describe_stack_events ( StackName = self . _stack_name ) else : response = self . _cf_client . describe_stack_events ( StackName = self . _stack_name , NextToken = next_token ) next_token = response . get ( 'NextToken' , None ) for event in response [ 'StackEvents' ] : row = [ ] event_time = event . get ( 'Timestamp' ) request_token = event . get ( 'ClientRequestToken' , 'unknown' ) if current_request_token is None : current_request_token = request_token elif current_request_token != request_token : keep_going = False break row . append ( event_time . strftime ( '%x %X' ) ) row . append ( event . get ( 'LogicalResourceId' ) ) row . append ( event . get ( 'ResourceStatus' ) ) row . append ( event . get ( 'ResourceStatusReason' , '' ) ) rows . append ( row ) if len ( rows ) > 0 : print ( '\nEvents for the current upsert:' ) print ( tabulate ( rows , headers = [ 'Time' , 'Logical ID' , 'Status' , 'Message' ] ) ) return True else : print ( '\nNo stack events found\n' ) except Exception as wtf : print ( wtf ) return False
List events from the given stack
1,572
def trac ( ) : hostname = re . sub ( r'^[^@]+@' , '' , env . host ) sitename = query_input ( question = '\nEnter site-name of Your trac web service' , default = flo ( 'trac.{hostname}' ) ) username = env . user site_dir = flo ( '/home/{username}/sites/{sitename}' ) bin_dir = flo ( '{site_dir}/virtualenv/bin' ) install_or_upgrade_virtualenv_pip_package ( ) create_directory_structure ( site_dir ) update_virtualenv ( site_dir , sitename ) set_up_trac_plugins ( sitename , site_dir , bin_dir ) set_up_gunicorn ( site_dir , sitename ) configure_nginx ( username , sitename , hostname ) if query_yes_no ( '\nRestore trac environment from backup tarball?' , default = 'no' ) : restore_tracenv_from_backup_tarball ( site_dir , bin_dir ) elif not tracenv_exists ( site_dir ) : init_tracenv ( site_dir , bin_dir , username ) upgrade_tracenv ( site_dir , bin_dir ) set_up_upstart_for_gunicorn ( sitename , username , site_dir )
Set up or update a trac project .
1,573
async def query ( self , path , method = 'get' , ** params ) : if method in ( 'get' , 'post' , 'patch' , 'delete' , 'put' ) : full_path = self . host + path if method == 'get' : resp = await self . aio_sess . get ( full_path , params = params ) elif method == 'post' : resp = await self . aio_sess . post ( full_path , data = params ) elif method == 'patch' : resp = await self . aio_sess . patch ( full_path , data = params ) elif method == 'delete' : resp = await self . aio_sess . delete ( full_path , params = params , headers = params ) elif method == 'put' : resp = await self . aio_sess . put ( full_path , data = params ) async with resp : if resp . content_type . startswith ( 'application/pdf' ) or resp . content_type . startswith ( 'application/epub' ) : return await resp . read ( ) return await self . handle_json_response ( resp ) else : raise ValueError ( 'method expected: get, post, patch, delete, put' )
Do a query to the System API
1,574
def revealjs ( basedir = None , title = None , subtitle = None , description = None , github_user = None , github_repo = None ) : basedir = basedir or query_input ( 'Base dir of the presentation?' , default = '~/repos/my_presi' ) revealjs_repo_name = 'reveal.js' revealjs_dir = flo ( '{basedir}/{revealjs_repo_name}' ) _lazy_dict [ 'presi_title' ] = title _lazy_dict [ 'presi_subtitle' ] = subtitle _lazy_dict [ 'presi_description' ] = description _lazy_dict [ 'github_user' ] = github_user _lazy_dict [ 'github_repo' ] = github_repo question = flo ( "Base dir already contains a sub dir '{revealjs_repo_name}'." ' Reset (and re-download) reveal.js codebase?' ) if not exists ( revealjs_dir ) or query_yes_no ( question , default = 'no' ) : run ( flo ( 'mkdir -p {basedir}' ) ) set_up_revealjs_codebase ( basedir , revealjs_repo_name ) install_plugins ( revealjs_dir ) apply_customizations ( repo_dir = revealjs_dir ) if exists ( revealjs_dir ) : install_files_in_basedir ( basedir , repo_dir = revealjs_dir ) init_git_repo ( basedir ) create_github_remote_repo ( basedir ) setup_npm ( revealjs_dir ) else : print ( 'abort' )
Set up or update a reveals . js presentation with slides written in markdown .
1,575
def tweak_css ( repo_dir ) : print_msg ( "* don't capitalize titles (no uppercase headings)" ) files = [ 'beige.css' , 'black.css' , 'blood.css' , 'league.css' , 'moon.css' , 'night.css' , 'serif.css' , 'simple.css' , 'sky.css' , 'solarized.css' , 'white.css' , ] line = ' text-transform: uppercase;' for file_ in files : update_or_append_line ( filename = flo ( '{repo_dir}/css/theme/{file_}' ) , prefix = line , new_line = flo ( '/*{line}*/' ) ) print_msg ( '* images without border' ) data = [ { 'file' : 'beige.css' , 'line' : ' border: 4px solid #333;' } , { 'file' : 'black.css' , 'line' : ' border: 4px solid #fff;' } , { 'file' : 'blood.css' , 'line' : ' border: 4px solid #eee;' } , { 'file' : 'league.css' , 'line' : ' border: 4px solid #eee;' } , { 'file' : 'moon.css' , 'line' : ' border: 4px solid #93a1a1;' } , { 'file' : 'night.css' , 'line' : ' border: 4px solid #eee;' } , { 'file' : 'serif.css' , 'line' : ' border: 4px solid #000;' } , { 'file' : 'simple.css' , 'line' : ' border: 4px solid #000;' } , { 'file' : 'sky.css' , 'line' : ' border: 4px solid #333;' } , { 'file' : 'solarized.css' , 'line' : ' border: 4px solid #657b83;' } , { 'file' : 'white.css' , 'line' : ' border: 4px solid #222;' } , ] for item in data : file_ = item [ 'file' ] lines = [ item [ 'line' ] , ] lines . extend ( [ ' box-shadow: 0 0 10px rgba(0, 0, 0, 0.15); }' , ' box-shadow: 0 0 20px rgba(0, 0, 0, 0.55); }' ] ) for line in lines : update_or_append_line ( filename = flo ( '{repo_dir}/css/theme/{file_}' ) , prefix = line , new_line = flo ( '/*{line}*/' ) )
Comment out some css settings .
1,576
def decktape ( ) : run ( 'mkdir -p ~/bin/decktape' ) if not exists ( '~/bin/decktape/decktape-1.0.0' ) : print_msg ( '\n## download decktape 1.0.0\n' ) run ( 'cd ~/bin/decktape && ' 'curl -L https://github.com/astefanutti/decktape/archive/' 'v1.0.0.tar.gz | tar -xz --exclude phantomjs' ) run ( 'cd ~/bin/decktape/decktape-1.0.0 && ' 'curl -L https://github.com/astefanutti/decktape/releases/' 'download/v1.0.0/phantomjs-linux-x86-64 -o phantomjs' ) run ( 'cd ~/bin/decktape/decktape-1.0.0 && ' 'chmod +x phantomjs' ) run ( 'ln -snf ~/bin/decktape/decktape-1.0.0 ~/bin/decktape/active' , msg = '\n## link installed decktape version as active' ) print_msg ( '\nCreate PDF from reveal.js presentation:\n\n ' '# serve presentation:\n ' 'cd ~/repos/my_presi/reveal.js/ && npm start\n\n ' '# create pdf in another shell:\n ' 'cd ~/bin/decktape/active && \\\n ' './phantomjs decktape.js --size 1280x800 localhost:8000 ' '~/repos/my_presi/my_presi.pdf' )
Install DeckTape .
1,577
def revealjs_template ( ) : from config import basedir , github_user , github_repo run ( flo ( 'rm -f {basedir}/index.html' ) ) run ( flo ( 'rm -f {basedir}/slides.md' ) ) run ( flo ( 'rm -f {basedir}/README.md' ) ) run ( flo ( 'rm -rf {basedir}/img/' ) ) title = 'reveal.js template' subtitle = '[reveal.js][3] presentation written ' 'in [markdown][4] set up with [fabric][5] & [fabsetup][6]' description = execute ( revealjs , basedir , title , subtitle , description , github_user , github_repo ) print_msg ( '\n## Re-add github repo infos into README.md' ) basename = os . path . basename ( basedir ) _insert_repo_infos_into_readme ( basedir , github_user = _lazy ( 'github_user' ) , github_repo = _lazy ( 'github_repo' , default = basename ) ) print_msg ( '\n## Assure symbolic link not tracked by git exists\n' ) run ( flo ( 'ln -snf ../reveal.js {basedir}/reveal.js/reveal.js' ) )
Create or update the template presentation demo using task revealjs .
1,578
def spatialDomainNoGrid ( self ) : self . w = np . zeros ( self . xw . shape ) if self . Debug : print ( "w = " ) print ( self . w . shape ) for i in range ( len ( self . q ) ) : if self . q [ i ] != 0 : dist = np . abs ( self . xw - self . x [ i ] ) self . w -= self . q [ i ] * self . coeff * np . exp ( - dist / self . alpha ) * ( np . cos ( dist / self . alpha ) + np . sin ( dist / self . alpha ) )
Superposition of analytical solutions without a gridded domain
1,579
def build_diagonals ( self ) : self . l2 = np . roll ( self . l2 , - 2 ) self . l1 = np . roll ( self . l1 , - 1 ) self . r1 = np . roll ( self . r1 , 1 ) self . r2 = np . roll ( self . r2 , 2 ) if self . coeff_matrix is not None : pass elif self . BC_E == 'Periodic' and self . BC_W == 'Periodic' : pass else : self . diags = np . vstack ( ( self . l2 , self . l1 , self . c0 , self . r1 , self . r2 ) ) self . offsets = np . array ( [ - 2 , - 1 , 0 , 1 , 2 ] ) self . coeff_matrix = spdiags ( self . diags , self . offsets , self . nx , self . nx , format = 'csr' )
Builds the diagonals for the coefficient array
1,580
def createArchiveExample ( fileName ) : print ( '*' * 80 ) print ( 'Create archive' ) print ( '*' * 80 ) archive = CombineArchive ( ) archive . addFile ( fileName , "./models/model.xml" , KnownFormats . lookupFormat ( "sbml" ) , True ) description = OmexDescription ( ) description . setAbout ( "." ) description . setDescription ( "Simple test archive including one SBML model" ) description . setCreated ( OmexDescription . getCurrentDateAndTime ( ) ) creator = VCard ( ) creator . setFamilyName ( "Bergmann" ) creator . setGivenName ( "Frank" ) creator . setEmail ( "fbergman@caltech.edu" ) creator . setOrganization ( "Caltech" ) description . addCreator ( creator ) archive . addMetadata ( "." , description ) location = "./models/model.xml" description = OmexDescription ( ) description . setAbout ( location ) description . setDescription ( "SBML model" ) description . setCreated ( OmexDescription . getCurrentDateAndTime ( ) ) archive . addMetadata ( location , description ) out_file = "out.omex" archive . writeToFile ( out_file ) print ( 'Archive created:' , out_file )
Creates Combine Archive containing the given file .
1,581
def calc_deviation ( values , average ) : size = len ( values ) if size < 2 : return 0 calc_sum = 0.0 for number in range ( 0 , size ) : calc_sum += math . sqrt ( ( values [ number ] - average ) ** 2 ) return math . sqrt ( ( 1.0 / ( size - 1 ) ) * ( calc_sum / size ) )
Calculate the standard deviation of a list of values
1,582
def append ( self , value ) : self . count += 1 if self . count == 1 : self . old_m = self . new_m = value self . old_s = 0 else : self . new_m = self . old_m + ( value - self . old_m ) / self . count self . new_s = self . old_s + ( value - self . old_m ) * ( value - self . new_m ) self . old_m = self . new_m self . old_s = self . new_s
Append a value to the stats list
1,583
def pipeline ( steps , initial = None ) : def apply ( result , step ) : return step ( result ) return reduce ( apply , steps , initial )
Chain results from a list of functions . Inverted reduce .
1,584
def add ( class_ , name , value , sep = ';' ) : values = class_ . get_values_list ( name , sep ) if value in values : return new_value = sep . join ( values + [ value ] ) winreg . SetValueEx ( class_ . key , name , 0 , winreg . REG_EXPAND_SZ , new_value ) class_ . notify ( )
Add a value to a delimited variable but only when the value isn t already present .
1,585
def current ( class_ ) : "Windows Platform SDK GetTimeZoneInformation" tzi = class_ ( ) kernel32 = ctypes . windll . kernel32 getter = kernel32 . GetTimeZoneInformation getter = getattr ( kernel32 , 'GetDynamicTimeZoneInformation' , getter ) code = getter ( ctypes . byref ( tzi ) ) return code , tzi
Windows Platform SDK GetTimeZoneInformation
1,586
def dynamic_info ( self ) : "Return a map that for a given year will return the correct Info" if self . key_name : dyn_key = self . get_key ( ) . subkey ( 'Dynamic DST' ) del dyn_key [ 'FirstEntry' ] del dyn_key [ 'LastEntry' ] years = map ( int , dyn_key . keys ( ) ) values = map ( Info , dyn_key . values ( ) ) return RangeMap ( zip ( years , values ) , RangeMap . descending , operator . ge ) else : return AnyDict ( self )
Return a map that for a given year will return the correct Info
1,587
def _locate_day ( year , cutoff ) : target_weekday = ( cutoff . day_of_week + 6 ) % 7 week_of_month = cutoff . day day = ( week_of_month - 1 ) * 7 + 1 result = datetime . datetime ( year , cutoff . month , day , cutoff . hour , cutoff . minute , cutoff . second , cutoff . millisecond ) days_to_go = ( target_weekday - result . weekday ( ) ) % 7 result += datetime . timedelta ( days_to_go ) while result . month == cutoff . month + 1 : result -= datetime . timedelta ( weeks = 1 ) return result
Takes a SYSTEMTIME object such as retrieved from a TIME_ZONE_INFORMATION structure or call to GetTimeZoneInformation and interprets it based on the given year to identify the actual day .
1,588
def redirect ( pattern , to , permanent = True , locale_prefix = True , anchor = None , name = None , query = None , vary = None , cache_timeout = 12 , decorators = None , re_flags = None , to_args = None , to_kwargs = None , prepend_locale = True , merge_query = False ) : if permanent : redirect_class = HttpResponsePermanentRedirect else : redirect_class = HttpResponseRedirect if locale_prefix : pattern = pattern . lstrip ( '^/' ) pattern = LOCALE_RE + pattern if re_flags : pattern = '(?{})' . format ( re_flags ) + pattern view_decorators = [ ] if cache_timeout is not None : view_decorators . append ( cache_control_expires ( cache_timeout ) ) if vary : if isinstance ( vary , basestring ) : vary = [ vary ] view_decorators . append ( vary_on_headers ( * vary ) ) if decorators : if callable ( decorators ) : view_decorators . append ( decorators ) else : view_decorators . extend ( decorators ) def _view ( request , * args , ** kwargs ) : kwargs = { k : v or '' for k , v in kwargs . items ( ) } args = [ x or '' for x in args ] if callable ( to ) : to_value = to ( request , * args , ** kwargs ) else : to_value = to if to_value . startswith ( '/' ) or HTTP_RE . match ( to_value ) : redirect_url = to_value else : try : redirect_url = reverse ( to_value , args = to_args , kwargs = to_kwargs ) except NoReverseMatch : redirect_url = to_value if prepend_locale and redirect_url . startswith ( '/' ) and kwargs . get ( 'locale' ) : redirect_url = '/{locale}' + redirect_url . lstrip ( '/' ) if args or kwargs : redirect_url = strip_tags ( force_text ( redirect_url ) . format ( * args , ** kwargs ) ) if query : if merge_query : req_query = parse_qs ( request . META . get ( 'QUERY_STRING' ) ) req_query . update ( query ) querystring = urlencode ( req_query , doseq = True ) else : querystring = urlencode ( query , doseq = True ) elif query is None : querystring = request . META . get ( 'QUERY_STRING' ) else : querystring = '' if querystring : redirect_url = '?' . join ( [ redirect_url , querystring ] ) if anchor : redirect_url = '#' . join ( [ redirect_url , anchor ] ) if PROTOCOL_RELATIVE_RE . match ( redirect_url ) : redirect_url = '/' + redirect_url . lstrip ( '/' ) return redirect_class ( redirect_url ) try : for decorator in reversed ( view_decorators ) : _view = decorator ( _view ) except TypeError : log . exception ( 'decorators not iterable or does not contain ' 'callable items' ) return url ( pattern , _view , name = name )
Return a url matcher suited for urlpatterns .
1,589
def __get_table_size ( self ) : length = ctypes . wintypes . DWORD ( ) res = self . method ( None , length , False ) if res != errors . ERROR_INSUFFICIENT_BUFFER : raise RuntimeError ( "Error getting table length (%d)" % res ) return length . value
Retrieve the size of the buffer needed by calling the method with a null pointer and length of zero . This should trigger an insufficient buffer error and return the size needed for the buffer .
1,590
def get_table ( self ) : buffer_length = self . __get_table_size ( ) returned_buffer_length = ctypes . wintypes . DWORD ( buffer_length ) buffer = ctypes . create_string_buffer ( buffer_length ) pointer_type = ctypes . POINTER ( self . structure ) table_p = ctypes . cast ( buffer , pointer_type ) res = self . method ( table_p , returned_buffer_length , False ) if res != errors . NO_ERROR : raise RuntimeError ( "Error retrieving table (%d)" % res ) return table_p . contents
Get the table
1,591
def entries ( self ) : table = self . get_table ( ) entries_array = self . row_structure * table . num_entries pointer_type = ctypes . POINTER ( entries_array ) return ctypes . cast ( table . entries , pointer_type ) . contents
Using the table structure return the array of entries based on the table size .
1,592
def owncloud ( ) : hostname = re . sub ( r'^[^@]+@' , '' , env . host ) sitename = query_input ( question = '\nEnter site-name of Your Owncloud web service' , default = flo ( 'owncloud.{hostname}' ) , color = cyan ) username = env . user fabfile_data_dir = FABFILE_DATA_DIR print ( magenta ( ' install owncloud' ) ) repository = '' . join ( [ 'http://download.opensuse.org/repositories/' , 'isv:/ownCloud:/community/Debian_7.0/' , ] ) with hide ( 'output' ) : sudo ( flo ( 'wget -O - {repository}Release.key | apt-key add -' ) ) filename = '/etc/apt/sources.list.d/owncloud.list' sudo ( flo ( "echo 'deb {repository} /' > {filename}" ) ) sudo ( 'apt-get update' ) install_packages ( [ 'owncloud' , 'php5-fpm' , 'php-apc' , 'memcached' , 'php5-memcache' , ] ) print ( magenta ( ' disable apache' ) ) with hide ( 'output' ) : sudo ( 'service apache2 stop' ) sudo ( 'update-rc.d apache2 disable' ) print ( magenta ( ' nginx setup for owncloud' ) ) filename = 'owncloud_site_config.template' path = flo ( '{fabfile_data_dir}/files/etc/nginx/sites-available/{filename}' ) from_str = filled_out_template ( path , username = username , sitename = sitename , hostname = hostname ) with tempfile . NamedTemporaryFile ( prefix = filename ) as tmp_file : with open ( tmp_file . name , 'w' ) as fp : fp . write ( from_str ) put ( tmp_file . name , flo ( '/tmp/{filename}' ) ) to = flo ( '/etc/nginx/sites-available/{sitename}' ) sudo ( flo ( 'mv /tmp/{filename} {to}' ) ) sudo ( flo ( 'chown root.root {to}' ) ) sudo ( flo ( 'chmod 644 {to}' ) ) sudo ( flo ( ' ' . join ( [ 'ln -snf ../sites-available/{sitename}' , '/etc/nginx/sites-enabled/{sitename}' , ] ) ) ) template = 'www.conf' to = flo ( '/etc/php5/fpm/pool.d/{template}' ) from_ = flo ( '{fabfile_data_dir}/files{to}' ) put ( from_ , '/tmp/' ) sudo ( flo ( 'mv /tmp/{template} {to}' ) ) sudo ( flo ( 'chown root.root {to}' ) ) sudo ( flo ( 'chmod 644 {to}' ) ) template = 'php.ini' to = flo ( '/etc/php5/fpm/{template}' ) from_ = flo ( '{fabfile_data_dir}/files{to}' ) put ( from_ , '/tmp/' ) sudo ( flo ( 'mv /tmp/{template} {to}' ) ) sudo ( flo ( 'chown root.root {to}' ) ) sudo ( flo ( 'chmod 644 {to}' ) ) sudo ( 'service php5-fpm restart' ) sudo ( 'service nginx reload' )
Set up owncloud .
1,593
def doctree_read_handler ( app , doctree ) : docname = sys . _getframe ( 2 ) . f_locals [ 'docname' ] if docname . startswith ( '_partial' ) : app . env . metadata [ docname ] [ 'orphan' ] = True
Add orphan to metadata for partials
1,594
def autodoc_skip_member_handler ( app , what , name , obj , skip , options ) : if 'YAMLTokens' in name : return True return False
Skip un parseable functions .
1,595
def surfplot ( self , z , titletext ) : if self . latlon : plt . imshow ( z , extent = ( 0 , self . dx * z . shape [ 0 ] , self . dy * z . shape [ 1 ] , 0 ) ) plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' ) else : plt . imshow ( z , extent = ( 0 , self . dx / 1000. * z . shape [ 0 ] , self . dy / 1000. * z . shape [ 1 ] , 0 ) ) plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' ) plt . colorbar ( ) plt . title ( titletext , fontsize = 16 )
Plot if you want to - for troubleshooting - 1 figure
1,596
def twoSurfplots ( self ) : plt . subplot ( 211 ) plt . title ( 'Load thickness, mantle equivalent [m]' , fontsize = 16 ) if self . latlon : plt . imshow ( self . qs / ( self . rho_m * self . g ) , extent = ( 0 , self . dx * self . qs . shape [ 0 ] , self . dy * self . qs . shape [ 1 ] , 0 ) ) plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' ) else : plt . imshow ( self . qs / ( self . rho_m * self . g ) , extent = ( 0 , self . dx / 1000. * self . qs . shape [ 0 ] , self . dy / 1000. * self . qs . shape [ 1 ] , 0 ) ) plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' ) plt . colorbar ( ) plt . subplot ( 212 ) plt . title ( 'Deflection [m]' ) if self . latlon : plt . imshow ( self . w , extent = ( 0 , self . dx * self . w . shape [ 0 ] , self . dy * self . w . shape [ 1 ] , 0 ) ) plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' ) else : plt . imshow ( self . w , extent = ( 0 , self . dx / 1000. * self . w . shape [ 0 ] , self . dy / 1000. * self . w . shape [ 1 ] , 0 ) ) plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' ) plt . colorbar ( )
Plot multiple subplot figure for 2D array
1,597
def outputDeflections ( self ) : try : self . wOutFile if self . Verbose : print ( "Output filename provided." ) except : try : self . wOutFile = self . configGet ( "string" , "output" , "DeflectionOut" , optional = True ) except : if self . Debug : print ( "No output filename provided:" ) print ( " not writing any deflection output to file" ) if self . wOutFile : if self . wOutFile [ - 4 : ] == '.npy' : from numpy import save save ( self . wOutFile , self . w ) else : from numpy import savetxt savetxt ( self . wOutFile , self . w , fmt = '%.3f' ) if self . Verbose : print ( "Saving deflections + self . wOutFile )
Outputs a grid of deflections if an output directory is defined in the configuration file If the filename given in the configuration file ends in . npy then a binary numpy grid will be exported . Otherwise an ASCII grid will be exported .
1,598
def TeArraySizeCheck ( self ) : if type ( self . Te ) == np . ndarray and type ( self . qs ) == np . ndarray : if type ( self . Te ) is np . ndarray : if ( np . array ( self . Te . shape ) != np . array ( self . qs . shape ) ) . any ( ) : sys . exit ( "q0 and Te arrays have incompatible shapes. Exiting." ) else : if self . Debug : print ( "Te and qs array sizes pass consistency check" )
Checks that Te and q0 array sizes are compatible For finite difference solution .
1,599
def FD ( self ) : if self . Verbose : print ( "Finite Difference Solution Technique" ) try : self . qs except : self . qs = self . q0 . copy ( ) del self . q0 self . x = np . arange ( self . dx / 2. , self . dx * self . qs . shape [ 0 ] , self . dx ) if self . dimension == 2 : self . y = np . arange ( self . dy / 2. , self . dy * self . qs . shape [ 1 ] , self . dy ) try : self . Solver except : if self . filename : self . Solver = self . configGet ( "string" , "numerical" , "Solver" ) else : sys . exit ( "No solver defined!" ) if self . filename : self . iterative_ConvergenceTolerance = self . configGet ( "float" , "numerical" , "ConvergenceTolerance" ) try : self . Te = self . configGet ( "float" , "input" , "ElasticThickness" , optional = False ) if self . Te is None : Tepath = self . configGet ( "string" , "input" , "ElasticThickness" , optional = False ) self . Te = Tepath else : Tepath = None except : Tepath = self . configGet ( "string" , "input" , "ElasticThickness" , optional = False ) self . Te = Tepath if self . Te is None : if self . coeff_matrix is not None : pass else : sys . exit ( "No input elastic thickness or coefficient matrix supplied." ) if type ( self . Te ) == str : Tepath = self . Te else : Tepath = None if Tepath : self . Te = self . loadFile ( self . Te , close_on_fail = False ) if self . Te is None : print ( "Requested Te file is provided but cannot be located." ) print ( "No scalar elastic thickness is provided in configuration file" ) print ( "(Typo in path to input Te grid?)" ) if self . coeff_matrix is not None : print ( "But a coefficient matrix has been found." ) print ( "Calculations will be carried forward using it." ) else : print ( "Exiting." ) sys . exit ( ) if self . Te . any ( ) : self . TeArraySizeCheck ( )
Set - up for the finite difference solution method