idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
52,100 | def linkify ( props ) : match = props [ 'match' ] protocol = match . group ( 1 ) url = match . group ( 2 ) href = protocol + url if props [ 'block' ] [ 'type' ] == BLOCK_TYPES . CODE : return href link_props = { 'href' : href , } if href . startswith ( 'www' ) : link_props [ 'href' ] = 'http://' + href return DOM . create_element ( 'a' , link_props , href ) | Wrap plain URLs with link tags . |
52,101 | def for_kind ( kind_map , type_ , fallback_key ) : if type_ not in kind_map : if fallback_key not in kind_map : raise ConfigException ( '"%s" is not in the config and has no fallback' % type_ ) config = kind_map [ fallback_key ] else : config = kind_map [ type_ ] if isinstance ( config , dict ) : if 'element' not in config : raise ConfigException ( '"%s" does not define an element' % type_ ) opts = Options ( type_ , ** config ) else : opts = Options ( type_ , config ) return opts | Create an Options object from any mapping . |
52,102 | def render ( self , content_state = None ) : if content_state is None : content_state = { } blocks = content_state . get ( 'blocks' , [ ] ) wrapper_state = WrapperState ( self . block_map , blocks ) document = DOM . create_element ( ) entity_map = content_state . get ( 'entityMap' , { } ) min_depth = 0 for block in blocks : depth = block [ 'depth' ] elt = self . render_block ( block , entity_map , wrapper_state ) if depth > min_depth : min_depth = depth if depth == 0 : DOM . append_child ( document , elt ) if min_depth > 0 and wrapper_state . stack . length ( ) != 0 : DOM . append_child ( document , wrapper_state . stack . tail ( ) . elt ) return DOM . render ( document ) | Starts the export process on a given piece of content state . |
52,103 | def build_command_groups ( self , block ) : text = block [ 'text' ] commands = sorted ( self . build_commands ( block ) ) grouped = groupby ( commands , Command . key ) listed = list ( groupby ( commands , Command . key ) ) sliced = [ ] i = 0 for start_index , commands in grouped : if i < len ( listed ) - 1 : stop_index = listed [ i + 1 ] [ 0 ] sliced . append ( ( text [ start_index : stop_index ] , list ( commands ) ) ) else : sliced . append ( ( text [ start_index : start_index ] , list ( commands ) ) ) i += 1 return sliced | Creates block modification commands grouped by start index with the text to apply them on . |
52,104 | def build_commands ( self , block ) : text_commands = Command . start_stop ( 'text' , 0 , len ( block [ 'text' ] ) ) style_commands = self . build_style_commands ( block ) entity_commands = self . build_entity_commands ( block ) return text_commands + style_commands + entity_commands | Build all of the manipulation commands for a given block . - One pair to set the text . - Multiple pairs for styles . - Multiple pairs for entities . |
52,105 | def run ( commands , shell = None , prompt_template = "default" , speed = 1 , quiet = False , test_mode = False , commentecho = False , ) : if not quiet : secho ( "We'll do it live!" , fg = "red" , bold = True ) secho ( "STARTING SESSION: Press Ctrl-C at any time to exit." , fg = "yellow" , bold = True , ) click . pause ( ) click . clear ( ) state = SessionState ( shell = shell , prompt_template = prompt_template , speed = speed , test_mode = test_mode , commentecho = commentecho , ) i = 0 while i < len ( commands ) : command = commands [ i ] . strip ( ) i += 1 if not command : continue is_comment = command . startswith ( "#" ) if not is_comment : command_as_list = shlex . split ( ensure_utf8 ( command ) ) else : command_as_list = None shell_match = SHELL_RE . match ( command ) if is_comment : match = OPTION_RE . match ( command ) if match : option , arg = match . group ( "option" ) , match . group ( "arg" ) func = OPTION_MAP [ option ] func ( state , arg ) elif state . commentecho ( ) : comment = command . lstrip ( "#" ) secho ( comment , fg = "yellow" , bold = True ) continue elif command_as_list and command_as_list [ 0 ] in [ "alias" , "export" ] : magictype ( command , prompt_template = state [ "prompt_template" ] , speed = state [ "speed" ] ) state . add_command ( command ) elif shell_match : shell_name = shell_match . groups ( ) [ 0 ] . strip ( ) py_commands = [ ] more = True while more : try : py_command = commands [ i ] . rstrip ( ) except IndexError : raise SessionError ( "Unmatched {0} code block in " "session file." . format ( shell_name ) ) i += 1 if py_command . startswith ( "```" ) : i += 1 more = False else : py_commands . append ( py_command ) magictype ( shell_name , prompt_template = state [ "prompt_template" ] , speed = state [ "speed" ] , ) if shell_name == "ipython" : try : from doitlive . ipython_consoles import start_ipython_player except ImportError : raise RuntimeError ( "```ipython blocks require IPython to be installed" ) ipy_commands = [ textwrap . dedent ( cmd ) for cmd in py_commands ] start_ipython_player ( ipy_commands , speed = state [ "speed" ] ) else : start_python_player ( py_commands , speed = state [ "speed" ] ) else : goto_stealthmode = magicrun ( command , ** state ) i -= stealthmode ( state , goto_stealthmode ) echo_prompt ( state [ "prompt_template" ] ) wait_for ( RETURNS ) if not quiet : secho ( "FINISHED SESSION" , fg = "yellow" , bold = True ) | Main function for magic - running a list of commands . |
52,106 | def play ( quiet , session_file , shell , speed , prompt , commentecho ) : run ( session_file . readlines ( ) , shell = shell , speed = speed , quiet = quiet , test_mode = TESTING , prompt_template = prompt , commentecho = commentecho , ) | Play a session file . |
52,107 | def demo ( quiet , shell , speed , prompt , commentecho ) : run ( DEMO , shell = shell , speed = speed , test_mode = TESTING , prompt_template = prompt , quiet = quiet , commentecho = commentecho , ) | Run a demo doitlive session . |
52,108 | def echo ( message = None , file = None , nl = True , err = False , color = None , carriage_return = False ) : message = message or "" if carriage_return and nl : click_echo ( message + "\r\n" , file , False , err , color ) elif carriage_return and not nl : click_echo ( message + "\r" , file , False , err , color ) else : click_echo ( message , file , nl , err , color ) | Patched click echo function . |
52,109 | def magictype ( text , prompt_template = "default" , speed = 1 ) : echo_prompt ( prompt_template ) cursor_position = 0 return_to_regular_type = False with raw_mode ( ) : while True : char = text [ cursor_position : cursor_position + speed ] in_char = getchar ( ) if in_char in { ESC , CTRLC } : echo ( carriage_return = True ) raise click . Abort ( ) elif in_char == TAB : return_to_regular_type = True break elif in_char == BACKSPACE : if cursor_position > 0 : echo ( "\b \b" , nl = False ) cursor_position -= 1 elif in_char in RETURNS : if cursor_position >= len ( text ) : echo ( "\r" , nl = True ) break elif in_char == CTRLZ and hasattr ( signal , "SIGTSTP" ) : os . kill ( 0 , signal . SIGTSTP ) click . clear ( ) echo_prompt ( prompt_template ) echo ( text [ : cursor_position ] , nl = False ) else : if cursor_position < len ( text ) : echo ( char , nl = False ) increment = min ( [ speed , len ( text ) - cursor_position ] ) cursor_position += increment return return_to_regular_type | Echo each character in text as keyboard characters are pressed . Characters are echo d speed characters at a time . |
52,110 | def regulartype ( prompt_template = "default" ) : echo_prompt ( prompt_template ) command_string = "" cursor_position = 0 with raw_mode ( ) : while True : in_char = getchar ( ) if in_char in { ESC , CTRLC } : echo ( carriage_return = True ) raise click . Abort ( ) elif in_char == TAB : echo ( "\r" , nl = True ) return in_char elif in_char == BACKSPACE : if cursor_position > 0 : echo ( "\b \b" , nl = False ) command_string = command_string [ : - 1 ] cursor_position -= 1 elif in_char in RETURNS : echo ( "\r" , nl = True ) return command_string elif in_char == CTRLZ and hasattr ( signal , "SIGTSTP" ) : os . kill ( 0 , signal . SIGTSTP ) click . clear ( ) echo_prompt ( prompt_template ) else : echo ( in_char , nl = False ) command_string += in_char cursor_position += 1 | Echo each character typed . Unlike magictype this echos the characters the user is pressing . |
52,111 | def regularrun ( shell , prompt_template = "default" , aliases = None , envvars = None , extra_commands = None , speed = 1 , test_mode = False , commentecho = False , ) : loop_again = True command_string = regulartype ( prompt_template ) if command_string == TAB : loop_again = False return loop_again run_command ( command_string , shell , aliases = aliases , envvars = envvars , extra_commands = extra_commands , test_mode = test_mode , ) return loop_again | Allow user to run their own live commands until CTRL - Z is pressed again . |
52,112 | def magicrun ( text , shell , prompt_template = "default" , aliases = None , envvars = None , extra_commands = None , speed = 1 , test_mode = False , commentecho = False , ) : goto_regulartype = magictype ( text , prompt_template , speed ) if goto_regulartype : return goto_regulartype run_command ( text , shell , aliases = aliases , envvars = envvars , extra_commands = extra_commands , test_mode = test_mode , ) return goto_regulartype | Echo out each character in text as keyboard characters are pressed wait for a RETURN keypress then run the text in a shell context . |
52,113 | def run_commands ( self ) : more = 0 prompt = sys . ps1 for command in self . commands : try : prompt = sys . ps2 if more else sys . ps1 try : magictype ( command , prompt_template = prompt , speed = self . speed ) except EOFError : self . write ( "\n" ) break else : if command . strip ( ) == "exit()" : return more = self . push ( command ) except KeyboardInterrupt : self . write ( "\nKeyboardInterrupt\n" ) self . resetbuffer ( ) more = 0 sys . exit ( 1 ) echo_prompt ( prompt ) wait_for ( RETURNS ) | Automatically type and execute all commands . |
52,114 | def interact ( self , banner = None ) : try : sys . ps1 except AttributeError : sys . ps1 = ">>>" try : sys . ps2 except AttributeError : sys . ps2 = "... " cprt = ( 'Type "help", "copyright", "credits" or "license" for ' "more information." ) if banner is None : self . write ( "Python %s on %s\n%s\n" % ( sys . version , sys . platform , cprt ) ) else : self . write ( "%s\n" % str ( banner ) ) self . run_commands ( ) | Run an interactive session . |
52,115 | def start_ipython_player ( commands , speed = 1 ) : PlayerTerminalIPythonApp . commands = commands PlayerTerminalIPythonApp . speed = speed PlayerTerminalIPythonApp . launch_instance ( ) | Starts a new magic IPython shell . |
52,116 | def on_feed_key ( self , key_press ) : if key_press . key in { Keys . Escape , Keys . ControlC } : echo ( carriage_return = True ) raise Abort ( ) if key_press . key == Keys . Backspace : if self . current_command_pos > 0 : self . current_command_pos -= 1 return key_press ret = None if key_press . key != Keys . CPRResponse : if self . current_command_pos < len ( self . current_command ) : current_key = self . current_command_key ret = KeyPress ( current_key ) increment = min ( [ self . speed , len ( self . current_command ) - self . current_command_pos ] ) self . current_command_pos += increment else : if key_press . key != Keys . Enter : return None self . current_command_index += 1 self . current_command_pos = 0 ret = key_press return ret | Handles the magictyping when a key is pressed |
52,117 | def init_shell ( self ) : self . shell = PlayerTerminalInteractiveShell . instance ( commands = self . commands , speed = self . speed , parent = self , display_banner = False , profile_dir = self . profile_dir , ipython_dir = self . ipython_dir , user_ns = self . user_ns , ) self . shell . configurables . append ( self ) | initialize the InteractiveShell instance |
52,118 | def raw_mode ( ) : if WIN : yield else : import tty import termios if not isatty ( sys . stdin ) : f = open ( "/dev/tty" ) fd = f . fileno ( ) else : fd = sys . stdin . fileno ( ) f = None try : old_settings = termios . tcgetattr ( fd ) tty . setraw ( fd ) except termios . error : pass try : yield finally : try : termios . tcsetattr ( fd , termios . TCSADRAIN , old_settings ) if f is not None : f . close ( ) except termios . error : pass | Enables terminal raw mode during the context . |
52,119 | def int_to_string ( number , alphabet , padding = None ) : output = "" alpha_len = len ( alphabet ) while number : number , digit = divmod ( number , alpha_len ) output += alphabet [ digit ] if padding : remainder = max ( padding - len ( output ) , 0 ) output = output + alphabet [ 0 ] * remainder return output [ : : - 1 ] | Convert a number to a string using the given alphabet . The output has the most significant digit first . |
52,120 | def string_to_int ( string , alphabet ) : number = 0 alpha_len = len ( alphabet ) for char in string : number = number * alpha_len + alphabet . index ( char ) return number | Convert a string to a number using the given alphabet . The input is assumed to have the most significant digit first . |
52,121 | def decode ( self , string , legacy = False ) : if legacy : string = string [ : : - 1 ] return _uu . UUID ( int = string_to_int ( string , self . _alphabet ) ) | Decode a string according to the current alphabet into a UUID Raises ValueError when encountering illegal characters or a too - long string . |
52,122 | def set_alphabet ( self , alphabet ) : new_alphabet = list ( sorted ( set ( alphabet ) ) ) if len ( new_alphabet ) > 1 : self . _alphabet = new_alphabet self . _alpha_len = len ( self . _alphabet ) else : raise ValueError ( "Alphabet with more than " "one unique symbols required." ) | Set the alphabet to be used for new UUIDs . |
52,123 | def encoded_length ( self , num_bytes = 16 ) : factor = math . log ( 256 ) / math . log ( self . _alpha_len ) return int ( math . ceil ( factor * num_bytes ) ) | Returns the string length of the shortened UUID . |
52,124 | def asm_module ( exprs , dst_reg , sym_to_reg , triple_or_target = None ) : if not llvmlite_available : raise RuntimeError ( "llvmlite module unavailable! can't assemble..." ) target = llvm_get_target ( triple_or_target ) M = ll . Module ( ) fntype = ll . FunctionType ( ll . VoidType ( ) , [ ] ) func = ll . Function ( M , fntype , name = '__arybo' ) func . attributes . add ( "naked" ) func . attributes . add ( "nounwind" ) BB = func . append_basic_block ( ) IRB = ll . IRBuilder ( ) IRB . position_at_end ( BB ) sym_to_value = { sym : IRB . load_reg ( IntType ( reg [ 1 ] ) , reg [ 0 ] , reg [ 0 ] ) for sym , reg in six . iteritems ( sym_to_reg ) } ret = to_llvm_ir ( exprs , sym_to_value , IRB ) IRB . store_reg ( ret , IntType ( dst_reg [ 1 ] ) , dst_reg [ 0 ] ) IRB . unreachable ( ) return M | Generate an LLVM module for a list of expressions |
52,125 | def asm_binary ( exprs , dst_reg , sym_to_reg , triple_or_target = None ) : if not llvmlite_available : raise RuntimeError ( "llvmlite module unavailable! can't assemble..." ) target = llvm_get_target ( triple_or_target ) M = asm_module ( exprs , dst_reg , sym_to_reg , target ) M = llvm . parse_assembly ( str ( M ) ) M . verify ( ) target_machine = target . create_target_machine ( ) obj_bin = target_machine . emit_object ( M ) obj = llvm . ObjectFileRef . from_data ( obj_bin ) for s in obj . sections ( ) : if s . is_text ( ) : return s . data ( ) raise RuntimeError ( "unable to get the assembled binary!" ) | Compile and assemble an expression for a given architecture . |
52,126 | def expr_contains ( e , o ) : if o == e : return True if e . has_args ( ) : for a in e . args ( ) : if expr_contains ( a , o ) : return True return False | Returns true if o is in e |
52,127 | def zext ( self , n ) : if n <= self . nbits : raise ValueError ( "n must be > %d bits" % self . nbits ) mba_ret = self . __new_mba ( n ) ret = mba_ret . from_cst ( 0 ) for i in range ( self . nbits ) : ret . vec [ i ] = self . vec [ i ] return mba_ret . from_vec ( ret ) | Zero - extend the variable to n bits . n bits must be stricly larger than the actual number of bits or a ValueError is thrown |
52,128 | def sext ( self , n ) : if n <= self . nbits : raise ValueError ( "n must be > %d bits" % self . nbits ) mba_ret = self . __new_mba ( n ) ret = mba_ret . from_cst ( 0 ) for i in range ( self . nbits ) : ret . vec [ i ] = self . vec [ i ] last_bit = self . vec [ self . nbits - 1 ] for i in range ( self . nbits , n ) : ret . vec [ i ] = last_bit return mba_ret . from_vec ( ret ) | Sign - extend the variable to n bits . n bits must be stricly larger than the actual number of bits or a ValueError is thrown |
52,129 | def evaluate ( self , values ) : ret = self . mba . evaluate ( self . vec , values ) if isinstance ( ret , six . integer_types ) : return ret return self . from_vec ( self . mba , ret ) | Evaluates the expression to an integer |
52,130 | def vectorial_decomp ( self , symbols ) : try : symbols = [ s . vec for s in symbols ] N = sum ( map ( lambda s : len ( s ) , symbols ) ) symbols_ = Vector ( N ) i = 0 for v in symbols : for s in v : symbols_ [ i ] = s i += 1 symbols = symbols_ except TypeError : pass return self . mba . vectorial_decomp ( symbols , self . vec ) | Compute the vectorial decomposition of the expression according to the given symbols . |
52,131 | def var ( self , name ) : ret = self . from_vec ( self . var_symbols ( name ) ) ret . name = name return ret | Get an n - bit named symbolic variable |
52,132 | def permut2expr ( self , P ) : if len ( P ) > ( 1 << self . nbits ) : raise ValueError ( "P must not contain more than %d elements" % ( 1 << self . nbits ) ) X = self . var ( 'X' ) ret = super ( MBA , self ) . permut2expr ( P , X . vec ) return self . from_vec ( ret ) , X | Convert a substitution table into an arybo application |
52,133 | def response_hook ( self , r , ** kwargs ) : if r . status_code == 401 : www_authenticate = r . headers . get ( 'www-authenticate' , '' ) . lower ( ) auth_type = _auth_type_from_header ( www_authenticate ) if auth_type is not None : return self . retry_using_http_NTLM_auth ( 'www-authenticate' , 'Authorization' , r , auth_type , kwargs ) elif r . status_code == 407 : proxy_authenticate = r . headers . get ( 'proxy-authenticate' , '' ) . lower ( ) auth_type = _auth_type_from_header ( proxy_authenticate ) if auth_type is not None : return self . retry_using_http_NTLM_auth ( 'proxy-authenticate' , 'Proxy-authorization' , r , auth_type , kwargs ) return r | The actual hook handler . |
52,134 | def dummy ( DF , cols = None ) : dummies = ( get_dummies ( DF [ col ] ) for col in ( DF . columns if cols is None else cols ) ) return concat ( dummies , axis = 1 , keys = DF . columns ) | Dummy code select columns of a DataFrame . |
52,135 | def cos_r ( self , N = None ) : if not hasattr ( self , 'F' ) or self . F . shape [ 1 ] < self . rank : self . fs_r ( N = self . rank ) self . dr = norm ( self . F , axis = 1 ) ** 2 return apply_along_axis ( lambda _ : _ / self . dr , 0 , self . F [ : , : N ] ** 2 ) | Return the squared cosines for each row . |
52,136 | def cos_c ( self , N = None ) : if not hasattr ( self , 'G' ) or self . G . shape [ 1 ] < self . rank : self . fs_c ( N = self . rank ) self . dc = norm ( self . G , axis = 1 ) ** 2 return apply_along_axis ( lambda _ : _ / self . dc , 0 , self . G [ : , : N ] ** 2 ) | Return the squared cosines for each column . |
52,137 | def cont_r ( self , percent = 0.9 , N = None ) : if not hasattr ( self , 'F' ) : self . fs_r ( N = self . rank ) return apply_along_axis ( lambda _ : _ / self . L [ : N ] , 1 , apply_along_axis ( lambda _ : _ * self . r , 0 , self . F [ : , : N ] ** 2 ) ) | Return the contribution of each row . |
52,138 | def cont_c ( self , percent = 0.9 , N = None ) : if not hasattr ( self , 'G' ) : self . fs_c ( N = self . rank ) return apply_along_axis ( lambda _ : _ / self . L [ : N ] , 1 , apply_along_axis ( lambda _ : _ * self . c , 0 , self . G [ : , : N ] ** 2 ) ) | Return the contribution of each column . |
52,139 | def fs_r_sup ( self , DF , N = None ) : if not hasattr ( self , 'G' ) : self . fs_c ( N = self . rank ) if N and ( not isinstance ( N , int ) or N <= 0 ) : raise ValueError ( "ncols should be a positive integer." ) s = - sqrt ( self . E ) if self . cor else self . s N = min ( N , self . rank ) if N else self . rank S_inv = diagsvd ( - 1 / s [ : N ] , len ( self . G . T ) , N ) return _mul ( DF . div ( DF . sum ( axis = 1 ) , axis = 0 ) , self . G , S_inv ) [ : , : N ] | Find the supplementary row factor scores . |
52,140 | def fs_c_sup ( self , DF , N = None ) : if not hasattr ( self , 'F' ) : self . fs_r ( N = self . rank ) if N and ( not isinstance ( N , int ) or N <= 0 ) : raise ValueError ( "ncols should be a positive integer." ) s = - sqrt ( self . E ) if self . cor else self . s N = min ( N , self . rank ) if N else self . rank S_inv = diagsvd ( - 1 / s [ : N ] , len ( self . F . T ) , N ) return _mul ( ( DF / DF . sum ( ) ) . T , self . F , S_inv ) [ : , : N ] | Find the supplementary column factor scores . |
52,141 | def data_recognise ( self , data = None ) : data = data or self . data data_lower = data . lower ( ) if data_lower . startswith ( u"http://" ) or data_lower . startswith ( u"https://" ) : return u'url' elif data_lower . startswith ( u"mailto:" ) : return u'email' elif data_lower . startswith ( u"matmsg:to:" ) : return u'emailmessage' elif data_lower . startswith ( u"tel:" ) : return u'telephone' elif data_lower . startswith ( u"smsto:" ) : return u'sms' elif data_lower . startswith ( u"mmsto:" ) : return u'mms' elif data_lower . startswith ( u"geo:" ) : return u'geo' elif data_lower . startswith ( u"mebkm:title:" ) : return u'bookmark' elif data_lower . startswith ( u"mecard:" ) : return u'phonebook' else : return u'text' | Returns an unicode string indicating the data type of the data paramater |
52,142 | def data_to_string ( self ) : if self . data_type == 'text' : return BOM_UTF8 + self . __class__ . data_encode [ self . data_type ] ( self . data ) . encode ( 'utf-8' ) else : return self . __class__ . data_encode [ self . data_type ] ( self . data ) . encode ( 'utf-8' ) | Returns a UTF8 string with the QR Code s data |
52,143 | def split_six ( series = None ) : if pd is None : raise ImportError ( 'The Pandas package is required' ' for this functionality' ) if np is None : raise ImportError ( 'The NumPy package is required' ' for this functionality' ) def base ( x ) : if x > 0 : base = pow ( 10 , math . floor ( math . log10 ( x ) ) ) return round ( x / base ) * base else : return 0 quants = [ 0 , 50 , 75 , 85 , 90 ] arr = series . values return [ base ( np . percentile ( arr , x ) ) for x in quants ] | Given a Pandas Series get a domain of values from zero to the 90% quantile rounded to the nearest order - of - magnitude integer . For example 2100 is rounded to 2000 2790 to 3000 . |
52,144 | def to_linear ( self , index = None ) : if index is None : n = len ( self . index ) - 1 index = [ self . index [ i ] * ( 1. - i / ( n - 1. ) ) + self . index [ i + 1 ] * i / ( n - 1. ) for i in range ( n ) ] colors = [ self . rgba_floats_tuple ( x ) for x in index ] return LinearColormap ( colors , index = index , vmin = self . vmin , vmax = self . vmax ) | Transforms the StepColormap into a LinearColormap . |
52,145 | def add_to ( self , parent , name = None , index = None ) : parent . add_child ( self , name = name , index = index ) return self | Add element to a parent . |
52,146 | def to_json ( self , depth = - 1 , ** kwargs ) : return json . dumps ( self . to_dict ( depth = depth , ordered = True ) , ** kwargs ) | Returns a JSON representation of the object . |
52,147 | def save ( self , outfile , close_file = True , ** kwargs ) : if isinstance ( outfile , text_type ) or isinstance ( outfile , binary_type ) : fid = open ( outfile , 'wb' ) else : fid = outfile root = self . get_root ( ) html = root . render ( ** kwargs ) fid . write ( html . encode ( 'utf8' ) ) if close_file : fid . close ( ) | Saves an Element into a file . |
52,148 | def get_code ( self ) : if self . code is None : self . code = urlopen ( self . url ) . read ( ) return self . code | Opens the link and returns the response s content . |
52,149 | def _repr_html_ ( self , ** kwargs ) : html = self . render ( ** kwargs ) html = "data:text/html;charset=utf-8;base64," + base64 . b64encode ( html . encode ( 'utf8' ) ) . decode ( 'utf8' ) if self . height is None : iframe = ( '<div style="width:{width};">' '<div style="position:relative;width:100%;height:0;padding-bottom:{ratio};">' '<iframe src="{html}" style="position:absolute;width:100%;height:100%;left:0;top:0;' 'border:none !important;" ' 'allowfullscreen webkitallowfullscreen mozallowfullscreen>' '</iframe>' '</div></div>' ) . format iframe = iframe ( html = html , width = self . width , ratio = self . ratio ) else : iframe = ( '<iframe src="{html}" width="{width}" height="{height}"' 'style="border:none !important;" ' '"allowfullscreen" "webkitallowfullscreen" "mozallowfullscreen">' '</iframe>' ) . format iframe = iframe ( html = html , width = self . width , height = self . height ) return iframe | Displays the Figure in a Jupyter notebook . |
52,150 | def add_subplot ( self , x , y , n , margin = 0.05 ) : width = 1. / y height = 1. / x left = ( ( n - 1 ) % y ) * width top = ( ( n - 1 ) // y ) * height left = left + width * margin top = top + height * margin width = width * ( 1 - 2. * margin ) height = height * ( 1 - 2. * margin ) div = Div ( position = 'absolute' , width = '{}%' . format ( 100. * width ) , height = '{}%' . format ( 100. * height ) , left = '{}%' . format ( 100. * left ) , top = '{}%' . format ( 100. * top ) , ) self . add_child ( div ) return div | Creates a div child subplot in a matplotlib . figure . add_subplot style . |
52,151 | def _elapsed ( self ) : self . last_time = time . time ( ) return self . last_time - self . start | Returns elapsed time at update . |
52,152 | def _calc_eta ( self ) : elapsed = self . _elapsed ( ) if self . cnt == 0 or elapsed < 0.001 : return None rate = float ( self . cnt ) / elapsed self . eta = ( float ( self . max_iter ) - float ( self . cnt ) ) / rate | Calculates estimated time left until completion . |
52,153 | def _print_title ( self ) : if self . title : self . _stream_out ( '{}\n' . format ( self . title ) ) self . _stream_flush ( ) | Prints tracking title at initialization . |
52,154 | def _cache_eta ( self ) : self . _calc_eta ( ) self . _cached_output += ' | ETA: ' + self . _get_time ( self . eta ) | Prints the estimated time left . |
52,155 | def _adjust_width ( self ) : if self . bar_width > self . max_iter : self . bar_width = int ( self . max_iter ) | Shrinks bar if number of iterations is less than the bar width |
52,156 | def _print ( self , force_flush = False ) : self . _stream_flush ( ) next_perc = self . _calc_percent ( ) if self . update_interval : do_update = time . time ( ) - self . last_time >= self . update_interval elif force_flush : do_update = True else : do_update = next_perc > self . last_progress if do_update and self . active : self . last_progress = next_perc self . _cache_percent_indicator ( self . last_progress ) if self . track : self . _cached_output += ' Time elapsed: ' + self . _get_time ( self . _elapsed ( ) ) self . _cache_eta ( ) if self . item_id : self . _cache_item_id ( ) self . _stream_out ( '\r%s' % self . _cached_output ) self . _stream_flush ( ) self . _cached_output = '' | Prints formatted percentage and tracked time to the screen . |
52,157 | def next ( self ) : try : line = self . _get_next_line ( ) except StopIteration : if self . _is_new_file ( ) : self . _rotated_logfile = None self . _fh . close ( ) self . _offset = 0 try : line = self . _get_next_line ( ) except StopIteration : self . _update_offset_file ( ) raise else : self . _update_offset_file ( ) raise if self . paranoid : self . _update_offset_file ( ) elif self . every_n and self . every_n <= self . _since_update : self . _update_offset_file ( ) return line | Return the next line in the file updating the offset . |
52,158 | def read ( self ) : lines = self . readlines ( ) if lines : try : return '' . join ( lines ) except TypeError : return '' . join ( force_text ( line ) for line in lines ) else : return None | Read in all unread lines and return them as a single string . |
52,159 | def _filehandle ( self ) : if not self . _fh or self . _is_closed ( ) : filename = self . _rotated_logfile or self . filename if filename . endswith ( '.gz' ) : self . _fh = gzip . open ( filename , 'r' ) else : self . _fh = open ( filename , "r" , 1 ) if self . read_from_end and not exists ( self . _offset_file ) : self . _fh . seek ( 0 , os . SEEK_END ) else : self . _fh . seek ( self . _offset ) return self . _fh | Return a filehandle to the file being tailed with the position set to the current offset . |
52,160 | def _update_offset_file ( self ) : if self . on_update : self . on_update ( ) offset = self . _filehandle ( ) . tell ( ) inode = stat ( self . filename ) . st_ino fh = open ( self . _offset_file , "w" ) fh . write ( "%s\n%s\n" % ( inode , offset ) ) fh . close ( ) self . _since_update = 0 | Update the offset file with the current inode and offset . |
52,161 | def _determine_rotated_logfile ( self ) : rotated_filename = self . _check_rotated_filename_candidates ( ) if rotated_filename and exists ( rotated_filename ) : if stat ( rotated_filename ) . st_ino == self . _offset_file_inode : return rotated_filename if stat ( self . filename ) . st_ino == self . _offset_file_inode : if self . copytruncate : return rotated_filename else : sys . stderr . write ( "[pygtail] [WARN] file size of %s shrank, and copytruncate support is " "disabled (expected at least %d bytes, was %d bytes).\n" % ( self . filename , self . _offset , stat ( self . filename ) . st_size ) ) return None | We suspect the logfile has been rotated so try to guess what the rotated filename is and return it . |
52,162 | def _check_rotated_filename_candidates ( self ) : candidate = "%s.0" % self . filename if ( exists ( candidate ) and exists ( "%s.1.gz" % self . filename ) and ( stat ( candidate ) . st_mtime > stat ( "%s.1.gz" % self . filename ) . st_mtime ) ) : return candidate candidate = "%s.1" % self . filename if exists ( candidate ) : return candidate candidate = "%s.1.gz" % self . filename if exists ( candidate ) : return candidate rotated_filename_patterns = [ "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" , "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz" , "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" , "%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz" , "%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]" , ] if self . log_patterns : rotated_filename_patterns . extend ( self . log_patterns ) file_dir , rel_filename = os . path . split ( self . filename ) for rotated_filename_pattern in rotated_filename_patterns : candidates = glob . glob ( os . path . join ( file_dir , rotated_filename_pattern % rel_filename ) ) if candidates : candidates . sort ( ) return candidates [ - 1 ] return None | Check for various rotated logfile filename patterns and return the first match we find . |
52,163 | def create_s3_session ( ) : sess = requests . Session ( ) retries = Retry ( total = 3 , backoff_factor = .5 , status_forcelist = [ 500 , 502 , 503 , 504 ] ) sess . mount ( 'https://' , HTTPAdapter ( max_retries = retries ) ) return sess | Creates a session with automatic retries on 5xx errors . |
52,164 | def load_module ( self , fullname ) : mod = sys . modules . setdefault ( fullname , imp . new_module ( fullname ) ) mod . __file__ = self . _path mod . __loader__ = self mod . __path__ = [ ] mod . __package__ = fullname return mod | Returns an empty module . |
52,165 | def load_module ( self , fullname ) : mod = sys . modules . get ( fullname ) if mod is not None : return mod mod = _from_core_node ( self . _store , self . _root ) sys . modules [ fullname ] = mod return mod | Returns an object that lazily looks up tables and groups . |
52,166 | def find_module ( self , fullname , path = None ) : if not fullname . startswith ( self . _module_name + '.' ) : return None submodule = fullname [ len ( self . _module_name ) + 1 : ] parts = submodule . split ( '.' ) if self . _teams : team = parts . pop ( 0 ) else : team = None if len ( parts ) == 2 : store , pkg = PackageStore . find_package ( team , parts [ 0 ] , parts [ 1 ] ) if pkg is not None : return PackageLoader ( store , pkg ) else : return None for store_dir in PackageStore . find_store_dirs ( ) : store = PackageStore ( store_dir ) if len ( parts ) == 0 : assert self . _teams path = store . team_path ( team ) elif len ( parts ) == 1 : path = store . user_path ( team , parts [ 0 ] ) if os . path . isdir ( path ) : return FakeLoader ( path ) return None | Looks up the table based on the module path . |
52,167 | def _have_pyspark ( ) : if _have_pyspark . flag is None : try : if PackageStore . get_parquet_lib ( ) is ParquetLib . SPARK : import pyspark _have_pyspark . flag = True else : _have_pyspark . flag = False except ImportError : _have_pyspark . flag = False return _have_pyspark . flag | Check if we re running Pyspark |
52,168 | def _path_hash ( path , transform , kwargs ) : sortedargs = [ "%s:%r:%s" % ( key , value , type ( value ) ) for key , value in sorted ( iteritems ( kwargs ) ) ] srcinfo = "{path}:{transform}:{{{kwargs}}}" . format ( path = os . path . abspath ( path ) , transform = transform , kwargs = "," . join ( sortedargs ) ) return digest_string ( srcinfo ) | Generate a hash of source file path + transform + args |
52,169 | def _gen_glob_data ( dir , pattern , child_table ) : dir = pathlib . Path ( dir ) matched = False used_names = set ( ) for filepath in sorted ( dir . glob ( pattern ) ) : if filepath . is_dir ( ) : continue else : matched = True node_table = { } if child_table is None else child_table . copy ( ) filepath = filepath . relative_to ( dir ) node_table [ RESERVED [ 'file' ] ] = str ( filepath ) node_name = to_nodename ( filepath . stem , invalid = used_names ) used_names . add ( node_name ) print ( "Matched with {!r}: {!r} from {!r}" . format ( pattern , node_name , str ( filepath ) ) ) yield node_name , node_table if not matched : print ( "Warning: {!r} matched no files." . format ( pattern ) ) return | Generates node data by globbing a directory for a pattern |
52,170 | def _remove_keywords ( d ) : return { k : v for k , v in iteritems ( d ) if k not in RESERVED } | copy the dict filter_keywords |
52,171 | def build_package ( team , username , package , subpath , yaml_path , checks_path = None , dry_run = False , env = 'default' ) : def find ( key , value ) : if isinstance ( value , Iterable ) and not isinstance ( value , string_types ) : for k , v in iteritems ( value ) : if k == key : yield v elif isinstance ( v , dict ) : for result in find ( key , v ) : yield result elif isinstance ( v , list ) : for item in v : for result in find ( key , item ) : yield result build_data = load_yaml ( yaml_path ) if ( checks_path is None and list ( find ( 'checks' , build_data [ 'contents' ] ) ) and 'checks' not in build_data ) : checks_path = 'checks.yml' checks_contents = load_yaml ( checks_path , optional = True ) elif checks_path is not None : checks_contents = load_yaml ( checks_path ) else : checks_contents = None build_package_from_contents ( team , username , package , subpath , os . path . dirname ( yaml_path ) , build_data , checks_contents = checks_contents , dry_run = dry_run , env = env ) | Builds a package from a given Yaml file and installs it locally . |
52,172 | def send_comment_email ( email , package_owner , package_name , commenter ) : link = '{CATALOG_URL}/package/{owner}/{pkg}/comments' . format ( CATALOG_URL = CATALOG_URL , owner = package_owner , pkg = package_name ) subject = "New comment on {package_owner}/{package_name}" . format ( package_owner = package_owner , package_name = package_name ) html = render_template ( 'comment_email.html' , commenter = commenter , link = link ) body = render_template ( 'comment_email.txt' , commenter = commenter , link = link ) send_email ( recipients = [ email ] , sender = DEFAULT_SENDER , subject = subject , html = html , body = body ) | Send email to owner of package regarding new comment |
52,173 | def hash_contents ( contents ) : assert isinstance ( contents , GroupNode ) result = hashlib . sha256 ( ) def _hash_int ( value ) : result . update ( struct . pack ( ">L" , value ) ) def _hash_str ( string ) : assert isinstance ( string , string_types ) _hash_int ( len ( string ) ) result . update ( string . encode ( ) ) def _hash_object ( obj ) : _hash_str ( obj . json_type ) if isinstance ( obj , ( TableNode , FileNode ) ) : hashes = obj . hashes _hash_int ( len ( hashes ) ) for hval in hashes : _hash_str ( hval ) elif isinstance ( obj , GroupNode ) : children = obj . children _hash_int ( len ( children ) ) for key , child in sorted ( iteritems ( children ) ) : _hash_str ( key ) _hash_object ( child ) else : assert False , "Unexpected object: %r" % obj if obj . metadata_hash is not None : _hash_str ( obj . metadata_hash ) _hash_object ( contents ) return result . hexdigest ( ) | Creates a hash of key names and hashes in a package dictionary . |
52,174 | def find_object_hashes ( root , meta_only = False ) : stack = [ root ] while stack : obj = stack . pop ( ) if not meta_only and isinstance ( obj , ( TableNode , FileNode ) ) : for objhash in obj . hashes : yield objhash stack . extend ( itervalues ( obj . get_children ( ) ) ) if obj . metadata_hash is not None : yield obj . metadata_hash | Iterator that returns hashes of all of the file and table nodes . |
52,175 | def _send_event_task ( args ) : endpoint = args [ 'endpoint' ] json_message = args [ 'json_message' ] _consumer_impl . send ( endpoint , json_message ) | Actually sends the MixPanel event . Runs in a uwsgi worker process . |
52,176 | def send ( self , endpoint , json_message ) : _send_event_task . spool ( endpoint = endpoint , json_message = json_message ) | Queues the message to be sent . |
52,177 | def main ( args = None ) : parser = argument_parser ( ) args = parser . parse_args ( args ) if not hasattr ( args , 'func' ) : args = parser . parse_args ( [ 'help' ] ) kwargs = vars ( args ) if kwargs . pop ( 'dev' ) or os . environ . get ( 'QUILT_DEV_MODE' , '' ) . strip ( ) . lower ( ) == 'true' : quilt . _DEV_MODE = True else : quilt . _DEV_MODE = False func = kwargs . pop ( 'func' ) try : func ( ** kwargs ) return 0 except QuiltException as ex : print ( ex . message , file = sys . stderr ) return 1 except requests . exceptions . ConnectionError as ex : print ( "Failed to connect: %s" % ex , file = sys . stderr ) return 1 | Build and run parser |
52,178 | def is_identifier ( string ) : matched = PYTHON_IDENTIFIER_RE . match ( string ) return bool ( matched ) and not keyword . iskeyword ( string ) | Check if string could be a valid python identifier |
52,179 | def fs_link ( path , linkpath , linktype = 'soft' ) : global WIN_SOFTLINK global WIN_HARDLINK WIN_NO_ERROR = 22 assert linktype in ( 'soft' , 'hard' ) path , linkpath = pathlib . Path ( path ) , pathlib . Path ( linkpath ) if not path . exists ( ) : raise QuiltException ( "Path to link to does not exist: {}" . format ( path ) ) if linkpath . exists ( ) : raise QuiltException ( "Link path already exists: {}" . format ( linkpath ) ) if os . name == 'nt' : ctypes . WinError ( ) if not sys . getwindowsversion ( ) [ 0 ] >= 6 : raise QuiltException ( "Unsupported operation: This version of Windows does not support linking." ) if linktype == 'soft' : if WIN_SOFTLINK is None : WIN_SOFTLINK = ctypes . windll . kernel32 . CreateSymbolicLinkW WIN_SOFTLINK . restype = ctypes . c_bool create_link = lambda l , p : WIN_SOFTLINK ( str ( l ) , str ( p ) , p . is_dir ( ) ) elif linktype == 'hard' : if WIN_HARDLINK is None : WIN_HARDLINK = ctypes . windll . kernel32 . CreateHardLinkW WIN_HARDLINK . restype = ctypes . c_bool create_link = WIN_HARDLINK create_link ( linkpath , path ) error = ctypes . WinError ( ) if error . winerror : raise QuiltException ( "Linking failed: " + str ( error ) , original_error = error ) if not linkpath . exists ( ) and linkpath . is_symlink ( ) : raise QuiltException ( "Linking failed: Expected symlink at: {}" . format ( linkpath ) ) else : try : if linktype == 'soft' : linkpath . symlink_to ( path ) elif linktype == 'hard' : os . link ( str ( path ) , str ( linkpath ) ) except OSError as error : raise QuiltException ( "Linking failed: " + str ( error ) , original_error = error ) | Create a hard or soft link of path at linkpath |
52,180 | def read ( self , size = - 1 ) : buf = self . _fd . read ( size ) self . _progress_cb ( len ( buf ) ) return buf | Read bytes and update the progress bar . |
52,181 | def create_dirs ( self ) : if not os . path . isdir ( self . _path ) : os . makedirs ( self . _path ) for dir_name in [ self . OBJ_DIR , self . TMP_OBJ_DIR , self . PKG_DIR , self . CACHE_DIR ] : path = os . path . join ( self . _path , dir_name ) if not os . path . isdir ( path ) : os . mkdir ( path ) if not os . path . exists ( self . _version_path ( ) ) : self . _write_format_version ( ) | Creates the store directory and its subdirectories . |
52,182 | def find_store_dirs ( cls ) : store_dirs = [ default_store_location ( ) ] extra_dirs_str = os . getenv ( 'QUILT_PACKAGE_DIRS' ) if extra_dirs_str : store_dirs . extend ( extra_dirs_str . split ( ':' ) ) return store_dirs | Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS . |
52,183 | def find_package ( cls , team , user , package , pkghash = None , store_dir = None ) : cls . check_name ( team , user , package ) dirs = cls . find_store_dirs ( ) for store_dir in dirs : store = PackageStore ( store_dir ) pkg = store . get_package ( team , user , package , pkghash = pkghash ) if pkg is not None : return store , pkg return None , None | Finds an existing package in one of the package directories . |
52,184 | def get_package ( self , team , user , package , pkghash = None ) : self . check_name ( team , user , package ) path = self . package_path ( team , user , package ) if not os . path . isdir ( path ) : return None if pkghash is None : latest_tag = os . path . join ( path , self . TAGS_DIR , self . LATEST ) if not os . path . exists ( latest_tag ) : return None with open ( latest_tag , 'r' ) as tagfile : pkghash = tagfile . read ( ) assert pkghash is not None contents_path = os . path . join ( path , self . CONTENTS_DIR , pkghash ) if not os . path . isfile ( contents_path ) : return None with open ( contents_path , 'r' ) as contents_file : try : return json . load ( contents_file , object_hook = decode_node ) except AssertionError as err : if str ( err ) . startswith ( "Bad package format" ) : name = "{}{}/{}, {}" . format ( team + ':' if team else '' , user , package , pkghash ) raise StoreException ( "Error in {}: {}" . format ( name , str ( err ) ) ) else : raise | Gets a package from this store . |
52,185 | def install_package ( self , team , user , package , contents ) : self . check_name ( team , user , package ) assert contents is not None self . create_dirs ( ) path = self . package_path ( team , user , package ) try : os . remove ( path ) except OSError : pass | Creates a new package in the default package store and allocates a per - user directory if needed . |
52,186 | def create_package_node ( self , team , user , package , dry_run = False ) : contents = RootNode ( dict ( ) ) if dry_run : return contents self . check_name ( team , user , package ) assert contents is not None self . create_dirs ( ) path = self . package_path ( team , user , package ) try : os . remove ( path ) except OSError : pass return contents | Creates a new package and initializes its contents . See install_package . |
52,187 | def iterpackages ( self ) : pkgdir = os . path . join ( self . _path , self . PKG_DIR ) if not os . path . isdir ( pkgdir ) : return for team in sub_dirs ( pkgdir ) : for user in sub_dirs ( self . team_path ( team ) ) : for pkg in sub_dirs ( self . user_path ( team , user ) ) : pkgpath = self . package_path ( team , user , pkg ) for hsh in sub_files ( os . path . join ( pkgpath , PackageStore . CONTENTS_DIR ) ) : yield self . get_package ( team , user , pkg , pkghash = hsh ) | Return an iterator over all the packages in the PackageStore . |
52,188 | def ls_packages ( self ) : packages = [ ] pkgdir = os . path . join ( self . _path , self . PKG_DIR ) if not os . path . isdir ( pkgdir ) : return [ ] for team in sub_dirs ( pkgdir ) : for user in sub_dirs ( self . team_path ( team ) ) : for pkg in sub_dirs ( self . user_path ( team , user ) ) : pkgpath = self . package_path ( team , user , pkg ) pkgmap = { h : [ ] for h in sub_files ( os . path . join ( pkgpath , PackageStore . CONTENTS_DIR ) ) } for tag in sub_files ( os . path . join ( pkgpath , PackageStore . TAGS_DIR ) ) : with open ( os . path . join ( pkgpath , PackageStore . TAGS_DIR , tag ) , 'r' ) as tagfile : pkghash = tagfile . read ( ) pkgmap [ pkghash ] . append ( tag ) for pkghash , tags in pkgmap . items ( ) : team_token = '' if team in ( DEFAULT_TEAM , ) else team + ':' fullpkg = "{team}{owner}/{pkg}" . format ( team = team_token , owner = user , pkg = pkg ) displaytags = tags if tags else [ "" ] for tag in displaytags : packages . append ( ( fullpkg , str ( tag ) , pkghash ) ) return packages | List packages in this store . |
52,189 | def team_path ( self , team = None ) : if team is None : team = DEFAULT_TEAM return os . path . join ( self . _path , self . PKG_DIR , team ) | Returns the path to directory with the team s users package repositories . |
52,190 | def user_path ( self , team , user ) : return os . path . join ( self . team_path ( team ) , user ) | Returns the path to directory with the user s package repositories . |
52,191 | def package_path ( self , team , user , package ) : return os . path . join ( self . user_path ( team , user ) , package ) | Returns the path to a package repository . |
52,192 | def object_path ( self , objhash ) : return os . path . join ( self . _path , self . OBJ_DIR , objhash ) | Returns the path to an object file based on its hash . |
52,193 | def prune ( self , objs = None ) : if objs is None : objdir = os . path . join ( self . _path , self . OBJ_DIR ) objs = os . listdir ( objdir ) remove_objs = set ( objs ) for pkg in self . iterpackages ( ) : remove_objs . difference_update ( find_object_hashes ( pkg ) ) for obj in remove_objs : path = self . object_path ( obj ) if os . path . exists ( path ) : os . chmod ( path , S_IWUSR ) os . remove ( path ) return remove_objs | Clean up objects not referenced by any packages . Try to prune all objects by default . |
52,194 | def save_dataframe ( self , dataframe ) : storepath = self . temporary_object_path ( str ( uuid . uuid4 ( ) ) ) parqlib = self . get_parquet_lib ( ) if isinstance ( dataframe , pd . DataFrame ) : import pyarrow as pa from pyarrow import parquet table = pa . Table . from_pandas ( dataframe ) parquet . write_table ( table , storepath ) elif parqlib is ParquetLib . SPARK : from pyspark import sql as sparksql assert isinstance ( dataframe , sparksql . DataFrame ) dataframe . write . parquet ( storepath ) else : assert False , "Unimplemented ParquetLib %s" % parqlib if os . path . isdir ( storepath ) : hashes = [ ] files = [ ofile for ofile in os . listdir ( storepath ) if ofile . endswith ( ".parquet" ) ] for obj in files : path = os . path . join ( storepath , obj ) objhash = digest_file ( path ) self . _move_to_store ( path , objhash ) hashes . append ( objhash ) rmtree ( storepath ) else : filehash = digest_file ( storepath ) self . _move_to_store ( storepath , filehash ) hashes = [ filehash ] return hashes | Save a DataFrame to the store . |
52,195 | def load_numpy ( self , hash_list ) : assert len ( hash_list ) == 1 self . _check_hashes ( hash_list ) with open ( self . object_path ( hash_list [ 0 ] ) , 'rb' ) as fd : return np . load ( fd , allow_pickle = False ) | Loads a numpy array . |
52,196 | def get_file ( self , hash_list ) : assert len ( hash_list ) == 1 self . _check_hashes ( hash_list ) return self . object_path ( hash_list [ 0 ] ) | Returns the path of the file - but verifies that the hash is actually present . |
52,197 | def save_metadata ( self , metadata ) : if metadata in ( None , { } ) : return None if SYSTEM_METADATA in metadata : raise StoreException ( "Not allowed to store %r in metadata" % SYSTEM_METADATA ) path = self . temporary_object_path ( str ( uuid . uuid4 ( ) ) ) with open ( path , 'w' ) as fd : try : json . dump ( metadata , fd , sort_keys = True , separators = ( ',' , ':' ) ) except ( TypeError , ValueError ) : raise StoreException ( "Metadata is not serializable" ) metahash = digest_file ( path ) self . _move_to_store ( path , metahash ) return metahash | Save metadata to the store . |
52,198 | def save_package_contents ( self , root , team , owner , pkgname ) : assert isinstance ( root , RootNode ) instance_hash = hash_contents ( root ) pkg_path = self . package_path ( team , owner , pkgname ) if not os . path . isdir ( pkg_path ) : os . makedirs ( pkg_path ) os . mkdir ( os . path . join ( pkg_path , self . CONTENTS_DIR ) ) os . mkdir ( os . path . join ( pkg_path , self . TAGS_DIR ) ) os . mkdir ( os . path . join ( pkg_path , self . VERSIONS_DIR ) ) dest = os . path . join ( pkg_path , self . CONTENTS_DIR , instance_hash ) with open ( dest , 'w' ) as contents_file : json . dump ( root , contents_file , default = encode_node , indent = 2 , sort_keys = True ) tag_dir = os . path . join ( pkg_path , self . TAGS_DIR ) if not os . path . isdir ( tag_dir ) : os . mkdir ( tag_dir ) latest_tag = os . path . join ( pkg_path , self . TAGS_DIR , self . LATEST ) with open ( latest_tag , 'w' ) as tagfile : tagfile . write ( "{hsh}" . format ( hsh = instance_hash ) ) | Saves the in - memory contents to a file in the local package repository . |
52,199 | def _move_to_store ( self , srcpath , objhash ) : destpath = self . object_path ( objhash ) if os . path . exists ( destpath ) : os . chmod ( destpath , S_IWUSR ) os . remove ( destpath ) os . chmod ( srcpath , S_IRUSR | S_IRGRP | S_IROTH ) move ( srcpath , destpath ) | Make the object read - only and move it to the store . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.