idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
6,000
|
def build_graph ( path , term_depth = 1000 , skim_depth = 10 , d_weights = False , ** kwargs ) : click . echo ( '\nTokenizing text...' ) t = Text . from_file ( path ) click . echo ( 'Extracted %d tokens' % len ( t . tokens ) ) m = Matrix ( ) click . echo ( '\nIndexing terms:' ) m . index ( t , t . most_frequent_terms ( term_depth ) , ** kwargs ) g = Skimmer ( ) click . echo ( '\nGenerating graph:' ) g . build ( t , m , skim_depth , d_weights ) return g
|
Tokenize a text index a term matrix and build out a graph .
|
6,001
|
def draw_spring ( self , ** kwargs ) : nx . draw_spring ( self . graph , with_labels = True , font_size = 10 , edge_color = '#dddddd' , node_size = 0 , ** kwargs ) plt . show ( )
|
Render a spring layout .
|
6,002
|
def build ( self , text , matrix , skim_depth = 10 , d_weights = False ) : for anchor in bar ( matrix . keys ) : n1 = text . unstem ( anchor ) pairs = matrix . anchored_pairs ( anchor ) . items ( ) for term , weight in list ( pairs ) [ : skim_depth ] : if d_weights : weight = 1 - weight n2 = text . unstem ( term ) self . graph . add_edge ( n1 , n2 , weight = float ( weight ) )
|
1 . For each term in the passed matrix score its KDE similarity with all other indexed terms .
|
6,003
|
def get_settings ( self , section = None , defaults = None ) : section = self . _maybe_get_default_name ( section ) if self . filepath is None : return { } parser = self . _get_parser ( defaults ) defaults = parser . defaults ( ) try : raw_items = parser . items ( section ) except NoSectionError : return { } local_conf = OrderedDict ( ) get_from_globals = { } for option , value in raw_items : if option . startswith ( "set " ) : name = option [ 4 : ] . strip ( ) defaults [ name ] = value elif option . startswith ( "get " ) : name = option [ 4 : ] . strip ( ) get_from_globals [ name ] = value local_conf [ name ] = None else : if option in defaults : continue local_conf [ option ] = value for option , global_option in get_from_globals . items ( ) : local_conf [ option ] = defaults [ global_option ] return ConfigDict ( local_conf , defaults , self )
|
Gets a named section from the configuration source .
|
6,004
|
def get_wsgi_app ( self , name = None , defaults = None ) : name = self . _maybe_get_default_name ( name ) defaults = self . _get_defaults ( defaults ) return loadapp ( self . pastedeploy_spec , name = name , relative_to = self . relative_to , global_conf = defaults , )
|
Reads the configuration source and finds and loads a WSGI application defined by the entry with name name per the PasteDeploy configuration format and loading mechanism .
|
6,005
|
def get_wsgi_server ( self , name = None , defaults = None ) : name = self . _maybe_get_default_name ( name ) defaults = self . _get_defaults ( defaults ) return loadserver ( self . pastedeploy_spec , name = name , relative_to = self . relative_to , global_conf = defaults , )
|
Reads the configuration source and finds and loads a WSGI server defined by the server entry with the name name per the PasteDeploy configuration format and loading mechanism .
|
6,006
|
def get_wsgi_filter ( self , name = None , defaults = None ) : name = self . _maybe_get_default_name ( name ) defaults = self . _get_defaults ( defaults ) return loadfilter ( self . pastedeploy_spec , name = name , relative_to = self . relative_to , global_conf = defaults , )
|
Reads the configuration soruce and finds and loads a WSGI filter defined by the filter entry with the name name per the PasteDeploy configuration format and loading mechanism .
|
6,007
|
def _maybe_get_default_name ( self , name ) : if name is None and self . uri . fragment : name = self . uri . fragment return name
|
Checks a name and determines whether to use the default name .
|
6,008
|
def set ( self , hue ) : x = hue / 360. * self . winfo_width ( ) self . coords ( 'cursor' , x , 0 , x , self . winfo_height ( ) ) self . _variable . set ( hue )
|
Set cursor position on the color corresponding to the hue value .
|
6,009
|
def getPageType ( name , number = False ) : if not name in pageNames ( ) : return None pageType = PyOrigin . Pages ( name ) . GetType ( ) if number : return str ( pageType ) if pageType == 1 : return "matrix" if pageType == 2 : return "book" if pageType == 3 : return "graph" if pageType == 4 : return "layout" if pageType == 5 : return "notes"
|
Returns the type of the page with that name . If that name doesn t exist None is returned .
|
6,010
|
def listEverything ( matching = False ) : pages = pageNames ( ) if matching : pages = [ x for x in pages if matching in x ] for i , page in enumerate ( pages ) : pages [ i ] = "%s%s (%s)" % ( pageFolder ( page ) , page , getPageType ( page ) ) print ( "\n" . join ( sorted ( pages ) ) )
|
Prints every page in the project to the console .
|
6,011
|
def sheetNames ( book = None ) : if book : if not book . lower ( ) in [ x . lower ( ) for x in bookNames ( ) ] : return False else : book = activeBook ( ) if not book : return False poBook = PyOrigin . WorksheetPages ( book ) if not len ( poBook ) : return None return [ x . GetName ( ) for x in poBook . Layers ( ) ]
|
return sheet names of a book .
|
6,012
|
def getSheet ( book = None , sheet = None ) : if book and not book . lower ( ) in [ x . lower ( ) for x in bookNames ( ) ] : print ( "book %s doesn't exist" % book ) return if book is None : book = activeBook ( ) . lower ( ) if book is None : print ( "no book given or selected" ) return if sheet and not sheet . lower ( ) in [ x . lower ( ) for x in sheetNames ( book ) ] : print ( "sheet %s doesn't exist" % sheet ) return if sheet is None : sheet = activeSheet ( ) . lower ( ) if sheet is None : return ( "no sheet given or selected" ) print for poSheet in PyOrigin . WorksheetPages ( book ) . Layers ( ) : if poSheet . GetName ( ) . lower ( ) == sheet . lower ( ) : return poSheet return False
|
returns the pyorigin object for a sheet .
|
6,013
|
def sheetDelete ( book = None , sheet = None ) : if book is None : book = activeBook ( ) if sheet in sheetNames ( ) : PyOrigin . WorksheetPages ( book ) . Layers ( sheetNames ( ) . index ( sheet ) ) . Destroy ( )
|
Delete a sheet from a book . If either isn t given use the active one .
|
6,014
|
def sheetDeleteEmpty ( bookName = None ) : if bookName is None : bookName = activeBook ( ) if not bookName . lower ( ) in [ x . lower ( ) for x in bookNames ( ) ] : print ( "can't clean up a book that doesn't exist:" , bookName ) return poBook = PyOrigin . WorksheetPages ( bookName ) namesToKill = [ ] for i , poSheet in enumerate ( [ poSheet for poSheet in poBook . Layers ( ) ] ) : poFirstCol = poSheet . Columns ( 0 ) if poFirstCol . GetLongName ( ) == "" and poFirstCol . GetData ( ) == [ ] : namesToKill . append ( poSheet . GetName ( ) ) for sheetName in namesToKill : print ( "deleting empty sheet" , sheetName ) sheetDelete ( bookName , sheetName )
|
Delete all sheets which contain no data
|
6,015
|
def pickle_load ( fname ) : assert type ( fname ) is str and os . path . exists ( fname ) print ( "loaded" , fname ) return pickle . load ( open ( fname , "rb" ) )
|
return the contents of a pickle file
|
6,016
|
def pickle_save ( thing , fname = None ) : if fname is None : fname = os . path . expanduser ( "~" ) + "/%d.pkl" % time . time ( ) assert type ( fname ) is str and os . path . isdir ( os . path . dirname ( fname ) ) pickle . dump ( thing , open ( fname , "wb" ) , pickle . HIGHEST_PROTOCOL ) print ( "saved" , fname )
|
save something to a pickle file
|
6,017
|
def getCodeBlocks ( ) : raw = open ( "examples.py" ) . read ( ) d = { } for block in raw . split ( "if __name__" ) [ 0 ] . split ( "\ndef " ) : title = block . split ( "\n" ) [ 0 ] . split ( "(" ) [ 0 ] if not title . startswith ( "demo_" ) : continue code = [ x [ 4 : ] for x in block . split ( "\n" ) [ 1 : ] if x . startswith ( " " ) ] d [ title ] = "\n" . join ( code ) . strip ( ) return d
|
return a dict with the code for each function
|
6,018
|
def getOutputBlocks ( ) : raw = open ( "output.txt" ) . read ( ) d = { } for block in raw . split ( "\n####### " ) [ 1 : ] : title = block . split ( "\n" ) [ 0 ] . split ( "(" ) [ 0 ] block = block . split ( "\n" , 1 ) [ 1 ] . strip ( ) d [ title ] = block . split ( "\nfinished in " ) [ 0 ] return d
|
return a dict with the output of each function
|
6,019
|
def decodeCommandLine ( self , cmdline ) : codec = getattr ( sys . stdin , 'encoding' , None ) or sys . getdefaultencoding ( ) return unicode ( cmdline , codec )
|
Turn a byte string from the command line into a unicode string .
|
6,020
|
def tokenize ( text ) : stem = PorterStemmer ( ) . stem tokens = re . finditer ( '[a-z]+' , text . lower ( ) ) for offset , match in enumerate ( tokens ) : unstemmed = match . group ( 0 ) yield { 'stemmed' : stem ( unstemmed ) , 'unstemmed' : unstemmed , 'offset' : offset }
|
Yield tokens .
|
6,021
|
def sort_dict ( d , desc = True ) : sort = sorted ( d . items ( ) , key = lambda x : x [ 1 ] , reverse = desc ) return OrderedDict ( sort )
|
Sort an ordered dictionary by value descending .
|
6,022
|
def window ( seq , n = 2 ) : it = iter ( seq ) result = tuple ( islice ( it , n ) ) if len ( result ) == n : yield result for token in it : result = result [ 1 : ] + ( token , ) yield result
|
Yield a sliding window over an iterable .
|
6,023
|
def insertUserStore ( siteStore , userStorePath ) : ls = siteStore . findUnique ( LoginSystem ) unattachedSubStore = Store ( userStorePath ) for lm in unattachedSubStore . query ( LoginMethod , LoginMethod . account == unattachedSubStore . findUnique ( LoginAccount ) , sort = LoginMethod . internal . descending ) : if ls . accountByAddress ( lm . localpart , lm . domain ) is None : localpart , domain = lm . localpart , lm . domain break else : raise AllNamesConflict ( ) unattachedSubStore . close ( ) insertLocation = siteStore . newFilePath ( 'account' , domain , localpart + '.axiom' ) insertParentLoc = insertLocation . parent ( ) if not insertParentLoc . exists ( ) : insertParentLoc . makedirs ( ) if insertLocation . exists ( ) : raise DatabaseDirectoryConflict ( ) userStorePath . moveTo ( insertLocation ) ss = SubStore ( store = siteStore , storepath = insertLocation ) attachedStore = ss . open ( ) attachedStore . findUnique ( LoginAccount ) . migrateUp ( )
|
Move the SubStore at the indicated location into the given site store s directory and then hook it up to the site store s authentication database .
|
6,024
|
def extractUserStore ( userAccount , extractionDestination , legacySiteAuthoritative = True ) : if legacySiteAuthoritative : userAccount . migrateDown ( ) av = userAccount . avatars av . open ( ) . close ( ) def _ ( ) : av . deleteFromStore ( ) userAccount . deleteLoginMethods ( ) userAccount . deleteFromStore ( ) av . storepath . moveTo ( extractionDestination ) userAccount . store . transact ( _ )
|
Move the SubStore for the given user account out of the given site store completely . Place the user store s database directory into the given destination directory .
|
6,025
|
def getAccountNames ( store , protocol = None ) : return ( ( meth . localpart , meth . domain ) for meth in getLoginMethods ( store , protocol ) )
|
Retrieve account name information about the given database .
|
6,026
|
def getDomainNames ( store ) : domains = set ( ) domains . update ( store . query ( LoginMethod , AND ( LoginMethod . internal == True , LoginMethod . domain != None ) ) . getColumn ( "domain" ) . distinct ( ) ) return sorted ( domains )
|
Retrieve a list of all local domain names represented in the given store .
|
6,027
|
def cloneInto ( self , newStore , avatars ) : la = LoginAccount ( store = newStore , password = self . password , avatars = avatars , disabled = self . disabled ) for siteMethod in self . store . query ( LoginMethod , LoginMethod . account == self ) : LoginMethod ( store = newStore , localpart = siteMethod . localpart , domain = siteMethod . domain , internal = siteMethod . internal , protocol = siteMethod . protocol , verified = siteMethod . verified , account = la ) return la
|
Create a copy of this LoginAccount and all associated LoginMethods in a different Store .
|
6,028
|
def addLoginMethod ( self , localpart , domain , protocol = ANY_PROTOCOL , verified = False , internal = False ) : if self . store . parent is None : otherStore = self . avatars . open ( ) peer = otherStore . findUnique ( LoginAccount ) else : otherStore = self . store . parent subStoreItem = self . store . parent . getItemByID ( self . store . idInParent ) peer = otherStore . findUnique ( LoginAccount , LoginAccount . avatars == subStoreItem ) for store , account in [ ( otherStore , peer ) , ( self . store , self ) ] : store . findOrCreate ( LoginMethod , account = account , localpart = localpart , domain = domain , protocol = protocol , verified = verified , internal = internal )
|
Add a login method to this account propogating up or down as necessary to site store or user store to maintain consistency .
|
6,029
|
def replacePassword ( self , currentPassword , newPassword ) : if unicode ( currentPassword ) != self . password : return fail ( BadCredentials ( ) ) return self . setPassword ( newPassword )
|
Set this account s password if the current password matches .
|
6,030
|
def addAccount ( self , username , domain , password , avatars = None , protocol = u'email' , disabled = 0 , internal = False , verified = True ) : if username is not None : username = unicode ( username ) if domain is not None : domain = unicode ( domain ) if password is not None : password = unicode ( password ) if self . accountByAddress ( username , domain ) is not None : raise DuplicateUser ( username , domain ) if avatars is None : avatars = self . makeAvatars ( domain , username ) subStore = avatars . open ( ) la = LoginAccount ( store = self . store , password = password , avatars = avatars , disabled = disabled ) def createSubStoreAccountObjects ( ) : LoginAccount ( store = subStore , password = password , disabled = disabled , avatars = subStore ) la . addLoginMethod ( localpart = username , domain = domain , protocol = protocol , internal = internal , verified = verified ) subStore . transact ( createSubStoreAccountObjects ) return la
|
Create a user account add it to this LoginBase and return it .
|
6,031
|
def identifySQLError ( self , sql , args , e ) : message = e . args [ 0 ] if message . startswith ( "table" ) and message . endswith ( "already exists" ) : return errors . TableAlreadyExists ( sql , args , e ) return errors . SQLError ( sql , args , e )
|
Identify an appropriate SQL error object for the given message for the supported versions of sqlite .
|
6,032
|
def createCacheRemoveCallback ( cacheRef , key , finalizer ) : def remove ( reference ) : try : finalizer ( ) except : logErrorNoMatterWhat ( ) try : cache = cacheRef ( ) if cache is not None : if key in cache . data : if cache . data [ key ] is reference : del cache . data [ key ] except : logErrorNoMatterWhat ( ) return remove
|
Construct a callable to be used as a weakref callback for cache entries .
|
6,033
|
def cache ( self , key , value ) : fin = value . __finalizer__ ( ) try : if self . data [ key ] ( ) is not None : raise CacheInconsistency ( "Duplicate cache key: %r %r %r" % ( key , value , self . data [ key ] ) ) except KeyError : pass callback = createCacheRemoveCallback ( self . _ref ( self ) , key , fin ) self . data [ key ] = self . _ref ( value , callback ) return value
|
Add an entry to the cache .
|
6,034
|
def get ( self , key ) : o = self . data [ key ] ( ) if o is None : del self . data [ key ] raise CacheFault ( "FinalizingCache has %r but its value is no more." % ( key , ) ) log . msg ( interface = iaxiom . IStatEvent , stat_cache_hits = 1 , key = key ) return o
|
Get an entry from the cache by key .
|
6,035
|
def parse_from_xml ( root ) : if root . tag != 'ubcpi' : raise UpdateFromXmlError ( _ ( 'Every peer instruction tool must contain an "ubcpi" element.' ) ) display_name_el = root . find ( 'display_name' ) if display_name_el is None : raise UpdateFromXmlError ( _ ( 'Every peer instruction tool must contain a "display_name" element.' ) ) else : display_name = _safe_get_text ( display_name_el ) rationale_size_min = int ( root . attrib [ 'rationale_size_min' ] ) if 'rationale_size_min' in root . attrib else None rationale_size_max = int ( root . attrib [ 'rationale_size_max' ] ) if 'rationale_size_max' in root . attrib else None question_el = root . find ( 'question' ) if question_el is None : raise UpdateFromXmlError ( _ ( 'Every peer instruction must tool contain a "question" element.' ) ) else : question = parse_question_xml ( question_el ) options_el = root . find ( 'options' ) if options_el is None : raise UpdateFromXmlError ( _ ( 'Every peer instruction must tool contain a "options" element.' ) ) else : options , correct_answer , correct_rationale = parse_options_xml ( options_el ) seeds_el = root . find ( 'seeds' ) if seeds_el is None : raise UpdateFromXmlError ( _ ( 'Every peer instruction must tool contain a "seeds" element.' ) ) else : seeds = parse_seeds_xml ( seeds_el ) algo = unicode ( root . attrib [ 'algorithm' ] ) if 'algorithm' in root . attrib else None num_responses = unicode ( root . attrib [ 'num_responses' ] ) if 'num_responses' in root . attrib else None return { 'display_name' : display_name , 'question_text' : question , 'options' : options , 'rationale_size' : { 'min' : rationale_size_min , 'max' : rationale_size_max } , 'correct_answer' : correct_answer , 'correct_rationale' : correct_rationale , 'seeds' : seeds , 'algo' : { "name" : algo , 'num_responses' : num_responses } }
|
Update the UBCPI XBlock s content from an XML definition .
|
6,036
|
def serialize_options ( options , block ) : for index , option_dict in enumerate ( block . options ) : option = etree . SubElement ( options , 'option' ) if index == block . correct_answer : option . set ( 'correct' , u'True' ) if hasattr ( block , 'correct_rationale' ) : rationale = etree . SubElement ( option , 'rationale' ) rationale . text = block . correct_rationale [ 'text' ] text = etree . SubElement ( option , 'text' ) text . text = option_dict . get ( 'text' , '' ) serialize_image ( option_dict , option )
|
Serialize the options in peer instruction XBlock to xml
|
6,037
|
def serialize_seeds ( seeds , block ) : for seed_dict in block . seeds : seed = etree . SubElement ( seeds , 'seed' ) seed . set ( 'option' , unicode ( seed_dict . get ( 'answer' , 0 ) + 1 ) ) seed . text = seed_dict . get ( 'rationale' , '' )
|
Serialize the seeds in peer instruction XBlock to xml
|
6,038
|
def serialize_to_xml ( root , block ) : root . tag = 'ubcpi' if block . rationale_size is not None : if block . rationale_size . get ( 'min' ) : root . set ( 'rationale_size_min' , unicode ( block . rationale_size . get ( 'min' ) ) ) if block . rationale_size . get ( 'max' ) : root . set ( 'rationale_size_max' , unicode ( block . rationale_size [ 'max' ] ) ) if block . algo : if block . algo . get ( 'name' ) : root . set ( 'algorithm' , block . algo . get ( 'name' ) ) if block . algo . get ( 'num_responses' ) : root . set ( 'num_responses' , unicode ( block . algo . get ( 'num_responses' ) ) ) display_name = etree . SubElement ( root , 'display_name' ) display_name . text = block . display_name question = etree . SubElement ( root , 'question' ) question_text = etree . SubElement ( question , 'text' ) question_text . text = block . question_text [ 'text' ] serialize_image ( block . question_text , question ) options = etree . SubElement ( root , 'options' ) serialize_options ( options , block ) seeds = etree . SubElement ( root , 'seeds' ) serialize_seeds ( seeds , block )
|
Serialize the Peer Instruction XBlock s content to XML .
|
6,039
|
def open ( self ) : if not self . handle : self . lvm . open ( ) self . __vgh = lvm_vg_open ( self . lvm . handle , self . name , self . mode ) if not bool ( self . __vgh ) : raise HandleError ( "Failed to initialize VG Handle." )
|
Obtains the lvm and vg_t handle . Usually you would never need to use this method unless you are doing operations using the ctypes function wrappers in conversion . py
|
6,040
|
def close ( self ) : if self . handle : cl = lvm_vg_close ( self . handle ) if cl != 0 : raise HandleError ( "Failed to close VG handle after init check." ) self . __vgh = None self . lvm . close ( )
|
Closes the lvm and vg_t handle . Usually you would never need to use this method unless you are doing operations using the ctypes function wrappers in conversion . py
|
6,041
|
def uuid ( self ) : self . open ( ) uuid = lvm_vg_get_uuid ( self . handle ) self . close ( ) return uuid
|
Returns the volume group uuid .
|
6,042
|
def extent_count ( self ) : self . open ( ) count = lvm_vg_get_extent_count ( self . handle ) self . close ( ) return count
|
Returns the volume group extent count .
|
6,043
|
def free_extent_count ( self ) : self . open ( ) count = lvm_vg_get_free_extent_count ( self . handle ) self . close ( ) return count
|
Returns the volume group free extent count .
|
6,044
|
def pv_count ( self ) : self . open ( ) count = lvm_vg_get_pv_count ( self . handle ) self . close ( ) return count
|
Returns the physical volume count .
|
6,045
|
def max_pv_count ( self ) : self . open ( ) count = lvm_vg_get_max_pv ( self . handle ) self . close ( ) return count
|
Returns the maximum allowed physical volume count .
|
6,046
|
def max_lv_count ( self ) : self . open ( ) count = lvm_vg_get_max_lv ( self . handle ) self . close ( ) return count
|
Returns the maximum allowed logical volume count .
|
6,047
|
def is_clustered ( self ) : self . open ( ) clust = lvm_vg_is_clustered ( self . handle ) self . close ( ) return bool ( clust )
|
Returns True if the VG is clustered False otherwise .
|
6,048
|
def is_exported ( self ) : self . open ( ) exp = lvm_vg_is_exported ( self . handle ) self . close ( ) return bool ( exp )
|
Returns True if the VG is exported False otherwise .
|
6,049
|
def is_partial ( self ) : self . open ( ) part = lvm_vg_is_partial ( self . handle ) self . close ( ) return bool ( part )
|
Returns True if the VG is partial False otherwise .
|
6,050
|
def sequence ( self ) : self . open ( ) seq = lvm_vg_get_seqno ( self . handle ) self . close ( ) return seq
|
Returns the volume group sequence number . This number increases everytime the volume group is modified .
|
6,051
|
def size ( self , units = "MiB" ) : self . open ( ) size = lvm_vg_get_size ( self . handle ) self . close ( ) return size_convert ( size , units )
|
Returns the volume group size in the given units . Default units are MiB .
|
6,052
|
def free_size ( self , units = "MiB" ) : self . open ( ) size = lvm_vg_get_free_size ( self . handle ) self . close ( ) return size_convert ( size , units )
|
Returns the volume group free size in the given units . Default units are MiB .
|
6,053
|
def extent_size ( self , units = "MiB" ) : self . open ( ) size = lvm_vg_get_extent_size ( self . handle ) self . close ( ) return size_convert ( size , units )
|
Returns the volume group extent size in the given units . Default units are MiB .
|
6,054
|
def remove_all_lvs ( self ) : lvs = self . lvscan ( ) for lv in lvs : self . remove_lv ( lv )
|
Removes all logical volumes from the volume group .
|
6,055
|
def set_pair ( self , term1 , term2 , value , ** kwargs ) : key = self . key ( term1 , term2 ) self . keys . update ( [ term1 , term2 ] ) self . pairs [ key ] = value
|
Set the value for a pair of terms .
|
6,056
|
def get_pair ( self , term1 , term2 ) : key = self . key ( term1 , term2 ) return self . pairs . get ( key , None )
|
Get the value for a pair of terms .
|
6,057
|
def index ( self , text , terms = None , ** kwargs ) : self . clear ( ) terms = terms or text . terms . keys ( ) pairs = combinations ( terms , 2 ) count = comb ( len ( terms ) , 2 ) for t1 , t2 in bar ( pairs , expected_size = count , every = 1000 ) : score = text . score_braycurtis ( t1 , t2 , ** kwargs ) self . set_pair ( t1 , t2 , score )
|
Index all term pair distances .
|
6,058
|
def anchored_pairs ( self , anchor ) : pairs = OrderedDict ( ) for term in self . keys : score = self . get_pair ( anchor , term ) if score : pairs [ term ] = score return utils . sort_dict ( pairs )
|
Get distances between an anchor term and all other terms .
|
6,059
|
def from_rgb ( r , g = None , b = None ) : c = r if isinstance ( r , list ) else [ r , g , b ] best = { } for index , item in enumerate ( colors ) : d = __distance ( item , c ) if ( not best or d <= best [ 'distance' ] ) : best = { 'distance' : d , 'index' : index } if 'index' in best : return best [ 'index' ] else : return 1
|
Return the nearest xterm 256 color code from rgb input .
|
6,060
|
def entry ( ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( 'action' , help = 'Action to take' , choices = [ 'from_hex' , 'to_rgb' , 'to_hex' ] , ) parser . add_argument ( 'value' , help = 'Value for the action' , ) parsed = parser . parse_args ( ) if parsed . action != "from_hex" : try : parsed . value = int ( parsed . value ) except ValueError : raise argparse . ArgumentError ( "Value for this action should be an integer" , ) print ( globals ( ) [ parsed . action ] ( parsed . value ) )
|
Parse command line arguments and run utilities .
|
6,061
|
def makeSoftwareVersion ( store , version , systemVersion ) : return store . findOrCreate ( SoftwareVersion , systemVersion = systemVersion , package = unicode ( version . package ) , version = unicode ( version . short ( ) ) , major = version . major , minor = version . minor , micro = version . micro )
|
Return the SoftwareVersion object from store corresponding to the version object creating it if it doesn t already exist .
|
6,062
|
def listVersionHistory ( store ) : q = store . query ( SystemVersion , sort = SystemVersion . creation . descending ) return [ sv . longWindedRepr ( ) for sv in q ]
|
List the software package version history of store .
|
6,063
|
def checkSystemVersion ( s , versions = None ) : if versions is None : versions = getSystemVersions ( ) currentVersionMap = dict ( [ ( v . package , v ) for v in versions ] ) mostRecentSystemVersion = s . findFirst ( SystemVersion , sort = SystemVersion . creation . descending ) mostRecentVersionMap = dict ( [ ( v . package , v . asVersion ( ) ) for v in s . query ( SoftwareVersion , ( SoftwareVersion . systemVersion == mostRecentSystemVersion ) ) ] ) if mostRecentVersionMap != currentVersionMap : currentSystemVersion = SystemVersion ( store = s , creation = Time ( ) ) for v in currentVersionMap . itervalues ( ) : makeSoftwareVersion ( s , v , currentSystemVersion )
|
Check if the current version is different from the previously recorded version . If it is or if there is no previously recorded version create a version matching the current config .
|
6,064
|
def reset ( self ) : self . colNames , self . colDesc , self . colUnits , self . colComments , self . colTypes , self . colData = [ ] , [ ] , [ ] , [ ] , [ ] , [ ]
|
clears all columns
|
6,065
|
def colDelete ( self , colI = - 1 ) : self . colNames . pop ( colI ) self . colDesc . pop ( colI ) self . colUnits . pop ( colI ) self . colComments . pop ( colI ) self . colTypes . pop ( colI ) self . colData . pop ( colI ) return
|
delete a column at a single index . Negative numbers count from the end .
|
6,066
|
def onex ( self ) : xCols = [ i for i in range ( self . nCols ) if self . colTypes [ i ] == 3 ] if len ( xCols ) > 1 : for colI in xCols [ 1 : ] [ : : - 1 ] : self . colDelete ( colI )
|
delete all X columns except the first one .
|
6,067
|
def wiggle ( self , noiseLevel = .1 ) : noise = ( np . random . rand ( * self . data . shape ) ) - .5 self . data = self . data + noise * noiseLevel
|
Slightly changes value of every cell in the worksheet . Used for testing .
|
6,068
|
def nRows ( self ) : if self . nCols : return max ( [ len ( x ) for x in self . colData ] ) else : return 0
|
returns maximum number of rows based on the longest colData
|
6,069
|
def data ( self ) : data = np . empty ( ( self . nRows , self . nCols ) , dtype = np . float ) data [ : ] = np . nan for colNum , colData in enumerate ( self . colData ) : validIs = np . where ( [ np . isreal ( v ) for v in colData ] ) [ 0 ] validData = np . ones ( len ( colData ) ) * np . nan validData [ validIs ] = np . array ( colData ) [ validIs ] data [ : len ( colData ) , colNum ] = validData return data
|
return all of colData as a 2D numpy array .
|
6,070
|
def data ( self , data ) : assert type ( data ) is np . ndarray assert data . shape [ 1 ] == self . nCols for i in range ( self . nCols ) : self . colData [ i ] = data [ : , i ] . tolist ( )
|
Given a 2D numpy array fill colData with it .
|
6,071
|
def open ( self ) : if not self . handle : try : path = self . system_dir except AttributeError : path = '' self . __handle = lvm_init ( path ) if not bool ( self . __handle ) : raise HandleError ( "Failed to initialize LVM handle." )
|
Obtains the lvm handle . Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion . py
|
6,072
|
def close ( self ) : if self . handle : q = lvm_quit ( self . handle ) if q != 0 : raise HandleError ( "Failed to close LVM handle." ) self . __handle = None
|
Closes the lvm handle . Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion . py
|
6,073
|
def from_file ( cls , path ) : with open ( path , 'r' , errors = 'replace' ) as f : return cls ( f . read ( ) )
|
Create a text from a file .
|
6,074
|
def load_stopwords ( self , path ) : if path : with open ( path ) as f : self . stopwords = set ( f . read ( ) . splitlines ( ) ) else : self . stopwords = set ( pkgutil . get_data ( 'textplot' , 'data/stopwords.txt' ) . decode ( 'utf8' ) . splitlines ( ) )
|
Load a set of stopwords .
|
6,075
|
def tokenize ( self ) : self . tokens = [ ] self . terms = OrderedDict ( ) for token in utils . tokenize ( self . text ) : if token [ 'unstemmed' ] in self . stopwords : self . tokens . append ( None ) else : self . tokens . append ( token ) offsets = self . terms . setdefault ( token [ 'stemmed' ] , [ ] ) offsets . append ( token [ 'offset' ] )
|
Tokenize the text .
|
6,076
|
def most_frequent_terms ( self , depth ) : counts = self . term_counts ( ) top_terms = set ( list ( counts . keys ( ) ) [ : depth ] ) end_count = list ( counts . values ( ) ) [ : depth ] [ - 1 ] bucket = self . term_count_buckets ( ) [ end_count ] return top_terms . union ( set ( bucket ) )
|
Get the X most frequent terms in the text and then probe down to get any other terms that have the same count as the last term .
|
6,077
|
def unstem ( self , term ) : originals = [ ] for i in self . terms [ term ] : originals . append ( self . tokens [ i ] [ 'unstemmed' ] ) mode = Counter ( originals ) . most_common ( 1 ) return mode [ 0 ] [ 0 ]
|
Given a stemmed term get the most common unstemmed variant .
|
6,078
|
def kde ( self , term , bandwidth = 2000 , samples = 1000 , kernel = 'gaussian' ) : terms = np . array ( self . terms [ term ] ) [ : , np . newaxis ] kde = KernelDensity ( kernel = kernel , bandwidth = bandwidth ) . fit ( terms ) x_axis = np . linspace ( 0 , len ( self . tokens ) , samples ) [ : , np . newaxis ] scores = kde . score_samples ( x_axis ) return np . exp ( scores ) * ( len ( self . tokens ) / samples )
|
Estimate the kernel density of the instances of term in the text .
|
6,079
|
def score_intersect ( self , term1 , term2 , ** kwargs ) : t1_kde = self . kde ( term1 , ** kwargs ) t2_kde = self . kde ( term2 , ** kwargs ) overlap = np . minimum ( t1_kde , t2_kde ) return np . trapz ( overlap )
|
Compute the geometric area of the overlap between the kernel density estimates of two terms .
|
6,080
|
def score_cosine ( self , term1 , term2 , ** kwargs ) : t1_kde = self . kde ( term1 , ** kwargs ) t2_kde = self . kde ( term2 , ** kwargs ) return 1 - distance . cosine ( t1_kde , t2_kde )
|
Compute a weighting score based on the cosine distance between the kernel density estimates of two terms .
|
6,081
|
def score_braycurtis ( self , term1 , term2 , ** kwargs ) : t1_kde = self . kde ( term1 , ** kwargs ) t2_kde = self . kde ( term2 , ** kwargs ) return 1 - distance . braycurtis ( t1_kde , t2_kde )
|
Compute a weighting score based on the City Block distance between the kernel density estimates of two terms .
|
6,082
|
def plot_term_kdes ( self , words , ** kwargs ) : stem = PorterStemmer ( ) . stem for word in words : kde = self . kde ( stem ( word ) , ** kwargs ) plt . plot ( kde ) plt . show ( )
|
Plot kernel density estimates for multiple words .
|
6,083
|
def _tupleCompare ( tuple1 , ineq , tuple2 , eq = lambda a , b : ( a == b ) , ander = AND , orer = OR ) : orholder = [ ] for limit in range ( len ( tuple1 ) ) : eqconstraint = [ eq ( elem1 , elem2 ) for elem1 , elem2 in zip ( tuple1 , tuple2 ) [ : limit ] ] ineqconstraint = ineq ( tuple1 [ limit ] , tuple2 [ limit ] ) orholder . append ( ander ( * ( eqconstraint + [ ineqconstraint ] ) ) ) return orer ( * orholder )
|
Compare two in - database tuples . Useful when sorting by a compound key and slicing into the middle of that query .
|
6,084
|
def truncate_rationale ( rationale , max_length = MAX_RATIONALE_SIZE_IN_EVENT ) : if isinstance ( rationale , basestring ) and max_length is not None and len ( rationale ) > max_length : return rationale [ 0 : max_length ] , True else : return rationale , False
|
Truncates the rationale for analytics event emission if necessary
|
6,085
|
def validate_options ( options ) : errors = [ ] if int ( options [ 'rationale_size' ] [ 'min' ] ) < 1 : errors . append ( _ ( 'Minimum Characters' ) ) if int ( options [ 'rationale_size' ] [ 'max' ] ) < 0 or int ( options [ 'rationale_size' ] [ 'max' ] ) > MAX_RATIONALE_SIZE : errors . append ( _ ( 'Maximum Characters' ) ) if not any ( error in [ _ ( 'Minimum Characters' ) , _ ( 'Maximum Characters' ) ] for error in errors ) and int ( options [ 'rationale_size' ] [ 'max' ] ) <= int ( options [ 'rationale_size' ] [ 'min' ] ) : errors += [ _ ( 'Minimum Characters' ) , _ ( 'Maximum Characters' ) ] try : if options [ 'algo' ] [ 'num_responses' ] != '#' and int ( options [ 'algo' ] [ 'num_responses' ] ) < 0 : errors . append ( _ ( 'Number of Responses' ) ) except ValueError : errors . append ( _ ( 'Not an Integer' ) ) if not errors : return None else : return { 'options_error' : _ ( 'Invalid Option(s): ' ) + ', ' . join ( errors ) }
|
Validate the options that course author set up and return errors in a dict if there is any
|
6,086
|
def get_student_item_dict ( self , anonymous_user_id = None ) : item_id = self . _serialize_opaque_key ( self . scope_ids . usage_id ) if hasattr ( self , "xmodule_runtime" ) : course_id = self . get_course_id ( ) if anonymous_user_id : student_id = anonymous_user_id else : student_id = self . xmodule_runtime . anonymous_student_id else : course_id = "edX/Enchantment_101/April_1" if self . scope_ids . user_id is None : student_id = '' else : student_id = unicode ( self . scope_ids . user_id ) student_item_dict = dict ( student_id = student_id , item_id = item_id , course_id = course_id , item_type = 'ubcpi' ) return student_item_dict
|
Create a student_item_dict from our surrounding context .
|
6,087
|
def get_answers_for_student ( student_item ) : submissions = sub_api . get_submissions ( student_item ) if not submissions : return Answers ( ) latest_submission = submissions [ 0 ] latest_answer_item = latest_submission . get ( 'answer' , { } ) return Answers ( latest_answer_item . get ( ANSWER_LIST_KEY , [ ] ) )
|
Retrieve answers from backend for a student and question
|
6,088
|
def add_answer_for_student ( student_item , vote , rationale ) : answers = get_answers_for_student ( student_item ) answers . add_answer ( vote , rationale ) sub_api . create_submission ( student_item , { ANSWER_LIST_KEY : answers . get_answers_as_list ( ) } )
|
Add an answer for a student to the backend
|
6,089
|
def add_answer ( self , vote , rationale ) : self . raw_answers . append ( { VOTE_KEY : vote , RATIONALE_KEY : rationale , } )
|
Add an answer
|
6,090
|
def createNew ( cls , store , pathSegments ) : if isinstance ( pathSegments , basestring ) : raise ValueError ( 'Received %r instead of a sequence' % ( pathSegments , ) ) if store . dbdir is None : self = cls ( store = store , storepath = None ) else : storepath = store . newDirectory ( * pathSegments ) self = cls ( store = store , storepath = storepath ) self . open ( ) self . close ( ) return self
|
Create a new SubStore allocating a new file space for it .
|
6,091
|
def createStore ( self , debug , journalMode = None ) : if self . storepath is None : self . store . _memorySubstores . append ( self ) if self . store . filesdir is None : filesdir = None else : filesdir = ( self . store . filesdir . child ( "_substore_files" ) . child ( str ( self . storeID ) ) . path ) return Store ( parent = self . store , filesdir = filesdir , idInParent = self . storeID , debug = debug , journalMode = journalMode ) else : return Store ( self . storepath . path , parent = self . store , idInParent = self . storeID , debug = debug , journalMode = journalMode )
|
Create the actual Store this Substore represents .
|
6,092
|
def upgradeCatalog1to2 ( oldCatalog ) : newCatalog = oldCatalog . upgradeVersion ( 'tag_catalog' , 1 , 2 , tagCount = oldCatalog . tagCount ) tags = newCatalog . store . query ( Tag , Tag . catalog == newCatalog ) tagNames = tags . getColumn ( "name" ) . distinct ( ) for t in tagNames : _TagName ( store = newCatalog . store , catalog = newCatalog , name = t ) return newCatalog
|
Create _TagName instances which version 2 of Catalog automatically creates for use in determining the tagNames result but which version 1 of Catalog did not create .
|
6,093
|
def tagNames ( self ) : return self . store . query ( _TagName , _TagName . catalog == self ) . getColumn ( "name" )
|
Return an iterator of unicode strings - the unique tag names which have been applied objects in this catalog .
|
6,094
|
def tagsOf ( self , obj ) : return self . store . query ( Tag , AND ( Tag . catalog == self , Tag . object == obj ) ) . getColumn ( "name" )
|
Return an iterator of unicode strings - the tag names which apply to the given object .
|
6,095
|
def loaded ( self , oself , dbval ) : setattr ( oself , self . dbunderlying , dbval ) delattr ( oself , self . underlying )
|
This method is invoked when the item is loaded from the database and when a transaction is reverted which restores this attribute s value .
|
6,096
|
def _convertPyval ( self , oself , pyval ) : if pyval is None and not self . allowNone : raise TypeError ( "attribute [%s.%s = %s()] must not be None" % ( self . classname , self . attrname , self . __class__ . __name__ ) ) return self . infilter ( pyval , oself , oself . store )
|
Convert a Python value to a value suitable for inserting into the database .
|
6,097
|
def _queryContainer ( self , store ) : if self . _subselectSQL is None : sql , args = self . container . _sqlAndArgs ( 'SELECT' , self . container . _queryTarget ) self . _subselectSQL , self . _subselectArgs = sql , args return self . _subselectSQL
|
Generate and cache the subselect SQL and its arguments . Return the subselect SQL .
|
6,098
|
def _sequenceContainer ( self , store ) : if self . _sequence is None : self . _sequence = list ( self . container ) self . _clause = ', ' . join ( [ '?' ] * len ( self . _sequence ) ) return self . _clause
|
Smash whatever we got into a list and save the result in case we are executed multiple times . This keeps us from tripping up over generators and the like .
|
6,099
|
def _sequenceArgs ( self , store ) : self . _sequenceContainer ( store ) return [ self . attribute . infilter ( pyval , None , store ) for pyval in self . _sequence ]
|
Filter each element of the data using the attribute type being tested for containment and hand back the resulting list .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.