idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,200
def to_json ( data ) : return json . dumps ( data , default = lambda x : x . __dict__ , sort_keys = True , indent = 4 )
Return data as a JSON string .
56,201
def convert_string ( string , chars = None ) : if chars is None : chars = [ ',' , '.' , '-' , '/' , ':' , ' ' ] for ch in chars : if ch in string : string = string . replace ( ch , ' ' ) return string
Remove certain characters from a string .
56,202
def convert_time ( time ) : split_time = time . split ( ) try : am_pm = split_time [ 1 ] . replace ( '.' , '' ) time_str = '{0} {1}' . format ( split_time [ 0 ] , am_pm ) except IndexError : return time try : time_obj = datetime . strptime ( time_str , '%I:%M %p' ) except ValueError : time_obj = datetime . strptime ( t...
Convert a time string into 24 - hour time .
56,203
def convert_month ( date , shorten = True , cable = True ) : month = date . split ( ) [ 0 ] . lower ( ) if 'sept' in month : shorten = False if cable else True try : if shorten : month = SHORT_MONTHS [ MONTHS . index ( month ) ] else : month = MONTHS [ SHORT_MONTHS . index ( month ) ] except ValueError : month = month ...
Replace month by shortening or lengthening it .
56,204
def convert_date ( date ) : date = convert_month ( date , shorten = False ) clean_string = convert_string ( date ) return datetime . strptime ( clean_string , DATE_FMT . replace ( '-' , '' ) )
Convert string to datetime object .
56,205
def date_in_range ( date1 , date2 , range ) : date_obj1 = convert_date ( date1 ) date_obj2 = convert_date ( date2 ) return ( date_obj2 - date_obj1 ) . days <= range
Check if two date objects are within a specific range
56,206
def inc_date ( date_obj , num , date_fmt ) : return ( date_obj + timedelta ( days = num ) ) . strftime ( date_fmt )
Increment the date by a certain number and return date object . as the specific string format .
56,207
def get_soup ( url ) : html = requests . get ( url , stream = True , headers = HEADERS ) if html . status_code != 404 : return BeautifulSoup ( html . content , 'html.parser' ) else : return None
Request the page and return the soup .
56,208
def match_list ( query_list , string ) : match = False index = 0 string = ' ' . join ( filter_stopwords ( string ) ) if not isinstance ( query_list , list ) : query_list = [ query_list ] while index < len ( query_list ) : query = query_list [ index ] words_query = filter_stopwords ( query ) match = all ( word in string...
Return True if all words in a word list are in the string .
56,209
def filter_stopwords ( phrase ) : if not isinstance ( phrase , list ) : phrase = phrase . split ( ) stopwords = [ 'the' , 'a' , 'in' , 'to' ] return [ word . lower ( ) for word in phrase if word . lower ( ) not in stopwords ]
Filter out stop words and return as a list of words
56,210
def safe_unicode ( string ) : if not PY3 : uni = string . replace ( u'\u2019' , "'" ) return uni . encode ( 'utf-8' ) return string
If Python 2 replace non - ascii characters and return encoded string .
56,211
def get_strings ( soup , tag ) : tags = soup . find_all ( tag ) strings = [ s . string for s in tags if s . string ] return strings
Get all the string children from an html tag .
56,212
def cli ( ctx , given_name , demo ) : path = None if path is None : path = ctx . home bubble_file_name = path + '/.bubble' config_file = path + '/config/config.yaml' if os . path . exists ( bubble_file_name ) and os . path . isfile ( bubble_file_name ) : ctx . say_yellow ( 'There is already a bubble present, will not i...
Initializes a bubble .
56,213
def _bld_op ( self , op , num , ** kwargs ) : kwargs [ 'other' ] = num setattr ( self , op , { 'mtype' : pab , 'kwargs' : kwargs } )
implements pandas an operator
56,214
def _bld_pab_generic ( self , funcname , ** kwargs ) : margs = { 'mtype' : pab , 'kwargs' : kwargs } setattr ( self , funcname , margs )
implements a generic version of an attribute based pandas function
56,215
def _bld_pnab_generic ( self , funcname , ** kwargs ) : margs = { 'mtype' : pnab , 'kwargs' : kwargs } setattr ( self , funcname , margs )
implement s a generic version of a non - attribute based pandas function
56,216
def get ( self , request , * args , ** kwargs ) : cart = ShoppingCartProxy ( request ) return JsonResponse ( cart . get_products ( onlypublic = request . GET . get ( 'onlypublic' , True ) ) )
List all products in the shopping cart
56,217
def post ( self , request , * args , ** kwargs ) : POST = json . loads ( request . body . decode ( 'utf-8' ) ) if 'product_pk' in POST and 'quantity' in POST : cart = ShoppingCartProxy ( request ) cart . add ( product_pk = int ( POST [ 'product_pk' ] ) , quantity = int ( POST [ 'quantity' ] ) ) return JsonResponse ( ca...
Adds new product to the current shopping cart
56,218
def register_signal ( alias : str , signal : pyqtSignal ) : if SignalDispatcher . signal_alias_exists ( alias ) : raise SignalDispatcherError ( 'Alias "' + alias + '" for signal already exists!' ) SignalDispatcher . signals [ alias ] = signal
Used to register signal at the dispatcher . Note that you can not use alias that already exists .
56,219
def register_handler ( alias : str , handler : callable ) : if SignalDispatcher . handlers . get ( alias ) is None : SignalDispatcher . handlers [ alias ] = [ handler ] else : SignalDispatcher . handlers . get ( alias ) . append ( handler )
Used to register handler at the dispatcher .
56,220
def dispatch ( ) : aliases = SignalDispatcher . signals . keys ( ) for alias in aliases : handlers = SignalDispatcher . handlers . get ( alias ) signal = SignalDispatcher . signals . get ( alias ) if signal is None or handlers . __len__ ( ) == 0 : continue for handler in handlers : signal . connect ( handler )
This methods runs the wheel . It is used to connect signal with their handlers based on the aliases .
56,221
def _get_rev ( self , fpath ) : rev = None try : cmd = [ "git" , "log" , "-n1" , "--pretty=format:\"%h\"" , fpath ] rev = Popen ( cmd , stdout = PIPE , stderr = PIPE ) . communicate ( ) [ 0 ] except : pass if not rev : try : cmd = [ "svn" , "info" , fpath ] svninfo = Popen ( cmd , stdout = PIPE , stderr = PIPE ) . stdo...
Get an SCM version number . Try svn and git .
56,222
def execute_migrations ( self , show_traceback = True ) : all_migrations = get_pending_migrations ( self . path , self . databases ) if not len ( all_migrations ) : sys . stdout . write ( "There are no migrations to apply.\n" ) for db , migrations in all_migrations . iteritems ( ) : connection = connections [ db ] curs...
Executes all pending migrations across all capable databases
56,223
def handle ( self , * args , ** options ) : self . do_list = options . get ( "do_list" ) self . do_execute = options . get ( "do_execute" ) self . do_create = options . get ( "do_create" ) self . do_create_all = options . get ( "do_create_all" ) self . do_seed = options . get ( "do_seed" ) self . load_initial_data = op...
Upgrades the database . Executes SQL scripts that haven t already been applied to the database .
56,224
def plantuml ( desc ) : classes , relations , inherits = desc result = [ '@startuml' , 'skinparam defaultFontName Courier' , ] for cls in classes : class_desc = [ ] class_desc += [ ( i [ 1 ] , i [ 0 ] ) for i in cls [ 'cols' ] ] class_desc += [ ( '+' , i ) for i in cls [ 'props' ] ] class_desc += [ ( '%s()' % i , '' ) ...
Generate plantuml class diagram
56,225
def is_reference_target ( resource , rtype , label ) : prop = resource . props . references . get ( rtype , False ) if prop : return label in prop
Return true if the resource has this rtype with this label
56,226
def get_sources ( self , resources ) : rtype = self . rtype label = self . props . label result = [ resource for resource in resources . values ( ) if is_reference_target ( resource , rtype , label ) ] return result
Filter resources based on which have this reference
56,227
def setup ( app : Sphinx ) : importscan . scan ( plugins ) dectate . commit ( kb ) app . add_config_value ( 'kaybee_settings' , KaybeeSettings ( ) , 'html' ) bridge = 'kaybee.plugins.postrenderer.config.KaybeeBridge' app . config . template_bridge = bridge app . connect ( 'env-updated' , flush_everything ) app . connec...
Initialize Kaybee as a Sphinx extension
56,228
def loadInstance ( self ) : if self . _loaded : return self . _loaded = True module_path = self . modulePath ( ) package = projex . packageFromPath ( module_path ) path = os . path . normpath ( projex . packageRootPath ( module_path ) ) if path in sys . path : sys . path . remove ( path ) sys . path . insert ( 0 , path...
Loads the plugin from the proxy information that was created from the registry file .
56,229
def clean_resource_json ( resource_json ) : for a in ( 'parent_docname' , 'parent' , 'template' , 'repr' , 'series' ) : if a in resource_json : del resource_json [ a ] props = resource_json [ 'props' ] for prop in ( 'acquireds' , 'style' , 'in_nav' , 'nav_title' , 'weight' , 'auto_excerpt' ) : if prop in props : del pr...
The catalog wants to be smaller let s drop some stuff
56,230
def get ( self , url , params = None , cache_cb = None , ** kwargs ) : if self . use_random_user_agent : headers = kwargs . get ( "headers" , dict ( ) ) headers . update ( { Headers . UserAgent . KEY : Headers . UserAgent . random ( ) } ) kwargs [ "headers" ] = headers url = add_params ( url , params ) cache_consumed ,...
Make http get request .
56,231
def download ( self , url , dst , params = None , cache_cb = None , overwrite = False , stream = False , minimal_size = - 1 , maximum_size = 1024 ** 6 , ** kwargs ) : response = self . get ( url , params = params , cache_cb = cache_cb , stream = stream , ** kwargs ) if not overwrite : if os . path . exists ( dst ) : ra...
Download binary content to destination .
56,232
def option ( * args , ** kwargs ) : def decorate_sub_command ( method ) : if not hasattr ( method , "optparser" ) : method . optparser = SubCmdOptionParser ( ) method . optparser . add_option ( * args , ** kwargs ) return method def decorate_class ( klass ) : assert _forgiving_issubclass ( klass , Cmdln ) _inherit_attr...
Decorator to add an option to the optparser argument of a Cmdln subcommand
56,233
def _inherit_attr ( klass , attr , default , cp ) : if attr not in klass . __dict__ : if hasattr ( klass , attr ) : value = cp ( getattr ( klass , attr ) ) else : value = default setattr ( klass , attr , value )
Inherit the attribute from the base class
56,234
def _forgiving_issubclass ( derived_class , base_class ) : return ( type ( derived_class ) is ClassType and type ( base_class ) is ClassType and issubclass ( derived_class , base_class ) )
Forgiving version of issubclass
56,235
def timecalMs1DataMedian ( msrunContainer , specfile , calibrationData , minDataPoints = 50 , deviationKey = 'relDev' ) : corrData = dict ( ) _posDict = dict ( ) pos = 0 for si in msrunContainer . getItems ( specfiles = specfile , sort = 'rt' , selector = lambda si : si . msLevel == 1 ) : corrData [ si . id ] = { 'cali...
Generates a calibration value for each MS1 scan by calculating the median deviation
56,236
def get_genericpage ( cls , kb_app ) : q = dectate . Query ( 'genericpage' ) klasses = sorted ( q ( kb_app ) , key = lambda args : args [ 0 ] . order ) if not klasses : return Genericpage else : return klasses [ 0 ] [ 1 ]
Return the one class if configured otherwise default
56,237
def cli ( ctx ) : manfile = bubble_lib_dir + os . sep + 'extras' + os . sep + 'Bubble.1.gz' mancmd = [ "/usr/bin/man" , manfile ] try : return subprocess . call ( mancmd ) except Exception as e : print ( 'cannot run man with bubble man page' ) print ( 'you can always have a look at: ' + manfile )
Shows the man page packed inside the bubble tool
56,238
def _fetch_dimensions ( self , dataset ) : for dimension in super ( SCB , self ) . _fetch_dimensions ( dataset ) : if dimension . id == "Region" : yield Dimension ( dimension . id , datatype = "region" , dialect = "skatteverket" , label = dimension . label ) else : yield dimension
We override this method just to set the correct datatype and dialect for regions .
56,239
def call ( self , func , key , timeout = None ) : result = self . get ( key ) if result == NONE_RESULT : return None if result is None : result = func ( ) self . set ( key , result if result is not None else NONE_RESULT , timeout ) return result
Wraps a function call with cache .
56,240
def map ( self , key_pattern , func , all_args , timeout = None ) : results = [ ] keys = [ make_key ( key_pattern , func , args , { } ) for args in all_args ] cached = dict ( zip ( keys , self . get_many ( keys ) ) ) cache_to_add = { } for key , args in zip ( keys , all_args ) : val = cached [ key ] if val is None : va...
Cache return value of multiple calls .
56,241
async def _window_open ( self , stream_id : int ) : stream = self . _get_stream ( stream_id ) return await stream . window_open . wait ( )
Wait until the identified stream s flow control window is open .
56,242
async def send_data ( self , stream_id : int , data : bytes , end_stream : bool = False , ) : if self . closed : raise ConnectionClosedError stream = self . _get_stream ( stream_id ) if stream . closed : raise StreamClosedError ( stream_id ) remaining = data while len ( remaining ) > 0 : await asyncio . gather ( self ....
Send data respecting the receiver s flow control instructions . If the provided data is larger than the connection s maximum outbound frame size it will be broken into several frames as appropriate .
56,243
async def read_data ( self , stream_id : int ) -> bytes : frames = [ f async for f in self . stream_frames ( stream_id ) ] return b'' . join ( frames )
Read data from the specified stream until it is closed by the remote peer . If the stream is never ended this never returns .
56,244
async def read_frame ( self , stream_id : int ) -> bytes : stream = self . _get_stream ( stream_id ) frame = await stream . read_frame ( ) if frame . flow_controlled_length > 0 : self . _acknowledge_data ( frame . flow_controlled_length , stream_id ) return frame . data
Read a single frame of data from the specified stream waiting until frames are available if none are present in the local buffer . If the stream is closed and all buffered frames have been consumed raises a StreamConsumedError .
56,245
async def get_pushed_stream_ids ( self , parent_stream_id : int ) -> List [ int ] : if parent_stream_id not in self . _streams : logger . error ( f'Parent stream {parent_stream_id} unknown to this connection' ) raise NoSuchStreamError ( parent_stream_id ) parent = self . _get_stream ( parent_stream_id ) await parent . ...
Return a list of all streams pushed by the remote peer that are children of the specified stream . If no streams have been pushed when this method is called waits until at least one stream has been pushed .
56,246
def convertMzml ( mzmlPath , outputDirectory = None ) : outputDirectory = outputDirectory if outputDirectory is not None else os . path . dirname ( mzmlPath ) msrunContainer = importMzml ( mzmlPath ) msrunContainer . setPath ( outputDirectory ) msrunContainer . save ( )
Imports an mzml file and converts it to a MsrunContainer file
56,247
def prepareSiiImport ( siiContainer , specfile , path , qcAttr , qcLargerBetter , qcCutoff , rankAttr , rankLargerBetter ) : if specfile not in siiContainer . info : siiContainer . addSpecfile ( specfile , path ) else : raise Exception ( '...' ) siiContainer . info [ specfile ] [ 'qcAttr' ] = qcAttr siiContainer . info...
Prepares the siiContainer for the import of peptide spectrum matching results . Adds entries to siiContainer . container and to siiContainer . info .
56,248
def importPeptideFeatures ( fiContainer , filelocation , specfile ) : if not os . path . isfile ( filelocation ) : warnings . warn ( 'The specified file does not exist %s' % ( filelocation , ) ) return None elif ( not filelocation . lower ( ) . endswith ( '.featurexml' ) and not filelocation . lower ( ) . endswith ( '....
Import peptide features from a featureXml file as generated for example by the OpenMS node featureFinderCentroided or a features . tsv file by the Dinosaur command line tool .
56,249
def _importDinosaurTsv ( filelocation ) : with io . open ( filelocation , 'r' , encoding = 'utf-8' ) as openFile : lines = openFile . readlines ( ) headerDict = dict ( [ [ y , x ] for ( x , y ) in enumerate ( lines [ 0 ] . strip ( ) . split ( '\t' ) ) ] ) featureDict = dict ( ) for linePos , line in enumerate ( lines [...
Reads a Dinosaur tsv file .
56,250
def rst_to_html ( input_string : str ) -> str : overrides = dict ( input_encoding = 'unicode' , doctitle_xform = True , initial_header_level = 1 ) parts = publish_parts ( writer_name = 'html' , source = input_string , settings_overrides = overrides ) return parts [ 'html_body' ]
Given a string of RST use docutils to generate html
56,251
def get_rst_title ( rst_doc : Node ) -> Optional [ Any ] : for title in rst_doc . traverse ( nodes . title ) : return title . astext ( ) return None
Given some RST extract what docutils thinks is the title
56,252
def get_rst_excerpt ( rst_doc : document , paragraphs : int = 1 ) -> str : texts = [ ] for count , p in enumerate ( rst_doc . traverse ( paragraph ) ) : texts . append ( p . astext ( ) ) if count + 1 == paragraphs : break return ' ' . join ( texts )
Given rst parse and return a portion
56,253
def requires_password_auth ( fn ) : def wrapper ( self , * args , ** kwargs ) : self . auth_context = HAPI . auth_context_password return fn ( self , * args , ** kwargs ) return wrapper
Decorator for HAPI methods that requires the instance to be authenticated with a password
56,254
def requires_api_auth ( fn ) : def wrapper ( self , * args , ** kwargs ) : self . auth_context = HAPI . auth_context_hapi return fn ( self , * args , ** kwargs ) return wrapper
Decorator for HAPI methods that requires the instance to be authenticated with a HAPI token
56,255
def parse ( response ) : tokens = { r [ 0 ] : r [ 1 ] for r in [ r . split ( '=' ) for r in response . split ( "&" ) ] } if 'dummy' in tokens : del tokens [ 'dummy' ] if re . match ( '\D\d+$' , tokens . keys ( ) [ 0 ] ) : set_tokens = [ ] for key , value in tokens : key = re . match ( '^(.+\D)(\d+)$' , key ) if key is ...
Parse a postdata - style response format from the API into usable data
56,256
def init_chain ( self ) : if not self . _hasinit : self . _hasinit = True self . _devices = [ ] self . jtag_enable ( ) while True : idcode = self . rw_dr ( bitcount = 32 , read = True , lastbit = False ) ( ) if idcode in NULL_ID_CODES : break dev = self . initialize_device_from_id ( self , idcode ) if self . _debug : p...
Autodetect the devices attached to the Controller and initialize a JTAGDevice for each .
56,257
def _UserUpdateConfigValue ( self , configKey , strDescriptor , isDir = True , dbConfigValue = None ) : newConfigValue = None if dbConfigValue is None : prompt = "Enter new {0} or 'x' to exit: " . format ( strDescriptor ) else : prompt = "Enter 'y' to use existing {0}, enter a new {0} or 'x' to exit: " . format ( strDe...
Allow user to set or update config values in the database table . This is always called if no valid entry exists in the table already .
56,258
def _GetConfigValue ( self , configKey , strDescriptor , isDir = True ) : goodlogging . Log . Info ( "CLEAR" , "Loading {0} from database:" . format ( strDescriptor ) ) goodlogging . Log . IncreaseIndent ( ) configValue = self . _db . GetConfigValue ( configKey ) if configValue is None : goodlogging . Log . Info ( "CLE...
Get configuration value from database table . If no value found user will be prompted to enter one .
56,259
def _UserUpdateSupportedFormats ( self , origFormatList = [ ] ) : formatList = list ( origFormatList ) inputDone = None while inputDone is None : prompt = "Enter new format (e.g. .mp4, .avi), " "'r' to reset format list, " "'f' to finish or " "'x' to exit: " response = goodlogging . Log . Input ( "CLEAR" , prompt ) if ...
Add supported formats to database table . Always called if the database table is empty .
56,260
def _GetSupportedFormats ( self ) : goodlogging . Log . Info ( "CLEAR" , "Loading supported formats from database:" ) goodlogging . Log . IncreaseIndent ( ) formatList = self . _db . GetSupportedFormats ( ) if formatList is None : goodlogging . Log . Info ( "CLEAR" , "No supported formats exist in database" ) formatLis...
Get supported format values from database table . If no values found user will be prompted to enter values for this table .
56,261
def _UserUpdateIgnoredDirs ( self , origIgnoredDirs = [ ] ) : ignoredDirs = list ( origIgnoredDirs ) inputDone = None while inputDone is None : prompt = "Enter new directory to ignore (e.g. DONE), " "'r' to reset directory list, " "'f' to finish or " "'x' to exit: " response = goodlogging . Log . Input ( "CLEAR" , prom...
Add ignored directories to database table . Always called if the database table is empty .
56,262
def _GetIgnoredDirs ( self ) : goodlogging . Log . Info ( "CLEAR" , "Loading ignored directories from database:" ) goodlogging . Log . IncreaseIndent ( ) ignoredDirs = self . _db . GetIgnoredDirs ( ) if ignoredDirs is None : goodlogging . Log . Info ( "CLEAR" , "No ignored directories exist in database" ) ignoredDirs =...
Get ignored directories values from database table . If no values found user will be prompted to enter values for this table .
56,263
def _GetDatabaseConfig ( self ) : goodlogging . Log . Seperator ( ) goodlogging . Log . Info ( "CLEAR" , "Getting configuration variables..." ) goodlogging . Log . IncreaseIndent ( ) if self . _sourceDir is None : self . _sourceDir = self . _GetConfigValue ( 'SourceDir' , 'source directory' ) if self . _inPlaceRename i...
Get all configuration from database .
56,264
def _GetSupportedFilesInDir ( self , fileDir , fileList , supportedFormatList , ignoreDirList ) : goodlogging . Log . Info ( "CLEAR" , "Parsing file directory: {0}" . format ( fileDir ) ) if os . path . isdir ( fileDir ) is True : for globPath in glob . glob ( os . path . join ( fileDir , '*' ) ) : if util . FileExtens...
Recursively get all supported files given a root search directory .
56,265
def Run ( self ) : self . _GetArgs ( ) goodlogging . Log . Info ( "CLEAR" , "Using database: {0}" . format ( self . _databasePath ) ) self . _db = database . RenamerDB ( self . _databasePath ) if self . _dbPrint or self . _dbUpdate : goodlogging . Log . Seperator ( ) self . _db . PrintAllTables ( ) if self . _dbUpdate ...
Main entry point for ClearManager class .
56,266
def flush ( self ) : self . stages = [ ] self . stagenames = [ ] if not self . queue : return if self . print_statistics : print ( "LEN OF QUENE" , len ( self ) ) t = time ( ) if self . _chain . _collect_compiler_artifacts : self . _compile ( debug = True , stages = self . stages , stagenames = self . stagenames ) else...
Force the queue of Primitives to compile execute on the Controller and fulfill promises with the data returned .
56,267
def step_impl ( context ) : expected_lines = context . text . split ( '\n' ) assert len ( expected_lines ) == len ( context . output ) for expected , actual in zip ( expected_lines , context . output ) : print ( '--\n\texpected: {}\n\tactual: {}' . format ( expected , actual ) ) assert expected == actual
Compares text as written to the log output
56,268
def _ParseShowList ( self , checkOnly = False ) : showTitleList = [ ] showIDList = [ ] csvReader = csv . reader ( self . _allShowList . splitlines ( ) ) for rowCnt , row in enumerate ( csvReader ) : if rowCnt == 0 : for colCnt , column in enumerate ( row ) : if column == 'title' : titleIndex = colCnt if column == self ...
Read self . _allShowList as csv file and make list of titles and IDs .
56,269
def _GetAllShowList ( self ) : today = datetime . date . today ( ) . strftime ( "%Y%m%d" ) saveFile = '_epguides_' + today + '.csv' saveFilePath = os . path . join ( self . _saveDir , saveFile ) if os . path . exists ( saveFilePath ) : with open ( saveFilePath , 'r' ) as allShowsFile : self . _allShowList = allShowsFil...
Populates self . _allShowList with the epguides all show info .
56,270
def _GetShowID ( self , showName ) : self . _GetTitleList ( ) self . _GetIDList ( ) for index , showTitle in enumerate ( self . _showTitleList ) : if showName == showTitle : return self . _showIDList [ index ] return None
Get epguides show id for a given show name .
56,271
def _ExtractDataFromShowHtml ( self , html ) : htmlLines = html . splitlines ( ) for count , line in enumerate ( htmlLines ) : if line . strip ( ) == r'<pre>' : startLine = count + 1 if line . strip ( ) == r'</pre>' : endLine = count try : dataList = htmlLines [ startLine : endLine ] dataString = '\n' . join ( dataList...
Extracts csv show data from epguides html source .
56,272
def _GetEpisodeName ( self , showID , season , episode ) : showInfo = csv . reader ( self . _showInfoDict [ showID ] . splitlines ( ) ) for rowCnt , row in enumerate ( showInfo ) : if rowCnt == 0 : for colCnt , column in enumerate ( row ) : if column == 'season' : seasonIndex = colCnt if column == 'episode' : episodeIn...
Get episode name from epguides show info .
56,273
def ShowNameLookUp ( self , string ) : goodlogging . Log . Info ( "EPGUIDES" , "Looking up show name match for string '{0}' in guide" . format ( string ) , verbosity = self . logVerbosity ) self . _GetTitleList ( ) showName = util . GetBestMatch ( string , self . _showTitleList ) return ( showName )
Attempts to find the best match for the given string in the list of epguides show titles . If this list has not previous been generated it will be generated first .
56,274
def EpisodeNameLookUp ( self , showName , season , episode ) : goodlogging . Log . Info ( "EPGUIDE" , "Looking up episode name for {0} S{1}E{2}" . format ( showName , season , episode ) , verbosity = self . logVerbosity ) goodlogging . Log . IncreaseIndent ( ) showID = self . _GetShowID ( showName ) if showID is not No...
Get the episode name correspondng to the given show name season number and episode number .
56,275
def private_path ( self ) : path = os . path . join ( self . path , '.hg' , '.private' ) try : os . mkdir ( path ) except OSError as e : if e . errno != errno . EEXIST : raise return path
Get the path to a directory which can be used to store arbitrary data
56,276
def bookmarks ( self ) : cmd = [ HG , 'bookmarks' ] output = self . _command ( cmd ) . decode ( self . encoding , 'replace' ) if output . startswith ( 'no bookmarks set' ) : return [ ] results = [ ] for line in output . splitlines ( ) : m = bookmarks_rx . match ( line ) assert m , 'unexpected output: ' + line results ....
Get list of bookmarks
56,277
def content ( self ) : if not self . _content : self . _content = self . _read ( ) return self . _content
Get the file contents .
56,278
def config ( self ) : conf = config . Configuration ( ) for namespace in self . namespaces : if not hasattr ( conf , namespace ) : if not self . _strict : continue raise exc . NamespaceNotRegistered ( "The namespace {0} is not registered." . format ( namespace ) ) name = getattr ( conf , namespace ) for item , value in...
Get a Configuration object from the file contents .
56,279
def _read ( self ) : with open ( self . path , 'r' ) as file_handle : content = file_handle . read ( ) return compat . unicode ( content )
Open the file and return its contents .
56,280
async def ask ( self , body , quick_replies = None , options = None , user = None ) : await self . send_text_message_to_all_interfaces ( recipient = user , text = body , quick_replies = quick_replies , options = options , ) return any . Any ( )
simple ask with predefined quick replies
56,281
async def say ( self , body , user , options ) : return await self . send_text_message_to_all_interfaces ( recipient = user , text = body , options = options )
say something to user
56,282
def connect ( self , protocolFactory ) : deferred = self . _startProcess ( ) deferred . addCallback ( self . _connectRelay , protocolFactory ) deferred . addCallback ( self . _startRelay ) return deferred
Starts a process and connect a protocol to it .
56,283
def _startProcess ( self ) : connectedDeferred = defer . Deferred ( ) processProtocol = RelayProcessProtocol ( connectedDeferred ) self . inductor . execute ( processProtocol , * self . inductorArgs ) return connectedDeferred
Use the inductor to start the process we want to relay data from .
56,284
def _connectRelay ( self , process , protocolFactory ) : try : wf = _WrappingFactory ( protocolFactory ) connector = RelayConnector ( process , wf , self . timeout , self . inductor . reactor ) connector . connect ( ) except : return defer . fail ( ) return wf . _onConnection
Set up and connect the protocol we want to relay to the process . This method is automatically called when the process is started and we are ready to relay through it .
56,285
def _startRelay ( self , client ) : process = client . transport . connector . process for _ , data in process . data : client . dataReceived ( data ) process . protocol = client @ process . _endedDeferred . addBoth def stopRelay ( reason ) : relay = client . transport relay . loseConnection ( reason ) connector = rela...
Start relaying data between the process and the protocol . This method is called when the protocol is connected .
56,286
def connectRelay ( self ) : self . protocol = self . connector . buildProtocol ( None ) self . connected = True self . protocol . makeConnection ( self )
Builds the target protocol and connects it to the relay transport .
56,287
def childDataReceived ( self , childFD , data ) : protocol = getattr ( self , 'protocol' , None ) if protocol : protocol . dataReceived ( data ) else : self . data . append ( ( childFD , data ) )
Relay data received on any file descriptor to the process
56,288
def publish ( self , user , provider , obj , comment , ** kwargs ) : social_user = self . _get_social_user ( user , provider ) backend = self . get_backend ( social_user , provider , context = kwargs ) return backend . publish ( obj , comment )
user - django User or UserSocialAuth instance provider - name of publisher provider obj - sharing object comment - string
56,289
def check ( self , user , provider , permission , ** kwargs ) : try : social_user = self . _get_social_user ( user , provider ) if not social_user : return False except SocialUserDoesNotExist : return False backend = self . get_backend ( social_user , provider , context = kwargs ) return backend . check ( permission )
user - django User or UserSocialAuth instance provider - name of publisher provider permission - if backend maintains check permissions vk - binary mask in int format facebook - scope string
56,290
def recognize_byte ( self , image , timeout = 10 ) : result = [ ] alpr = subprocess . Popen ( self . _cmd , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . DEVNULL ) try : stdout , stderr = alpr . communicate ( input = image , timeout = 10 ) stdout = io . StringIO ( str ( stdout , 'utf-8'...
Process a byte image buffer .
56,291
def finished ( finished_status , update_interval , table , status_column , edit_at_column ) : sql = select ( [ table ] ) . where ( and_ ( * [ status_column >= finished_status , edit_at_column >= x_seconds_before_now ( update_interval ) ] ) ) return sql
Create text sql statement query for sqlalchemy that getting all finished task .
56,292
def unfinished ( finished_status , update_interval , table , status_column , edit_at_column ) : sql = select ( [ table ] ) . where ( or_ ( * [ status_column < finished_status , edit_at_column < x_seconds_before_now ( update_interval ) ] ) ) return sql
Create text sql statement query for sqlalchemy that getting all unfinished task .
56,293
def find_nearest ( x , x0 ) -> Tuple [ int , Any ] : x = np . asanyarray ( x ) x0 = np . atleast_1d ( x0 ) if x . size == 0 or x0 . size == 0 : raise ValueError ( 'empty input(s)' ) if x0 . ndim not in ( 0 , 1 ) : raise ValueError ( '2-D x0 not handled yet' ) ind = np . empty_like ( x0 , dtype = int ) for i , xi in enu...
This find_nearest function does NOT assume sorted input
56,294
def ensure_context_attribute_exists ( context , name , default_value = None ) : if not hasattr ( context , name ) : setattr ( context , name , default_value )
Ensure a behave resource exists as attribute in the behave context . If this is not the case the attribute is created by using the default_value .
56,295
def ensure_workdir_exists ( context ) : ensure_context_attribute_exists ( context , "workdir" , None ) if not context . workdir : context . workdir = os . path . abspath ( WORKDIR ) pathutil . ensure_directory_exists ( context . workdir )
Ensures that the work directory exists . In addition the location of the workdir is stored as attribute in the context object .
56,296
def del_by_idx ( tree , idxs ) : if len ( idxs ) == 0 : tree [ 'item' ] = None tree [ 'subtrees' ] = [ ] else : hidx , tidxs = idxs [ 0 ] , idxs [ 1 : ] del_by_idx ( tree [ 'subtrees' ] [ hidx ] [ 1 ] , tidxs ) if len ( tree [ 'subtrees' ] [ hidx ] [ 1 ] [ 'subtrees' ] ) == 0 : del tree [ 'subtrees' ] [ hidx ]
Delete a key entry based on numerical indexes into subtree lists .
56,297
def find_in_tree ( tree , key , perfect = False ) : if len ( key ) == 0 : if tree [ 'item' ] is not None : return tree [ 'item' ] , ( ) else : for i in range ( len ( tree [ 'subtrees' ] ) ) : if not perfect and tree [ 'subtrees' ] [ i ] [ 0 ] == '*' : item , trace = find_in_tree ( tree [ 'subtrees' ] [ i ] [ 1 ] , ( ) ...
Helper to perform find in dictionary tree .
56,298
def find ( self , key , perfect = False ) : return find_in_tree ( self . root , key , perfect )
Find a key path in the tree matching wildcards . Return value for key along with index path through subtree lists to the result . Throw KeyError if the key path doesn t exist in the tree .
56,299
def _purge_unreachable ( self , key ) : dels = [ ] for p in self : if dominates ( key , p ) : dels . append ( p ) for k in dels : _ , idxs = find_in_tree ( self . root , k , perfect = True ) del_by_idx ( self . root , idxs )
Purge unreachable dominated key paths before inserting a new key path .