idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
58,700
def set_day ( self , day : int ) -> datetime : self . value = self . value . replace ( day = day ) return self . value
Sets the day value
58,701
def set_value ( self , value : datetime ) : assert isinstance ( value , datetime ) self . value = value
Sets the current value
58,702
def start_of_day ( self ) -> datetime : self . value = datetime ( self . value . year , self . value . month , self . value . day ) return self . value
Returns start of day
58,703
def subtract_days ( self , days : int ) -> datetime : self . value = self . value - relativedelta ( days = days ) return self . value
Subtracts dates from the given value
58,704
def subtract_weeks ( self , weeks : int ) -> datetime : self . value = self . value - timedelta ( weeks = weeks ) return self . value
Subtracts number of weeks from the current value
58,705
def subtract_months ( self , months : int ) -> datetime : self . value = self . value - relativedelta ( months = months ) return self . value
Subtracts a number of months from the current value
58,706
def yesterday ( self ) -> datetime : self . value = datetime . today ( ) - timedelta ( days = 1 ) return self . value
Set the value to yesterday
58,707
def get_uuid_string ( low = None , high = None , ** x ) : if low is None or high is None : return None x = '' . join ( [ parse_part ( low ) , parse_part ( high ) ] ) return '-' . join ( [ x [ : 8 ] , x [ 8 : 12 ] , x [ 12 : 16 ] , x [ 16 : 20 ] , x [ 20 : 32 ] ] )
This method parses a UUID protobuf message type from its component high and low longs into a standard formatted UUID string
58,708
def search ( self , search ) : search = search . replace ( '/' , ' ' ) params = { 'q' : search } return self . _get_records ( params )
search Zenodo record for string search
58,709
def qdict_get_list ( qdict , k ) : pks = qdict . getlist ( k ) return [ e for e in pks if e ]
get list from QueryDict and remove blank date from list .
58,710
def request_get_next ( request , default_next ) : next_url = request . POST . get ( 'next' ) or request . GET . get ( 'next' ) or request . META . get ( 'HTTP_REFERER' ) or default_next return next_url
get next url form request
58,711
def upload_progress ( request ) : if 'X-Progress-ID' in request . GET : progress_id = request . GET [ 'X-Progress-ID' ] elif 'X-Progress-ID' in request . META : progress_id = request . META [ 'X-Progress-ID' ] if 'logfilename' in request . GET : logfilename = request . GET [ 'logfilename' ] elif 'logfilename' in request . META : logfilename = request . META [ 'logfilename' ] cache_key = "%s_%s" % ( request . META [ 'REMOTE_ADDR' ] , progress_id ) data = cache . get ( cache_key ) if not data : data = cache . get ( logfilename . replace ( ' ' , '_' ) ) return HttpResponse ( json . dumps ( data ) )
AJAX view adapted from django - progressbarupload
58,712
def set_color ( self , fg = None , bg = None , intensify = False , target = sys . stdout ) : raise NotImplementedError
Set foreground - and background colors and intensity .
58,713
def add ( self , entity ) : do_append = self . __check_new ( entity ) if do_append : self . __entities . append ( entity )
Adds the given entity to this cache .
58,714
def remove ( self , entity ) : self . __id_map . pop ( entity . id , None ) self . __slug_map . pop ( entity . slug , None ) self . __entities . remove ( entity )
Removes the given entity from this cache .
58,715
def retrieve ( self , filter_expression = None , order_expression = None , slice_key = None ) : ents = iter ( self . __entities ) if not filter_expression is None : ents = filter_expression ( ents ) if not order_expression is None : ents = iter ( order_expression ( ents ) ) if not slice_key is None : ents = islice ( ents , slice_key . start , slice_key . stop ) return ents
Retrieve entities from this cache possibly after filtering ordering and slicing .
58,716
def extract_user_keywords_generator ( twitter_lists_gen , lemmatizing = "wordnet" ) : for user_twitter_id , twitter_lists_list in twitter_lists_gen : if twitter_lists_list is not None : if "lists" in twitter_lists_list . keys ( ) : twitter_lists_list = twitter_lists_list [ "lists" ] bag_of_lemmas , lemma_to_keywordbag = user_twitter_list_bag_of_words ( twitter_lists_list , lemmatizing ) for lemma , keywordbag in lemma_to_keywordbag . items ( ) : lemma_to_keywordbag [ lemma ] = dict ( keywordbag ) lemma_to_keywordbag = dict ( lemma_to_keywordbag ) user_annotation = dict ( ) user_annotation [ "bag_of_lemmas" ] = bag_of_lemmas user_annotation [ "lemma_to_keywordbag" ] = lemma_to_keywordbag yield user_twitter_id , user_annotation
Based on the user - related lists I have downloaded annotate the users .
58,717
def form_user_label_matrix ( user_twitter_list_keywords_gen , id_to_node , max_number_of_labels ) : user_label_matrix , annotated_nodes , label_to_lemma , node_to_lemma_tokeywordbag = form_user_term_matrix ( user_twitter_list_keywords_gen , id_to_node , None ) user_label_matrix , annotated_nodes , label_to_lemma = filter_user_term_matrix ( user_label_matrix , annotated_nodes , label_to_lemma , max_number_of_labels ) lemma_to_keyword = form_lemma_tokeyword_map ( annotated_nodes , node_to_lemma_tokeywordbag ) return user_label_matrix , annotated_nodes , label_to_lemma , lemma_to_keyword
Forms the user - label matrix to be used in multi - label classification .
58,718
def form_user_term_matrix ( user_twitter_list_keywords_gen , id_to_node , lemma_set = None , keyword_to_topic_manual = None ) : term_to_attribute = dict ( ) user_term_matrix_row = list ( ) user_term_matrix_col = list ( ) user_term_matrix_data = list ( ) append_user_term_matrix_row = user_term_matrix_row . append append_user_term_matrix_col = user_term_matrix_col . append append_user_term_matrix_data = user_term_matrix_data . append annotated_nodes = list ( ) append_node = annotated_nodes . append node_to_lemma_tokeywordbag = dict ( ) invalid_terms = list ( ) counter = 0 if keyword_to_topic_manual is not None : manual_keyword_list = list ( keyword_to_topic_manual . keys ( ) ) for user_twitter_id , user_annotation in user_twitter_list_keywords_gen : counter += 1 bag_of_words = user_annotation [ "bag_of_lemmas" ] lemma_to_keywordbag = user_annotation [ "lemma_to_keywordbag" ] if lemma_set is not None : bag_of_words = { lemma : multiplicity for lemma , multiplicity in bag_of_words . items ( ) if lemma in lemma_set } lemma_to_keywordbag = { lemma : keywordbag for lemma , keywordbag in lemma_to_keywordbag . items ( ) if lemma in lemma_set } node = id_to_node [ user_twitter_id ] append_node ( node ) node_to_lemma_tokeywordbag [ node ] = lemma_to_keywordbag for term , multiplicity in bag_of_words . items ( ) : if term == "news" : continue if keyword_to_topic_manual is not None : keyword_bag = lemma_to_keywordbag [ term ] term = max ( keyword_bag . keys ( ) , key = ( lambda key : keyword_bag [ key ] ) ) found_list_of_words = simple_word_query ( term , manual_keyword_list , edit_distance = 1 ) if len ( found_list_of_words ) > 0 : term = found_list_of_words [ 0 ] try : term = keyword_to_topic_manual [ term ] except KeyError : print ( term ) vocabulary_size = len ( term_to_attribute ) attribute = term_to_attribute . setdefault ( term , vocabulary_size ) append_user_term_matrix_row ( node ) append_user_term_matrix_col ( attribute ) append_user_term_matrix_data ( multiplicity ) annotated_nodes = np . array ( list ( set ( annotated_nodes ) ) , dtype = np . int64 ) user_term_matrix_row = np . array ( user_term_matrix_row , dtype = np . int64 ) user_term_matrix_col = np . array ( user_term_matrix_col , dtype = np . int64 ) user_term_matrix_data = np . array ( user_term_matrix_data , dtype = np . float64 ) user_term_matrix = sparse . coo_matrix ( ( user_term_matrix_data , ( user_term_matrix_row , user_term_matrix_col ) ) , shape = ( len ( id_to_node ) , len ( term_to_attribute ) ) ) label_to_topic = dict ( zip ( term_to_attribute . values ( ) , term_to_attribute . keys ( ) ) ) return user_term_matrix , annotated_nodes , label_to_topic , node_to_lemma_tokeywordbag
Forms a user - term matrix .
58,719
def fetch_twitter_lists_for_user_ids_generator ( twitter_app_key , twitter_app_secret , user_id_list ) : twitter = login ( twitter_app_key , twitter_app_secret ) get_list_memberships_counter = 0 get_list_memberships_time_window_start = time . perf_counter ( ) for user_twitter_id in user_id_list : try : twitter_lists_list , get_list_memberships_counter , get_list_memberships_time_window_start = safe_twitter_request_handler ( twitter_api_func = twitter . get_list_memberships , call_rate_limit = 15 , call_counter = get_list_memberships_counter , time_window_start = get_list_memberships_time_window_start , max_retries = 5 , wait_period = 2 , user_id = user_twitter_id , count = 500 , cursor = - 1 ) yield user_twitter_id , twitter_lists_list except twython . TwythonError : yield user_twitter_id , None except URLError : yield user_twitter_id , None except BadStatusLine : yield user_twitter_id , None
Collects at most 500 Twitter lists for each user from an input list of Twitter user ids .
58,720
def decide_which_users_to_annotate ( centrality_vector , number_to_annotate , already_annotated , node_to_id ) : centrality_vector = np . asarray ( centrality_vector ) ind = np . argsort ( np . squeeze ( centrality_vector ) ) if centrality_vector . size > 1 : reversed_ind = ind [ : : - 1 ] else : reversed_ind = list ( ) reversed_ind = reversed_ind . append ( ind ) user_id_list = list ( ) append_user_id = user_id_list . append counter = 0 for node in reversed_ind : user_twitter_id = node_to_id [ node ] if user_twitter_id not in already_annotated : append_user_id ( user_twitter_id ) counter += 1 if counter >= number_to_annotate : break return user_id_list
Sorts a centrality vector and returns the Twitter user ids that are to be annotated .
58,721
def on_demand_annotation ( twitter_app_key , twitter_app_secret , user_twitter_id ) : twitter = login ( twitter_app_key , twitter_app_secret ) twitter_lists_list = twitter . get_list_memberships ( user_id = user_twitter_id , count = 1000 ) for twitter_list in twitter_lists_list : print ( twitter_list ) return twitter_lists_list
A service that leverages twitter lists for on - demand annotation of popular users .
58,722
def get_member_class ( resource ) : reg = get_current_registry ( ) if IInterface in provided_by ( resource ) : member_class = reg . getUtility ( resource , name = 'member-class' ) else : member_class = reg . getAdapter ( resource , IMemberResource , name = 'member-class' ) return member_class
Returns the registered member class for the given resource .
58,723
def get_collection_class ( resource ) : reg = get_current_registry ( ) if IInterface in provided_by ( resource ) : coll_class = reg . getUtility ( resource , name = 'collection-class' ) else : coll_class = reg . getAdapter ( resource , ICollectionResource , name = 'collection-class' ) return coll_class
Returns the registered collection resource class for the given marker interface or member resource class or instance .
58,724
def as_member ( entity , parent = None ) : reg = get_current_registry ( ) rc = reg . getAdapter ( entity , IMemberResource ) if not parent is None : rc . __parent__ = parent return rc
Adapts an object to a location aware member resource .
58,725
def get_resource_url ( resource ) : path = model_path ( resource ) parsed = list ( urlparse . urlparse ( path ) ) parsed [ 1 ] = "" return urlparse . urlunparse ( parsed )
Returns the URL for the given resource .
58,726
def get_registered_collection_resources ( ) : reg = get_current_registry ( ) return [ util . component for util in reg . registeredUtilities ( ) if util . name == 'collection-class' ]
Returns a list of all registered collection resource classes .
58,727
def resource_to_url ( resource , request = None , quote = False ) : if request is None : request = get_current_request ( ) reg = get_current_registry ( ) cnv = reg . getAdapter ( request , IResourceUrlConverter ) return cnv . resource_to_url ( resource , quote = quote )
Converts the given resource to a URL .
58,728
def url_to_resource ( url , request = None ) : if request is None : request = get_current_request ( ) reg = get_current_registry ( ) cnv = reg . getAdapter ( request , IResourceUrlConverter ) return cnv . url_to_resource ( url )
Converts the given URL to a resource .
58,729
def get_entity_class ( resource ) : reg = get_current_registry ( ) if IInterface in provided_by ( resource ) : ent_cls = reg . getUtility ( resource , name = 'entity-class' ) else : ent_cls = reg . getAdapter ( resource , IEntity , name = 'entity-class' ) return ent_cls
Returns the entity class registered for the given registered resource .
58,730
def install_board_with_programmer ( mcu , programmer , f_cpu = 16000000 , core = 'arduino' , replace_existing = False , ) : bunch = AutoBunch ( ) board_id = '{mcu}_{f_cpu}_{programmer}' . format ( f_cpu = f_cpu , mcu = mcu , programmer = programmer , ) bunch . name = '{mcu}@{f} Prog:{programmer}' . format ( f = strfreq ( f_cpu ) , mcu = mcu , programmer = programmer , ) bunch . upload . using = programmer bunch . build . mcu = mcu bunch . build . f_cpu = str ( f_cpu ) + 'L' bunch . build . core = core install_board ( board_id , bunch , replace_existing = replace_existing )
install board with programmer .
58,731
def logMsg ( self , msg , printMsg = True ) : time = datetime . datetime . now ( ) . strftime ( '%I:%M %p' ) self . log = '{0}\n{1} | {2}' . format ( self . log , time , msg ) if printMsg : print msg if self . addLogsToArcpyMessages : from arcpy import AddMessage AddMessage ( msg )
logs a message and prints it to the screen
58,732
def logGPMsg ( self , printMsg = True ) : from arcpy import GetMessages msgs = GetMessages ( ) try : self . logMsg ( msgs , printMsg ) except : self . logMsg ( 'error getting arcpy message' , printMsg )
logs the arcpy messages and prints them to the screen
58,733
def writeLogToFile ( self ) : if not os . path . exists ( self . logFolder ) : os . mkdir ( self . logFolder ) with open ( self . logFile , mode = 'a' ) as f : f . write ( '\n\n' + self . log )
writes the log to a
58,734
def logError ( self ) : import traceback self . logMsg ( 'ERROR!!!' ) errMsg = traceback . format_exc ( ) self . logMsg ( errMsg ) return errMsg
gets traceback info and logs it
58,735
def get_random_giphy ( phrase ) : with warnings . catch_warnings ( ) : warnings . simplefilter ( 'ignore' ) giphy = giphypop . Giphy ( ) results = giphy . search_list ( phrase = phrase , limit = 100 ) if not results : raise ValueError ( 'There were no results for that phrase' ) return random . choice ( results ) . media_url
Return the URL of a random GIF related to the phrase if possible
58,736
def handle_command_line ( ) : phrase = ' ' . join ( sys . argv [ 1 : ] ) or 'random' try : giphy = get_random_giphy ( phrase ) except ValueError : sys . stderr . write ( 'Unable to find any GIFs for {!r}\n' . format ( phrase ) ) sys . exit ( 1 ) display ( fetch_image ( giphy ) )
Display an image for the phrase in sys . argv if possible
58,737
def make_required_folders ( self ) : for folder in [ self . pending_folder , self . usb_incoming_folder , self . outgoing_folder , self . incoming_folder , self . archive_folder , self . tmp_folder , self . log_folder , ] : if not os . path . exists ( folder ) : os . makedirs ( folder )
Makes all folders declared in the config if they do not exist .
58,738
def load ( self , filename , offset ) : try : self . offset = offset self . fd = open ( filename , 'rb' ) self . fd . seek ( self . offset + VOLUME_HEADER_OFFSET ) data = self . fd . read ( 1024 ) self . vol_header = VolumeHeader ( data ) self . fd . close ( ) except IOError as e : print ( e )
Loads HFS + volume information
58,739
def get_interfaces ( self ) : interfaces = self . xml . find ( 'devices' ) . iter ( 'interface' ) iobjs = [ ] for interface in interfaces : _type = interface . attrib [ 'type' ] mac = interface . find ( 'mac' ) . attrib [ 'address' ] source = interface . find ( 'source' ) . attrib [ _type ] model = interface . find ( 'model' ) . attrib [ 'type' ] iobjs . append ( NetworkInterface ( _type , mac , source , model ) ) return iobjs
Return a list of sham . network . interfaces . NetworkInterface describing all the interfaces this VM has
58,740
def get_disks ( self ) : disks = [ disk for disk in self . xml . iter ( 'disk' ) ] disk_objs = [ ] for disk in disks : source = disk . find ( 'source' ) if source is None : continue path = source . attrib [ 'file' ] diskobj = self . domain . connect ( ) . storageVolLookupByPath ( path ) disk_objs . append ( diskobj ) return [ Volume ( d , StoragePool ( d . storagePoolLookupByVolume ( ) ) ) for d in disk_objs ]
Return a list of all the Disks attached to this VM The disks are returned in a sham . storage . volumes . Volume object
58,741
def delete ( self ) : disks = self . get_disks ( ) self . domain . undefine ( ) for disk in disks : disk . wipe ( ) disk . delete ( )
Delete this VM and remove all its disks
58,742
def to_dict ( self ) : return { 'domain_type' : self . domain_type , 'max_memory' : self . max_memory , 'current_memory' : self . current_memory , 'num_cpus' : self . num_cpus , 'running' : self . is_running ( ) , 'name' : self . name , }
Return the values contained in this object as a dict
58,743
def guess_url_vcs ( url ) : parsed = urllib . parse . urlsplit ( url ) if parsed . scheme in ( 'git' , 'svn' ) : return parsed . scheme elif parsed . path . endswith ( '.git' ) : return 'git' elif parsed . hostname == 'github.com' : return 'git' if parsed . scheme in ( 'http' , 'https' ) : resp = requests . get ( url ) if re . match ( 'basehttp.*python.*' , resp . headers . get ( 'server' ) . lower ( ) ) : return 'hg' return None
Given a url try to guess what kind of VCS it s for . Return None if we can t make a good guess .
58,744
def guess_folder_vcs ( folder ) : try : contents = os . listdir ( folder ) vcs_folders = [ '.git' , '.hg' , '.svn' ] found = next ( ( x for x in vcs_folders if x in contents ) , None ) return found [ 1 : ] if found else None except OSError : return None
Given a path for a folder on the local filesystem see what kind of vcs repo it is if any .
58,745
def basename ( url ) : url = url . strip ( ) url , _sep , _fragment = url . partition ( '#' ) if url . endswith ( '/' ) : url = url [ : - 1 ] return re . sub ( r'\.git$' , '' , url . split ( '/' ) [ - 1 ] )
Return the name of the folder that you d get if you cloned url into the current working directory .
58,746
def get_url ( self ) : cmd = { 'hg' : 'hg paths default' , 'git' : 'git config --local --get remote.origin.url' , } [ self . vcs_type ] with chdir ( self . folder ) : r = self . run ( cmd ) return r . output . replace ( '\n' , '' )
Assuming that the repo has been cloned locally get its default upstream URL .
58,747
def fburl ( parser , token ) : bits = token . contents . split ( ' ' ) if len ( bits ) < 2 : raise template . TemplateSyntaxError ( "'%s' takes at least one argument" " (path to a view)" % bits [ 0 ] ) viewname = bits [ 1 ] args = [ ] kwargs = { } asvar = None if len ( bits ) > 2 : bits = iter ( bits [ 2 : ] ) for bit in bits : if bit == 'as' : asvar = bits . next ( ) break else : for arg in bit . split ( "," ) : if '=' in arg : k , v = arg . split ( '=' , 1 ) k = k . strip ( ) kwargs [ k ] = parser . compile_filter ( v ) elif arg : args . append ( parser . compile_filter ( arg ) ) return URLNode ( viewname , args , kwargs , asvar )
Returns an absolute URL matching given view with its parameters .
58,748
def chdir ( method ) : def wrapper ( self , dir , * args , ** kw ) : dirstack = ChdirStack ( ) dirstack . push ( dir ) try : return method ( self , dir , * args , ** kw ) finally : dirstack . pop ( ) return functools . wraps ( method ) ( wrapper )
Decorator executing method in directory dir .
58,749
def push ( self , dir ) : self . stack . append ( os . getcwd ( ) ) os . chdir ( dir or os . getcwd ( ) )
Push cwd on stack and change to dir .
58,750
def pop ( self ) : if len ( self . stack ) : os . chdir ( self . stack . pop ( ) )
Pop dir off stack and change to it .
58,751
def get_matching ( self , source_id ) : value = self . _accessor . get_by_id ( source_id ) if not value is None : reg = get_current_registry ( ) prx_fac = reg . getUtility ( IDataTraversalProxyFactory ) prx = prx_fac . make_proxy ( value , self . _accessor , self . relationship_direction , self . relation_operation ) else : prx = None return prx
Returns a matching target object for the given source ID .
58,752
def update_attribute_value_items ( self ) : for attr in self . _attribute_iterator ( ) : if attr . kind != RESOURCE_ATTRIBUTE_KINDS . COLLECTION : try : attr_val = self . _get_proxied_attribute_value ( attr ) except AttributeError : continue else : yield ( attr , attr_val )
Returns an iterator of items for an attribute value map to use for an UPDATE operation .
58,753
def get_entity ( self ) : if self . _accessor is None : if self . __converted_entity is None : self . __converted_entity = self . _convert_to_entity ( ) else : self . __converted_entity = self . get_matching ( self . get_id ( ) ) . get_entity ( ) return self . __converted_entity
Returns the entity converted from the proxied data .
58,754
def make_traverser ( cls , source_data , target_data , relation_operation , accessor = None , manage_back_references = True ) : reg = get_current_registry ( ) prx_fac = reg . getUtility ( IDataTraversalProxyFactory ) if relation_operation == RELATION_OPERATIONS . ADD or relation_operation == RELATION_OPERATIONS . UPDATE : if relation_operation == RELATION_OPERATIONS . ADD and not target_data is None : raise ValueError ( 'Must not provide target data with ' 'relation operation ADD.' ) source_proxy = prx_fac . make_proxy ( source_data , None , RELATIONSHIP_DIRECTIONS . NONE , relation_operation ) source_is_sequence = source_proxy . proxy_for == RESOURCE_KINDS . COLLECTION if not source_is_sequence : source_id = source_proxy . get_id ( ) else : source_proxy = None source_is_sequence = False if relation_operation == RELATION_OPERATIONS . REMOVE or relation_operation == RELATION_OPERATIONS . UPDATE : rel_dir = RELATIONSHIP_DIRECTIONS . BIDIRECTIONAL if not manage_back_references : rel_dir &= ~ RELATIONSHIP_DIRECTIONS . REVERSE if relation_operation == RELATION_OPERATIONS . REMOVE : if not source_data is None : raise ValueError ( 'Must not provide source data with ' 'relation operation REMOVE.' ) target_proxy = prx_fac . make_proxy ( target_data , accessor , rel_dir , relation_operation ) else : if accessor is None : raise ValueError ( 'Need to provide an accessor when ' 'performing UPDATE operations.' ) if not target_data is None : target_root = target_data elif not source_is_sequence : target_root = accessor . get_by_id ( source_id ) if target_root is None : raise ValueError ( 'Entity with ID %s to update not ' 'found.' % source_id ) else : target_root = [ ] for src_prx in source_proxy : tgt_ent_id = src_prx . get_id ( ) if tgt_ent_id is None : continue tgt_ent = accessor . get_by_id ( tgt_ent_id ) if tgt_ent is None : continue target_root . append ( tgt_ent ) target_proxy = prx_fac . make_proxy ( target_root , accessor , rel_dir , relation_operation ) target_is_sequence = target_proxy . proxy_for == RESOURCE_KINDS . COLLECTION else : target_proxy = None target_is_sequence = False if not source_proxy is None and not target_proxy is None : if not ( ( source_is_sequence and target_is_sequence ) or ( not source_is_sequence and not target_is_sequence ) ) : raise ValueError ( 'When both source and target root nodes are ' 'given, they can either both be sequences ' 'or both not be sequences.' ) return cls ( source_proxy , target_proxy )
Factory method to create a tree traverser depending on the input source and target data combination .
58,755
async def info ( self , obj_id = None ) : if obj_id : try : return await self . process ( obj_id ) except JSONDecodeError : raise LookupError ( 'Error object with that id does not exist' , obj_id ) else : return await self . connector . getJson ( '/system/info/public' , remote = False )
Get info about object id
58,756
async def search ( self , query , sort_map = { 'BoxSet' : 0 , 'Series' : 1 , 'Movie' : 2 , 'Audio' : 3 , 'Person' : 4 } , strict_sort = False ) : search_params = { 'remote' : False , 'searchTerm' : query } if strict_sort : search_params [ 'IncludeItemTypes' ] = ',' . join ( sort_map . keys ( ) ) json = await self . connector . getJson ( '/Search/Hints/' , ** search_params ) items = await self . process ( json [ "SearchHints" ] ) m_size = len ( sort_map ) items = sorted ( items , key = lambda x : sort_map . get ( x . type , m_size ) ) return items
Sends a search request to emby returns results
58,757
async def nextUp ( self , userId = None ) : json = await self . connector . getJson ( '/Shows/NextUp' , pass_uid = True , remote = False , userId = userId ) return await self . process ( json )
returns list of items marked as next up
58,758
async def update ( self ) : keys = self . extras . keys ( ) self . extras = { } for key in keys : try : func = getattr ( self , key , None ) if callable ( func ) : func ( ) except : pass
reload all cached information
58,759
async def create_playlist ( self , name , * songs ) : data = { 'Name' : name } ids = [ i . id for i in ( await self . process ( songs ) ) ] if ids : data [ 'Ids' ] = ',' . join ( ids ) return await self . connector . post ( '/Playlists' , data = data , pass_uid = True , remote = False )
create a new playlist
58,760
def load_all ( self , group ) : for ep in iter_entry_points ( group = group ) : plugin = ep . load ( ) plugin ( self . __config )
Loads all plugins advertising entry points with the given group name . The specified plugin needs to be a callable that accepts the everest configurator as single argument .
58,761
def gui ( ) : sel = psidialogs . multi_choice ( libraries ( ) , 'select libraries to remove from %s!' % libraries_dir ( ) , title = 'remove boards' ) print ( '%s selected' % sel ) if sel : if psidialogs . ask_yes_no ( 'Do you really want to remove selected libraries?\n' + '\n' . join ( sel ) ) : for x in sel : remove_lib ( x ) print ( '%s was removed' % x )
remove libraries by GUI .
58,762
def src2ast ( src : str ) -> Expression : try : return ast . parse ( src , mode = 'eval' ) except SyntaxError : raise ValueError ( "Not a valid expression." ) from None
Return ast . Expression created from source code given in src .
58,763
def names ( expr : AST ) -> Set [ str ] : nodes = [ node for node in ast . walk ( expr ) if isinstance ( node , ast . Name ) ] loaded = { node . id for node in nodes if isinstance ( node . ctx , ast . Load ) } stored = { node . id for node in nodes if isinstance ( node . ctx , ast . Store ) } return loaded - stored
Names of globals in expr .
58,764
def replace_name ( expr : AST , old_name : str , new_name : str ) -> AST : return _NameReplacer ( old_name , new_name ) . visit ( deepcopy ( expr ) )
Replace all Name nodes named old_name with nodes named new_name .
58,765
def Negation ( expr : Expression ) -> Expression : expr = Expression ( _negate ( expr . body ) ) return ast . fix_missing_locations ( expr )
Return expression which is the negation of expr .
58,766
def Conjunction ( expr1 : Expression , expr2 : Expression ) -> Expression : expr = Expression ( ast . BoolOp ( ast . And ( ) , [ expr1 . body , expr2 . body ] ) ) return ast . fix_missing_locations ( expr )
Return expression which is the conjunction of expr1 and expr2 .
58,767
def Disjunction ( expr1 : Expression , expr2 : Expression ) -> Expression : expr = Expression ( ast . BoolOp ( ast . Or ( ) , [ expr1 . body , expr2 . body ] ) ) return ast . fix_missing_locations ( expr )
Return expression which is the disjunction of expr1 and expr2 .
58,768
def Contradiction ( expr1 : Expression , expr2 : Expression ) -> Expression : expr = Disjunction ( Conjunction ( expr1 , Negation ( expr2 ) ) , Conjunction ( Negation ( expr1 ) , expr2 ) ) return ast . fix_missing_locations ( expr )
Return expression which is the contradiction of expr1 and expr2 .
58,769
def diff_binding ( self ) -> int : try : prev_op , prev_op_binding = self . nested_ops [ - 2 ] except IndexError : prev_op , prev_op_binding = None , 0 try : curr_op , curr_op_binding = self . nested_ops [ - 1 ] except IndexError : curr_op , curr_op_binding = None , 0 if prev_op is ast . Pow and isinstance ( curr_op , ( ast . Invert , ast . USub ) ) : return 1 return curr_op_binding - prev_op_binding
Return the difference betweens the binding levels of the current and the previous operator .
58,770
def wrap_expr ( self , src : str , dfltChaining : bool ) -> str : diff_binding = self . op_man . diff_binding ( ) if diff_binding < 0 or diff_binding == 0 and not dfltChaining : return self . parenthesize ( src ) else : return src
Wrap src in parentheses if neccessary .
58,771
def visit ( self , node : AST , dfltChaining : bool = True ) -> str : if node is None : return '' if isinstance ( node , ast . Expression ) : return self . visit ( node . body ) method = 'visit_' + node . __class__ . __name__ visitor = getattr ( self , method , self . generic_visit ) return visitor ( node , dfltChaining )
Process node by dispatching to a handler .
58,772
def generic_visit ( self , node : AST , dfltChaining : bool = True ) -> str : for field , value in ast . iter_fields ( node ) : if isinstance ( value , list ) : for item in value : if isinstance ( item , AST ) : self . visit ( item ) elif isinstance ( value , AST ) : self . visit ( value )
Default handler called if no explicit visitor function exists for a node .
58,773
def visit_NameConstant ( self , node : AST , dfltChaining : bool = True ) -> str : return str ( node . value )
Return node s name as string .
58,774
def visit_Num ( self , node : AST , dfltChaining : bool = True ) -> str : return str ( node . n )
Return node s number as string .
58,775
def visit_Str ( self , node : AST , dfltChaining : bool = True ) -> str : return repr ( node . s )
Return node s string representation .
58,776
def visit_FormattedValue ( self , node : AST , dfltChaining : bool = True ) -> str : format_spec = node . format_spec return f"{{{self.visit(node.value)}" f"{self.CONV_MAP.get(node.conversion, '')}" f"{':'+self._nested_str(format_spec) if format_spec else ''}}}"
Return node s value formatted according to its format spec .
58,777
def visit_Tuple ( self , node : AST , dfltChaining : bool = True ) -> str : elems = ( self . visit ( elt ) for elt in node . elts ) return f"({', '.join(elems)}{')' if len(node.elts) != 1 else ',)'}"
Return tuple representation of node s elements .
58,778
def visit_Set ( self , node : AST , dfltChaining : bool = True ) -> str : return '{' + ', ' . join ( [ self . visit ( elt ) for elt in node . elts ] ) + '}'
Return set representation of node s elements .
58,779
def visit_Dict ( self , node : AST , dfltChaining : bool = True ) -> str : items = ( ': ' . join ( ( self . visit ( key ) , self . visit ( value ) ) ) for key , value in zip ( node . keys , node . values ) ) return f"{{{', '.join(items)}}}"
Return dict representation of node s elements .
58,780
def visit_Name ( self , node : AST , dfltChaining : bool = True ) -> str : return node . id
Return node s id .
58,781
def visit_Starred ( self , node : AST , dfltChaining : bool = True ) -> str : with self . op_man ( node ) : return f"*{self.visit(node.value)}"
Return representation of starred expresssion .
58,782
def visit_Expr ( self , node : AST , dfltChaining : bool = True ) -> str : return self . visit ( node . value )
Return representation of nested expression .
58,783
def visit_UnaryOp ( self , node : AST , dfltChaining : bool = True ) -> str : op = node . op with self . op_man ( op ) : return self . visit ( op ) + self . visit ( node . operand )
Return representation of node s operator and operand .
58,784
def visit_Div ( self , node : AST , dfltChaining : bool = True ) -> str : return '/' if self . compact else ' / '
Return division sign .
58,785
def visit_Compare ( self , node : AST , dfltChaining : bool = True ) -> str : first_op = node . ops [ 0 ] with self . op_man ( first_op ) : cmps = [ ' ' . join ( ( self . visit ( op ) , self . visit ( cmp , dfltChaining = False ) ) ) for op , cmp in zip ( node . ops , node . comparators ) ] src = ' ' . join ( ( self . visit ( node . left ) , ' ' . join ( cmps ) ) ) return self . wrap_expr ( src , dfltChaining )
Return node s operators and operands as inlined expression .
58,786
def visit_keyword ( self , node : AST , dfltChaining : bool = True ) -> str : arg = node . arg if arg is None : return f"**{self.visit(node.value)}" else : return f"{arg}={self.visit(node.value)}"
Return representation of node as keyword arg .
58,787
def visit_Call ( self , node : AST , dfltChaining : bool = True ) -> str : args = node . args try : kwds = node . keywords except AttributeError : kwds = [ ] self . compact = True args_src = ( self . visit ( arg ) for arg in args ) kwds_src = ( self . visit ( kwd ) for kwd in kwds ) param_src = ', ' . join ( chain ( args_src , kwds_src ) ) src = f"{self.visit(node.func)}({param_src})" self . compact = False return src
Return node s representation as function call .
58,788
def visit_arguments ( self , node : AST , dfltChaining : bool = True ) -> str : args = node . args dflts = node . defaults vararg = node . vararg kwargs = node . kwonlyargs kwdflts = node . kw_defaults kwarg = node . kwarg self . compact = True n_args_without_dflt = len ( args ) - len ( dflts ) args_src = ( arg . arg for arg in args [ : n_args_without_dflt ] ) dflts_src = ( f"{arg.arg}={self.visit(dflt)}" for arg , dflt in zip ( args [ n_args_without_dflt : ] , dflts ) ) vararg_src = ( f"*{vararg.arg}" , ) if vararg else ( ) kwargs_src = ( ( f"{kw.arg}={self.visit(dflt)}" if dflt is not None else f"{kw.arg}" ) for kw , dflt in zip ( kwargs , kwdflts ) ) kwarg_src = ( f"**{kwarg.arg}" , ) if kwarg else ( ) src = ', ' . join ( chain ( args_src , dflts_src , vararg_src , kwargs_src , kwarg_src ) ) self . compact = False return src
Return node s representation as argument list .
58,789
def visit_Lambda ( self , node : AST , dfltChaining : bool = True ) -> str : with self . op_man ( node ) : src = f"lambda {self.visit(node.args)}: {self.visit(node.body)}" return self . wrap_expr ( src , dfltChaining )
Return node s representation as lambda expression .
58,790
def visit_IfExp ( self , node : AST , dfltChaining : bool = True ) -> str : with self . op_man ( node ) : src = " if " . join ( ( self . visit ( node . body , dfltChaining = False ) , " else " . join ( ( self . visit ( node . test ) , self . visit ( node . orelse ) ) ) ) ) return self . wrap_expr ( src , dfltChaining )
Return node s representation as ... if ... else ... expression .
58,791
def visit_Attribute ( self , node : AST , dfltChaining : bool = True ) -> str : return '.' . join ( ( self . visit ( node . value ) , node . attr ) )
Return node s representation as attribute access .
58,792
def visit_Slice ( self , node : AST , dfltChaining : bool = True ) -> str : elems = [ self . visit ( node . lower ) , self . visit ( node . upper ) ] if node . step is not None : elems . append ( self . visit ( node . step ) ) return ':' . join ( elems )
Return node s representation as slice .
58,793
def visit_ExtSlice ( self , node : AST , dfltChaining : bool = True ) -> str : return ', ' . join ( ( self . visit ( dim ) for dim in node . dims ) )
Return node s representation as extended slice .
58,794
def visit_comprehension ( self , node : AST , dfltChaining : bool = True ) -> str : target = node . target try : elts = target . elts except AttributeError : names = self . visit ( target ) else : names = ', ' . join ( self . visit ( elt ) for elt in elts ) src = f"for {names} in {self.visit(node.iter)}" if node . ifs : src += f" {' '.join('if ' + self.visit(if_) for if_ in node.ifs)}" return src
Return node s representation as comprehension .
58,795
def visit_ListComp ( self , node : AST , dfltChaining : bool = True ) -> str : return f"[{self.visit(node.elt)} " f"{' '.join(self.visit(gen) for gen in node.generators)}]"
Return node s representation as list comprehension .
58,796
def visit_SetComp ( self , node : AST , dfltChaining : bool = True ) -> str : return f"{{{self.visit(node.elt)} " f"{' '.join(self.visit(gen) for gen in node.generators)}}}"
Return node s representation as set comprehension .
58,797
def visit_DictComp ( self , node : AST , dfltChaining : bool = True ) -> str : return f"{{{self.visit(node.key)}: {self.visit(node.value)} " f"{' '.join(self.visit(gen) for gen in node.generators)}}}"
Return node s representation as dict comprehension .
58,798
def visit_GeneratorExp ( self , node : AST , dfltChaining : bool = True ) -> str : return f"({self.visit(node.elt)} " f"{' '.join(self.visit(gen) for gen in node.generators)})"
Return node s representation as generator expression .
58,799
def visible_line_width ( self , position = Point ) : extra_char_width = len ( [ None for c in self [ : position ] . line_buffer if 0x2013 <= ord ( c ) <= 0xFFFD ] ) return len ( self [ : position ] . quoted_text ( ) ) + self [ : position ] . line_buffer . count ( u"\t" ) * 7 + extra_char_width
Return the visible width of the text in line buffer up to position .