idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
6,900
def get_series_episodes ( self , series_id , episode_number = None , aired_season = None , aired_episode = None , dvd_season = None , dvd_episode = None , imdb_id = None , page = 1 ) : arguments = locals ( ) optional_parameters = { 'episode_number' : 'absoluteNumber' , 'aired_season' : 'airedSeason' , 'aired_episode' : 'airedEpisode' , 'dvd_season' : 'dvdSeason' , 'dvd_episode' : 'dvdEpisode' , 'imdb_id' : 'imdbId' , 'page' : 'page' } query_string = utils . query_param_string_from_option_args ( optional_parameters , arguments ) raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/series/%d/episodes/query?%s' % ( series_id , query_string ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
Retrieves all episodes for a particular series given its TheTVDB and filtered by additional optional details .
6,901
def get_updated ( self , from_time , to_time = None ) : arguments = locals ( ) optional_parameters = { 'to_time' : 'toTime' } query_string = 'fromTime=%s&%s' % ( from_time , utils . query_param_string_from_option_args ( optional_parameters , arguments ) ) raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/uodated/query?%s' % query_string , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
Retrives a list of series that have changed on TheTVDB since a provided from time parameter and optionally to an specified to time .
6,902
def get_user ( self ) : return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user' , headers = self . __get_header_with_auth ( ) ) )
Retrieves information about the user currently using the api .
6,903
def get_user_favorites ( self ) : return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user/favorites' , headers = self . __get_header_with_auth ( ) ) )
Retrieves the list of tv series the current user has flagged as favorite .
6,904
def delete_user_favorite ( self , series_id ) : return self . parse_raw_response ( requests_util . run_request ( 'delete' , self . API_BASE_URL + '/user/favorites/%d' % series_id , headers = self . __get_header_with_auth ( ) ) )
Deletes the series of the provided id from the favorites list of the current user .
6,905
def __get_user_ratings ( self ) : return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user/ratings' , headers = self . __get_header_with_auth ( ) ) )
Returns a list of the ratings provided by the current user .
6,906
def get_user_ratings ( self , item_type = None ) : if item_type : query_string = 'itemType=%s' % item_type return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user/ratings/qeury?%s' % query_string , headers = self . __get_header_with_auth ( ) ) ) else : return self . __get_user_ratings ( )
Returns a list of the ratings for the type of item provided for the current user .
6,907
def add_user_rating ( self , item_type , item_id , item_rating ) : raw_response = requests_util . run_request ( 'put' , self . API_BASE_URL + '/user/ratings/%s/%d/%d' % ( item_type , item_id , item_rating ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
Adds the rating for the item indicated for the current user .
6,908
def delete_user_rating ( self , item_type , item_id ) : raw_response = requests_util . run_request ( 'delete' , self . API_BASE_URL + '/user/ratings/%s/%d' % ( item_type , item_id ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
Deletes from the list of rating of the current user the rating provided for the specified element type .
6,909
def get_episode ( self , episode_id ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/episodes/%d' % episode_id , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
Returns the full information of the episode belonging to the Id provided .
6,910
def get_languages ( self ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/languages' , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
Returns a list of all language options available in TheTVDB .
6,911
def get_language ( self , language_id ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/languages/%d' % language_id , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
Retrieves information about the language of the given id .
6,912
def SetCredentials ( api_key , api_passwd ) : global V1_API_KEY global V1_API_PASSWD global _V1_ENABLED _V1_ENABLED = True V1_API_KEY = api_key V1_API_PASSWD = api_passwd
Establish API key and password associated with APIv1 commands .
6,913
def normalize ( A , axis = None , inplace = False ) : if not inplace : A = A . copy ( ) A += np . finfo ( float ) . eps Asum = A . sum ( axis ) if axis and A . ndim > 1 : Asum [ Asum == 0 ] = 1 shape = list ( A . shape ) shape [ axis ] = 1 Asum . shape = shape A /= Asum return A
Normalize the input array so that it sums to 1 .
6,914
def ph2full ( ptrans , htrans ) : n_pstates = len ( ptrans ) n_hstates = len ( htrans [ 0 , 0 ] ) N = n_pstates * n_hstates trans = np . zeros ( ( N , N ) ) for pidx in range ( n_pstates ) : for hidx in range ( n_hstates ) : trans [ pidx * n_hstates + hidx ] = ( ptrans [ pidx , : , np . newaxis ] * htrans [ pidx , : , hidx ] ) . flatten ( ) return trans
Convert a p - state transition matrix and h - state matrices to the full transation matrix
6,915
def full2ph ( trans , n_pstates ) : n_hstates = len ( trans ) / n_pstates htrans = np . zeros ( ( n_pstates , n_pstates , n_hstates , n_hstates ) ) for pidx1 , pidx2 in product ( range ( n_pstates ) , range ( n_pstates ) ) : idx1 = pidx1 * n_hstates idx2 = pidx2 * n_hstates htrans [ pidx1 , pidx2 ] = trans [ idx1 : idx1 + n_hstates , idx2 : idx2 + n_hstates ] ptrans = normalize ( htrans . sum ( axis = - 1 ) . sum ( axis = - 1 ) , axis = 1 ) htrans = normalize ( htrans , axis = 3 ) return ptrans , htrans
Convert a full transmat to the respective p - state and h - state transmats
6,916
def gen_stochastic_matrix ( size , random_state = None ) : if not type ( size ) is tuple : size = ( 1 , size ) assert len ( size ) == 2 n = random_state . uniform ( size = ( size [ 0 ] , size [ 1 ] - 1 ) ) n = np . concatenate ( [ np . zeros ( ( size [ 0 ] , 1 ) ) , n , np . ones ( ( size [ 0 ] , 1 ) ) ] , axis = 1 ) A = np . diff ( np . sort ( n ) ) return A . squeeze ( )
Generate a unfiformly - random stochastic array or matrix
6,917
def steadystate ( A , max_iter = 100 ) : P = np . linalg . matrix_power ( A , max_iter ) v = [ ] for i in range ( len ( P ) ) : if not np . any ( [ np . allclose ( P [ i ] , vi , ) for vi in v ] ) : v . append ( P [ i ] ) return normalize ( np . sum ( v , axis = 0 ) )
Empirically determine the steady state probabilities from a stochastic matrix
6,918
def pipe ( ) : r , w = os . pipe ( ) return File . fromfd ( r , 'rb' ) , File . fromfd ( w , 'wb' )
create an inter - process communication pipe
6,919
def get_id_head ( self ) : id_head = None for target_node in self : if target_node . is_head ( ) : id_head = target_node . get_id ( ) break return id_head
Returns the id of the target that is set as head
6,920
def add_target_id ( self , this_id ) : new_target = Ctarget ( ) new_target . set_id ( this_id ) self . node . append ( new_target . get_node ( ) )
Adds a new target to the span with the specified id
6,921
def create_from_ids ( self , list_ids ) : for this_id in list_ids : new_target = Ctarget ( ) new_target . set_id ( this_id ) self . node . append ( new_target . get_node ( ) )
Adds new targets to the span with the specified ids
6,922
def create_from_targets ( self , list_targs ) : for this_target in list_targs : self . node . append ( this_target . get_node ( ) )
Adds new targets to the span that are defined in a list
6,923
def get_statement ( self , statement_id ) : if statement_id in self . idx : return Cstatement ( self . idx [ statement_id ] , self . type ) else : return None
Returns the statement object for the supplied identifier
6,924
def add_statement ( self , statement_obj ) : if statement_obj . get_id ( ) in self . idx : raise ValueError ( "Statement with id {} already exists!" . format ( statement_obj . get_id ( ) ) ) self . node . append ( statement_obj . get_node ( ) ) self . idx [ statement_obj . get_id ( ) ] = statement_obj
Adds a statement object to the layer
6,925
def RootGroup ( self ) : return ( clc . v2 . Group ( id = self . root_group_id , alias = self . alias , session = self . session ) )
Returns group object for datacenter root group .
6,926
def LL ( n ) : if ( n <= 0 ) : return Context ( '0' ) else : LL1 = LL ( n - 1 ) r1 = C1 ( 3 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - LL1 - LL1 r2 = LL1 - LL1 - LL1 return r1 + r2
constructs the LL context
6,927
def HH ( n ) : if ( n <= 0 ) : return Context ( '1' ) else : LL1 = LL ( n - 1 ) HH1 = HH ( n - 1 ) r1 = C1 ( 3 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - LL1 - HH1 r2 = HH1 - HH1 - HH1 return r1 + r2
constructs the HH context
6,928
def AA ( n ) : if ( n <= 1 ) : return Context ( '10\n00' ) else : AA1 = AA ( n - 1 ) r1 = C1 ( 2 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - AA1 r2 = AA1 - AA1 return r1 + r2
constructs the AA context
6,929
def BB ( n ) : if ( n <= 1 ) : return Context ( '0\n1' ) else : BB1 = BB ( n - 1 ) AA1 = AA ( n - 1 ) r1 = C1 ( ( n - 1 ) * 2 ** ( n - 2 ) , 2 ** ( n - 1 ) ) - AA1 - BB1 r2 = BB1 - C1 ( 2 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - BB1 return r1 + r2
constructs the BB context
6,930
def processAndSetDefaults ( self ) : if not self . input : raise ValueError ( NO_INPUT_FILE ) if not self . output : if not self . build_directory : File ( ) pass else : pass if not self . build_directory : pass for dependency in self . given_dependencies : pass if self . output_format != self . output . getType ( ) : raise ValueError ( "" ) return
The heart of the Instruction object . This method will make sure that all fields not entered will be defaulted to a correct value . Also checks for incongruities in the data entered if it was by the user .
6,931
def greenlet ( func , args = ( ) , kwargs = None ) : if args or kwargs : def target ( ) : return func ( * args , ** ( kwargs or { } ) ) else : target = func return compat . greenlet ( target , state . mainloop )
create a new greenlet from a function and arguments
6,932
def schedule ( target = None , args = ( ) , kwargs = None ) : if target is None : def decorator ( target ) : return schedule ( target , args = args , kwargs = kwargs ) return decorator if isinstance ( target , compat . greenlet ) or target is compat . main_greenlet : glet = target else : glet = greenlet ( target , args , kwargs ) state . paused . append ( glet ) return target
insert a greenlet into the scheduler
6,933
def schedule_at ( unixtime , target = None , args = ( ) , kwargs = None ) : if target is None : def decorator ( target ) : return schedule_at ( unixtime , target , args = args , kwargs = kwargs ) return decorator if isinstance ( target , compat . greenlet ) or target is compat . main_greenlet : glet = target else : glet = greenlet ( target , args , kwargs ) state . timed_paused . insert ( unixtime , glet ) return target
insert a greenlet into the scheduler to be run at a set time
6,934
def schedule_in ( secs , target = None , args = ( ) , kwargs = None ) : return schedule_at ( time . time ( ) + secs , target , args , kwargs )
insert a greenlet into the scheduler to run after a set time
6,935
def schedule_recurring ( interval , target = None , maxtimes = 0 , starting_at = 0 , args = ( ) , kwargs = None ) : starting_at = starting_at or time . time ( ) if target is None : def decorator ( target ) : return schedule_recurring ( interval , target , maxtimes , starting_at , args , kwargs ) return decorator func = target if isinstance ( target , compat . greenlet ) or target is compat . main_greenlet : if target . dead : raise TypeError ( "can't schedule a dead greenlet" ) func = target . run def run_and_schedule_one ( tstamp , count ) : if not maxtimes or count < maxtimes : tstamp += interval func ( * args , ** ( kwargs or { } ) ) schedule_at ( tstamp , run_and_schedule_one , args = ( tstamp , count + 1 ) ) firstrun = starting_at + interval schedule_at ( firstrun , run_and_schedule_one , args = ( firstrun , 0 ) ) return target
insert a greenlet into the scheduler to run regularly at an interval
6,936
def schedule_exception ( exception , target ) : if not isinstance ( target , compat . greenlet ) : raise TypeError ( "can only schedule exceptions for greenlets" ) if target . dead : raise ValueError ( "can't send exceptions to a dead greenlet" ) schedule ( target ) state . to_raise [ target ] = exception
schedule a greenlet to have an exception raised in it immediately
6,937
def schedule_exception_at ( unixtime , exception , target ) : if not isinstance ( target , compat . greenlet ) : raise TypeError ( "can only schedule exceptions for greenlets" ) if target . dead : raise ValueError ( "can't send exceptions to a dead greenlet" ) schedule_at ( unixtime , target ) state . to_raise [ target ] = exception
schedule a greenlet to have an exception raised at a unix timestamp
6,938
def schedule_exception_in ( secs , exception , target ) : schedule_exception_at ( time . time ( ) + secs , exception , target )
schedule a greenlet receive an exception after a number of seconds
6,939
def end ( target ) : if not isinstance ( target , compat . greenlet ) : raise TypeError ( "argument must be a greenlet" ) if not target . dead : schedule ( target ) state . to_raise [ target ] = compat . GreenletExit ( )
schedule a greenlet to be stopped immediately
6,940
def handle_exception ( klass , exc , tb , coro = None ) : if coro is None : coro = compat . getcurrent ( ) replacement = [ ] for weak in state . local_exception_handlers . get ( coro , ( ) ) : func = weak ( ) if func is None : continue try : func ( klass , exc , tb ) except Exception : continue replacement . append ( weak ) if replacement : state . local_exception_handlers [ coro ] [ : ] = replacement replacement = [ ] for weak in state . global_exception_handlers : func = weak ( ) if func is None : continue try : func ( klass , exc , tb ) except Exception : continue replacement . append ( weak ) state . global_exception_handlers [ : ] = replacement
run all the registered exception handlers
6,941
def global_exception_handler ( handler ) : if not hasattr ( handler , "__call__" ) : raise TypeError ( "exception handlers must be callable" ) log . info ( "setting a new global exception handler" ) state . global_exception_handlers . append ( weakref . ref ( handler ) ) return handler
add a callback for when an exception goes uncaught in any greenlet
6,942
def remove_global_exception_handler ( handler ) : for i , cb in enumerate ( state . global_exception_handlers ) : cb = cb ( ) if cb is not None and cb is handler : state . global_exception_handlers . pop ( i ) log . info ( "removing a global exception handler" ) return True return False
remove a callback from the list of global exception handlers
6,943
def local_exception_handler ( handler = None , coro = None ) : if handler is None : return lambda h : local_exception_handler ( h , coro ) if not hasattr ( handler , "__call__" ) : raise TypeError ( "exception handlers must be callable" ) if coro is None : coro = compat . getcurrent ( ) log . info ( "setting a new coroutine local exception handler" ) state . local_exception_handlers . setdefault ( coro , [ ] ) . append ( weakref . ref ( handler ) ) return handler
add a callback for when an exception occurs in a particular greenlet
6,944
def remove_local_exception_handler ( handler , coro = None ) : if coro is None : coro = compat . getcurrent ( ) for i , cb in enumerate ( state . local_exception_handlers . get ( coro , [ ] ) ) : cb = cb ( ) if cb is not None and cb is handler : state . local_exception_handlers [ coro ] . pop ( i ) log . info ( "removing a coroutine local exception handler" ) return True return False
remove a callback from the list of exception handlers for a coroutine
6,945
def global_hook ( handler ) : if not hasattr ( handler , "__call__" ) : raise TypeError ( "trace hooks must be callable" ) log . info ( "setting a new global hook callback" ) state . global_hooks . append ( weakref . ref ( handler ) ) return handler
add a callback to run in every switch between coroutines
6,946
def remove_global_hook ( handler ) : for i , cb in enumerate ( state . global_hooks ) : cb = cb ( ) if cb is not None and cb is handler : state . global_hooks . pop ( i ) log . info ( "removing a global hook callback" ) return True return False
remove a callback from the list of global hooks
6,947
def local_incoming_hook ( handler = None , coro = None ) : if handler is None : return lambda h : local_incoming_hook ( h , coro ) if not hasattr ( handler , "__call__" ) : raise TypeError ( "trace hooks must be callable" ) if coro is None : coro = compat . getcurrent ( ) log . info ( "setting a coroutine incoming local hook callback" ) state . local_to_hooks . setdefault ( coro , [ ] ) . append ( weakref . ref ( handler ) ) return handler
add a callback to run every time a greenlet is about to be switched to
6,948
def remove_local_incoming_hook ( handler , coro = None ) : if coro is None : coro = compat . getcurrent ( ) for i , cb in enumerate ( state . local_to_hooks . get ( coro , [ ] ) ) : cb = cb ( ) if cb is not None and cb is handler : log . info ( "removing a coroutine incoming local hook callback" ) state . local_to_hooks [ coro ] . pop ( i ) return True return False
remove a callback from the incoming hooks for a particular coro
6,949
def local_outgoing_hook ( handler = None , coro = None ) : if handler is None : return lambda h : local_outgoing_hook ( h , coro ) if not hasattr ( handler , "__call__" ) : raise TypeError ( "trace hooks must be callable" ) if coro is None : coro = compat . getcurrent ( ) log . info ( "setting a coroutine local outgoing hook callback" ) state . local_from_hooks . setdefault ( coro , [ ] ) . append ( weakref . ref ( handler ) ) return handler
add a callback to run every time a greenlet is switched away from
6,950
def remove_local_outgoing_hook ( handler , coro = None ) : if coro is None : coro = compat . getcurrent ( ) for i , cb in enumerate ( state . local_from_hooks . get ( coro , [ ] ) ) : cb = cb ( ) if cb is not None and cb is handler : log . info ( "removing a coroutine outgoing local hook callback" ) state . local_from_hooks [ coro ] . pop ( i ) return True return False
remove a callback from the outgoing hooks for a particular coro
6,951
def set_ignore_interrupts ( flag = True ) : log . info ( "setting ignore_interrupts to %r" % flag ) state . ignore_interrupts = bool ( flag )
turn off EINTR - raising from emulated syscalls on interruption by signals
6,952
def reset_poller ( poll = None ) : state . poller = poll or poller . best ( ) log . info ( "resetting fd poller, using %s" % type ( state . poller ) . __name__ )
replace the scheduler s poller throwing away any pre - existing state
6,953
def find_resource ( r , * , pkg = 'cyther' ) : file_path = pkg_resources . resource_filename ( pkg , os . path . join ( 'test' , r ) ) if not os . path . isfile ( file_path ) : msg = "Resource '{}' does not exist" raise FileNotFoundError ( msg . format ( file_path ) ) return file_path
Finds a given cyther resource in the test subdirectory in cyther package
6,954
def assert_output ( output , assert_equal ) : sorted_output = sorted ( output ) sorted_assert = sorted ( assert_equal ) if sorted_output != sorted_assert : raise ValueError ( ASSERT_ERROR . format ( sorted_output , sorted_assert ) )
Check that two outputs have the same contents as one another even if they aren t sorted yet
6,955
def write_dict_to_file ( file_path , obj ) : lines = [ ] for key , value in obj . items ( ) : lines . append ( key + ':' + repr ( value ) + '\n' ) with open ( file_path , 'w+' ) as file : file . writelines ( lines ) return None
Write a dictionary of string keys to a file
6,956
def read_dict_from_file ( file_path ) : with open ( file_path ) as file : lines = file . read ( ) . splitlines ( ) obj = { } for line in lines : key , value = line . split ( ':' , maxsplit = 1 ) obj [ key ] = eval ( value ) return obj
Read a dictionary of strings from a file
6,957
def get_input ( prompt , check , * , redo_prompt = None , repeat_prompt = False ) : if isinstance ( check , str ) : check = ( check , ) to_join = [ ] for item in check : if item : to_join . append ( str ( item ) ) else : to_join . append ( "''" ) prompt += " [{}]: " . format ( '/' . join ( to_join ) ) if repeat_prompt : redo_prompt = prompt elif not redo_prompt : redo_prompt = "Incorrect input, please choose from {}: " "" . format ( str ( check ) ) if callable ( check ) : def _checker ( r ) : return check ( r ) elif isinstance ( check , tuple ) : def _checker ( r ) : return r in check else : raise ValueError ( RESPONSES_ERROR . format ( type ( check ) ) ) response = input ( prompt ) while not _checker ( response ) : print ( response , type ( response ) ) response = input ( redo_prompt if redo_prompt else prompt ) return response
Ask the user to input something on the terminal level check their response and ask again if they didn t answer correctly
6,958
def get_choice ( prompt , choices ) : print ( ) checker = [ ] for offset , choice in enumerate ( choices ) : number = offset + 1 print ( "\t{}): '{}'\n" . format ( number , choice ) ) checker . append ( str ( number ) ) response = get_input ( prompt , tuple ( checker ) + ( '' , ) ) if not response : print ( "Exiting..." ) exit ( ) offset = int ( response ) - 1 selected = choices [ offset ] return selected
Asks for a single choice out of multiple items . Given those items and a prompt to ask the user with
6,959
def generateBatches ( tasks , givens ) : _removeGivensFromTasks ( tasks , givens ) batches = [ ] while tasks : batch = set ( ) for task , dependencies in tasks . items ( ) : if not dependencies : batch . add ( task ) if not batch : _batchErrorProcessing ( tasks ) for task in batch : del tasks [ task ] for task , dependencies in tasks . items ( ) : for item in batch : if item in dependencies : tasks [ task ] . remove ( item ) batches . append ( batch ) return batches
A function to generate a batch of commands to run in a specific order as to meet all the dependencies for each command . For example the commands with no dependencies are run first and the commands with the most deep dependencies are run last
6,960
def Get ( self , key ) : for group in self . groups : if group . id . lower ( ) == key . lower ( ) : return ( group ) elif group . name . lower ( ) == key . lower ( ) : return ( group ) elif group . description . lower ( ) == key . lower ( ) : return ( group ) raise ( clc . CLCException ( "Group not found" ) )
Get group by providing name ID description or other unique key .
6,961
def Search ( self , key ) : results = [ ] for group in self . groups : if group . id . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( group ) elif group . name . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( group ) elif group . description . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( group ) return ( results )
Search group list by providing partial name ID description or other key .
6,962
def GetAll ( root_group_id , alias = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) groups = [ ] for r in clc . v2 . API . Call ( 'GET' , 'groups/%s/%s' % ( alias , root_group_id ) , { } , session = session ) [ 'groups' ] : groups . append ( Group ( id = r [ 'id' ] , alias = alias , group_obj = r , session = session ) ) return ( groups )
Gets a list of groups within a given account .
6,963
def Refresh ( self ) : self . dirty = False self . data = clc . v2 . API . Call ( 'GET' , 'groups/%s/%s' % ( self . alias , self . id ) , session = self . session ) self . data [ 'changeInfo' ] [ 'createdDate' ] = clc . v2 . time_utils . ZuluTSToSeconds ( self . data [ 'changeInfo' ] [ 'createdDate' ] ) self . data [ 'changeInfo' ] [ 'modifiedDate' ] = clc . v2 . time_utils . ZuluTSToSeconds ( self . data [ 'changeInfo' ] [ 'modifiedDate' ] )
Reloads the group object to synchronize with cloud representation .
6,964
def Defaults ( self , key ) : if not hasattr ( self , 'defaults' ) : self . defaults = clc . v2 . API . Call ( 'GET' , 'groups/%s/%s/defaults' % ( self . alias , self . id ) , session = self . session ) try : return ( self . defaults [ key ] [ 'value' ] ) except : return ( None )
Returns default configurations for resources deployed to this group .
6,965
def Subgroups ( self ) : return ( Groups ( alias = self . alias , groups_lst = self . data [ 'groups' ] , session = self . session ) )
Returns a Groups object containing all child groups .
6,966
def Servers ( self ) : return ( clc . v2 . Servers ( alias = self . alias , servers_lst = [ obj [ 'id' ] for obj in self . data [ 'links' ] if obj [ 'rel' ] == 'server' ] , session = self . session ) )
Returns a Servers object containing all servers within the group .
6,967
def List ( type = 'All' ) : r = clc . v1 . API . Call ( 'post' , 'Queue/ListQueueRequests' , { 'ItemStatusType' : Queue . item_status_type_map [ type ] } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Requests' ] )
List of Queued requests and their current status details .
6,968
def _Load ( self , location ) : for network in clc . v2 . API . Call ( 'GET' , '/v2-experimental/networks/%s/%s' % ( self . alias , location ) , { } , session = self . session ) : self . networks . append ( Network ( id = network [ 'id' ] , alias = self . alias , network_obj = network , session = self . session ) )
Load all networks associated with the given location .
6,969
def Get ( self , key ) : for network in self . networks : try : if network . id == key : return ( network ) if network . name == key : return ( network ) if network . cidr == key : return ( network ) except : pass
Get network by providing name ID or other unique key .
6,970
def Create ( alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) if not location : location = clc . v2 . Account . GetLocation ( session = session ) return clc . v2 . Requests ( clc . v2 . API . Call ( 'POST' , '/v2-experimental/networks/%s/%s/claim' % ( alias , location ) , session = session ) , alias = alias , session = session )
Claims a new network within a given account .
6,971
def Delete ( self , location = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = self . session ) return clc . v2 . API . Call ( 'POST' , '/v2-experimental/networks/%s/%s/%s/release' % ( self . alias , location , self . id ) , session = self . session )
Releases the calling network .
6,972
def Update ( self , name , description = None , location = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = self . session ) payload = { 'name' : name } payload [ 'description' ] = description if description else self . description r = clc . v2 . API . Call ( 'PUT' , '/v2-experimental/networks/%s/%s/%s' % ( self . alias , location , self . id ) , payload , session = self . session ) self . name = self . data [ 'name' ] = name if description : self . data [ 'description' ] = description
Updates the attributes of a given Network via PUT .
6,973
def Refresh ( self , location = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = self . session ) new_object = clc . v2 . API . Call ( 'GET' , '/v2-experimental/networks/%s/%s/%s' % ( self . alias , location , self . id ) , session = self . session ) if new_object : self . name = new_object [ 'name' ] self . data = new_object
Reloads the network object to synchronize with cloud representation .
6,974
def install_build_requires ( pkg_targets ) : def pip_install ( pkg_name , pkg_vers = None ) : pkg_name_version = '%s==%s' % ( pkg_name , pkg_vers ) if pkg_vers else pkg_name print '[WARNING] %s not found, attempting to install using a raw "pip install" call!' % pkg_name_version subprocess . Popen ( 'pip install %s' % pkg_name_version , shell = True ) . communicate ( ) def get_pkg_info ( pkg ) : pkg_name , pkg_vers = None , None if '==' in pkg : pkg_name , pkg_vers = pkg . split ( '==' ) else : pkg_name = pkg . replace ( '>' , '' ) . replace ( '<' , '' ) . split ( '=' ) [ 0 ] return pkg_name , pkg_vers for pkg in pkg_targets : pkg_name , pkg_vers = get_pkg_info ( pkg ) try : pkg_name_version = '%s==%s' % ( pkg_name , pkg_vers ) if pkg_vers else pkg_name if pkg_vers : version = getattr ( importlib . import_module ( pkg_name ) , '__version__' ) if version != pkg_vers : pip_install ( pkg_name , pkg_vers ) else : importlib . import_module ( pkg_name ) except ImportError : pip_install ( pkg_name , pkg_vers )
Iterate through build_requires list and pip install if package is not present accounting for version
6,975
def initiateCompilation ( args , file ) : commands = makeCommands ( 0 , file ) if not args [ 'concise' ] and args [ 'print_args' ] : print_commands = bool ( args [ 'watch' ] ) response = multiCall ( * commands , print_commands = print_commands ) return response
Starts the entire compilation procedure
6,976
def run ( path , timer = False , repeat = 3 , number = 10000 , precision = 2 ) : code = extractAtCyther ( path ) if not code : output = "There was no '@cyther' code collected from the " "file '{}'\n" . format ( path ) return { 'returncode' : 0 , 'output' : output } module_directory = os . path . dirname ( path ) module_name = os . path . splitext ( os . path . basename ( path ) ) [ 0 ] setup_string = SETUP_TEMPLATE . format ( module_directory , module_name , '{}' ) if timer : string = TIMER_TEMPLATE . format ( setup_string , code , repeat , number , precision , '{}' ) else : string = setup_string + code script = os . path . join ( os . path . dirname ( __file__ ) , 'script.py' ) with open ( script , 'w+' ) as file : file . write ( string ) response = call ( [ 'python' , script ] ) return response
Extracts and runs the
6,977
def core ( args ) : args = furtherArgsProcessing ( args ) numfiles = len ( args [ 'filenames' ] ) interval = INTERVAL / numfiles files = processFiles ( args ) while True : for file in files : cytherize ( args , file ) if not args [ 'watch' ] : break else : time . sleep ( interval )
The heart of Cyther this function controls the main loop and can be used to perform any Cyther action . You can call if using Cyther from the module level
6,978
def set_timestamp ( self , timestamp = None ) : if timestamp is None : import time timestamp = time . strftime ( '%Y-%m-%dT%H:%M:%S%Z' ) self . node . set ( 'timestamp' , timestamp )
Set the timestamp of the linguistic processor set to None for the current time
6,979
def set_beginTimestamp ( self , btimestamp = None ) : if btimestamp is None : import time btimestamp = time . strftime ( '%Y-%m-%dT%H:%M:%S%Z' ) self . node . set ( 'beginTimestamp' , btimestamp )
Set the begin timestamp of the linguistic processor set to None for the current time
6,980
def set_endTimestamp ( self , etimestamp = None ) : if etimestamp is None : import time etimestamp = time . strftime ( '%Y-%m-%dT%H:%M:%S%Z' ) self . node . set ( 'endTimestamp' , etimestamp )
Set the end timestamp of the linguistic processor set to None for the current time
6,981
def set_publicId ( self , publicId ) : publicObj = self . get_public ( ) if publicObj is not None : publicObj . set_publicid ( publicId ) else : publicObj = Cpublic ( ) publicObj . set_publicid ( publicId ) self . set_public ( publicObj )
Sets the publicId to the public object
6,982
def set_uri ( self , uri ) : publicObj = self . get_public ( ) if publicObj is not None : publicObj . set_uri ( uri ) else : publicObj = Cpublic ( ) publicObj . set_uri ( uri ) self . set_public ( publicObj )
Sets the uri to the public object
6,983
def remove_lp ( self , layer ) : for this_node in self . node . findall ( 'linguisticProcessors' ) : if this_node . get ( 'layer' ) == layer : self . node . remove ( this_node ) break
Removes the linguistic processors for a given layer
6,984
def add_linguistic_processor ( self , layer , my_lp ) : found_lp_obj = None for this_lp in self . node . findall ( 'linguisticProcessors' ) : lp_obj = ClinguisticProcessors ( this_lp ) if lp_obj . get_layer ( ) == layer : found_lp_obj = lp_obj break if found_lp_obj is None : found_lp_obj = ClinguisticProcessors ( ) found_lp_obj . set_layer ( layer ) self . add_linguistic_processors ( found_lp_obj ) found_lp_obj . add_linguistic_processor ( my_lp )
Adds a linguistic processor to a certain layer
6,985
def get_fileDesc ( self ) : node = self . node . find ( 'fileDesc' ) if node is not None : return CfileDesc ( node = node ) else : return None
Returns the fileDesc object or None if there is no such element
6,986
def get_public ( self ) : node = self . node . find ( 'public' ) if node is not None : return Cpublic ( node = node ) else : return None
Returns the public object or None if there is no such element
6,987
def GetPackages ( classification , visibility ) : r = clc . v1 . API . Call ( 'post' , 'Blueprint/GetPackages' , { 'Classification' : Blueprint . classification_stoi [ classification ] , 'Visibility' : Blueprint . visibility_stoi [ visibility ] } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Packages' ] )
Gets a list of Blueprint Packages filtered by classification and visibility .
6,988
def GetAllPackages ( classification ) : packages = [ ] for visibility in Blueprint . visibility_stoi . keys ( ) : try : for r in Blueprint . GetPackages ( classification , visibility ) : packages . append ( dict ( r . items ( ) + { 'Visibility' : visibility } . items ( ) ) ) except : pass if len ( packages ) : return ( packages )
Gets a list of all Blueprint Packages with a given classification .
6,989
def PackageUpload ( package , ftp_url ) : if not os . path . isfile ( package ) : clc . v1 . output . Status ( 'ERROR' , 2 , 'Package file (%s) not found' % ( package ) ) return m = re . search ( "ftp://(?P<user>.+?):(?P<passwd>.+?)@(?P<host>.+)" , ftp_url ) try : ftp = ftplib . FTP ( m . group ( 'host' ) , m . group ( 'user' ) , m . group ( 'passwd' ) ) file = open ( package , 'rb' ) filename = re . sub ( ".*/" , "" , package ) ftp . storbinary ( "STOR %s" % ( filename ) , file ) file . close ( ) ftp . quit ( ) clc . v1 . output . Status ( 'SUCCESS' , 2 , 'Blueprint package %s Uploaded' % ( filename ) ) except Exception as e : clc . v1 . output . Status ( 'ERROR' , 2 , 'FTP error %s: %s' % ( ftp_url , str ( e ) ) ) return ( { } )
Uploads specified zip package to cloud endpoint .
6,990
def PackagePublish ( package , classification , visibility , os ) : r = clc . v1 . API . Call ( 'post' , 'Blueprint/PublishPackage' , { 'Classification' : Blueprint . classification_stoi [ classification ] , 'Name' : package , 'OperatingSystems' : os , 'Visibility' : Blueprint . visibility_stoi [ visibility ] } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
Publishes a Blueprint Package for use within the Blueprint Designer .
6,991
def PackagePublishUI ( package , type , visibility ) : linux_lst = { 'L' : { 'selected' : False , 'Description' : 'All Linux' } } windows_lst = { 'W' : { 'selected' : False , 'Description' : 'All Windows' } } for r in clc . v1 . Server . GetTemplates ( ) : r [ 'selected' ] = False if re . search ( "Windows" , r [ 'Description' ] ) : windows_lst [ str ( r [ 'OperatingSystem' ] ) ] = r elif re . search ( "CentOS|RedHat|Ubuntu" , r [ 'Description' ] ) : linux_lst [ str ( r [ 'OperatingSystem' ] ) ] = r if os . name == 'posix' : scr = curses . initscr ( ) curses . cbreak ( ) while True : if os . name == 'posix' : c = Blueprint . _DrawPublishPackageUIPosix ( scr , linux_lst , windows_lst ) else : c = Blueprint . _DrawPublishPackageUI ( linux_lst , windows_lst ) if c . lower ( ) == 'q' : break elif c . lower ( ) == 'l' : for l in linux_lst : linux_lst [ l ] [ 'selected' ] = not linux_lst [ l ] [ 'selected' ] elif c . lower ( ) == 'w' : for l in windows_lst : windows_lst [ l ] [ 'selected' ] = not windows_lst [ l ] [ 'selected' ] elif c in linux_lst : linux_lst [ c ] [ 'selected' ] = not linux_lst [ c ] [ 'selected' ] elif c in windows_lst : windows_lst [ c ] [ 'selected' ] = not windows_lst [ c ] [ 'selected' ] if os . name == 'posix' : curses . nocbreak ( ) curses . echo ( ) curses . endwin ( ) ids = [ ] for l in dict ( linux_lst . items ( ) + windows_lst . items ( ) ) . values ( ) : if l [ 'selected' ] and 'OperatingSystem' in l : ids . append ( str ( l [ 'OperatingSystem' ] ) ) clc . v1 . output . Status ( 'SUCCESS' , 2 , 'Selected operating system IDs: %s' % ( " " . join ( ids ) ) ) return ( Blueprint . PackagePublish ( package , type , visibility , ids ) )
Publishes a Blueprint Package for use within the Blueprint Designer after interactive OS selection .
6,992
def getmembers ( self ) : if self . _members is None : self . _members = _members = [ ] g = self . data_file magic = g . read ( 2 ) while magic : if magic == b'07' : magic += g . read ( 4 ) member = RPMInfo . _read ( magic , g ) if member . name == 'TRAILER!!!' : break if not member . isdir : _members . append ( member ) magic = g . read ( 2 ) return _members return self . _members
Return the members of the archive as a list of RPMInfo objects . The list has the same order as the members in the archive .
6,993
def getmember ( self , name ) : members = self . getmembers ( ) for m in members [ : : - 1 ] : if m . name == name : return m raise KeyError ( "member %s could not be found" % name )
Return an RPMInfo object for member name . If name can not be found in the archive KeyError is raised . If a member occurs more than once in the archive its last occurrence is assumed to be the most up - to - date version .
6,994
def data_file ( self ) : if self . _data_file is None : fileobj = _SubFile ( self . _fileobj , self . data_offset ) if self . headers [ "archive_compression" ] == b"xz" : if not getattr ( sys . modules [ __name__ ] , 'lzma' , False ) : raise NoLZMAModuleError ( 'lzma module not present' ) self . _data_file = lzma . LZMAFile ( fileobj ) else : self . _data_file = gzip . GzipFile ( fileobj = fileobj ) return self . _data_file
Return the uncompressed raw CPIO data of the RPM archive .
6,995
def load_input ( definition ) : if isinstance ( definition , ( str , io . TextIOWrapper ) ) : try : definition = yaml . safe_load ( definition ) except Exception as exc : raise ParsingInputError ( "Unable to parse input: %s" % str ( exc ) ) return definition
Load and parse input if needed .
6,996
def any2sql ( func , definition_dict = None , ** definition_kwargs ) : if definition_dict and definition_kwargs : raise InputError ( "Cannot process dict and kwargs input at the same time" ) definition = load_input ( definition_dict or definition_kwargs ) if definition . get ( 'returning' , '' ) == '*' : definition [ 'returning' ] = mosql_raw ( '*' ) try : result = func ( ** definition ) except ( TypeError , AttributeError ) as exc : raise ClauseError ( "Clause definition error: %s" % str ( exc ) ) from exc except Exception as exc : import json2sql . errors as json2sql_errors if exc . __class__ . __name__ not in json2sql_errors . __dict__ . keys ( ) : raise json2sql_errors . Json2SqlInternalError ( "Unhandled error: %s" % str ( exc ) ) from exc raise return result
Handle general to SQL conversion .
6,997
def _expand_join ( join_definition ) : join_table_name = join_definition . pop ( 'table' ) join_func = getattr ( mosql_query , join_definition . pop ( 'join_type' , 'join' ) ) return join_func ( join_table_name , ** join_definition )
Expand join definition to join call .
6,998
def _construct_select_query ( ** filter_definition ) : table_name = filter_definition . pop ( 'table' ) distinct = filter_definition . pop ( 'distinct' , False ) select_count = filter_definition . pop ( 'count' , False ) if distinct and select_count : raise UnsupportedDefinitionError ( 'SELECT (DISTINCT ...) is not supported' ) if select_count and 'select' in filter_definition : raise UnsupportedDefinitionError ( 'SELECT COUNT(columns) is not supported' ) if 'joins' in filter_definition : join_definitions = filter_definition . pop ( 'joins' ) if not isinstance ( join_definitions , ( tuple , list ) ) : join_definitions = ( join_definitions , ) filter_definition [ 'joins' ] = [ ] for join_def in join_definitions : filter_definition [ 'joins' ] . append ( _expand_join ( join_def ) ) if 'where' in filter_definition : for key , value in filter_definition [ 'where' ] . items ( ) : if is_filter_query ( value ) : sub_query = value . pop ( DEFAULT_FILTER_KEY ) if value : raise ParsingInputError ( "Unknown keys for sub-query provided: %s" % value ) filter_definition [ 'where' ] [ key ] = mosql_raw ( '( {} )' . format ( _construct_select_query ( ** sub_query ) ) ) elif isinstance ( value , str ) and value . startswith ( '$' ) and QUERY_REFERENCE . fullmatch ( value [ 1 : ] ) : filter_definition [ 'where' ] [ key ] = mosql_raw ( '"{}"' . format ( '"."' . join ( value [ 1 : ] . split ( '.' ) ) ) ) raw_select = select ( table_name , ** filter_definition ) if distinct : raw_select = raw_select . replace ( 'SELECT' , 'SELECT DISTINCT' , 1 ) if select_count : raw_select = raw_select . replace ( 'SELECT *' , 'SELECT COUNT(*)' , 1 ) return raw_select
Return SELECT statement that will be used as a filter .
6,999
def connect ( self , address ) : address = _dns_resolve ( self , address ) with self . _registered ( 'we' ) : while 1 : err = self . _sock . connect_ex ( address ) if not self . _blocking or err not in _BLOCKING_OP : if err not in ( 0 , errno . EISCONN ) : raise socket . error ( err , errno . errorcode [ err ] ) return if self . _writable . wait ( self . gettimeout ( ) ) : raise socket . timeout ( "timed out" ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" )
initiate a new connection to a remote socket bound to an address