idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
52,400 | def exclude_sources ( exclude , section = False ) : newlist = sources . copy ( ) if not isinstance ( exclude , list ) : exclude = [ exclude ] for source in exclude : if not section : newlist . remove ( source ) else : pos = newlist . index ( source ) + 1 if pos == len ( sources ) : return [ ] newlist = sources [ pos : ... | Returns a narrower list of sources . |
52,401 | def get_lyrics ( song , l_sources = None ) : if l_sources is None : l_sources = sources if song . lyrics and not CONFIG [ 'overwrite' ] : logger . debug ( '%s already has embedded lyrics' , song ) return None runtimes = { } source = None for l_source in l_sources : start = time . time ( ) try : lyrics = l_source ( song... | Searches for lyrics of a single song and returns a Result object with the various stats collected in the process . |
52,402 | def get_lyrics_threaded ( song , l_sources = None ) : if l_sources is None : l_sources = sources if song . lyrics and not CONFIG [ 'overwrite' ] : logger . debug ( '%s already has embedded lyrics' , song ) return None runtimes = { } queue = Queue ( ) pool = [ LyrThread ( source , song , queue ) for source in l_sources ... | Launches a pool of threads to search for the lyrics of a single song . |
52,403 | def run ( songs ) : if not hasattr ( songs , '__iter__' ) : result = get_lyrics_threaded ( songs ) process_result ( result ) else : start = time . time ( ) stats = run_mp ( songs ) end = time . time ( ) if CONFIG [ 'print_stats' ] : stats . print_stats ( ) total_time = end - start total_time = '%d:%02d:%02d' % ( total_... | Calls get_lyrics_threaded for a song or list of songs . |
52,404 | def run_mp ( songs ) : stats = Stats ( ) if CONFIG [ 'debug' ] : good = open ( 'found' , 'w' ) bad = open ( 'notfound' , 'w' ) logger . debug ( 'Launching a pool of %d processes\n' , CONFIG [ 'jobcount' ] ) chunksize = math . ceil ( len ( songs ) / os . cpu_count ( ) ) try : with Pool ( CONFIG [ 'jobcount' ] ) as pool ... | Concurrently calls get_lyrics to fetch the lyrics of a large list of songs . |
52,405 | def parse_setup ( options : Union [ List , str ] ) -> str : if isinstance ( options , str ) : return options return "\n" . join ( options ) | Convert potentially a list of commands into a single string . |
52,406 | def create_scheduler_file ( scheduler : str , job : Job ) -> str : logger . debug ( "Create Scheduler File Function" ) if job . scheduler_options is None : scheduler_options : Dict [ str , Any ] = { } else : scheduler_options = deepcopy ( job . scheduler_options ) try : setup_string = parse_setup ( scheduler_options [ ... | Substitute values into a template scheduler file . |
52,407 | def range_type_to_dtype ( range_type : str ) -> Optional [ tf . DType ] : range2dtype = { 'real' : tf . float32 , 'int' : tf . int32 , 'bool' : tf . bool } return range2dtype [ range_type ] | Maps RDDL range types to TensorFlow dtypes . |
52,408 | def python_type_to_dtype ( python_type : type ) -> Optional [ tf . DType ] : dtype = None if python_type == float : dtype = tf . float32 elif python_type == int : dtype = tf . int32 elif python_type == bool : dtype = tf . bool return dtype | Maps python types to TensorFlow dtypes . |
52,409 | def get_dependency_type ( _type ) : if _type == DependencyTypes . AFTER : return 'after' elif _type == DependencyTypes . AFTER_ANY : return 'afterany' elif _type == DependencyTypes . AFTER_CORR : return 'aftercorr' elif _type == DependencyTypes . AFTER_NOT_OK : return 'afternotok' elif _type == DependencyTypes . AFTER_... | Get the dependency type string for SlurmPrinter |
52,410 | def generate ( self , job ) : options = job . get_options ( ) . copy ( ) job_name = options . pop ( 'name' , None ) job_account = options . pop ( 'account' , None ) job_walltime = options . pop ( 'walltime' , None ) job_mem_per_cpu = options . pop ( 'mem_per_cpu' , None ) job_memory = options . pop ( 'memory' , None ) ... | Generates a job submission script from a job object |
52,411 | def get_info_mpris2 ( name ) : bus_name = 'org.mpris.MediaPlayer2.' + name path = '/org/mpris/MediaPlayer2' interface = 'org.mpris.MediaPlayer2.Player' address = DBusAddress ( path , bus_name = bus_name , interface = interface ) msg = Properties ( address ) . get ( 'Metadata' ) connection = connect_and_authenticate ( )... | Get the current playing song from an mpris2 compliant player . |
52,412 | def get_current_clementine ( ) : try : return get_info_mpris2 ( 'clementine' ) except DBusErrorResponse : bus_name = 'org.mpris.clementine' path = '/Player' interface = 'org.freedesktop.MediaPlayer' return dbus_get_metadata ( path , bus_name , interface ) | Get the current song from clementine . |
52,413 | def get_current_cmus ( ) : result = subprocess . run ( 'cmus-remote -Q' . split ( ' ' ) , check = True , stdout = subprocess . PIPE , stderr = subprocess . DEVNULL ) info = { } for line in result . stdout . decode ( ) . split ( '\n' ) : line = line . split ( ' ' ) if line [ 0 ] != 'tag' : continue key = line [ 1 ] if k... | Get the current song from cmus . |
52,414 | def from_filename ( cls , filename ) : if not filename : logger . error ( 'No filename specified' ) return None if not os . path . exists ( filename ) : logger . error ( "Err: File '%s' does not exist" , filename ) return None if os . path . isdir ( filename ) : logger . error ( "Err: File '%s' is a directory" , filena... | Class constructor using the path to the corresponding mp3 file . The metadata will be read from this file to create the song object so it must at least contain valid ID3 tags for artist and title . |
52,415 | def fetch_album_name ( self ) : response = get_lastfm ( 'track.getInfo' , artist = self . artist , track = self . title ) if response : try : self . album = response [ 'track' ] [ 'album' ] [ 'title' ] logger . debug ( 'Found album %s from lastfm' , self . album ) except Exception : logger . warning ( 'Could not fetch ... | Get the name of the album from lastfm . |
52,416 | def _get_closest_week ( self , metric_date ) : days_after_monday = metric_date . isoweekday ( ) - 1 return metric_date - datetime . timedelta ( days = days_after_monday ) | Gets the closest monday to the date provided . |
52,417 | def _get_daily_date_range ( self , metric_date , delta ) : dates = [ metric_date ] start_date = metric_date end_date = metric_date + delta while start_date . month < end_date . month or start_date . year < end_date . year : days_in_month = calendar . monthrange ( start_date . year , start_date . month ) [ 1 ] start_dat... | Get the range of months that we need to use as keys to scan redis . |
52,418 | def _get_weekly_date_range ( self , metric_date , delta ) : dates = [ metric_date ] end_date = metric_date + delta spanning_years = end_date . year - metric_date . year for i in range ( spanning_years ) : dates . append ( datetime . date ( year = metric_date . year + ( i + 1 ) , month = 1 , day = 1 ) ) return dates | Gets the range of years that we need to use as keys to get metrics from redis . |
52,419 | def clear_all ( self ) : keys = self . _analytics_backend . keys ( ) for key in itertools . chain ( * keys ) : with self . _analytics_backend . map ( ) as conn : if key . startswith ( self . _prefix ) : conn . delete ( key ) | Deletes all sandsnake related data from redis . |
52,420 | def track_count ( self , unique_identifier , metric , inc_amt = 1 , ** kwargs ) : return self . _analytics_backend . incr ( self . _prefix + ":" + "analy:%s:count:%s" % ( unique_identifier , metric ) , inc_amt ) | Tracks a metric just by count . If you track a metric this way you won t be able to query the metric by day week or month . |
52,421 | def track_metric ( self , unique_identifier , metric , date = None , inc_amt = 1 , ** kwargs ) : metric = [ metric ] if isinstance ( metric , basestring ) else metric unique_identifier = [ unique_identifier ] if not isinstance ( unique_identifier , ( types . ListType , types . TupleType , types . GeneratorType , ) ) el... | Tracks a metric for a specific unique_identifier for a certain date . The redis backend supports lists for both unique_identifier and metric allowing for tracking of multiple metrics for multiple unique_identifiers efficiently . Not all backends may support this . |
52,422 | def get_metric_by_day ( self , unique_identifier , metric , from_date , limit = 30 , ** kwargs ) : conn = kwargs . get ( "connection" , None ) date_generator = ( from_date + datetime . timedelta ( days = i ) for i in itertools . count ( ) ) metric_key_date_range = self . _get_daily_date_range ( from_date , datetime . t... | Returns the metric for unique_identifier segmented by day starting from from_date |
52,423 | def get_metric_by_week ( self , unique_identifier , metric , from_date , limit = 10 , ** kwargs ) : conn = kwargs . get ( "connection" , None ) closest_monday_from_date = self . _get_closest_week ( from_date ) metric_key_date_range = self . _get_weekly_date_range ( closest_monday_from_date , datetime . timedelta ( week... | Returns the metric for unique_identifier segmented by week starting from from_date |
52,424 | def get_metric_by_month ( self , unique_identifier , metric , from_date , limit = 10 , ** kwargs ) : conn = kwargs . get ( "connection" , None ) first_of_month = datetime . date ( year = from_date . year , month = from_date . month , day = 1 ) metric_key_date_range = self . _get_weekly_date_range ( first_of_month , rel... | Returns the metric for unique_identifier segmented by month starting from from_date . It will retrieve metrics data starting from the 1st of the month specified in from_date |
52,425 | def get_count ( self , unique_identifier , metric , start_date = None , end_date = None , ** kwargs ) : result = None if start_date and end_date : start_date , end_date = ( start_date , end_date , ) if start_date < end_date else ( end_date , start_date , ) start_date = start_date if hasattr ( start_date , 'date' ) else... | Gets the count for the metric for unique_identifier . You can specify a start_date and an end_date to only get metrics within that time range . |
52,426 | def set_metric_by_day ( self , unique_identifier , metric , date , count , sync_agg = True , update_counter = True ) : metric = [ metric ] if isinstance ( metric , basestring ) else metric unique_identifier = [ unique_identifier ] if not isinstance ( unique_identifier , ( types . ListType , types . TupleType , types . ... | Sets the count for the metric for unique_identifier . You must specify a date for the count to be set on . Useful for resetting a metric count to 0 or decrementing a metric . |
52,427 | def sync_agg_metric ( self , unique_identifier , metric , start_date , end_date ) : self . sync_week_metric ( unique_identifier , metric , start_date , end_date ) self . sync_month_metric ( unique_identifier , metric , start_date , end_date ) | Uses the count for each day in the date range to recalculate the counters for the associated weeks and months for the metric for unique_identifier . Useful for updating the counters for week and month after using set_metric_by_day . |
52,428 | def sync_week_metric ( self , unique_identifier , metric , start_date , end_date ) : metric = [ metric ] if isinstance ( metric , basestring ) else metric unique_identifier = [ unique_identifier ] if not isinstance ( unique_identifier , ( types . ListType , types . TupleType , types . GeneratorType , ) ) else unique_id... | Uses the count for each day in the date range to recalculate the counters for the weeks for the metric for unique_identifier . Useful for updating the counters for week and month after using set_metric_by_day . |
52,429 | def sync_month_metric ( self , unique_identifier , metric , start_date , end_date ) : metric = [ metric ] if isinstance ( metric , basestring ) else metric unique_identifier = [ unique_identifier ] if not isinstance ( unique_identifier , ( types . ListType , types . TupleType , types . GeneratorType , ) ) else unique_i... | Uses the count for each day in the date range to recalculate the counters for the months for the metric for unique_identifier . Useful for updating the counters for week and month after using set_metric_by_day . |
52,430 | def is_full_mxid ( user_string ) : if not user_string [ 0 ] == "@" : return False parts = user_string [ 1 : ] . split ( ":" ) localpart_chars = ascii_lowercase + digits + "._-=" if not ( len ( parts ) == 2 and all ( [ i in localpart_chars for i in parts [ 0 ] ] ) ) : return False return True | Returns True if a string is a valid mxid . |
52,431 | def intent ( method ) : def wrapper ( self , * args , ** kwargs ) : try : return method ( self , * args , ** kwargs ) except exceptions . MatrixError as e : if isinstance ( e . original_exception , matrix_client . errors . MatrixRequestError ) : self . _handle_request_exception ( e ) return method ( self , * args , ** ... | Helps object methods handle MatrixRequestError . |
52,432 | def get_variables ( self ) -> Set [ str ] : variables = set ( ) for cmd in self . _cmd : for var in self . __formatter . parse ( cmd ) : logger . debug ( "Checking variable: %s" , var ) if var [ 1 ] is not None and var [ 1 ] not in [ "creates" , "requires" ] : variables . add ( var [ 1 ] ) return variables | Find all the variables specified in a format string . |
52,433 | def as_bash_array ( self ) -> str : return_string = "( \\\n" for command in self : return_string += '"' + str ( command ) + '" \\\n' return_string += ")" return return_string | Return a representation as a bash array . |
52,434 | def combine_dictionaries ( dicts : List [ Dict [ str , Any ] ] ) -> Dict [ str , Any ] : return dict ( ChainMap ( * dicts ) ) | Merge a list of dictionaries into a single dictionary . |
52,435 | def iterator_zip ( variables : VarType , parent : str = None ) -> Iterable [ VarMatrix ] : logger . debug ( "Yielding from zip iterator" ) if isinstance ( variables , list ) : for item in variables : yield list ( variable_matrix ( item , parent , "zip" ) ) else : yield list ( variable_matrix ( variables , parent , "zip... | Apply the zip operator to a set of variables . |
52,436 | def iterator_product ( variables : VarType , parent : str = None ) -> Iterable [ VarMatrix ] : logger . debug ( "Yielding from product iterator" ) if isinstance ( variables , list ) : raise ValueError ( f"Product only takes mappings of values, got {variables} of type {type(variables)}" ) yield list ( variable_matrix ( ... | Apply the product operator to a set of variables . |
52,437 | def iterator_chain ( variables : VarType , parent : str = None ) -> Iterable [ VarMatrix ] : logger . debug ( "Yielding from append iterator" ) if not isinstance ( variables , list ) : raise ValueError ( f"Append keyword only takes a list of arguments, got {variables} of type {type(variables)}" ) yield list ( chain . f... | This successively appends each element of an array to a single list of values . |
52,438 | def iterator_cycle ( variables : VarType , parent : str ) -> Iterable [ VarMatrix ] : if isinstance ( variables , dict ) : if variables . get ( "times" ) : times = int ( variables [ "times" ] ) del variables [ "times" ] yield list ( variable_matrix ( variables , parent , "product" ) ) * times else : raise ValueError ( ... | Cycle through a list of values a specified number of times |
52,439 | def variable_matrix ( variables : VarType , parent : str = None , iterator : str = "product" ) -> Iterable [ Dict [ str , YamlValue ] ] : _iters : Dict [ str , Callable ] = { "product" : product , "zip" : zip } _special_keys : Dict [ str , Callable [ [ VarType , Any ] , Iterable [ VarMatrix ] ] ] = { "zip" : iterator_z... | Process the variables into a list of the appropriate combinations . |
52,440 | def uniqueify ( my_list : Any ) -> List [ Any ] : if sys . version_info >= ( 3 , 6 ) : return list ( dict . fromkeys ( my_list ) ) seen = set ( ) return [ x for x in my_list if x not in seen and not seen . add ( x ) ] | Remove duplicate entries in a list retaining order . |
52,441 | def process_command ( command : CommandInput , matrix : VarMatrix ) -> List [ Command ] : assert command is not None if isinstance ( command , str ) : command_list = [ Command ( command , variables = variables ) for variables in matrix ] elif isinstance ( command , list ) : command_list = [ Command ( command , variable... | Generate all combinations of commands given a variable matrix . |
52,442 | def read_file ( filename : PathLike = "experiment.yml" ) -> Dict [ str , Any ] : logger . debug ( "Input file: %s" , filename ) with open ( filename , "r" ) as stream : structure = yaml . safe_load ( stream ) return structure | Read and parse yaml file . |
52,443 | def run_bash_jobs ( jobs : Iterator [ Job ] , directory : PathLike = Path . cwd ( ) , dry_run : bool = False ) -> None : logger . debug ( "Running commands in bash shell" ) for job in jobs : if shutil . which ( job . shell ) is None : raise ProcessLookupError ( f"The shell '{job.shell}' was not found." ) failed = False... | Submit commands to the bash shell . |
52,444 | def run_scheduler_jobs ( scheduler : str , jobs : Iterator [ Job ] , directory : PathLike = Path . cwd ( ) , basename : str = "experi" , dry_run : bool = False , ) -> None : submit_job = True logger . debug ( "Creating commands in %s files." , scheduler ) if scheduler == "pbs" : submit_executable = "qsub" elif schedule... | Submit a series of commands to a batch scheduler . |
52,445 | def determine_scheduler ( scheduler : Optional [ str ] , experiment_definition : Dict [ str , YamlValue ] ) -> str : if scheduler is not None : if scheduler in [ "shell" , "pbs" , "slurm" ] : return scheduler raise ValueError ( "Argument scheduler only supports input values of ['shell', 'pbs', 'slurm']" ) if experiment... | Determine the scheduler to use to run the jobs . |
52,446 | def safe_type ( self , data , tree ) : if not isinstance ( data , list ) : name = self . __class__ . __name__ msg = "did not pass validation against callable: %s" % name reason = 'expected a list but got %s' % safe_repr ( data ) raise Invalid ( self . schema , tree , reason = reason , pair = 'value' , msg = msg ) | Make sure that the incoming data complies with the class type we are expecting it to be . In this case classes that inherit from this base class expect data to be of type list . |
52,447 | def get_context ( context ) : new_context = { 'model' : context [ 'model' ] , 'session' : context [ 'session' ] , 'user' : context . get ( 'user' ) , 'ignore_auth' : context . get ( 'ignore_auth' , False ) , 'use_cache' : context . get ( 'use_cache' , False ) , } if 'validate' in context : new_context [ 'validate' ] = ... | An internal context generator . Accepts a CKAN context . |
52,448 | def re_sort ( data ) : keys = sorted ( data . keys ( ) ) new_data = { } for number , key in enumerate ( keys ) : new_data [ number ] = data [ key ] return new_data | A data with keys that are not enumerated sequentially will be re sorted and sequentially ordered . |
52,449 | def ensure ( assertion , message = None ) : message = message or assertion if not assertion : raise AssertionError ( message ) return True | Checks an assertion argument for truth - ness . Will return True or explicitly raise AssertionError . This is to deal with environments using python - O or PYTHONOPTIMIZE = . |
52,450 | def fluent_shape ( self ) -> Sequence [ int ] : return tuple ( self . _shape . as_list ( ) [ 1 : ] if self . _batch else self . _shape . as_list ( ) [ : ] ) | Returns a copy of the fluent shape ignoring batch size if in batch mode . |
52,451 | def broadcast ( cls , shape1 : 'TensorFluentShape' , shape2 : 'TensorFluentShape' ) -> Tuple [ Reshaping , Reshaping ] : reshape_1 , reshape_2 = None , None if not ( shape1 . _batch or shape2 . _batch ) : return reshape_1 , reshape_2 size_1 , size_2 = shape1 . fluent_size , shape2 . fluent_size size_diff = abs ( size_1... | It broadcasts the fluent shapes if any input is in batch mode . |
52,452 | def run ( bam , chrom , pos1 , pos2 , reffa , chr_reffa , parameters ) : is_chr_query = chrom . startswith ( 'chr' ) if is_chr_query and chr_reffa is None : chr_reffa = reffa bam_header = subprocess . check_output ( "samtools view -H {}" . format ( bam ) , shell = True ) is_chr_bam = bam_header . find ( 'SN:chr' ) != -... | Run mpileup on given chrom and pos |
52,453 | def parse_iso8601_date ( string ) : match = _RE_ISO8601_DATE . search ( string ) if not match : raise ValueError ( 'Expected ISO 8601 date' ) year = int ( match . group ( 'year' ) ) month = int ( match . group ( 'month' ) ) day = int ( match . group ( 'day' ) ) return date ( year , month , day ) | Parse an ISO 8601 date string |
52,454 | def import_submodules ( package , parent_package = None , exclude_submodules = None ) : exclude_submodules_dot = [ x + '.' for x in exclude_submodules ] if exclude_submodules else exclude_submodules package = importlib . import_module ( package , parent_package ) for _ , name , _ in pkgutil . walk_packages ( package . ... | Generator which imports all submodules of a module recursively including subpackages |
52,455 | def compile_initial_state ( self , batch_size : Optional [ int ] = None ) -> Sequence [ tf . Tensor ] : with self . graph . as_default ( ) : with tf . name_scope ( 'initial_state' ) : self . _initialize_initial_state_fluents ( ) if batch_size is None : return self . initial_state_fluents return self . _compile_batch_fl... | Returns a tuple of tensors representing the initial state fluents . |
52,456 | def compile_default_action ( self , batch_size : Optional [ int ] = None ) -> Sequence [ tf . Tensor ] : with self . graph . as_default ( ) : with tf . name_scope ( 'default_action' ) : self . _initialize_default_action_fluents ( ) if batch_size is None : return self . default_action_fluents return self . _compile_batc... | Returns a tuple of tensors representing the default action fluents . |
52,457 | def cpfs ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] , noise : Optional [ Noise ] = None ) -> Tuple [ List [ TensorFluent ] , List [ TensorFluent ] ] : scope = self . transition_scope ( state , action ) batch_size = int ( state [ 0 ] . shape [ 0 ] ) interm_fluents , next_state_fluents ... | Compiles the intermediate and next state fluent CPFs given the current state and action . |
52,458 | def reward ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] , next_state : Sequence [ tf . Tensor ] ) -> tf . Tensor : scope = self . reward_scope ( state , action , next_state ) r = self . compile_reward ( scope ) . tensor with self . graph . as_default ( ) : with tf . name_scope ( 'reward... | Compiles the reward function given the current state action and next_state . |
52,459 | def compile_cpfs ( self , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ Noise ] = None ) -> Tuple [ List [ CPFPair ] , List [ CPFPair ] ] : interm_fluents = self . compile_intermediate_cpfs ( scope , batch_size , noise ) scope . update ( dict ( interm_fluents ) ) next_s... | Compiles the intermediate and next state fluent CPFs given the current state and action scope . |
52,460 | def compile_intermediate_cpfs ( self , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ Noise ] = None ) -> List [ CPFPair ] : interm_fluents = [ ] with self . graph . as_default ( ) : with tf . name_scope ( 'intermediate_cpfs' ) : for cpf in self . rddl . domain . interme... | Compiles the intermediate fluent CPFs given the current state and action scope . |
52,461 | def compile_state_cpfs ( self , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ Noise ] = None ) -> List [ CPFPair ] : next_state_fluents = [ ] with self . graph . as_default ( ) : with tf . name_scope ( 'state_cpfs' ) : for cpf in self . rddl . domain . state_cpfs : cpf_... | Compiles the next state fluent CPFs given the current state and action scope . |
52,462 | def compile_reward ( self , scope : Dict [ str , TensorFluent ] ) -> TensorFluent : reward_expr = self . rddl . domain . reward with self . graph . as_default ( ) : with tf . name_scope ( 'reward' ) : return self . _compile_expression ( reward_expr , scope ) | Compiles the reward function given the fluent scope . |
52,463 | def compile_state_action_constraints ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] ) -> List [ TensorFluent ] : scope = self . transition_scope ( state , action ) constraints = [ ] with self . graph . as_default ( ) : with tf . name_scope ( 'state_action_constraints' ) : for p in self . ... | Compiles the state - action constraints given current state and action fluents . |
52,464 | def compile_action_preconditions ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] ) -> List [ TensorFluent ] : scope = self . action_precondition_scope ( state , action ) preconds = [ ] with self . graph . as_default ( ) : with tf . name_scope ( 'action_preconditions' ) : for p in self . rd... | Compiles the action preconditions given current state and action fluents . |
52,465 | def compile_state_invariants ( self , state : Sequence [ tf . Tensor ] ) -> List [ TensorFluent ] : scope = self . state_invariant_scope ( state ) invariants = [ ] with self . graph . as_default ( ) : with tf . name_scope ( 'state_invariants' ) : for p in self . rddl . domain . invariants : fluent = self . _compile_exp... | Compiles the state invarints given current state fluents . |
52,466 | def compile_action_preconditions_checking ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] ) -> tf . Tensor : with self . graph . as_default ( ) : with tf . name_scope ( 'action_preconditions_checking' ) : preconds = self . compile_action_preconditions ( state , action ) all_preconds = tf .... | Combines the action preconditions into an applicability checking op . |
52,467 | def compile_action_bound_constraints ( self , state : Sequence [ tf . Tensor ] ) -> Dict [ str , Bounds ] : scope = self . action_precondition_scope ( state ) lower_bounds = self . rddl . domain . action_lower_bound_constraints upper_bounds = self . rddl . domain . action_upper_bound_constraints with self . graph . as_... | Compiles all actions bounds for the given state . |
52,468 | def non_fluents_scope ( self ) -> Dict [ str , TensorFluent ] : if self . __dict__ . get ( 'non_fluents' ) is None : self . _initialize_non_fluents ( ) return dict ( self . non_fluents ) | Returns a partial scope with non - fluents . |
52,469 | def state_scope ( self , state_fluents : Sequence [ tf . Tensor ] ) -> Dict [ str , TensorFluent ] : return dict ( zip ( self . rddl . domain . state_fluent_ordering , state_fluents ) ) | Returns a partial scope with current state - fluents . |
52,470 | def action_scope ( self , action_fluents : Sequence [ tf . Tensor ] ) -> Dict [ str , TensorFluent ] : return dict ( zip ( self . rddl . domain . action_fluent_ordering , action_fluents ) ) | Returns a partial scope with current action - fluents . |
52,471 | def next_state_scope ( self , next_state_fluents : Sequence [ tf . Tensor ] ) -> Dict [ str , TensorFluent ] : return dict ( zip ( self . rddl . domain . next_state_fluent_ordering , next_state_fluents ) ) | Returns a partial scope with current next state - fluents . |
52,472 | def transition_scope ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] ) -> Dict [ str , TensorFluent ] : scope = { } scope . update ( self . non_fluents_scope ( ) ) scope . update ( self . state_scope ( state ) ) scope . update ( self . action_scope ( action ) ) return scope | Returns the complete transition fluent scope for the current state and action fluents . |
52,473 | def reward_scope ( self , state : Sequence [ tf . Tensor ] , action : Sequence [ tf . Tensor ] , next_state : Sequence [ tf . Tensor ] ) -> Dict [ str , TensorFluent ] : scope = { } scope . update ( self . non_fluents_scope ( ) ) scope . update ( self . state_scope ( state ) ) scope . update ( self . action_scope ( act... | Returns the complete reward fluent scope for the current state action fluents and next_state fluents . |
52,474 | def state_invariant_scope ( self , state : Sequence [ tf . Tensor ] ) : scope = { } scope . update ( self . non_fluents_scope ( ) ) scope . update ( self . state_scope ( state ) ) return scope | Returns the state invariant fluent scope for the current state . |
52,475 | def _initialize_pvariables ( self , pvariables : Dict [ str , PVariable ] , ordering : List [ str ] , initializer : Optional [ InitializerList ] = None ) -> List [ Tuple [ str , TensorFluent ] ] : if initializer is not None : init = dict ( ) for ( ( name , args ) , value ) in initializer : arity = len ( args ) if args ... | Instantiates pvariables given an initialization list and returns a list of TensorFluents in the given ordering . |
52,476 | def _initialize_non_fluents ( self ) : non_fluents = self . rddl . domain . non_fluents initializer = self . rddl . non_fluents . init_non_fluent self . non_fluents = self . _initialize_pvariables ( non_fluents , self . rddl . domain . non_fluent_ordering , initializer ) return self . non_fluents | Returns the non - fluents instantiated . |
52,477 | def _initialize_initial_state_fluents ( self ) : state_fluents = self . rddl . domain . state_fluents initializer = self . rddl . instance . init_state self . initial_state_fluents = self . _initialize_pvariables ( state_fluents , self . rddl . domain . state_fluent_ordering , initializer ) return self . initial_state_... | Returns the initial state - fluents instantiated . |
52,478 | def _initialize_default_action_fluents ( self ) : action_fluents = self . rddl . domain . action_fluents self . default_action_fluents = self . _initialize_pvariables ( action_fluents , self . rddl . domain . action_fluent_ordering ) return self . default_action_fluents | Returns the default action - fluents instantiated . |
52,479 | def _compile_batch_fluents ( self , fluents : List [ Tuple [ str , TensorFluent ] ] , batch_size : int ) -> Sequence [ tf . Tensor ] : batch_fluents = [ ] with self . graph . as_default ( ) : for name , fluent in fluents : name_scope = utils . identifier ( name ) with tf . name_scope ( name_scope ) : t = tf . stack ( [... | Compiles fluents into tensors with given batch_size . |
52,480 | def _compile_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype2compiler = { 'constant' : self . _compile_constant_expression , 'pvar' : self . _compile_pvariable_expression , 'ra... | Compile the expression expr into a TensorFluent in the given scope with optional batch size . |
52,481 | def _compile_constant_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype = expr . etype args = expr . args dtype = utils . python_type_to_dtype ( etype [ 1 ] ) fluent = TensorFlue... | Compile a constant expression expr into a TensorFluent in the given scope with optional batch size . |
52,482 | def _compile_pvariable_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype = expr . etype args = expr . args name = expr . _pvar_to_name ( args ) if name not in scope : raise Value... | Compile a pvariable expression expr into a TensorFluent in the given scope with optional batch size . |
52,483 | def _compile_random_variable_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype = expr . etype args = expr . args if etype [ 1 ] == 'KronDelta' : sample = self . _compile_expressi... | Compile a random variable expression expr into a TensorFluent in the given scope with optional batch size . |
52,484 | def _compile_arithmetic_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype = expr . etype args = expr . args if len ( args ) == 1 : etype2op = { '+' : lambda x : x , '-' : lambda ... | Compile an arithmetic expression expr into a TensorFluent in the given scope with optional batch size . |
52,485 | def _compile_function_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype = expr . etype args = expr . args if len ( args ) == 1 : etype2func = { 'abs' : TensorFluent . abs , 'exp'... | Compile a function expression expr into a TensorFluent in the given scope with optional batch size . |
52,486 | def _compile_control_flow_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype = expr . etype args = expr . args if etype [ 1 ] == 'if' : condition = self . _compile_expression ( ar... | Compile a control flow expression expr into a TensorFluent in the given scope with optional batch size . |
52,487 | def _compile_aggregation_expression ( self , expr : Expression , scope : Dict [ str , TensorFluent ] , batch_size : Optional [ int ] = None , noise : Optional [ List [ tf . Tensor ] ] = None ) -> TensorFluent : etype = expr . etype args = expr . args typed_var_list = args [ : - 1 ] vars_list = [ var for _ , ( var , _ )... | Compile an aggregation expression expr into a TensorFluent in the given scope with optional batch size . |
52,488 | def variable_names ( self ) : if self . _variable_names is None : if self . _operator is None : if self . _operands is None : self . _variable_names = tuple ( ) else : self . _variable_names = self . _get_variable_names ( self . _operands ) elif self . _operator == 'NOT' : self . _variable_names = self . _operands . va... | Get all variable names required for this query |
52,489 | def user ( self ) : try : return self . _user except AttributeError : self . _user = MatrixUser ( self . mxid , self . Api ( identity = self . mxid ) ) return self . _user | Creates a User object when requested . |
52,490 | def room ( self ) : try : return self . _room except AttributeError : room_id = self . json [ "room_id" ] self . _room = MatrixRoom ( room_id , self . Api ) return self . _room | Creates a Room object when requested . |
52,491 | def join ( self , room_str ) : response = self . user_api . join_room ( room_str ) return self . _mkroom ( response [ "room_id" ] ) | Joins room id or alias even if it must first be created . |
52,492 | def optional ( _object ) : if is_callable ( _object ) : validator = _object @ wraps ( validator ) def decorated ( value ) : if value : return validator ( value ) return return decorated else : def optional ( * args ) : return _object optional . is_optional = True optional . _object = _object return optional | This decorator has a double functionality it can wrap validators and make them optional or it can wrap keys and make that entry optional . |
52,493 | def set_walltime ( self , walltime ) : if not isinstance ( walltime , timedelta ) : raise TypeError ( 'walltime must be an instance of datetime.timedelta. %s given' % type ( walltime ) ) self . _options [ 'walltime' ] = walltime return self | Setting a walltime for the job |
52,494 | def get_url ( url , parser = 'html' ) : url = request . quote ( url , safe = ':/?=&' ) logger . debug ( 'URL: %s' , url ) req = request . Request ( url , headers = { 'User-Agent' : 'foobar' } ) try : response = request . urlopen ( req ) except HTTPError : raise except ( ssl . SSLError , URLError ) : context = ssl . SSL... | Requests the specified url and returns a BeautifulSoup object with its contents . |
52,495 | def get_lastfm ( method , lastfm_key = '' , ** kwargs ) : if not lastfm_key : if 'lastfm_key' not in CONFIG or not CONFIG [ 'lastfm_key' ] : logger . warning ( 'No lastfm key configured' ) return '' else : lastfm_key = CONFIG [ 'lastfm_key' ] url = 'http://ws.audioscrobbler.com/2.0/?method={}&api_key={}&format=json' ur... | Request the specified method from the lastfm api . |
52,496 | def metrolyrics ( song ) : translate = { URLESCAPE : '' , ' ' : '-' } title = song . title . lower ( ) title = normalize ( title , translate ) title = re . sub ( r'\-{2,}' , '-' , title ) artist = song . artist . lower ( ) artist = normalize ( artist , translate ) artist = re . sub ( r'\-{2,}' , '-' , artist ) url = 'h... | Returns the lyrics found in metrolyrics for the specified mp3 file or an empty string if not found . |
52,497 | def darklyrics ( song ) : if not hasattr ( song , 'album' ) or not song . album : song . fetch_album_name ( ) if not hasattr ( song , 'album' ) or not song . album : return '' artist = song . artist . lower ( ) artist = normalize ( artist , URLESCAPES , '' ) album = song . album . lower ( ) album = normalize ( album , ... | Returns the lyrics found in darklyrics for the specified mp3 file or an empty string if not found . |
52,498 | def azlyrics ( song ) : artist = song . artist . lower ( ) if artist [ 0 : 2 ] == 'a ' : artist = artist [ 2 : ] artist = normalize ( artist , URLESCAPES , '' ) title = song . title . lower ( ) title = normalize ( title , URLESCAPES , '' ) url = 'https://www.azlyrics.com/lyrics/{}/{}.html' . format ( artist , title ) s... | Returns the lyrics found in azlyrics for the specified mp3 file or an empty string if not found . |
52,499 | def metalarchives ( song ) : artist = normalize ( song . artist ) title = normalize ( song . title ) url = 'https://www.metal-archives.com/search/ajax-advanced/searching/songs' url += f'/?songTitle={title}&bandName={artist}&ExactBandMatch=1' soup = get_url ( url , parser = 'json' ) if not soup : return '' song_id_re = ... | Returns the lyrics found in MetalArchives for the specified mp3 file or an empty string if not found . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.