idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
55,400
def regulation ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'RAD_REGULATION' , column , value , ** kwargs )
Provides relevant information about applicable regulations .
55,401
def regulatory_program ( self , column = None , value = None , ** kwargs ) : return self . _resolve_call ( 'RAD_REGULATORY_PROG' , column , value , ** kwargs )
Identifies the regulatory authority governing a facility and by virtue of that identification also identifies the regulatory program of interest and the type of facility .
55,402
def collect_basic_info ( ) : s = sys . version_info _collect ( json . dumps ( { 'sys.version_info' : tuple ( s ) } ) ) _collect ( sys . version ) return sys . version
collect basic info about the system os python version ...
55,403
def call ( function ) : message = 'call:%s.%s' % ( function . __module__ , function . __name__ ) @ functools . wraps ( function ) def wrapper ( * args , ** kwargs ) : _collect ( message ) return function ( * args , ** kwargs ) return wrapper
decorator that collect function call count .
55,404
def _parse_ip_addr_show ( raw_result ) : show_re = ( r'"(?P<dev>\S+)"\s+does not exist' ) re_result = search ( show_re , raw_result ) result = None if not ( re_result ) : show_re = ( r'\s*(?P<os_index>\d+):\s+(?P<dev>\S+):\s+<(?P<falgs_str>.*)?>.*?' r'mtu\s+(?P<mtu>\d+).+?state\s+(?P<state>\w+).*' r'\s*link/(?P<link_type>\w+)\s+(?P<mac_address>\S+)' ) re_result = search ( show_re , raw_result , DOTALL ) result = re_result . groupdict ( ) show_re = ( r'((inet )\s*(?P<inet>[^/]+)/(?P<inet_mask>\d{1,2}))' ) re_result = search ( show_re , raw_result ) if ( re_result ) : result . update ( re_result . groupdict ( ) ) show_re = ( r'((?<=inet6 )(?P<inet6>[^/]+)/(?P<inet6_mask>\d{1,2}))' ) re_result = search ( show_re , raw_result ) if ( re_result ) : result . update ( re_result . groupdict ( ) ) for key , value in result . items ( ) : if value is not None : if value . isdigit ( ) : result [ key ] = int ( value ) return result
Parse the ip addr list dev command raw output .
55,405
def interface ( enode , portlbl , addr = None , up = None , shell = None ) : assert portlbl port = enode . ports [ portlbl ] if addr is not None : assert ip_interface ( addr ) cmd = 'ip addr add {addr} dev {port}' . format ( addr = addr , port = port ) response = enode ( cmd , shell = shell ) assert not response if up is not None : cmd = 'ip link set dev {port} {state}' . format ( port = port , state = 'up' if up else 'down' ) response = enode ( cmd , shell = shell ) assert not response
Configure a interface .
55,406
def remove_ip ( enode , portlbl , addr , shell = None ) : assert portlbl assert ip_interface ( addr ) port = enode . ports [ portlbl ] cmd = 'ip addr del {addr} dev {port}' . format ( addr = addr , port = port ) response = enode ( cmd , shell = shell ) assert not response
Remove an IP address from an interface .
55,407
def add_route ( enode , route , via , shell = None ) : via = ip_address ( via ) version = '-4' if ( via . version == 6 ) or ( route != 'default' and ip_network ( route ) . version == 6 ) : version = '-6' cmd = 'ip {version} route add {route} via {via}' . format ( version = version , route = route , via = via ) response = enode ( cmd , shell = shell ) assert not response
Add a new static route .
55,408
def add_link_type_vlan ( enode , portlbl , name , vlan_id , shell = None ) : assert name if name in enode . ports : raise ValueError ( 'Port {name} already exists' . format ( name = name ) ) assert portlbl assert vlan_id port = enode . ports [ portlbl ] cmd = 'ip link add link {dev} name {name} type vlan id {vlan_id}' . format ( dev = port , name = name , vlan_id = vlan_id ) response = enode ( cmd , shell = shell ) assert not response , 'Cannot add virtual link {name}' . format ( name = name ) enode . ports [ name ] = name
Add a new virtual link with the type set to VLAN .
55,409
def remove_link_type_vlan ( enode , name , shell = None ) : assert name if name not in enode . ports : raise ValueError ( 'Port {name} doesn\'t exists' . format ( name = name ) ) cmd = 'ip link del link dev {name}' . format ( name = name ) response = enode ( cmd , shell = shell ) assert not response , 'Cannot remove virtual link {name}' . format ( name = name ) del enode . ports [ name ]
Delete a virtual link .
55,410
def show_interface ( enode , dev , shell = None ) : assert dev cmd = 'ip addr list dev {ldev}' . format ( ldev = dev ) response = enode ( cmd , shell = shell ) first_half_dict = _parse_ip_addr_show ( response ) d = None if ( first_half_dict ) : cmd = 'ip -s link list dev {ldev}' . format ( ldev = dev ) response = enode ( cmd , shell = shell ) second_half_dict = _parse_ip_stats_link_show ( response ) d = first_half_dict . copy ( ) d . update ( second_half_dict ) return d
Show the configured parameters and stats of an interface .
55,411
def build_mmd ( target_folder = DEFAULT_LIBRARY_DIR ) : mmd_dir = tempfile . mkdtemp ( ) mmd_repo = pygit2 . clone_repository ( 'https://github.com/jasedit/MultiMarkdown-5' , mmd_dir , checkout_branch = 'fix_windows' ) mmd_repo . init_submodules ( ) mmd_repo . update_submodules ( ) build_dir = os . path . join ( mmd_dir , 'build' ) old_pwd = os . getcwd ( ) os . chdir ( build_dir ) cmake_cmd = [ 'cmake' , '-DCMAKE_BUILD_TYPE=Release' , '-DSHAREDBUILD=1' , '..' ] if platform . system ( ) == 'Windows' : is_64bit = platform . architecture ( ) [ 0 ] == '64bit' generator = 'Visual Studio 14 2015{0}' . format ( ' Win64' if is_64bit else '' ) cmake_cmd . insert ( - 1 , '-G' ) cmake_cmd . insert ( - 1 , '{0}' . format ( generator ) ) subprocess . call ( cmake_cmd ) PLATFORM_BUILDS [ platform . system ( ) ] ( ) lib_file = 'libMultiMarkdown' + SHLIB_EXT [ platform . system ( ) ] if not os . path . exists ( target_folder ) : os . mkdir ( target_folder ) src = os . path . join ( build_dir , SHLIB_PREFIX [ platform . system ( ) ] , lib_file ) dest = os . path . join ( target_folder , lib_file ) shutil . copyfile ( src , dest ) os . chdir ( old_pwd ) shutil . rmtree ( mmd_dir , ignore_errors = True )
Build and install the MultiMarkdown shared library .
55,412
def generate_requirements_files ( self , base_dir = '.' ) : print ( "Creating requirements files\n" ) shared = self . _get_shared_section ( ) requirements_dir = self . _make_requirements_directory ( base_dir ) for section in self . config . sections ( ) : if section == 'metadata' : continue requirements = { } for option in self . config . options ( section ) : requirements [ option ] = self . config . get ( section , option ) if not requirements : continue filename = os . path . join ( requirements_dir , '%s.txt' % section ) self . _write_requirements_file ( shared , section , requirements , filename )
Generate set of requirements files for config
55,413
def _write_default_sections ( self ) : self . config . add_section ( 'metadata' ) self . config . set ( 'metadata' , 'shared' , 'common' ) self . config . add_section ( 'common' ) self . config . add_section ( 'development' ) self . config . add_section ( 'production' )
Starting from scratch so create a default rc file
55,414
def _parse_requirements ( self , input ) : results = [ ] for line in input : ( package , version ) = self . _parse_line ( line ) if package : results . append ( ( package , version ) ) return tuple ( results )
Parse a list of requirements specifications . Lines that look like foobar == 1 . 0 are parsed ; all other lines are silently ignored .
55,415
def create_rc_file ( self , packages ) : print ( "Creating rcfile '%s'\n" % self . rc_filename ) if not self . config . sections ( ) : self . _write_default_sections ( ) sections = { } section_text = [ ] for i , section in enumerate ( self . config . sections ( ) ) : if section == 'metadata' : continue sections [ i ] = section section_text . append ( '%s. %s' % ( i , section ) ) section_text = ' / ' . join ( section_text ) self . _remap_stdin ( ) package_names = set ( ) lines = packages . readlines ( ) requirements = self . _parse_requirements ( lines ) for ( package , version ) in requirements : package_names . add ( package ) section , configured_version = self . _get_option ( package ) if section : if configured_version : if configured_version != version : print ( "Updating '%s' version from '%s' to '%s'" % ( package , configured_version , version ) ) self . config . set ( section , package , version ) continue section = self . _get_section ( package , sections , section_text ) self . _set_option ( section , package , version ) for section in self . config . sections ( ) : if section == 'metadata' : continue for option in self . config . options ( section ) : if option not in package_names : print ( "Removing package '%s'" % option ) self . config . remove_option ( section , option ) rc_file = open ( self . rc_filename , 'w+' ) self . config . write ( rc_file ) rc_file . close ( )
Create a set of requirements files for config
55,416
def upgrade_packages ( self , packages ) : print ( "Upgrading packages\n" ) package_list = [ ] requirements = self . _parse_requirements ( packages . readlines ( ) ) for ( package , version ) in requirements : package_list . append ( package ) if package_list : args = [ "pip" , "install" , "-U" , ] args . extend ( package_list ) subprocess . check_call ( args ) else : print ( "No packages to upgrade" )
Upgrade all specified packages to latest version
55,417
def determine_extra_packages ( self , packages ) : args = [ "pip" , "freeze" , ] installed = subprocess . check_output ( args , universal_newlines = True ) installed_list = set ( ) lines = installed . strip ( ) . split ( '\n' ) for ( package , version ) in self . _parse_requirements ( lines ) : installed_list . add ( package ) package_list = set ( ) for ( package , version ) in self . _parse_requirements ( packages . readlines ( ) ) : package_list . add ( package ) removal_list = installed_list - package_list return tuple ( removal_list )
Return all packages that are installed but missing from packages . Return value is a tuple of the package names
55,418
def remove_extra_packages ( self , packages , dry_run = False ) : removal_list = self . determine_extra_packages ( packages ) if not removal_list : print ( "No packages to be removed" ) else : if dry_run : print ( "The following packages would be removed:\n %s\n" % "\n " . join ( removal_list ) ) else : print ( "Removing packages\n" ) args = [ "pip" , "uninstall" , "-y" , ] args . extend ( list ( removal_list ) ) subprocess . check_call ( args )
Remove all packages missing from list
55,419
def rewrap ( self , ** kwargs ) : if self . inplace : for key , val in kwargs . items ( ) : setattr ( self , key , val ) return self else : for key in [ 'obj' , 'default' , 'skipmissing' , 'inplace' , 'empty' ] : kwargs . setdefault ( key , getattr ( self , key ) ) return pluckable ( ** kwargs )
Inplace constructor . Depending on self . inplace rewrap obj or just update internal vars possibly including the obj .
55,420
def _sliced_list ( self , selector ) : if self . skipmissing : return self . obj [ selector ] keys = xrange ( selector . start or 0 , selector . stop or sys . maxint , selector . step or 1 ) res = [ ] for key in keys : self . _append ( self . obj , key , res , skipmissing = False ) return res
For slice selectors operating on lists we need to handle them differently depending on skipmissing . In explicit mode we may have to expand the list with default values .
55,421
def forceutc ( t : Union [ str , datetime . datetime , datetime . date , np . datetime64 ] ) -> Union [ datetime . datetime , datetime . date ] : if isinstance ( t , str ) : t = parse ( t ) elif isinstance ( t , np . datetime64 ) : t = t . astype ( datetime . datetime ) elif isinstance ( t , datetime . datetime ) : pass elif isinstance ( t , datetime . date ) : return t elif isinstance ( t , ( np . ndarray , list , tuple ) ) : return np . asarray ( [ forceutc ( T ) for T in t ] ) else : raise TypeError ( 'datetime only input' ) if t . tzinfo is None : t = t . replace ( tzinfo = UTC ) else : t = t . astimezone ( UTC ) return t
Add UTC to datetime - naive and convert to UTC for datetime aware
55,422
def step_a_new_working_directory ( context ) : command_util . ensure_context_attribute_exists ( context , "workdir" , None ) command_util . ensure_workdir_exists ( context ) shutil . rmtree ( context . workdir , ignore_errors = True )
Creates a new empty working directory
55,423
def step_use_curdir_as_working_directory ( context ) : context . workdir = os . path . abspath ( "." ) command_util . ensure_workdir_exists ( context )
Uses the current directory as working directory
55,424
def step_an_empty_file_named_filename ( context , filename ) : assert not os . path . isabs ( filename ) command_util . ensure_workdir_exists ( context ) filename2 = os . path . join ( context . workdir , filename ) pathutil . create_textfile_with_contents ( filename2 , "" )
Creates an empty file .
55,425
def step_i_run_command ( context , command ) : command_util . ensure_workdir_exists ( context ) context . command_result = command_shell . run ( command , cwd = context . workdir ) command_util . workdir_save_coverage_files ( context . workdir ) if False and DEBUG : print ( u"run_command: {0}" . format ( command ) ) print ( u"run_command.output {0}" . format ( context . command_result . output ) )
Run a command as subprocess collect its output and returncode .
55,426
def step_command_output_should_contain_exactly_text ( context , text ) : expected_text = text if "{__WORKDIR__}" in text or "{__CWD__}" in text : expected_text = textutil . template_substitute ( text , __WORKDIR__ = posixpath_normpath ( context . workdir ) , __CWD__ = posixpath_normpath ( os . getcwd ( ) ) ) actual_output = context . command_result . output textutil . assert_text_should_contain_exactly ( actual_output , expected_text )
Verifies that the command output of the last command contains the expected text .
55,427
def get_file_list ( path , max_depth = 1 , cur_depth = 0 ) : if os . path . exists ( path ) : for name in os . listdir ( path ) : if name . startswith ( '.' ) : continue full_path = os . path . join ( path , name ) if os . path . isdir ( full_path ) : if cur_depth == max_depth : continue file_list = get_file_list ( full_path , max_depth , cur_depth + 1 ) for result in file_list : yield result else : yield full_path
Recursively returns a list of all files up to max_depth in a directory .
55,428
def get_applied_migrations ( databases = None ) : if not databases : databases = get_capable_databases ( ) else : all_databases = list ( get_capable_databases ( ) ) databases = list ( itertools . ifilter ( lambda x : x in all_databases , databases ) ) results = defaultdict ( list ) for db in databases : for x in Migration . objects . using ( db ) . order_by ( "migration_label" ) : results [ db ] . append ( x . migration_label ) return results
Returns a dictionary containing lists of all applied migrations where the key is the database alias .
55,429
def getContGroupArrays ( arrays , groupPositions , arrayKeys = None ) : if arrayKeys is None : arrayKeys = list ( viewkeys ( arrays ) ) matchingArrays = dict ( ) for key in arrayKeys : matchingArrays [ key ] = arrays [ key ] [ groupPositions ] return matchingArrays
Convinience function to generate a subset of arrays from specified array positions .
55,430
def calcDistMatchArr ( matchArr , tKey , mKey ) : matchArrSize = listvalues ( matchArr ) [ 0 ] . size distInfo = { 'posPairs' : list ( ) , 'eucDist' : list ( ) } _matrix = numpy . swapaxes ( numpy . array ( [ matchArr [ tKey ] , matchArr [ mKey ] ] ) , 0 , 1 ) for pos1 in range ( matchArrSize - 1 ) : for pos2 in range ( pos1 + 1 , matchArrSize ) : distInfo [ 'posPairs' ] . append ( ( pos1 , pos2 ) ) distInfo [ 'posPairs' ] = numpy . array ( distInfo [ 'posPairs' ] ) distInfo [ 'eucDist' ] = scipy . spatial . distance . pdist ( _matrix ) distSort = numpy . argsort ( distInfo [ 'eucDist' ] ) for key in list ( viewkeys ( distInfo ) ) : distInfo [ key ] = distInfo [ key ] [ distSort ] return distInfo
Calculate the euclidean distance of all array positions in matchArr .
55,431
def load ( self , path , name ) : filename = name + '.fgic' filepath = aux . joinpath ( path , filename ) with zipfile . ZipFile ( filepath , 'r' ) as containerZip : jsonString = io . TextIOWrapper ( containerZip . open ( 'data' ) , encoding = 'utf-8' ) . read ( ) infoString = io . TextIOWrapper ( containerZip . open ( 'info' ) , encoding = 'utf-8' ) . read ( ) self . container = json . loads ( jsonString , object_hook = Fgi . jsonHook ) self . info . update ( json . loads ( infoString ) ) self . _matrixTemplate = self . info [ '_matrixTemplate' ] del self . info [ '_matrixTemplate' ]
Imports the specified fgic file from the hard disk .
55,432
def create ( cls , path , encoding = 'utf-8' ) : cmd = [ GIT , 'init' , '--quiet' , '--bare' , path ] subprocess . check_call ( cmd ) return cls ( path , encoding )
Create a new bare repository
55,433
def cache ( self , dependency : Dependency , value ) : if dependency . threadlocal : setattr ( self . _local , dependency . name , value ) elif dependency . singleton : self . _singleton [ dependency . name ] = value
Store an instance of dependency in the cache . Does nothing if dependency is NOT a threadlocal or a singleton .
55,434
def cached ( self , dependency ) : if dependency . threadlocal : return getattr ( self . _local , dependency . name , None ) elif dependency . singleton : return self . _singleton . get ( dependency . name )
Get a cached instance of dependency .
55,435
def _set ( self , name , factory , singleton = False , threadlocal = False ) : name = name or factory . __name__ factory . _giveme_registered_name = name dep = Dependency ( name , factory , singleton , threadlocal ) self . _registry [ name ] = dep
Add a dependency factory to the registry
55,436
def register ( self , function = None , * , singleton = False , threadlocal = False , name = None ) : def decorator ( function = None ) : self . _set ( name , function , singleton , threadlocal ) return function if function : return decorator ( function ) return decorator
Add an object to the injector s registry .
55,437
def inject ( self , function = None , ** names ) : def decorator ( function ) : @ wraps ( function ) def wrapper ( * args , ** kwargs ) : sig = signature ( function ) params = sig . parameters bound = sig . bind_partial ( * args , ** kwargs ) bound . apply_defaults ( ) injected_kwargs = { } for key , value in params . items ( ) : if key not in bound . arguments : name = names . get ( key ) if name : injected_kwargs [ key ] = self . get ( name ) else : try : injected_kwargs [ key ] = self . get ( key ) except DependencyNotFoundError as e : warnings . warn ( ambigious_not_found_msg . format ( key ) , DependencyNotFoundWarning ) injected_kwargs . update ( bound . kwargs ) return function ( * bound . args , ** injected_kwargs ) return wrapper if function : return decorator ( function ) return decorator
Inject dependencies into funtion s arguments when called .
55,438
def resolve ( self , dependency ) : if isinstance ( dependency , str ) : name = dependency else : name = dependency . _giveme_registered_name return DeferredProperty ( partial ( self . get , name ) )
Resolve dependency as instance attribute of given class .
55,439
def _fetch_itemslist ( self , current_item ) : if current_item . is_root : html = requests . get ( self . base_url ) . text soup = BeautifulSoup ( html , 'html.parser' ) for item_html in soup . select ( ".row .col-md-6" ) : try : label = item_html . select_one ( "h2" ) . text except Exception : continue yield API ( label , blob = item_html ) else : for resource in current_item . json [ "resource" ] : label = u"{}, {}" . format ( resource [ "title" ] , resource [ "summary" ] ) yield SMHIDataset ( label , blob = resource )
Get a all available apis
55,440
def _fetch_data ( self , dataset , query = { } , include_inactive_stations = False ) : data = [ ] parameter = dataset station_dim = dataset . dimensions [ "station" ] all_stations = station_dim . allowed_values if "station" not in query : if include_inactive_stations : query [ "station" ] = list ( all_stations ) else : query [ "station" ] = list ( station_dim . active_stations ( ) ) else : if not isinstance ( query [ "station" ] , list ) : query [ "station" ] = [ query [ "station" ] ] query [ "station" ] = [ all_stations . get_by_label ( x ) for x in query [ "station" ] ] if "period" not in query : query [ "period" ] = PERIODS elif not isinstance ( query [ "period" ] , list ) : query [ "period" ] = [ query [ "period" ] ] for period in query [ "period" ] : if period not in PERIODS : msg = u"{} is not an allowed period" . format ( period ) raise Exception ( msg ) n_queries = len ( query [ "station" ] ) * len ( query [ "period" ] ) counter = 0 print ( "Fetching data with {} queries." . format ( n_queries ) ) for station in query [ "station" ] : for period in query [ "period" ] : url = dataset . url . replace ( ".json" , "/station/{}/period/{}/data.csv" . format ( station . key , period ) ) print ( "/GET {} " . format ( url ) ) r = requests . get ( url ) if r . status_code == 200 : raw_data = DataCsv ( ) . from_string ( r . content ) . to_dictlist ( ) for row in raw_data : value_col = parameter . id . split ( "," ) [ 0 ] value = float ( row [ value_col ] ) row [ "parameter" ] = parameter . id row [ "station" ] = station . label row [ "station_key" ] = station . key row [ "period" ] = period row . pop ( value_col , None ) datapoint = Result ( value , row ) yield datapoint elif r . status_code == 404 : print ( "Warning no data at {}" . format ( url ) ) else : raise Exception ( "Connection error for {}" . format ( url ) )
Should yield dataset rows
55,441
def _get_example_csv ( self ) : station_key = self . json [ "station" ] [ 0 ] [ "key" ] period = "corrected-archive" url = self . url . replace ( ".json" , "/station/{}/period/{}/data.csv" . format ( station_key , period ) ) r = requests . get ( url ) if r . status_code == 200 : return DataCsv ( ) . from_string ( r . content ) else : raise Exception ( "Error connecting to api" )
For dimension parsing
55,442
def plural ( formatter , value , name , option , format ) : words = format . split ( '|' ) if not name and len ( words ) == 1 : return try : number = decimal . Decimal ( value ) except ( ValueError , decimal . InvalidOperation ) : return locale = Locale . parse ( option ) if option else formatter . locale index = get_plural_tag_index ( number , locale ) return formatter . format ( words [ index ] , value )
Chooses different textension for locale - specific pluralization rules .
55,443
def get_choice ( value ) : if value is None : return 'null' for attr in [ '__name__' , 'name' ] : if hasattr ( value , attr ) : return getattr ( value , attr ) return str ( value )
Gets a key to choose a choice from any value .
55,444
def choose ( formatter , value , name , option , format ) : if not option : return words = format . split ( '|' ) num_words = len ( words ) if num_words < 2 : return choices = option . split ( '|' ) num_choices = len ( choices ) if num_words not in ( num_choices , num_choices + 1 ) : n = num_choices raise ValueError ( 'specify %d or %d choices' % ( n , n + 1 ) ) choice = get_choice ( value ) try : index = choices . index ( choice ) except ValueError : if num_words == num_choices : raise ValueError ( 'no default choice supplied' ) index = - 1 return formatter . format ( words [ index ] , value )
Adds simple logic to format strings .
55,445
def list_ ( formatter , value , name , option , format ) : if not format : return if not hasattr ( value , '__getitem__' ) or isinstance ( value , string_types ) : return words = format . split ( u'|' , 4 ) num_words = len ( words ) if num_words < 2 : return num_items = len ( value ) item_format = words [ 0 ] spacer = u'' if num_words < 2 else words [ 1 ] final_spacer = spacer if num_words < 3 else words [ 2 ] two_spacer = final_spacer if num_words < 4 else words [ 3 ] buf = io . StringIO ( ) for x , item in enumerate ( value ) : if x == 0 : pass elif x < num_items - 1 : buf . write ( spacer ) elif x == 1 : buf . write ( two_spacer ) else : buf . write ( final_spacer ) buf . write ( formatter . format ( item_format , item , index = x ) ) return buf . getvalue ( )
Repeats the items of an array .
55,446
def add_months ( datetime_like_object , n , return_date = False ) : a_datetime = parser . parse_datetime ( datetime_like_object ) month_from_ordinary = a_datetime . year * 12 + a_datetime . month month_from_ordinary += n year , month = divmod ( month_from_ordinary , 12 ) try : a_datetime = datetime ( year , month , a_datetime . day , a_datetime . hour , a_datetime . minute , a_datetime . second , a_datetime . microsecond , tzinfo = a_datetime . tzinfo , ) except ValueError : month_from_ordinary += 1 year , month = divmod ( month_from_ordinary , 12 ) a_datetime = datetime ( year , month , 1 , a_datetime . hour , a_datetime . minute , a_datetime . second , a_datetime . microsecond , tzinfo = a_datetime . tzinfo , ) a_datetime = add_days ( a_datetime , - 1 ) if return_date : return a_datetime . date ( ) else : return a_datetime
Returns a time that n months after a time .
55,447
def _log ( self , content ) : self . _buffer += content if self . _auto_flush : self . flush ( )
Write a string to the log
55,448
def reset ( self ) : self . _buffer = '' self . _chars_flushed = 0 self . _game_start_timestamp = datetime . datetime . now ( )
Erase the log and reset the timestamp
55,449
def logpath ( self ) : name = '{}-{}.catan' . format ( self . timestamp_str ( ) , '-' . join ( [ p . name for p in self . _players ] ) ) path = os . path . join ( self . _log_dir , name ) if not os . path . exists ( self . _log_dir ) : os . mkdir ( self . _log_dir ) return path
Return the logfile path and filename as a string .
55,450
def flush ( self ) : latest = self . _latest ( ) self . _chars_flushed += len ( latest ) if self . _use_stdout : file = sys . stdout else : file = open ( self . logpath ( ) , 'a' ) print ( latest , file = file , flush = True , end = '' ) if not self . _use_stdout : file . close ( )
Append the latest updates to file or optionally to stdout instead . See the constructor for logging options .
55,451
def log_game_start ( self , players , terrain , numbers , ports ) : self . reset ( ) self . _set_players ( players ) self . _logln ( '{} v{}' . format ( __name__ , __version__ ) ) self . _logln ( 'timestamp: {0}' . format ( self . timestamp_str ( ) ) ) self . _log_players ( players ) self . _log_board_terrain ( terrain ) self . _log_board_numbers ( numbers ) self . _log_board_ports ( ports ) self . _logln ( '...CATAN!' )
Begin a game .
55,452
def _log_board_ports ( self , ports ) : ports = sorted ( ports , key = lambda port : ( port . tile_id , port . direction ) ) self . _logln ( 'ports: {0}' . format ( ' ' . join ( '{}({} {})' . format ( p . type . value , p . tile_id , p . direction ) for p in ports ) ) )
A board with no ports is allowed .
55,453
def _SetGuide ( self , guideName ) : if ( guideName == epguides . EPGuidesLookup . GUIDE_NAME ) : self . _guide = epguides . EPGuidesLookup ( ) else : raise Exception ( "[RENAMER] Unknown guide set for TVRenamer selection: Got {}, Expected {}" . format ( guideName , epguides . EPGuidesLookup . GUIDE_NAME ) )
Select guide corresponding to guideName
55,454
def _GetUniqueFileShowNames ( self , tvFileList ) : showNameList = [ tvFile . fileInfo . showName for tvFile in tvFileList ] return ( set ( showNameList ) )
Return a list containing all unique show names from tvfile . TVFile object list .
55,455
def _GetShowInfo ( self , stringSearch ) : goodlogging . Log . Info ( "RENAMER" , "Looking up show info for: {0}" . format ( stringSearch ) ) goodlogging . Log . IncreaseIndent ( ) showInfo = self . _GetShowID ( stringSearch ) if showInfo is None : goodlogging . Log . DecreaseIndent ( ) return None elif showInfo . showID is None : goodlogging . Log . DecreaseIndent ( ) return None elif showInfo . showName is None : showInfo . showName = self . _db . SearchTVLibrary ( showID = showInfo . showID ) [ 0 ] [ 1 ] goodlogging . Log . Info ( "RENAMER" , "Found show name: {0}" . format ( showInfo . showName ) ) goodlogging . Log . DecreaseIndent ( ) return showInfo else : goodlogging . Log . DecreaseIndent ( ) return showInfo
Calls GetShowID and does post processing checks on result .
55,456
def _CreateNewShowDir ( self , showName ) : stripedDir = util . StripSpecialCharacters ( showName ) goodlogging . Log . Info ( "RENAMER" , "Suggested show directory name is: '{0}'" . format ( stripedDir ) ) if self . _skipUserInput is False : response = goodlogging . Log . Input ( 'RENAMER' , "Enter 'y' to accept this directory, 'x' to skip this show or enter a new directory to use: " ) else : response = 'y' if response . lower ( ) == 'x' : return None elif response . lower ( ) == 'y' : return stripedDir else : return response
Create new directory name for show . An autogenerated choice which is the showName input that has been stripped of special characters is proposed which the user can accept or they can enter a new name to use . If the skipUserInput variable is True the autogenerated value is accepted by default .
55,457
def _GenerateLibraryPath ( self , tvFile , libraryDir ) : goodlogging . Log . Info ( "RENAMER" , "Looking up library directory in database for show: {0}" . format ( tvFile . showInfo . showName ) ) goodlogging . Log . IncreaseIndent ( ) showID , showName , showDir = self . _db . SearchTVLibrary ( showName = tvFile . showInfo . showName ) [ 0 ] if showDir is None : goodlogging . Log . Info ( "RENAMER" , "No directory match found in database - looking for best match in library directory: {0}" . format ( libraryDir ) ) dirList = os . listdir ( libraryDir ) listDir = False matchName = tvFile . showInfo . showName while showDir is None : if len ( dirList ) == 0 : goodlogging . Log . Info ( "RENAMER" , "TV Library directory is empty" ) response = None else : if listDir is True : goodlogging . Log . Info ( "RENAMER" , "TV library directory contains: {0}" . format ( ', ' . join ( dirList ) ) ) else : matchDirList = util . GetBestMatch ( matchName , dirList ) listDir = False if self . _skipUserInput is True : if len ( matchDirList ) == 1 : response = matchDirList [ 0 ] goodlogging . Log . Info ( "RENAMER" , "Automatic selection of show directory: {0}" . format ( response ) ) else : response = None goodlogging . Log . Info ( "RENAMER" , "Could not make automatic selection of show directory" ) else : listDirPrompt = "enter 'ls' to list all items in TV library directory" response = util . UserAcceptance ( matchDirList , promptComment = listDirPrompt , promptOnly = listDir , xStrOverride = "to create new show directory" ) if response is None : showDir = self . _CreateNewShowDir ( tvFile . showInfo . showName ) if showDir is None : goodlogging . Log . DecreaseIndent ( ) return tvFile elif response . lower ( ) == 'ls' : listDir = True elif response in matchDirList : showDir = response else : matchName = response self . _db . UpdateShowDirInTVLibrary ( showID , showDir ) showDir = os . path . join ( libraryDir , showDir ) goodlogging . Log . DecreaseIndent ( ) seasonDir = self . _LookUpSeasonDirectory ( showID , showDir , tvFile . showInfo . seasonNum ) if seasonDir is None : return tvFile else : showDir = os . path . join ( showDir , seasonDir ) tvFile . GenerateNewFilePath ( showDir ) return tvFile
Creates a full path for TV file in TV library .
55,458
def catch ( ignore = [ ] , was_doing = "something important" , helpfull_tips = "you should use a debugger" , gbc = None ) : exc_cls , exc , tb = sys . exc_info ( ) if exc_cls in ignore : msg = 'exception in ignorelist' gbc . say ( 'ignoring caught:' + str ( exc_cls ) ) return 'exception in ignorelist' ex_message = traceback . format_exception_only ( exc_cls , exc ) [ - 1 ] ex_message = ex_message . strip ( ) error_frame = tb while error_frame . tb_next is not None : error_frame = error_frame . tb_next file = error_frame . tb_frame . f_code . co_filename line = error_frame . tb_lineno stack = traceback . extract_tb ( tb ) formated_stack = [ ] for summary in stack : formated_stack . append ( { 'file' : summary [ 0 ] , 'line' : summary [ 1 ] , 'func' : summary [ 2 ] , 'text' : summary [ 3 ] } ) event = { 'was_doing' : was_doing , 'message' : ex_message , 'errorLocation' : { 'file' : file , 'line' : line , 'full' : file + ' -> ' + str ( line ) } , 'stack' : formated_stack } try : gbc . cry ( 'caught:' + pformat ( event ) ) print ( 'Bubble3: written error to log' ) print ( 'Bubble3: tips for fixing this:' ) print ( helpfull_tips ) except Exception as e : print ( 'Bubble3: cant log error cause of %s' % e )
Catch prepare and log error
55,459
def from_name ( api_url , name , dry_run = False ) : return DataSet ( '/' . join ( [ api_url , name ] ) . rstrip ( '/' ) , token = None , dry_run = dry_run )
doesn t require a token config param as all of our data is currently public
55,460
def secured_clipboard ( item ) : expire_clock = time . time ( ) def set_text ( clipboard , selectiondata , info , data ) : if 15.0 >= time . time ( ) - expire_clock : selectiondata . set_text ( item . get_secret ( ) ) clipboard . clear ( ) def clear ( clipboard , data ) : pass targets = [ ( "STRING" , 0 , 0 ) , ( "TEXT" , 0 , 1 ) , ( "COMPOUND_TEXT" , 0 , 2 ) , ( "UTF8_STRING" , 0 , 3 ) ] cp = gtk . clipboard_get ( ) cp . set_with_data ( targets , set_text , clear )
This clipboard only allows 1 paste
55,461
def get_active_window ( ) : active_win = None default = wnck . screen_get_default ( ) while gtk . events_pending ( ) : gtk . main_iteration ( False ) window_list = default . get_windows ( ) if len ( window_list ) == 0 : print "No Windows Found" for win in window_list : if win . is_active ( ) : active_win = win . get_name ( ) return active_win
Get the currently focused window
55,462
def get ( self ) : attrs = ( "networks" , "security_groups" , "floating_ips" , "routers" , "internet_gateways" ) for attr in attrs : setattr ( self , attr , eval ( "self.get_{}()" . format ( attr ) ) )
Get quota from Cloud Provider .
55,463
def join_css_class ( css_class , * additional_css_classes ) : css_set = set ( chain . from_iterable ( c . split ( ' ' ) for c in [ css_class , * additional_css_classes ] if c ) ) return ' ' . join ( css_set )
Returns the union of one or more CSS classes as a space - separated string . Note that the order will not be preserved .
55,464
def _init_routes_and_middlewares ( self ) : self . _init_middlewares ( ) self . _init_endpoints ( ) self . app = falcon . API ( middleware = self . middleware ) self . app . add_error_handler ( Exception , self . _error_handler ) for version_path , endpoints in self . catalog : for route , resource in endpoints : self . app . add_route ( version_path + route , resource )
Initialize hooks and URI routes to resources .
55,465
def listen ( self ) : msgtmpl = ( u'Serving on host %(host)s:%(port)s' ) host = CONF . wsgi . wsgi_host port = CONF . wsgi . wsgi_port LOG . info ( msgtmpl , { 'host' : host , 'port' : port } ) server_cls = self . _get_server_cls ( host ) httpd = simple_server . make_server ( host , port , self . app , server_cls ) httpd . serve_forever ( )
Self - host using bind and port from the WSGI config group .
55,466
def get_promise ( self ) : if self . _promise is None : promise = [ ] if self . read : promise . append ( TDOPromise ( self . _chain , 0 , self . bitcount ) ) else : promise . append ( None ) if self . read_status : promise . append ( TDOPromise ( self . _chain , 0 , self . dev . _desc . _ir_length ) ) else : promise . append ( None ) self . _promise = promise return self . _promise
Return the special set of promises for run_instruction .
55,467
def _get_dataset ( self , dataset , name , color ) : global palette html = "{" html += '\t"label": "' + name + '",' if color is not None : html += '"backgroundColor": "' + color + '",\n' else : html += '"backgroundColor": ' + palette + ',\n' html += '"data": ' + self . _format_list ( dataset ) + ',\n' html += "}" return html
Encode a dataset
55,468
def get ( self , slug , xdata , ydatasets , label , opts , style , ctype ) : xdataset = self . _format_list ( xdata ) width = "100%" height = "300px" if opts is not None : if "width" in opts : width = str ( opts [ "width" ] ) if "height" in opts : height = str ( opts [ "height" ] ) stylestr = '<style>#container_' + slug + ' { width:' + width + ' !important; height:' + height + ' !important}</style>\n' html = stylestr html += '<div id="container_' + slug + '"><canvas id="canvas_' + slug + '"></canvas></div>\n' html += '<script>\n' html += 'var data = {\n' html += 'labels: ' + xdataset + ',\n' html += 'datasets:[\n' colors = None if "color" in style : colors = style [ "color" ] i = 0 for dataset in ydatasets : name = dataset [ "name" ] data = dataset [ "data" ] html += self . _get_dataset ( data , name , colors ) if i < len ( ydatasets ) - 1 : html += "," i += 1 html += ']\n' html += '}\n' html += 'window.onload = function() {' html += 'var ctx = document.getElementById("canvas_' + slug + '").getContext("2d");' html += 'window.myChart = new Chart(ctx, {' html += 'type: "' + ctype + '",' html += 'data: data,' html += 'options: {' html += 'spanGaps: false,' html += 'responsive: true,' html += 'maintainAspectRatio: false,' if "legend" in opts : html += 'legend: {' html += 'position: "' + opts [ "legend" ] + '",' html += '},' else : html += 'legend: {' html += 'display: false,' html += '},' if "title" in opts : html += 'title: {' html += 'display: true,' html += 'text: "' + opts [ "title" ] + '"' html += '}' html += '}' html += '});' html += '};' html += '</script>\n' return html
Returns html for a chart
55,469
def _format_list ( self , data ) : dataset = "[" i = 0 for el in data : if pd . isnull ( el ) : dataset += "null" else : dtype = type ( data [ i ] ) if dtype == int or dtype == float : dataset += str ( el ) else : dataset += '"' + el + '"' if i < len ( data ) - 1 : dataset += ', ' dataset += "]" return dataset
Format a list to use in javascript
55,470
def status ( self , status , headers = None ) : self . response = _Response ( status , headers ) return self
Respond with given status and no content
55,471
def text ( self , text , status = 200 , headers = None ) : self . response = _Response ( status , headers , text . encode ( 'utf8' ) ) return self
Respond with given status and text content
55,472
def matches ( self , method , path , headers , bytes = None ) : return self . _expectation . matches ( method , path , headers , bytes )
Checks if rule matches given request parameters
55,473
def on ( self , method , path = None , headers = None , text = None , json = None ) : rule = Rule ( method , path , headers , text , json ) return self . _add_rule_to ( rule , self . _rules )
Sends response to matching parameters one time and removes it from list of expectations
55,474
def stop ( self ) : self . _server . shutdown ( ) self . _server . server_close ( ) self . _thread . join ( ) self . running = False
Shuts the server down and waits for server thread to join
55,475
def send ( self , stats ) : "Format stats and send to one or more Graphite hosts" buf = cStringIO . StringIO ( ) now = int ( time . time ( ) ) num_stats = 0 pct = stats . percent timers = stats . timers for key , vals in timers . iteritems ( ) : if not vals : continue num = len ( vals ) vals = sorted ( vals ) vmin = vals [ 0 ] vmax = vals [ - 1 ] mean = vmin max_at_thresh = vmax if num > 1 : idx = round ( ( pct / 100.0 ) * num ) tmp = vals [ : int ( idx ) ] if tmp : max_at_thresh = tmp [ - 1 ] mean = sum ( tmp ) / idx key = 'stats.timers.%s' % key buf . write ( '%s.mean %f %d\n' % ( key , mean , now ) ) buf . write ( '%s.upper %f %d\n' % ( key , vmax , now ) ) buf . write ( '%s.upper_%d %f %d\n' % ( key , pct , max_at_thresh , now ) ) buf . write ( '%s.lower %f %d\n' % ( key , vmin , now ) ) buf . write ( '%s.count %d %d\n' % ( key , num , now ) ) num_stats += 1 counts = stats . counts for key , val in counts . iteritems ( ) : buf . write ( 'stats.%s %f %d\n' % ( key , val / stats . interval , now ) ) buf . write ( 'stats_counts.%s %f %d\n' % ( key , val , now ) ) num_stats += 1 gauges = stats . gauges for key , val in gauges . iteritems ( ) : buf . write ( 'stats.%s %f %d\n' % ( key , val , now ) ) buf . write ( 'stats_counts.%s %f %d\n' % ( key , val , now ) ) num_stats += 1 buf . write ( 'statsd.numStats %d %d\n' % ( num_stats , now ) ) for host in self . _hosts : try : sock = socket . create_connection ( host ) sock . sendall ( buf . getvalue ( ) ) sock . close ( ) except Exception , ex : self . error ( E_SENDFAIL % ( 'graphite' , host , ex ) )
Format stats and send to one or more Graphite hosts
55,476
def get_payload ( self , * args , ** kwargs ) : if not kwargs : kwargs = self . default_params else : kwargs . update ( self . default_params ) for item in args : if isinstance ( item , dict ) : kwargs . update ( item ) if hasattr ( self , 'type_params' ) : kwargs . update ( self . type_params ( * args , ** kwargs ) ) return kwargs
Receive all passed in args kwargs and combine them together with any required params
55,477
async def read_frame ( self ) -> DataFrame : if self . _data_frames . qsize ( ) == 0 and self . closed : raise StreamConsumedError ( self . id ) frame = await self . _data_frames . get ( ) self . _data_frames . task_done ( ) if frame is None : raise StreamConsumedError ( self . id ) return frame
Read a single frame from the local buffer .
55,478
def read_frame_nowait ( self ) -> Optional [ DataFrame ] : try : frame = self . _data_frames . get_nowait ( ) except asyncio . QueueEmpty : if self . closed : raise StreamConsumedError ( self . id ) return None self . _data_frames . task_done ( ) if frame is None : raise StreamConsumedError ( self . id ) return frame
Read a single frame from the local buffer immediately .
55,479
def merge ( a , b , op = None , recurse_list = False , max_depth = None ) : if op is None : op = operator . add if max_depth is not None : if max_depth < 1 : return op ( a , b ) else : max_depth -= 1 if isinstance ( a , dict ) and isinstance ( b , dict ) : result = { } for key in set ( chain ( a . keys ( ) , b . keys ( ) ) ) : if key in a and key in b : result [ key ] = merge ( a [ key ] , b [ key ] , op = op , recurse_list = recurse_list , max_depth = max_depth ) elif key in a : result [ key ] = deepcopy ( a [ key ] ) elif key in b : result [ key ] = deepcopy ( b [ key ] ) return result elif isinstance ( a , list ) and isinstance ( b , list ) : if recurse_list and len ( a ) == len ( b ) : result = [ ] for idx in range ( len ( a ) ) : result . append ( merge ( a [ idx ] , b [ idx ] , op = op , recurse_list = recurse_list , max_depth = max_depth ) ) return result else : return op ( a , b ) return op ( a , b )
Immutable merge a structure with b using binary operator op on leaf nodes . All nodes at or below max_depth are considered to be leaf nodes .
55,480
def _param_deprecation_warning ( schema , deprecated , context ) : for i in deprecated : if i in schema : msg = 'When matching {ctx}, parameter {word} is deprecated, use __{word}__ instead' msg = msg . format ( ctx = context , word = i ) warnings . warn ( msg , Warning )
Raises warning about using the old names for some parameters . The new naming scheme just has two underscores on each end of the word for consistency
55,481
def has_perm ( self , user , perm , obj = None , * args , ** kwargs ) : try : if not self . _obj_ok ( obj ) : if hasattr ( obj , 'get_permissions_object' ) : obj = obj . get_permissions_object ( perm ) else : raise InvalidPermissionObjectException return user . permset_tree . allow ( Action ( perm ) , obj ) except ObjectDoesNotExist : return False
Test user permissions for a single action and object .
55,482
def permitted_actions ( self , user , obj = None ) : try : if not self . _obj_ok ( obj ) : raise InvalidPermissionObjectException return user . permset_tree . permitted_actions ( obj ) except ObjectDoesNotExist : return [ ]
Determine list of permitted actions for an object or object pattern .
55,483
def list ( self , name , platform = '' , genre = '' ) : data_list = self . db . get_data ( self . list_path , name = name , platform = platform , genre = genre ) data_list = data_list . get ( 'Data' ) or { } games = data_list . get ( 'Game' ) or [ ] return [ self . _build_item ( ** i ) for i in games ]
The name argument is required for this method as per the API server specification . This method also provides the platform and genre optional arguments as filters .
55,484
def list ( self ) : data_list = self . db . get_data ( self . list_path ) data_list = data_list . get ( 'Data' ) or { } platforms = ( data_list . get ( 'Platforms' ) or { } ) . get ( 'Platform' ) or [ ] return [ self . _build_item ( ** i ) for i in platforms ]
No argument is required for this method as per the API server specification .
55,485
def remove_none_dict_values ( obj ) : if isinstance ( obj , ( list , tuple , set ) ) : return type ( obj ) ( remove_none_dict_values ( x ) for x in obj ) elif isinstance ( obj , dict ) : return type ( obj ) ( ( k , remove_none_dict_values ( v ) ) for k , v in obj . items ( ) if v is not None ) else : return obj
Remove None values from dict .
55,486
def Client ( version = __version__ , resource = None , provider = None , ** kwargs ) : versions = _CLIENTS . keys ( ) if version not in versions : raise exceptions . UnsupportedVersion ( 'Unknown client version or subject' ) if provider is None : raise exceptions . ProviderNotDefined ( 'Not define Provider for Client' ) support_types = CONF . providers . driver_mapper . keys ( ) if provider . type not in support_types : raise exceptions . ProviderTypeNotFound ( 'Unknow provider.' ) resources = _CLIENTS [ version ] . keys ( ) if not resource : raise exceptions . ResourceNotDefined ( 'Not define Resource, choose one: compute, network,\ object_storage, block_storage.' ) elif resource . lower ( ) not in resources : raise exceptions . ResourceNotFound ( 'Unknow resource: compute, network,\ object_storage, block_storage.' ) LOG . info ( 'Instantiating {} client ({})' . format ( resource , version ) ) return _CLIENTS [ version ] [ resource ] ( provider . type , provider . config , ** kwargs )
Initialize client object based on given version .
55,487
def accession ( self ) : accession = None if self . defline . startswith ( '>gi|' ) : match = re . match ( '>gi\|\d+\|[^\|]+\|([^\|\n ]+)' , self . defline ) if match : accession = match . group ( 1 ) elif self . defline . startswith ( '>gnl|' ) : match = re . match ( '>gnl\|[^\|]+\|([^\|\n ]+)' , self . defline ) if match : accession = match . group ( 1 ) elif self . defline . startswith ( '>lcl|' ) : match = re . match ( '>lcl\|([^\|\n ]+)' , self . defline ) if match : accession = match . group ( 1 ) return accession
Parse accession number from commonly supported formats .
55,488
def format_seq ( self , outstream = None , linewidth = 70 ) : if linewidth == 0 or len ( self . seq ) <= linewidth : if outstream is None : return self . seq else : print ( self . seq , file = outstream ) return i = 0 seq = '' while i < len ( self . seq ) : if outstream is None : seq += self . seq [ i : i + linewidth ] + '\n' else : print ( self . seq [ i : i + linewidth ] , file = outstream ) i += linewidth if outstream is None : return seq
Print a sequence in a readable format .
55,489
def get_validator ( filter_data ) : for matcher_type , m in matchers . items ( ) : if hasattr ( m , 'can_handle' ) and m . can_handle ( filter_data ) : filter_data = m . handle ( filter_data ) return filter_data
ask every matcher whether it can serve such filter data
55,490
def run ( ) : network_client = client . Client ( version = _VERSION , resource = _RESOURCES [ 0 ] , provider = _PROVIDER ) network_client . delete ( "4b983028-0f8c-4b63-b10c-6e8420bb7903" )
Run the examples
55,491
def sort ( self , attr ) : self . entries = Sorter ( self . entries , self . category , attr ) . sort_entries ( ) return self
Sort the ratings based on an attribute
55,492
def get_title ( self ) : if self . category == 'cable' : strings = get_strings ( self . soup , 'strong' ) else : strings = get_strings ( self . soup , 'b' ) if len ( strings ) == 0 : strings = get_strings ( self . soup , 'strong' ) if len ( strings ) >= 1 and self . category == 'cable' : return strings [ 0 ] elif len ( strings ) > 0 and 'Fast' in strings [ - 1 ] : return strings [ 0 ] return '' . join ( strings )
Title is either the chart header for a cable ratings page or above the opening description for a broadcast ratings page .
55,493
def get_json ( self ) : ratings_dict = { 'category' : self . category , 'date' : self . date , 'day' : self . weekday , 'next week' : self . next_week , 'last week' : self . last_week , 'entries' : self . entries , 'url' : self . url } return to_json ( ratings_dict )
Serialize ratings object as JSON - formatted string
55,494
def _get_url_params ( self , shorten = True ) : cable = True if self . category == 'cable' else False url_date = convert_month ( self . date , shorten = shorten , cable = cable ) return [ BASE_URL , self . weekday . lower ( ) , self . category + '-ratings' , url_date . replace ( ' ' , '-' ) ]
Returns a list of each parameter to be used for the url format .
55,495
def _verify_page ( self ) : title_date = self . _get_date_in_title ( ) . lower ( ) split_date = self . date . lower ( ) . split ( ) split_date [ 0 ] = split_date [ 0 ] [ : 3 ] return all ( term in title_date for term in split_date )
Verify the ratings page matches the correct date
55,496
def _get_ratings_page ( self ) : self . _build_url ( ) soup = get_soup ( self . url ) if soup : return soup self . _build_url ( shorten = False ) soup = get_soup ( self . url ) if soup : return soup return SearchDaily ( self . category , date = self . date ) . fetch_result ( )
Do a limited search for the correct url .
55,497
def _build_url ( self , shorten = True ) : self . url = URL_FORMAT . format ( * self . _get_url_params ( shorten = shorten ) )
Build the url for a cable ratings page
55,498
def fetch_entries ( self ) : data = [ ] for row in self . get_rows ( ) : if exceeded_limit ( self . limit , len ( data ) ) : break entry = row . find_all ( 'td' ) entry_dict = { } show = entry [ 0 ] . string net = entry [ 1 ] . string if not self . _match_query ( show , net ) : continue entry_dict [ 'show' ] = show entry_dict [ 'net' ] = net entry_dict [ 'time' ] = entry [ 2 ] . string if ',' in entry [ 3 ] . string : entry_dict [ 'viewers' ] = entry [ 3 ] . string . replace ( ',' , '.' ) else : entry_dict [ 'viewers' ] = '0.' + entry [ 3 ] . string entry_dict [ 'rating' ] = entry [ 4 ] . string data . append ( Entry ( ** entry_dict ) ) return data
Fetch data and parse it to build a list of cable entries .
55,499
def _build_url ( self , shorten = True ) : url_order = self . _get_url_params ( shorten = shorten ) if self . category != 'final' : url_order [ 1 ] , url_order [ 2 ] = url_order [ 2 ] , url_order [ 1 ] self . url = URL_FORMAT . format ( * url_order )
Build the url for a broadcast ratings page