idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
11,900 | def remove_existing_links ( root_dir ) : logger = logging . getLogger ( __name__ ) for name in os . listdir ( root_dir ) : full_name = os . path . join ( root_dir , name ) if os . path . islink ( full_name ) : logger . debug ( 'Deleting existing symlink {0}' . format ( full_name ) ) os . remove ( full_name ) | Delete any symlinks present at the root of a directory . |
11,901 | def render_diagram ( out_base ) : import codecs import subprocess import sadisplay desc = sadisplay . describe ( list ( model_registry . values ( ) ) , show_methods = False , show_properties = True , show_indexes = True , ) with codecs . open ( out_base + '.dot' , 'w' , encoding = 'utf-8' ) as f : f . write ( sadisplay . dot ( desc ) ) if not hasattr ( config , 'DOT_EXECUTABLE' ) : raise RuntimeError ( "Please configure the 'DOT_EXECUTABLE' variable in your 'project_config.py'" ) if not os . path . exists ( config . DOT_EXECUTABLE ) : raise IOError ( "Could not find file pointed to by 'DOT_EXECUTABLE': " + str ( config . DOT_EXECUTABLE ) ) subprocess . check_call ( [ config . DOT_EXECUTABLE , '-T' , 'png' , '-o' , out_base + '.png' , out_base + '.dot' ] ) | Render a data model diagram |
11,902 | def get_max_id ( cls , session ) : id_base = None for c in [ cls ] + list ( cls . __bases__ ) : for base_class in c . __bases__ : if base_class . __name__ == 'Base' : if id_base is None : id_base = c else : raise RuntimeError ( "Multiple base object classes for class " + cls . __name__ ) if id_base is None : raise RuntimeError ( "Error searching for base class of " + cls . __name__ ) max_id = session . query ( func . max ( id_base . id ) ) . scalar ( ) if max_id is None : max_id = 0 return max_id | Get the current max value of the id column . |
11,903 | def truncate_to_field_length ( self , field , value ) : max_len = getattr ( self . __class__ , field ) . prop . columns [ 0 ] . type . length if value and len ( value ) > max_len : return value [ : max_len ] else : return value | Truncate the value of a string field to the field s max length . |
11,904 | def extern ( obj , timeout = 200 ) : global installed if not installed : install_hook ( obj , timeout ) installed = True | Tell Tkinter to process untnwisted event loop . It registers just once the update handle . |
11,905 | def intern ( obj , timeout ) : core . gear . timeout = timeout core . gear . pool . append ( obj ) | Tell untwisted to process an extern event loop . |
11,906 | def _make_ticket_node ( ticket_id , config , options = None ) : options = options or { } ref = config . jira_uri_template . format ( ticket = ticket_id ) link = nodes . reference ( text = ticket_id , refuri = ref , ** options ) return link | Construct a reference node for a JIRA ticket . |
11,907 | def _oxford_comma_separator ( i , length ) : if length == 1 : return None elif length < 3 and i == 0 : return ' and ' elif i < length - 2 : return ', ' elif i == length - 2 : return ', and ' else : return None | Make a separator for a prose - like list with between items except for and after the second to last item . |
11,908 | def jira_role ( name , rawtext , text , lineno , inliner , options = None , content = None , oxford_comma = True ) : options = options or { } content = content or [ ] config = inliner . document . settings . env . app . config ticket_ids = [ each . strip ( ) for each in utils . unescape ( text ) . split ( ',' ) ] n_tickets = len ( ticket_ids ) if oxford_comma : sep_factory = _oxford_comma_separator else : sep_factory = _comma_separator node_list = [ ] for i , ticket_id in enumerate ( ticket_ids ) : node = _make_ticket_node ( ticket_id , config , options = options ) node_list . append ( node ) sep_text = sep_factory ( i , n_tickets ) if sep_text is not None : sep = nodes . raw ( text = sep_text , format = 'html' ) node_list . append ( sep ) return node_list , [ ] | Sphinx role for referencing a JIRA ticket . |
11,909 | def jira_bracket_role ( name , rawtext , text , lineno , inliner , options = None , content = None , open_symbol = '[' , close_symbol = ']' ) : node_list , _ = jira_role ( name , rawtext , text , lineno , inliner , options = options , content = None , oxford_comma = False ) node_list = nodes . raw ( text = open_symbol , format = 'html' ) + node_list + nodes . raw ( text = close_symbol , format = 'html' ) return node_list , [ ] | Sphinx role for referencing a JIRA ticket with ticket numbers enclosed in braces . Useful for changelogs . |
11,910 | def jira_parens_role ( name , rawtext , text , lineno , inliner , options = None , content = None ) : return jira_bracket_role ( name , rawtext , text , lineno , inliner , options = None , content = None , open_symbol = '(' , close_symbol = ')' ) | Sphinx role for referencing a JIRA ticket with ticket numbers enclosed in parentheses . Useful for changelogs . |
11,911 | def _method_call ( self , method , category , ** kwargs ) : session = requests . Session ( ) try : response = session . get ( "http://" + self . _api_address ) except requests . exceptions . ConnectionError : raise FantasyDataError ( 'Error: Cannot connect to the FantasyData API' ) method = method . format ( format = self . _response_format , ** kwargs ) request_url = "/v3/{game_type}/{category}/{format}/{method}?{get_params}" . format ( game_type = self . game_type , category = category , format = self . _response_format , method = method , get_params = self . _get_params ) response = session . get ( self . _api_schema + self . _api_address + request_url , headers = self . _headers ) result = response . json ( ) if isinstance ( result , dict ) and response . status_code : if response . status_code == 401 : raise FantasyDataError ( 'Error: Invalid API key' ) elif response . status_code == 200 : pass else : raise FantasyDataError ( 'Error: Failed to get response' ) return result | Call API method . Generate request . Parse response . Process errors method str API method url for request . Contains parameters params dict parameters for method url |
11,912 | def get_projected_player_game_stats_by_player ( self , season , week , player_id ) : result = self . _method_call ( "PlayerGameProjectionStatsByPlayerID/{season}/{week}/{player_id}" , "projections" , season = season , week = week , player_id = player_id ) return result | Projected Player Game Stats by Player |
11,913 | def get_projected_player_game_stats_by_team ( self , season , week , team_id ) : result = self . _method_call ( "PlayerGameProjectionStatsByTeam/{season}/{week}/{team_id}" , "projections" , season = season , week = week , team_id = team_id ) return result | Projected Player Game Stats by Team |
11,914 | def get_projected_player_game_stats_by_week ( self , season , week ) : result = self . _method_call ( "PlayerGameProjectionStatsByWeek/{season}/{week}" , "projections" , season = season , week = week ) return result | Projected Player Game Stats by Week |
11,915 | def get_projected_fantasy_defense_game_stats_by_week ( self , season , week ) : result = self . _method_call ( "FantasyDefenseProjectionsByGame/{season}/{week}" , "projections" , season = season , week = week ) return result | Projected Fantasy Defense Game Stats by Week |
11,916 | def get_injuries ( self , season , week ) : result = self . _method_call ( "Injuries/{season}/{week}" , "stats" , season = season , week = week ) return result | Injuries by week |
11,917 | def get_injuries_by_team ( self , season , week , team_id ) : result = self . _method_call ( "Injuries/{season}/{week}/{team_id}" , "stats" , season = season , week = week , team_id = team_id ) return result | Injuries by week and team |
11,918 | def get_box_score_by_team ( self , season , week , team_id ) : result = self . _method_call ( "BoxScoreV3/{season}/{week}/{team_id}" , "stats" , season = season , week = week , team_id = team_id ) return result | Box score by week and team |
11,919 | def authenticate ( self , password ) : user = None try : self . _authenticate_user_dn ( password ) self . _check_requirements ( ) self . _get_or_create_user ( ) user = self . _user except self . AuthenticationFailed as e : logger . debug ( u"Authentication failed for %s: %s" % ( self . _username , e ) ) except ldap . LDAPError as e : results = ldap_error . send ( self . backend . __class__ , context = 'authenticate' , exception = e ) if len ( results ) == 0 : logger . warning ( u"Caught LDAPError while authenticating %s: %s" , self . _username , pprint . pformat ( e ) ) except Exception : logger . exception ( u"Caught Exception while authenticating %s" , self . _username ) raise return user | Authenticates against the LDAP directory and returns the corresponding User object if successful . Returns None on failure . |
11,920 | def get_group_permissions ( self ) : if self . _group_permissions is None : self . _group_permissions = set ( ) if self . settings . FIND_GROUP_PERMS : try : self . _load_group_permissions ( ) except ldap . LDAPError as e : results = ldap_error . send ( self . backend . __class__ , context = 'get_group_permissions' , exception = e ) if len ( results ) == 0 : logger . warning ( "Caught LDAPError loading group permissions: %s" , pprint . pformat ( e ) ) return self . _group_permissions | If allowed by the configuration this returns the set of permissions defined by the user s LDAP group memberships . |
11,921 | def _populate_user ( self ) : self . _populate_user_from_attributes ( ) self . _populate_user_from_group_memberships ( ) self . _populate_user_from_dn_regex ( ) self . _populate_user_from_dn_regex_negation ( ) | Populates our User object with information from the LDAP directory . |
11,922 | def _populate_and_save_user_profile ( self ) : try : app_label , class_name = django . conf . settings . AUTH_PROFILE_MODULE . split ( '.' ) profile_model = apps . get_model ( app_label , class_name ) profile , created = profile_model . objects . get_or_create ( user = self . _user ) save_profile = False logger . debug ( "Populating Django user profile for %s" , get_user_username ( self . _user ) ) save_profile = self . _populate_profile_from_attributes ( profile ) or save_profile save_profile = self . _populate_profile_flags_from_dn_regex ( profile ) or save_profile save_profile = self . _populate_profile_from_group_memberships ( profile ) or save_profile signal_responses = populate_user_profile . send ( self . backend . __class__ , profile = profile , ldap_user = self ) if len ( signal_responses ) > 0 : save_profile = True if save_profile : profile . save ( ) except ObjectDoesNotExist : logger . debug ( "Django user %s does not have a profile to populate" , get_user_username ( self . _user ) ) except LookupError : logger . debug ( 'User Profile model defined in settings.AUTH_PROFILE_MODULE is invalid' ) | Populates a User profile object with fields from the LDAP directory . |
11,923 | def _populate_profile_from_attributes ( self , profile ) : save_profile = False for field , attr in self . settings . PROFILE_ATTR_MAP . items ( ) : try : setattr ( profile , field , self . attrs [ attr ] [ 0 ] ) save_profile = True except Exception : logger . warning ( "%s does not have a value for the attribute %s" , self . dn , attr ) return save_profile | Populate the given profile object from AUTH_LDAP_PROFILE_ATTR_MAP . Returns True if the profile was modified . |
11,924 | def _populate_profile_from_group_memberships ( self , profile ) : save_profile = False for field , group_dns in self . settings . PROFILE_FLAGS_BY_GROUP . items ( ) : if isinstance ( group_dns , six . string_types ) : group_dns = [ group_dns ] value = any ( self . _get_groups ( ) . is_member_of ( dn ) for dn in group_dns ) setattr ( profile , field , value ) save_profile = True return save_profile | Populate the given profile object from AUTH_LDAP_PROFILE_FLAGS_BY_GROUP . Returns True if the profile was modified . |
11,925 | def _load_group_permissions ( self ) : group_names = self . _get_groups ( ) . get_group_names ( ) perms = Permission . objects . filter ( group__name__in = group_names ) perms = perms . values_list ( 'content_type__app_label' , 'codename' ) perms = perms . order_by ( ) self . _group_permissions = set ( [ "%s.%s" % ( ct , name ) for ct , name in perms ] ) | Populates self . _group_permissions based on LDAP group membership and Django group permissions . |
11,926 | def get_task_id ( self ) : task_id = self . json_response . get ( "task_id" , None ) self . logger . info ( "%s\t%s" % ( self . request_method , self . request_url ) ) return task_id | Method to get all department members . |
11,927 | def get_message_id ( self ) : message_id = self . json_response . get ( "messageId" , None ) self . logger . info ( "%s\t%s" % ( self . request_method , self . request_url ) ) return message_id | Method to get messageId of group created . |
11,928 | def change_dir ( directory ) : def cd_decorator ( func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : org_path = os . getcwd ( ) os . chdir ( directory ) func ( * args , ** kwargs ) os . chdir ( org_path ) return wrapper return cd_decorator | Wraps a function to run in a given directory . |
11,929 | def build_css ( minimize = True ) : print ( 'Build CSS' ) args = { } args [ 'style' ] = 'compressed' if minimize else 'nested' cmd = CMD_SASS . format ( ** args ) run ( cmd ) | Builds CSS from SASS . |
11,930 | def profile ( func ) : def _f ( * args , ** kwargs ) : print ( "\n<<<---" ) pr = cProfile . Profile ( ) pr . enable ( ) res = func ( * args , ** kwargs ) p = pstats . Stats ( pr ) p . strip_dirs ( ) . sort_stats ( 'cumtime' ) . print_stats ( 20 ) print ( "\n- ) return res return _f | Decorator Execute cProfile |
11,931 | def total_size ( obj , verbose = False ) : seen = set ( ) def sizeof ( o ) : if id ( o ) in seen : return 0 seen . add ( id ( o ) ) s = sys . getsizeof ( o , default = 0 ) if verbose : print ( s , type ( o ) , repr ( o ) ) if isinstance ( o , ( tuple , list , set , frozenset , deque ) ) : s += sum ( map ( sizeof , iter ( o ) ) ) elif isinstance ( o , dict ) : s += sum ( map ( sizeof , chain . from_iterable ( o . items ( ) ) ) ) elif "__dict__" in dir ( o ) : s += sum ( map ( sizeof , chain . from_iterable ( o . __dict__ . items ( ) ) ) ) return s return sizeof ( obj ) | Returns approximate memory size |
11,932 | def mute ( func ) : def _f ( * args , ** kwargs ) : sys . stdout = open ( os . devnull , 'w' ) res = func ( * args , ** kwargs ) sys . stdout . close ( ) sys . stdout = sys . __stdout__ return res return _f | Decorator Make stdout silent |
11,933 | def _insert_html_configs ( c , * , project_name , short_project_name ) : c [ 'templates_path' ] = [ '_templates' , lsst_sphinx_bootstrap_theme . get_html_templates_path ( ) ] c [ 'html_theme' ] = 'lsst_sphinx_bootstrap_theme' c [ 'html_theme_path' ] = [ lsst_sphinx_bootstrap_theme . get_html_theme_path ( ) ] c [ 'html_theme_options' ] = { 'logotext' : short_project_name } c [ 'html_title' ] = project_name c [ 'html_short_title' ] = short_project_name c [ 'html_logo' ] = None c [ 'html_favicon' ] = None if os . path . isdir ( '_static' ) : c [ 'html_static_path' ] = [ '_static' ] else : c [ 'html_static_path' ] = [ ] c [ 'html_last_updated_fmt' ] = '%b %d, %Y' c [ 'html_use_smartypants' ] = True c [ 'html_domain_indices' ] = False c [ 'html_use_index' ] = False c [ 'html_split_index' ] = False c [ 'html_show_sourcelink' ] = True c [ 'html_show_sphinx' ] = True c [ 'html_show_copyright' ] = True c [ 'html_file_suffix' ] = '.html' c [ 'html_search_language' ] = 'en' return c | Insert HTML theme configurations . |
11,934 | def _insert_common_sphinx_configs ( c , * , project_name ) : c [ 'project' ] = project_name c [ 'source_suffix' ] = '.rst' c [ 'source_encoding' ] = 'utf-8-sig' c [ 'master_doc' ] = 'index' c [ 'numfig' ] = True c [ 'numfig_format' ] = { 'figure' : 'Figure %s' , 'table' : 'Table %s' , 'code-block' : 'Listing %s' } c [ 'default_role' ] = 'obj' c [ 'rst_epilog' ] = c [ 'suppress_warnings' ] = [ 'app.add_directive' , ] return c | Add common core Sphinx configurations to the state . |
11,935 | def _insert_breathe_configs ( c , * , project_name , doxygen_xml_dirname ) : if doxygen_xml_dirname is not None : c [ 'breathe_projects' ] = { project_name : doxygen_xml_dirname } c [ 'breathe_default_project' ] = project_name return c | Add breathe extension configurations to the state . |
11,936 | def _insert_automodapi_configs ( c ) : c [ 'numpydoc_show_class_members' ] = False c [ 'autosummary_generate' ] = True c [ 'automodapi_toctreedirnm' ] = 'py-api' c [ 'automodsumm_inherited_members' ] = True c [ 'autodoc_inherit_docstrings' ] = True c [ 'autoclass_content' ] = "class" c [ 'autodoc_default_flags' ] = [ 'show-inheritance' , 'special-members' ] return c | Add configurations related to automodapi autodoc and numpydoc to the state . |
11,937 | def _insert_matplotlib_configs ( c ) : if 'extensions' not in c : c [ 'extensions' ] = [ ] try : import matplotlib . sphinxext . plot_directive c [ 'extensions' ] += [ matplotlib . sphinxext . plot_directive . __name__ ] except ( ImportError , AttributeError ) : warnings . warn ( "matplotlib's plot_directive could not be imported. " "Inline plots will not be included in the output." ) return c | Add configurations related to matplotlib s plot directive to the state . |
11,938 | def _insert_single_package_eups_version ( c , eups_version ) : c [ 'release_eups_tag' ] = 'current' c [ 'release_git_ref' ] = 'master' c [ 'version' ] = eups_version c [ 'release' ] = eups_version c [ 'scipipe_conda_ref' ] = 'master' c [ 'pipelines_demo_ref' ] = 'master' c [ 'newinstall_ref' ] = 'master' return c | Insert version information into the configuration namespace . |
11,939 | def _insert_eups_version ( c ) : eups_tag = os . getenv ( 'EUPS_TAG' ) if eups_tag is None : eups_tag = 'd_latest' if eups_tag in ( 'd_latest' , 'w_latest' , 'current' ) : git_ref = 'master' elif eups_tag . startswith ( 'd_' ) : git_ref = 'master' elif eups_tag . startswith ( 'v' ) : git_ref = eups_tag . lstrip ( 'v' ) . replace ( '_' , '.' ) elif eups_tag . startswith ( 'w_' ) : git_ref = eups_tag . replace ( '_' , '.' ) else : git_ref = 'master' c [ 'release_eups_tag' ] = eups_tag c [ 'release_git_ref' ] = git_ref c [ 'version' ] = eups_tag c [ 'release' ] = eups_tag c [ 'scipipe_conda_ref' ] = git_ref c [ 'pipelines_demo_ref' ] = git_ref c [ 'newinstall_ref' ] = git_ref return c | Insert information about the current EUPS tag into the configuration namespace . |
11,940 | def build_pipelines_lsst_io_configs ( * , project_name , copyright = None ) : sys . setrecursionlimit ( 2000 ) c = { } c = _insert_common_sphinx_configs ( c , project_name = project_name ) c = _insert_html_configs ( c , project_name = project_name , short_project_name = project_name ) c = _insert_extensions ( c ) c = _insert_intersphinx_mapping ( c ) c = _insert_automodapi_configs ( c ) c = _insert_matplotlib_configs ( c ) c = _insert_graphviz_configs ( c ) c = _insert_eups_version ( c ) date = datetime . datetime . now ( ) c [ 'today' ] = date . strftime ( '%Y-%m-%d' ) c [ 'copyright' ] = '2015-{year} LSST contributors' . format ( year = date . year ) c [ 'todo_include_todos' ] = False c [ 'exclude_patterns' ] = [ 'README.rst' , '_build' , 'releases/note-source/*.rst' , 'releases/tickets-source/*.rst' , 'ups' , '.pyvenv' , '.github' , 'home' , ] c = _insert_rst_epilog ( c ) c = _insert_jinja_configuration ( c ) return c | Build a dict of Sphinx configurations that populate the conf . py of the main pipelines_lsst_io Sphinx project for LSST Science Pipelines documentation . |
11,941 | def setup ( self , app ) : super ( ) . setup ( app ) self . cfg . port = int ( self . cfg . port ) self . cfg . db = int ( self . cfg . db ) self . cfg . poolsize = int ( self . cfg . poolsize ) | Setup the plugin . |
11,942 | async def startup ( self , app ) : if self . cfg . fake : if not FakeConnection : raise PluginException ( 'Install fakeredis for fake connections.' ) self . conn = await FakeConnection . create ( ) if self . cfg . pubsub : self . pubsub_conn = self . conn else : try : if self . cfg . poolsize <= 1 : self . conn = await asyncio . wait_for ( asyncio_redis . Connection . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , ) , self . cfg . timeout ) else : self . conn = await asyncio . wait_for ( asyncio_redis . Pool . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , poolsize = self . cfg . poolsize , ) , self . cfg . timeout ) if self . cfg . pubsub : self . pubsub_conn = await asyncio . wait_for ( asyncio_redis . Connection . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , ) , self . cfg . timeout ) except asyncio . TimeoutError : raise PluginException ( 'Muffin-redis connection timeout.' ) if self . cfg . pubsub : self . pubsub_subscription = await self . pubsub_conn . start_subscribe ( ) self . pubsub_reader = ensure_future ( self . _pubsub_reader_proc ( ) , loop = self . app . loop ) | Connect to Redis . |
11,943 | async def cleanup ( self , app ) : self . conn . close ( ) if self . pubsub_conn : self . pubsub_reader . cancel ( ) self . pubsub_conn . close ( ) await asyncio . sleep ( 0 ) | Close self connections . |
11,944 | def set ( self , key , value , * args , ** kwargs ) : if self . cfg . jsonpickle : value = jsonpickle . encode ( value ) return self . conn . set ( key , value , * args , ** kwargs ) | Store the given value into Redis . |
11,945 | async def get ( self , key ) : value = await self . conn . get ( key ) if self . cfg . jsonpickle : if isinstance ( value , bytes ) : return jsonpickle . decode ( value . decode ( 'utf-8' ) ) if isinstance ( value , str ) : return jsonpickle . decode ( value ) return value | Decode the value . |
11,946 | def publish ( self , channel , message ) : if self . cfg . jsonpickle : message = jsonpickle . encode ( message ) return self . conn . publish ( channel , message ) | Publish message to channel . |
11,947 | def start_subscribe ( self ) : if not self . conn : raise ValueError ( 'Not connected' ) elif not self . pubsub_conn : raise ValueError ( 'PubSub not enabled' ) return Subscription ( self ) | Create a new Subscription context manager . |
11,948 | async def _subscribe ( self , channels , is_mask ) : news = [ ] for channel in channels : key = channel , is_mask self . _channels . append ( key ) if key in self . _plugin . _subscriptions : self . _plugin . _subscriptions [ key ] . append ( self . _queue ) else : self . _plugin . _subscriptions [ key ] = [ self . _queue ] news . append ( channel ) if news : await getattr ( self . _sub , 'psubscribe' if is_mask else 'subscribe' ) ( news ) | Subscribe to given channel . |
11,949 | async def _unsubscribe ( self , channels , is_mask ) : vanished = [ ] if channels : for channel in channels : key = channel , is_mask self . _channels . remove ( key ) self . _plugin . _subscriptions [ key ] . remove ( self . _queue ) if not self . _plugin . _subscriptions [ key ] : vanished . append ( channel ) del self . _plugin . _subscriptions [ key ] else : while self . _channels : channel , is_mask = key = self . _channels . pop ( ) self . _plugin . _subscriptions [ key ] . remove ( self . _queue ) if not self . _plugin . _subscriptions [ key ] : vanished . append ( channel ) del self . _plugin . _subscriptions [ key ] if vanished : await getattr ( self . _sub , 'punsubscribe' if is_mask else 'unsubscribe' ) ( vanished ) | Unsubscribe from given channel . |
11,950 | def xor ( a , b ) : return bytearray ( i ^ j for i , j in zip ( a , b ) ) | Bitwise xor on equal length bytearrays . |
11,951 | def value ( self ) : try : if isinstance ( self . __value , Expression ) : return self . __value . value return self . __value except AttributeError : return 0 | Set a calculated value for this Expression . Used when writing formulas using XlsxWriter to give cells an initial value when the sheet is loaded without being calculated . |
11,952 | def has_value ( self ) : try : if isinstance ( self . __value , Expression ) : return self . __value . has_value return True except AttributeError : return False | return True if value has been set |
11,953 | def copy ( source , destination , ignore = None , adapter = None , fatal = True , logger = LOG . debug ) : return _file_op ( source , destination , _copy , adapter , fatal , logger , ignore = ignore ) | Copy source - > destination |
11,954 | def move ( source , destination , adapter = None , fatal = True , logger = LOG . debug ) : return _file_op ( source , destination , _move , adapter , fatal , logger ) | Move source - > destination |
11,955 | def symlink ( source , destination , adapter = None , must_exist = True , fatal = True , logger = LOG . debug ) : return _file_op ( source , destination , _symlink , adapter , fatal , logger , must_exist = must_exist ) | Symlink source < - destination |
11,956 | def soap_attribute ( self , name , value ) : setattr ( self , name , value ) self . _attributes . add ( name ) | Marks an attribute as being a part of the data defined by the soap datatype |
11,957 | def get_soap_object ( self , client ) : def to_soap_attribute ( attr ) : words = attr . split ( '_' ) words = words [ : 1 ] + [ word . capitalize ( ) for word in words [ 1 : ] ] return '' . join ( words ) soap_object = client . factory . create ( self . soap_name ) for attr in self . _attributes : value = getattr ( self , attr ) setattr ( soap_object , to_soap_attribute ( attr ) , value ) return soap_object | Create and return a soap service type defined for this instance |
11,958 | def get_soap_object ( self , client ) : record_data = super ( ) . get_soap_object ( client ) record_data . records = [ Record ( r ) . get_soap_object ( client ) for r in record_data . records ] return record_data | Override default get_soap_object behavior to account for child Record types |
11,959 | def handle_message_registered ( self , msg_data , host ) : response = None if msg_data [ "method" ] == "EVENT" : logger . debug ( "<%s> <euuid:%s> Event message " "received" % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) response = self . event ( msg_data [ "cuuid" ] , host , msg_data [ "euuid" ] , msg_data [ "event_data" ] , msg_data [ "timestamp" ] , msg_data [ "priority" ] ) elif msg_data [ "method" ] == "OK EVENT" : logger . debug ( "<%s> <euuid:%s> Event confirmation message " "received" % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) try : del self . event_uuids [ msg_data [ "euuid" ] ] except KeyError : logger . warning ( "<%s> <euuid:%s> Euuid does not exist in event " "buffer. Key was removed before we could process " "it." % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) elif msg_data [ "method" ] == "OK NOTIFY" : logger . debug ( "<%s> <euuid:%s> Ok notify " "received" % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) try : del self . event_uuids [ msg_data [ "euuid" ] ] except KeyError : logger . warning ( "<%s> <euuid:%s> Euuid does not exist in event " "buffer. Key was removed before we could process " "it." % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) return response | Processes messages that have been delivered by a registered client . |
11,960 | def autodiscover ( self , message ) : if message [ "version" ] in self . allowed_versions : logger . debug ( "<%s> Client version matches server " "version." % message [ "cuuid" ] ) response = serialize_data ( { "method" : "OHAI Client" , "version" : self . version , "server_name" : self . server_name } , self . compression , encryption = False ) else : logger . warning ( "<%s> Client version %s does not match allowed server " "versions %s" % ( message [ "cuuid" ] , message [ "version" ] , self . version ) ) response = serialize_data ( { "method" : "BYE REGISTER" } , self . compression , encryption = False ) return response | This function simply returns the server version number as a response to the client . |
11,961 | def register ( self , message , host ) : cuuid = message [ "cuuid" ] if len ( self . registry ) > self . registration_limit : logger . warning ( "<%s> Registration limit exceeded" % cuuid ) response = serialize_data ( { "method" : "BYE REGISTER" } , self . compression , encryption = False ) return response data = { "host" : host [ 0 ] , "port" : host [ 1 ] , "time" : datetime . now ( ) } return_msg = { "method" : "OK REGISTER" } if "encryption" in message and self . encryption : data [ "encryption" ] = PublicKey ( message [ "encryption" ] [ 0 ] , message [ "encryption" ] [ 1 ] ) self . encrypted_hosts [ host ] = cuuid return_msg [ "encryption" ] = [ self . encryption . n , self . encryption . e ] if cuuid in self . registry : for key in data : self . registry [ cuuid ] [ key ] = data [ key ] else : self . registry [ cuuid ] = data self . registry [ cuuid ] [ "authenticated" ] = False response = serialize_data ( return_msg , self . compression , encryption = False ) logger . debug ( "<%s> Registry entries:" % cuuid ) for ( key , value ) in self . registry . items ( ) : logger . debug ( "<%s> %s %s" % ( str ( cuuid ) , str ( key ) , pformat ( value ) ) ) return response | This function will register a particular client in the server s registry dictionary . |
11,962 | def is_registered ( self , cuuid , host ) : if ( cuuid in self . registry ) and ( self . registry [ cuuid ] [ "host" ] == host ) : return True else : return False | This function will check to see if a given host with client uuid is currently registered . |
11,963 | def event ( self , cuuid , host , euuid , event_data , timestamp , priority ) : response = None if host in self . encrypted_hosts : logger . debug ( "Encrypted!" ) client_key = self . registry [ cuuid ] [ "encryption" ] else : logger . debug ( "Not encrypted :<" ) client_key = None port = host [ 1 ] host = host [ 0 ] if not self . is_registered ( cuuid , host ) : logger . warning ( "<%s> Sending BYE EVENT: Client not registered." % cuuid ) response = serialize_data ( { "method" : "BYE EVENT" , "data" : "Not registered" } , self . compression , self . encryption , client_key ) return response if euuid in self . event_uuids : logger . warning ( "<%s> Event ID is already being processed: %s" % ( cuuid , euuid ) ) return response self . event_uuids [ euuid ] = 0 logger . debug ( "<%s> <euuid:%s> Currently processing events: " "%s" % ( cuuid , euuid , str ( self . event_uuids ) ) ) logger . debug ( "<%s> <euuid:%s> New event being processed" % ( cuuid , euuid ) ) logger . debug ( "<%s> <euuid:%s> Event Data: %s" % ( cuuid , euuid , pformat ( event_data ) ) ) if self . middleware . event_legal ( cuuid , euuid , event_data ) : logger . debug ( "<%s> <euuid:%s> Event LEGAL. Sending judgement " "to client." % ( cuuid , euuid ) ) response = serialize_data ( { "method" : "LEGAL" , "euuid" : euuid , "priority" : priority } , self . compression , self . encryption , client_key ) thread = threading . Thread ( target = self . middleware . event_execute , args = ( cuuid , euuid , event_data ) ) thread . start ( ) else : logger . debug ( "<%s> <euuid:%s> Event ILLEGAL. Sending judgement " "to client." % ( cuuid , euuid ) ) response = serialize_data ( { "method" : "ILLEGAL" , "euuid" : euuid , "priority" : priority } , self . compression , self . encryption , client_key ) self . listener . call_later ( self . timeout , self . retransmit , { "euuid" : euuid , "response" : response , "cuuid" : cuuid } ) return response | This function will process event packets and send them to legal checks . |
11,964 | def notify ( self , cuuid , event_data ) : euuid = str ( uuid . uuid1 ( ) ) if "encryption" in self . registry [ cuuid ] : client_key = self . registry [ cuuid ] [ "encryption" ] else : client_key = None logger . debug ( "<%s> <%s> Sending NOTIFY event to client with event data: " "%s" % ( str ( cuuid ) , str ( euuid ) , pformat ( event_data ) ) ) try : ip_address = self . registry [ cuuid ] [ "host" ] except KeyError : logger . warning ( "<%s> <%s> Host not found in registry! Transmit " "Canceled" % ( str ( cuuid ) , str ( euuid ) ) ) return False try : port = self . registry [ cuuid ] [ "port" ] except KeyError : logger . warning ( "<%s> <%s> Port not found! Transmit " "Canceled" % ( str ( cuuid ) , str ( euuid ) ) ) return False packet = serialize_data ( { "method" : "NOTIFY" , "event_data" : event_data , "euuid" : euuid } , self . compression , self . encryption , client_key ) address = ( ip_address , port ) self . event_uuids [ euuid ] = 0 logger . debug ( "<%s> Currently processing events: " "%s" % ( cuuid , pformat ( self . event_uuids ) ) ) logger . debug ( "<%s> New NOTIFY event being processed:" % cuuid ) logger . debug ( "<%s> EUUID: %s" % ( cuuid , euuid ) ) logger . debug ( "<%s> Event Data: %s" % ( cuuid , pformat ( event_data ) ) ) self . listener . send_datagram ( packet , address ) self . listener . call_later ( self . timeout , self . retransmit , { "euuid" : euuid , "response" : packet , "cuuid" : cuuid } ) | This function will send a NOTIFY event to a registered client . |
11,965 | def once ( dispatcher , event , handle , * args ) : def shell ( dispatcher , * args ) : try : handle ( dispatcher , * args ) except Exception as e : raise e finally : dispatcher . del_map ( event , shell ) dispatcher . add_map ( event , shell , * args ) | Used to do a mapping like event - > handle but handle is called just once upon event . |
11,966 | def mainloop ( self ) : while True : try : self . update ( ) except Kill : break except KeyboardInterrupt : print ( self . base ) raise | This is the reactor mainloop . It is intented to be called when a reactor is installed . |
11,967 | def start_session ( self , b_hold_session , sig_doc_xml = None , datafile = None ) : response = self . __invoke ( 'StartSession' , { 'bHoldSession' : b_hold_session , 'SigDocXML' : sig_doc_xml or SkipValue , 'datafile' : datafile or SkipValue , 'SigningProfile' : SkipValue , } ) if response [ 'Sesscode' ] : self . data_files = [ ] self . session_code = response [ 'Sesscode' ] if sig_doc_xml : self . container = PreviouslyCreatedContainer ( ) return True return False | Start a DigidocService session |
11,968 | def mobile_sign ( self , id_code , country , phone_nr , language = None , signing_profile = 'LT_TM' ) : if not ( self . container and isinstance ( self . container , PreviouslyCreatedContainer ) ) : assert self . data_files , 'To use MobileSign endpoint the application must ' 'add at least one data file to users session' response = self . __invoke ( 'MobileSign' , { 'SignerIDCode' : id_code , 'SignersCountry' : country , 'SignerPhoneNo' : phone_nr , 'Language' : self . parse_language ( language ) , 'Role' : SkipValue , 'City' : SkipValue , 'StateOrProvince' : SkipValue , 'PostalCode' : SkipValue , 'CountryName' : SkipValue , 'ServiceName' : self . service_name , 'AdditionalDataToBeDisplayed' : self . mobile_message , 'SigningProfile' : signing_profile , 'MessagingMode' : 'asynchClientServer' , 'AsyncConfiguration' : SkipValue , 'ReturnDocInfo' : SkipValue , 'ReturnDocData' : SkipValue , } ) return response | This can be used to add a signature to existing data files |
11,969 | def is_prime ( n , k = 64 ) : if n == 2 : return True if n < 2 or n % 2 == 0 : return False for i in range ( 3 , 2048 ) : if n % i == 0 : return False s = 0 d = n - 1 while True : q , r = divmod ( d , 2 ) if r == 1 : break s += 1 d = q for i in range ( k ) : a = random . randint ( 2 , n - 1 ) if check_candidate ( a , d , n , s ) : return False return True | Test whether n is prime probabilisticly . |
11,970 | def get_prime ( bits , k = 64 ) : if bits % 8 != 0 or bits == 0 : raise ValueError ( "bits must be >= 0 and divisible by 8" ) while True : n = int . from_bytes ( os . urandom ( bits // 8 ) , "big" ) if is_prime ( n , k ) : return n | Return a random prime up to a certain length . |
11,971 | def make_rsa_keys ( bits = 2048 , e = 65537 , k = 64 ) : p , q = None , None while p == q : p , q = get_prime ( bits // 2 ) , get_prime ( bits // 2 ) n = p * q phi_n = phi ( n , p , q ) d = mult_inv ( e , phi_n ) return n , e , d | Create RSA key pair . |
11,972 | def setup ( app ) : for name , ( default , rebuild , _ ) in ref . CONFIG_VALUES . iteritems ( ) : app . add_config_value ( name , default , rebuild ) app . add_directive ( 'javaimport' , ref . JavarefImportDirective ) app . add_role ( 'javaref' , ref . JavarefRole ( app ) ) app . connect ( 'builder-inited' , initialize_env ) app . connect ( 'env-purge-doc' , ref . purge_imports ) app . connect ( 'env-merge-info' , ref . merge_imports ) app . connect ( 'build-finished' , ref . cleanup ) | Register the extension with Sphinx . |
11,973 | def validate_env ( app ) : if not hasattr ( app . env , 'javalink_config_cache' ) : app . env . javalink_config_cache = { } for conf_attr , ( _ , _ , env_attr ) in ref . CONFIG_VALUES . iteritems ( ) : if not env_attr : continue value = getattr ( app . config , conf_attr ) cached = app . env . javalink_config_cache . get ( conf_attr , value ) app . env . javalink_config_cache [ conf_attr ] = value if value != cached : app . verbose ( '[javalink] config.%s has changed, clearing related env' , conf_attr ) delattr ( app . env , env_attr ) | Purge expired values from the environment . |
11,974 | def find_rt_jar ( javahome = None ) : if not javahome : if 'JAVA_HOME' in os . environ : javahome = os . environ [ 'JAVA_HOME' ] elif sys . platform == 'darwin' : javahome = _find_osx_javahome ( ) else : javahome = _get_javahome_from_java ( _find_java_binary ( ) ) rtpath = os . path . join ( javahome , 'jre' , 'lib' , 'rt.jar' ) if not os . path . isfile ( rtpath ) : msg = 'Could not find rt.jar: {} is not a file' . format ( rtpath ) raise ExtensionError ( msg ) return rtpath | Find the path to the Java standard library jar . |
11,975 | def filter ( self , record ) : found = self . _pattern . search ( record . getMessage ( ) ) return not found | Returns True if the record shall be logged . False otherwise . |
11,976 | def _get_value ( obj , key ) : if isinstance ( obj , ( list , tuple ) ) : for item in obj : v = _find_value ( key , item ) if v is not None : return v return None if isinstance ( obj , dict ) : return obj . get ( key ) if obj is not None : return getattr ( obj , key , None ) | Get a value for key from obj if possible |
11,977 | def _find_value ( key , * args ) : for arg in args : v = _get_value ( arg , key ) if v is not None : return v | Find a value for key in any of the objects given as args |
11,978 | def add_search_path ( * path_tokens ) : full_path = os . path . join ( * path_tokens ) if full_path not in sys . path : sys . path . insert ( 0 , os . path . abspath ( full_path ) ) | Adds a new search path from where modules can be loaded . This function is provided for test applications to add locations to the search path so any required functionality can be loaded . It helps keeping the step implementation modules simple by placing the bulk of the implementation in separate utility libraries . This function can also be used to add the application being tested to the path so its functionality can be made available for testing . |
11,979 | def load_script ( filename ) : path , module_name , ext = _extract_script_components ( filename ) add_search_path ( path ) return _load_module ( module_name ) | Loads a python script as a module . |
11,980 | def parse_dates ( df , inplace = True , * args , ** kwargs ) : if not inplace : df = df . copy ( ) for c in df . columns : i = df [ c ] . first_valid_index ( ) if i is not None and type ( df [ c ] . ix [ i ] ) in ( date , datetime ) : df [ c ] = pd . to_datetime ( df [ c ] , * args , ** kwargs ) if not inplace : return df | Parse all datetime . date and datetime . datetime columns |
11,981 | def to_float ( * args ) : floats = [ np . array ( a , dtype = np . float32 ) for a in args ] return floats [ 0 ] if len ( floats ) == 1 else floats | cast numpy arrays to float32 if there s more than one return an array |
11,982 | def get_attr ( name ) : i = name . rfind ( '.' ) cls = str ( name [ i + 1 : ] ) module = str ( name [ : i ] ) mod = __import__ ( module , fromlist = [ cls ] ) return getattr ( mod , cls ) | get a class or function by name |
11,983 | def drop_constant_column_levels ( df ) : columns = df . columns constant_levels = [ i for i , level in enumerate ( columns . levels ) if len ( level ) <= 1 ] constant_levels . reverse ( ) for i in constant_levels : columns = columns . droplevel ( i ) df . columns = columns | drop the levels of a multi - level column dataframe which are constant operates in place |
11,984 | def dict_diff ( dicts ) : diff_keys = set ( ) for k in union ( set ( d . keys ( ) ) for d in dicts ) : values = [ ] for d in dicts : if k not in d : diff_keys . add ( k ) break else : values . append ( d [ k ] ) if nunique ( values ) > 1 : diff_keys . add ( k ) break return [ dict_subset ( d , diff_keys ) for d in dicts ] | Subset dictionaries to keys which map to multiple values |
11,985 | def dict_update_union ( d1 , d2 ) : for k in d2 : if k in d1 : d1 [ k ] . update ( d2 [ k ] ) else : d1 [ k ] = d2 [ k ] | update a set - valued dictionary when key exists union sets |
11,986 | def compile_file ( self , infile , outfile , outdated = False , force = False ) : myfile = codecs . open ( outfile , 'w' , 'utf-8' ) if settings . DEBUG : myfile . write ( sass . compile ( filename = infile ) ) else : myfile . write ( sass . compile ( filename = infile , output_style = 'compressed' ) ) return myfile . close ( ) | Process sass file . |
11,987 | def from_task ( cls , task ) : target = cls ( name = task . get_name ( ) , params = task . get_param_string ( ) ) return target | Create a new target representing a task and its parameters |
11,988 | def _base_query ( self , session ) : return session . query ( ORMTargetMarker ) . filter ( ORMTargetMarker . name == self . name ) . filter ( ORMTargetMarker . params == self . params ) | Base query for a target . |
11,989 | def exists ( self ) : session = client . get_client ( ) . create_session ( ) ret = self . _base_query ( session ) . count ( ) > 0 session . close ( ) return ret | Check if a target exists |
11,990 | def create ( self ) : session = client . get_client ( ) . create_session ( ) if not self . _base_query ( session ) . count ( ) > 0 : marker = ORMTargetMarker ( name = self . name , params = self . params ) session . add ( marker ) session . commit ( ) session . close ( ) | Create an instance of the current target in the database |
11,991 | def remove ( self ) : session = client . get_client ( ) . create_session ( ) if not self . _base_query ( session ) . count ( ) > 0 : session . close ( ) raise RuntimeError ( "Target does not exist, name={:s}, params={:s}" "" . format ( self . name , self . params ) ) self . _base_query ( session ) . delete ( ) session . commit ( ) session . close ( ) | Remove a target |
11,992 | def add_uppercase ( table ) : orig = table . copy ( ) orig . update ( dict ( ( k . capitalize ( ) , v . capitalize ( ) ) for k , v in table . items ( ) ) ) return orig | Extend the table with uppercase options |
11,993 | def translit ( src , table = UkrainianKMU , preserve_case = True ) : u src = text_type ( src ) src_is_upper = src . isupper ( ) if hasattr ( table , "DELETE_PATTERN" ) : src = table . DELETE_PATTERN . sub ( u"" , src ) if hasattr ( table , "PATTERN1" ) : src = table . PATTERN1 . sub ( lambda x : table . SPECIAL_CASES [ x . group ( ) ] , src ) if hasattr ( table , "PATTERN2" ) : src = table . PATTERN2 . sub ( lambda x : table . FIRST_CHARACTERS [ x . group ( ) ] , src ) res = src . translate ( table . MAIN_TRANSLIT_TABLE ) if src_is_upper and preserve_case : return res . upper ( ) else : return res | u Transliterates given unicode src text to transliterated variant according to a given transliteration table . Official ukrainian transliteration is used by default |
11,994 | def store ( self , df , attribute_columns ) : entity_id_start = models . Entity . get_max_id ( self . session ) + 1 attribute_id_start = models . Attribute . get_max_id ( self . session ) + 1 df [ 'id' ] = range ( entity_id_start , entity_id_start + len ( df ) ) df [ 'type' ] = self . type df [ [ 'id' , 'type' ] ] . to_sql ( name = models . Entity . __tablename__ , con = self . client . engine , if_exists = 'append' , index = False ) for col in attribute_columns : attr_df = df [ [ col , 'id' ] ] . rename ( columns = { 'id' : 'entity_id' , col : 'value' } ) attr_df [ 'name' ] = col attr_df [ 'id' ] = range ( attribute_id_start , attribute_id_start + len ( df ) ) attribute_id_start += len ( df ) attr_df . to_sql ( name = models . Attribute . __tablename__ , con = self . client . engine , if_exists = 'append' , index = False ) | Store entities and their attributes |
11,995 | def run ( self ) : df = PaintingsInputData ( ) . load ( ) df . rename ( columns = { 'paintingLabel' : 'name' } , inplace = True ) artists = models . Entity . query_with_attributes ( 'artist' , self . client ) df [ 'artist_id' ] = df [ 'creator_wiki_id' ] . map ( artists . set_index ( 'wiki_id' ) [ 'id' ] ) attribute_columns = [ 'name' , 'wiki_id' , 'area' , 'decade' , 'artist_id' ] self . store ( df , attribute_columns ) self . done ( ) | Load all paintings into the database |
11,996 | def serialize_data ( data , compression = False , encryption = False , public_key = None ) : message = json . dumps ( data ) if compression : message = zlib . compress ( message ) message = binascii . b2a_base64 ( message ) if encryption and public_key : message = encryption . encrypt ( message , public_key ) encoded_message = str . encode ( message ) return encoded_message | Serializes normal Python datatypes into plaintext using json . |
11,997 | def unserialize_data ( data , compression = False , encryption = False ) : try : if encryption : data = encryption . decrypt ( data ) except Exception as err : logger . error ( "Decryption Error: " + str ( err ) ) message = False try : if compression : data = binascii . a2b_base64 ( data ) data = zlib . decompress ( data ) message = json . loads ( data ) except Exception as err : logger . error ( "Decompression Error: " + str ( err ) ) message = False decoded_message = data . decode ( ) if not encryption and not compression : message = json . loads ( decoded_message ) return message | Unserializes the packet data and converts it from json format to normal Python datatypes . |
11,998 | def listen ( self ) : self . listening = True if self . threading : from threading import Thread self . listen_thread = Thread ( target = self . listen_loop ) self . listen_thread . daemon = True self . listen_thread . start ( ) self . scheduler_thread = Thread ( target = self . scheduler ) self . scheduler_thread . daemon = True self . scheduler_thread . start ( ) else : self . listen_loop ( ) | Starts the listen loop . If threading is enabled then the loop will be started in its own thread . |
11,999 | def listen_loop ( self ) : while self . listening : try : data , address = self . sock . recvfrom ( self . bufsize ) self . receive_datagram ( data , address ) if self . stats_enabled : self . stats [ 'bytes_recieved' ] += len ( data ) except socket . error as error : if error . errno == errno . WSAECONNRESET : logger . info ( "connection reset" ) else : raise logger . info ( "Shutting down the listener..." ) | Starts the listen loop and executes the receieve_datagram method whenever a packet is receieved . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.