idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
5,400
|
def getData ( self ) : haveOnCurve = False for point in self . _points : if point . segmentType is not None : haveOnCurve = True break if haveOnCurve : _prepPointsForSegments ( self . _points ) firstPoint = self . _points [ 0 ] lastPoint = self . _points [ - 1 ] if firstPoint . segmentType is not None and lastPoint . segmentType is not None : if firstPoint . coordinates == lastPoint . coordinates : if ( firstPoint . segmentType in [ "line" , "move" ] ) : del self . _points [ 0 ] else : raise AssertionError ( "Unhandled point type sequence" ) return self . _points
|
Return a list of normalized InputPoint objects for the contour drawn with this pen .
|
5,401
|
def reCurveFromEntireInputContour ( self , inputContour ) : if self . clockwise : inputFlat = inputContour . clockwiseFlat else : inputFlat = inputContour . counterClockwiseFlat outputFlat = [ ] for segment in self . segments : assert segment . segmentType == "flat" outputFlat += segment . points haveMatch = False if len ( inputFlat ) == len ( outputFlat ) : if inputFlat == outputFlat : haveMatch = True else : inputStart = inputFlat [ 0 ] if inputStart in outputFlat : if outputFlat . count ( inputStart ) > 1 : startIndexes = [ index for index , point in enumerate ( outputFlat ) if point == inputStart ] else : startIndexes = [ outputFlat . index ( inputStart ) ] for startIndex in startIndexes : test = outputFlat [ startIndex : ] + outputFlat [ : startIndex ] if inputFlat == test : haveMatch = True break if haveMatch : self . segments = [ ] if self . clockwise : inputSegments = inputContour . clockwiseSegments else : inputSegments = inputContour . counterClockwiseSegments for inputSegment in inputSegments : self . segments . append ( OutputSegment ( segmentType = inputSegment . segmentType , points = [ OutputPoint ( coordinates = point . coordinates , segmentType = point . segmentType , smooth = point . smooth , name = point . name , kwargs = point . kwargs ) for point in inputSegment . points ] , final = True ) ) inputSegment . used = True self . clockwise = inputContour . clockwise return True return False
|
Match if entire input contour matches entire output contour allowing for different start point .
|
5,402
|
def _is_custom_qs_manager ( funcdef ) : decors = getattr ( funcdef , 'decorators' , None ) if decors : for dec in decors . get_children ( ) : try : if dec . name == 'queryset_manager' : return True except AttributeError : continue return False
|
Checks if a function definition is a queryset manager created with the
|
5,403
|
def _is_call2custom_manager ( node ) : called = safe_infer ( node . func ) funcdef = getattr ( called , '_proxied' , None ) return _is_custom_qs_manager ( funcdef )
|
Checks if the call is being done to a custom queryset manager .
|
5,404
|
def _is_custom_manager_attribute ( node ) : attrname = node . attrname if not name_is_from_qs ( attrname ) : return False for attr in node . get_children ( ) : inferred = safe_infer ( attr ) funcdef = getattr ( inferred , '_proxied' , None ) if _is_custom_qs_manager ( funcdef ) : return True return False
|
Checks if the attribute is a valid attribute for a queryset manager .
|
5,405
|
def by_group_and_perm ( cls , group_id , perm_name , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) . filter ( cls . model . group_id == group_id ) query = query . filter ( cls . model . perm_name == perm_name ) return query . first ( )
|
return by by_user_and_perm and permission name
|
5,406
|
def serve_forever ( django = False ) : logger = getLogger ( "irc.dispatch" ) logger . setLevel ( settings . LOG_LEVEL ) logger . addHandler ( StreamHandler ( ) ) app = IRCApplication ( django ) server = SocketIOServer ( ( settings . HTTP_HOST , settings . HTTP_PORT ) , app ) print "%s [Bot: %s] listening on %s:%s" % ( settings . GNOTTY_VERSION_STRING , app . bot . __class__ . __name__ , settings . HTTP_HOST , settings . HTTP_PORT , ) server . serve_forever ( )
|
Starts the gevent - socketio server .
|
5,407
|
def kill ( pid_file ) : try : with open ( pid_file ) as f : os . kill ( int ( f . read ( ) ) , 9 ) os . remove ( pid_file ) except ( IOError , OSError ) : return False return True
|
Attempts to shut down a previously started daemon .
|
5,408
|
def run ( ) : settings . parse_args ( ) pid_name = "gnotty-%s-%s.pid" % ( settings . HTTP_HOST , settings . HTTP_PORT ) pid_file = settings . PID_FILE or os . path . join ( gettempdir ( ) , pid_name ) if settings . KILL : if kill ( pid_file ) : print "Daemon killed" else : print "Could not kill any daemons" return elif kill ( pid_file ) : print "Running daemon killed" if settings . DAEMON : daemonize ( pid_file ) serve_forever ( )
|
CLI entry point . Parses args and starts the gevent - socketio server .
|
5,409
|
def on_start ( self , host , port , channel , nickname , password ) : self . client = WebSocketIRCClient ( host , port , channel , nickname , password , self ) self . spawn ( self . client . start )
|
A WebSocket session has started - create a greenlet to host the IRC client and start it .
|
5,410
|
def disconnect ( self , * args , ** kwargs ) : quit_message = "%s %s" % ( settings . GNOTTY_VERSION_STRING , settings . GNOTTY_PROJECT_URL ) self . client . connection . quit ( quit_message ) super ( IRCNamespace , self ) . disconnect ( * args , ** kwargs )
|
WebSocket was disconnected - leave the IRC channel .
|
5,411
|
def respond_webhook ( self , environ ) : request = FieldStorage ( fp = environ [ "wsgi.input" ] , environ = environ ) url = environ [ "PATH_INFO" ] params = dict ( [ ( k , request [ k ] . value ) for k in request ] ) try : if self . bot is None : raise NotImplementedError response = self . bot . handle_webhook_event ( environ , url , params ) except NotImplementedError : return 404 except : self . logger . debug ( format_exc ( ) ) return 500 return response or 200
|
Passes the request onto a bot with a webhook if the webhook path is requested .
|
5,412
|
def respond_static ( self , environ ) : path = os . path . normpath ( environ [ "PATH_INFO" ] ) if path == "/" : content = self . index ( ) content_type = "text/html" else : path = os . path . join ( os . path . dirname ( __file__ ) , path . lstrip ( "/" ) ) try : with open ( path , "r" ) as f : content = f . read ( ) except IOError : return 404 content_type = guess_type ( path ) [ 0 ] return ( 200 , [ ( "Content-Type" , content_type ) ] , content )
|
Serves a static file when Django isn t being used .
|
5,413
|
def index ( self ) : root_dir = os . path . dirname ( __file__ ) template_dir = os . path . join ( root_dir , "templates" , "gnotty" ) with open ( os . path . join ( template_dir , "base.html" ) , "r" ) as f : base = f . read ( ) with open ( os . path . join ( template_dir , "chat.html" ) , "r" ) as f : base = base . replace ( "{% block content %}" , f . read ( ) ) replace = { "{% block content %}" : "" , "{% block extrahead %}" : "" , "{% endblock %}" : "" , "{% load gnotty_tags %}" : "" , "{% extends \"gnotty/base.html\" %}" : "" , "{% url gnotty_chat %}" : "/" , "{% gnotty_nav %}" : "" , "{% templatetag openvariable %}" : "{{" , "{% templatetag closevariable %}" : "}}" , } for k , v in replace . items ( ) : base = base . replace ( k , v ) for k , v in settings . items ( ) : base = base . replace ( "{{ %s }}" % k , unicode ( v or "" ) ) return base
|
Loads the chat interface template when Django isn t being used manually dealing with the Django template bits .
|
5,414
|
def authorized ( self , environ ) : if self . django and settings . LOGIN_REQUIRED : try : from django . conf import settings as django_settings from django . contrib . auth import SESSION_KEY from django . contrib . auth . models import User from django . contrib . sessions . models import Session from django . core . exceptions import ObjectDoesNotExist cookie = SimpleCookie ( environ [ "HTTP_COOKIE" ] ) cookie_name = django_settings . SESSION_COOKIE_NAME session_key = cookie [ cookie_name ] . value session = Session . objects . get ( session_key = session_key ) user_id = session . get_decoded ( ) . get ( SESSION_KEY ) user = User . objects . get ( id = user_id ) except ( ImportError , KeyError , ObjectDoesNotExist ) : return False return True
|
If we re running Django and GNOTTY_LOGIN_REQUIRED is set to True pull the session cookie from the environment and validate that the user is authenticated .
|
5,415
|
def base_query ( cls , db_session = None ) : db_session = get_db_session ( db_session ) return db_session . query ( cls . model )
|
returns base query for specific service
|
5,416
|
def on ( event , * args , ** kwargs ) : def wrapper ( func ) : for i , arg in args : kwargs [ i ] = arg func . event = Event ( event , kwargs ) return func return wrapper
|
Event method wrapper for bot mixins . When a bot is constructed its metaclass inspects all members of all base classes and looks for methods marked with an event attribute which is assigned via this wrapper . It then stores all the methods in a dict that maps event names to lists of these methods which are each called when the event occurs .
|
5,417
|
def get_dict ( self , exclude_keys = None , include_keys = None ) : d = { } exclude_keys_list = exclude_keys or [ ] include_keys_list = include_keys or [ ] for k in self . _get_keys ( ) : if k not in exclude_keys_list and ( k in include_keys_list or not include_keys ) : d [ k ] = getattr ( self , k ) return d
|
return dictionary of keys and values corresponding to this model s data - if include_keys is null the function will return all keys
|
5,418
|
def get_appstruct ( self ) : result = [ ] for k in self . _get_keys ( ) : result . append ( ( k , getattr ( self , k ) ) ) return result
|
return list of tuples keys and values corresponding to this model s data
|
5,419
|
def delete ( self , db_session = None ) : db_session = get_db_session ( db_session , self ) db_session . delete ( self )
|
Deletes the object via session this will permanently delete the object from storage on commit
|
5,420
|
def power_up ( self ) : GPIO . output ( self . _pd_sck , False ) time . sleep ( 0.01 ) return True
|
power up the HX711
|
5,421
|
def reset ( self ) : logging . debug ( "power down" ) self . power_down ( ) logging . debug ( "power up" ) self . power_up ( ) logging . debug ( "read some raw data" ) result = self . get_raw_data ( 6 ) if result is False : raise GenericHX711Exception ( "failed to reset HX711" ) else : return True
|
reset the HX711 and prepare it for the next reading
|
5,422
|
def _validate_measure_count ( self , times ) : if not self . min_measures <= times <= self . max_measures : raise ParameterValidationError ( "{times} is not within the borders defined in the class" . format ( times = times ) )
|
check if times is within the borders defined in the class
|
5,423
|
def _validate_gain_A_value ( self , gain_A ) : if gain_A not in self . _valid_gains_for_channel_A : raise ParameterValidationError ( "{gain_A} is not a valid gain" . format ( gain_A = gain_A ) )
|
validate a given value for gain_A
|
5,424
|
def _set_channel_gain ( self , num ) : if not 1 <= num <= 3 : raise AttributeError ( ) for _ in range ( num ) : logging . debug ( "_set_channel_gain called" ) start_counter = time . perf_counter ( ) GPIO . output ( self . _pd_sck , True ) GPIO . output ( self . _pd_sck , False ) end_counter = time . perf_counter ( ) time_elapsed = float ( end_counter - start_counter ) if time_elapsed >= 0.00006 : logging . warning ( 'setting gain and channel took more than 60µs. ' 'Time elapsed: {:0.8f}' . format ( time_elapsed ) ) result = self . get_raw_data ( times = 6 ) if result is False : raise GenericHX711Exception ( "channel was not set properly" ) return True
|
Finish data transmission from HX711 by setting next required gain and channel
|
5,425
|
def get_raw_data ( self , times = 5 ) : self . _validate_measure_count ( times ) data_list = [ ] while len ( data_list ) < times : data = self . _read ( ) if data not in [ False , - 1 ] : data_list . append ( data ) return data_list
|
do some readings and aggregate them using the defined statistics function
|
5,426
|
def shift_ordering_down ( self , parent_id , position , db_session = None , * args , ** kwargs ) : return self . service . shift_ordering_down ( parent_id = parent_id , position = position , db_session = db_session , * args , ** kwargs )
|
Shifts ordering to close gaps after node deletion or being moved to another branch begins the shift from given position
|
5,427
|
def flatten_list ( l : List [ list ] ) -> list : return [ v for inner_l in l for v in inner_l ]
|
takes a list of lists l and returns a flat list
|
5,428
|
def read_nem_file ( file_path : str ) -> NEMFile : _ , file_extension = os . path . splitext ( file_path ) if file_extension . lower ( ) == '.zip' : with zipfile . ZipFile ( file_path , 'r' ) as archive : for csv_file in archive . namelist ( ) : with archive . open ( csv_file ) as csv_text : nmi_file = csv_text . read ( ) . decode ( 'utf-8' ) . splitlines ( ) reader = csv . reader ( nmi_file , delimiter = ',' ) return parse_nem_rows ( reader , file_name = csv_file ) with open ( file_path ) as nmi_file : return parse_nem_file ( nmi_file )
|
Read in NEM file and return meter readings named tuple
|
5,429
|
def parse_nem_file ( nem_file ) -> NEMFile : reader = csv . reader ( nem_file , delimiter = ',' ) return parse_nem_rows ( reader , file_name = nem_file )
|
Parse NEM file and return meter readings named tuple
|
5,430
|
def calculate_manual_reading ( basic_data : BasicMeterData ) -> Reading : t_start = basic_data . previous_register_read_datetime t_end = basic_data . current_register_read_datetime read_start = basic_data . previous_register_read read_end = basic_data . current_register_read value = basic_data . quantity uom = basic_data . uom quality_method = basic_data . current_quality_method return Reading ( t_start , t_end , value , uom , quality_method , "" , "" , read_start , read_end )
|
Calculate the interval between two manual readings
|
5,431
|
def parse_interval_records ( interval_record , interval_date , interval , uom , quality_method ) -> List [ Reading ] : interval_delta = timedelta ( minutes = interval ) return [ Reading ( t_start = interval_date + ( i * interval_delta ) , t_end = interval_date + ( i * interval_delta ) + interval_delta , read_value = parse_reading ( val ) , uom = uom , quality_method = quality_method , event_code = "" , event_desc = "" , read_start = None , read_end = None ) for i , val in enumerate ( interval_record ) ]
|
Convert interval values into tuples with datetime
|
5,432
|
def update_reading_events ( readings , event_record ) : for i in range ( event_record . start_interval - 1 , event_record . end_interval ) : readings [ i ] = Reading ( t_start = readings [ i ] . t_start , t_end = readings [ i ] . t_end , read_value = readings [ i ] . read_value , uom = readings [ i ] . uom , quality_method = event_record . quality_method , event_code = event_record . reason_code , event_desc = event_record . reason_description , read_start = readings [ i ] . read_start , read_end = readings [ i ] . read_end ) return readings
|
Updates readings from a 300 row to reflect any events found in a subsequent 400 row
|
5,433
|
def parse_datetime ( record : str ) -> Optional [ datetime ] : format_strings = { 8 : '%Y%m%d' , 12 : '%Y%m%d%H%M' , 14 : '%Y%m%d%H%M%S' } if record == '' : return None return datetime . strptime ( record . strip ( ) , format_strings [ len ( record . strip ( ) ) ] )
|
Parse a datetime string into a python datetime object
|
5,434
|
def color ( nickname ) : _hex = md5 ( nickname ) . hexdigest ( ) [ : 6 ] darken = lambda s : str ( int ( round ( int ( s , 16 ) * .7 ) ) ) return "rgb(%s)" % "," . join ( [ darken ( _hex [ i : i + 2 ] ) for i in range ( 6 ) [ : : 2 ] ] )
|
Provides a consistent color for a nickname . Uses first 6 chars of nickname s md5 hash and then slightly darkens the rgb values for use on a light background .
|
5,435
|
def on_welcome ( self , connection , event ) : connection . join ( self . channel , key = settings . IRC_CHANNEL_KEY or "" )
|
Join the channel once connected to the IRC server .
|
5,436
|
def on_nicknameinuse ( self , connection , event ) : digits = "" while self . nickname [ - 1 ] . isdigit ( ) : digits = self . nickname [ - 1 ] + digits self . nickname = self . nickname [ : - 1 ] digits = 1 if not digits else int ( digits ) + 1 self . nickname += str ( digits ) self . connect ( self . host , self . port , self . nickname )
|
Increment a digit on the nickname if it s in use and re - connect .
|
5,437
|
def message_channel ( self , message ) : data = "PRIVMSG %s :%s\r\n" % ( self . channel , message ) self . connection . socket . send ( data . encode ( "utf-8" ) )
|
Nicer shortcut for sending a message to a channel . Also irclib doesn t handle unicode so we bypass its privmsg - > send_raw methods and use its socket directly .
|
5,438
|
def emit_message ( self , message ) : try : nickname_color = self . nicknames [ self . nickname ] except KeyError : return message = message [ : settings . MAX_MESSAGE_LENGTH ] if message . startswith ( "/" ) : self . connection . send_raw ( message . lstrip ( "/" ) ) return self . message_channel ( message ) self . namespace . emit ( "message" , self . nickname , message , nickname_color )
|
Send a message to the channel . We also emit the message back to the sender s WebSocket .
|
5,439
|
def emit_nicknames ( self ) : nicknames = [ { "nickname" : name , "color" : color ( name ) } for name in sorted ( self . nicknames . keys ( ) ) ] self . namespace . emit ( "nicknames" , nicknames )
|
Send the nickname list to the Websocket . Called whenever the nicknames list changes .
|
5,440
|
def on_join ( self , connection , event ) : nickname = self . get_nickname ( event ) nickname_color = color ( nickname ) self . nicknames [ nickname ] = nickname_color self . namespace . emit ( "join" ) self . namespace . emit ( "message" , nickname , "joins" , nickname_color ) self . emit_nicknames ( )
|
Someone joined the channel - send the nicknames list to the WebSocket .
|
5,441
|
def on_nick ( self , connection , event ) : old_nickname = self . get_nickname ( event ) old_color = self . nicknames . pop ( old_nickname ) new_nickname = event . target ( ) message = "is now known as %s" % new_nickname self . namespace . emit ( "message" , old_nickname , message , old_color ) new_color = color ( new_nickname ) self . nicknames [ new_nickname ] = new_color self . emit_nicknames ( ) if self . nickname == old_nickname : self . nickname = new_nickname
|
Someone changed their nickname - send the nicknames list to the WebSocket .
|
5,442
|
def on_quit ( self , connection , event ) : nickname = self . get_nickname ( event ) nickname_color = self . nicknames [ nickname ] del self . nicknames [ nickname ] self . namespace . emit ( "message" , nickname , "leaves" , nickname_color ) self . emit_nicknames ( )
|
Someone left the channel - send the nicknames list to the WebSocket .
|
5,443
|
def on_pubmsg ( self , connection , event ) : for message in event . arguments ( ) : nickname = self . get_nickname ( event ) nickname_color = self . nicknames [ nickname ] self . namespace . emit ( "message" , nickname , message , nickname_color )
|
Messages received in the channel - send them to the WebSocket .
|
5,444
|
def perms_for_user ( cls , instance , user , db_session = None ) : db_session = get_db_session ( db_session , instance ) query = db_session . query ( cls . models_proxy . GroupResourcePermission . group_id . label ( "owner_id" ) , cls . models_proxy . GroupResourcePermission . perm_name , sa . literal ( "group" ) . label ( "type" ) , ) query = query . filter ( cls . models_proxy . GroupResourcePermission . group_id . in_ ( [ gr . id for gr in user . groups ] ) ) query = query . filter ( cls . models_proxy . GroupResourcePermission . resource_id == instance . resource_id ) query2 = db_session . query ( cls . models_proxy . UserResourcePermission . user_id . label ( "owner_id" ) , cls . models_proxy . UserResourcePermission . perm_name , sa . literal ( "user" ) . label ( "type" ) , ) query2 = query2 . filter ( cls . models_proxy . UserResourcePermission . user_id == user . id ) query2 = query2 . filter ( cls . models_proxy . UserResourcePermission . resource_id == instance . resource_id ) query = query . union ( query2 ) groups_dict = dict ( [ ( g . id , g ) for g in user . groups ] ) perms = [ PermissionTuple ( user , row . perm_name , row . type , groups_dict . get ( row . owner_id ) if row . type == "group" else None , instance , False , True , ) for row in query ] if instance . owner_user_id == user . id : perms . append ( PermissionTuple ( user , ALL_PERMISSIONS , "user" , None , instance , True , True ) ) groups_dict = dict ( [ ( g . id , g ) for g in user . groups ] ) if instance . owner_group_id in groups_dict : perms . append ( PermissionTuple ( user , ALL_PERMISSIONS , "group" , groups_dict . get ( instance . owner_group_id ) , instance , True , True , ) ) return perms
|
returns all permissions that given user has for this resource from groups and directly set ones too
|
5,445
|
def direct_perms_for_user ( cls , instance , user , db_session = None ) : db_session = get_db_session ( db_session , instance ) query = db_session . query ( cls . models_proxy . UserResourcePermission . user_id , cls . models_proxy . UserResourcePermission . perm_name , ) query = query . filter ( cls . models_proxy . UserResourcePermission . user_id == user . id ) query = query . filter ( cls . models_proxy . UserResourcePermission . resource_id == instance . resource_id ) perms = [ PermissionTuple ( user , row . perm_name , "user" , None , instance , False , True ) for row in query ] if instance . owner_user_id == user . id : perms . append ( PermissionTuple ( user , ALL_PERMISSIONS , "user" , None , instance , True ) ) return perms
|
returns permissions that given user has for this resource without ones inherited from groups that user belongs to
|
5,446
|
def group_perms_for_user ( cls , instance , user , db_session = None ) : db_session = get_db_session ( db_session , instance ) perms = resource_permissions_for_users ( cls . models_proxy , ANY_PERMISSION , resource_ids = [ instance . resource_id ] , user_ids = [ user . id ] , db_session = db_session , ) perms = [ p for p in perms if p . type == "group" ] groups_dict = dict ( [ ( g . id , g ) for g in user . groups ] ) if instance . owner_group_id in groups_dict : perms . append ( PermissionTuple ( user , ALL_PERMISSIONS , "group" , groups_dict . get ( instance . owner_group_id ) , instance , True , True , ) ) return perms
|
returns permissions that given user has for this resource that are inherited from groups
|
5,447
|
def by_resource_id ( cls , resource_id , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) . filter ( cls . model . resource_id == int ( resource_id ) ) return query . first ( )
|
fetch the resouce by id
|
5,448
|
def perm_by_group_and_perm_name ( cls , resource_id , group_id , perm_name , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . models_proxy . GroupResourcePermission ) query = query . filter ( cls . models_proxy . GroupResourcePermission . group_id == group_id ) query = query . filter ( cls . models_proxy . GroupResourcePermission . perm_name == perm_name ) query = query . filter ( cls . models_proxy . GroupResourcePermission . resource_id == resource_id ) return query . first ( )
|
fetch permissions by group and permission name
|
5,449
|
def lock_resource_for_update ( cls , resource_id , db_session ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) query = query . filter ( cls . model . resource_id == resource_id ) query = query . with_for_update ( ) return query . first ( )
|
Selects resource for update - locking access for other transactions
|
5,450
|
def permissions ( cls , instance , db_session = None ) : db_session = get_db_session ( db_session , instance ) query = db_session . query ( cls . models_proxy . GroupPermission . group_id . label ( "owner_id" ) , cls . models_proxy . GroupPermission . perm_name . label ( "perm_name" ) , sa . literal ( "group" ) . label ( "type" ) , ) query = query . filter ( cls . models_proxy . GroupPermission . group_id == cls . models_proxy . UserGroup . group_id ) query = query . filter ( cls . models_proxy . User . id == cls . models_proxy . UserGroup . user_id ) query = query . filter ( cls . models_proxy . User . id == instance . id ) query2 = db_session . query ( cls . models_proxy . UserPermission . user_id . label ( "owner_id" ) , cls . models_proxy . UserPermission . perm_name . label ( "perm_name" ) , sa . literal ( "user" ) . label ( "type" ) , ) query2 = query2 . filter ( cls . models_proxy . UserPermission . user_id == instance . id ) query = query . union ( query2 ) groups_dict = dict ( [ ( g . id , g ) for g in instance . groups ] ) return [ PermissionTuple ( instance , row . perm_name , row . type , groups_dict . get ( row . owner_id ) if row . type == "group" else None , None , False , True , ) for row in query ]
|
returns all non - resource permissions based on what groups user belongs and directly set ones for this user
|
5,451
|
def groups_with_resources ( cls , instance ) : return instance . groups_dynamic . options ( sa . orm . eagerload ( cls . models_proxy . Group . resources ) )
|
Returns a list of groups users belongs to with eager loaded resources owned by those groups
|
5,452
|
def resources_with_possible_perms ( cls , instance , resource_ids = None , resource_types = None , db_session = None ) : perms = resource_permissions_for_users ( cls . models_proxy , ANY_PERMISSION , resource_ids = resource_ids , resource_types = resource_types , user_ids = [ instance . id ] , db_session = db_session , ) for resource in instance . resources : perms . append ( PermissionTuple ( instance , ALL_PERMISSIONS , "user" , None , resource , True , True ) ) for group in cls . groups_with_resources ( instance ) : for resource in group . resources : perms . append ( PermissionTuple ( instance , ALL_PERMISSIONS , "group" , group , resource , True , True ) ) return perms
|
returns list of permissions and resources for this user
|
5,453
|
def gravatar_url ( cls , instance , default = "mm" , ** kwargs ) : hash = hashlib . md5 ( instance . email . encode ( "utf8" ) . lower ( ) ) . hexdigest ( ) if "d" not in kwargs : kwargs [ "d" ] = default params = "&" . join ( [ six . moves . urllib . parse . urlencode ( { key : value } ) for key , value in kwargs . items ( ) ] ) return "https://secure.gravatar.com/avatar/{}?{}" . format ( hash , params )
|
returns user gravatar url
|
5,454
|
def set_password ( cls , instance , raw_password ) : hash_callable = getattr ( instance . passwordmanager , "hash" , instance . passwordmanager . encrypt ) password = hash_callable ( raw_password ) if six . PY2 : instance . user_password = password . decode ( "utf8" ) else : instance . user_password = password cls . regenerate_security_code ( instance )
|
sets new password on a user using password manager
|
5,455
|
def check_password ( cls , instance , raw_password , enable_hash_migration = True ) : verified , replacement_hash = instance . passwordmanager . verify_and_update ( raw_password , instance . user_password ) if enable_hash_migration and replacement_hash : if six . PY2 : instance . user_password = replacement_hash . decode ( "utf8" ) else : instance . user_password = replacement_hash return verified
|
checks string with users password hash using password manager
|
5,456
|
def by_id ( cls , user_id , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) query = query . filter ( cls . model . id == user_id ) query = query . options ( sa . orm . eagerload ( "groups" ) ) return query . first ( )
|
fetch user by user id
|
5,457
|
def by_user_name_and_security_code ( cls , user_name , security_code , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) query = query . filter ( sa . func . lower ( cls . model . user_name ) == ( user_name or "" ) . lower ( ) ) query = query . filter ( cls . model . security_code == security_code ) return query . first ( )
|
fetch user objects by user name and security code
|
5,458
|
def by_user_names ( cls , user_names , db_session = None ) : user_names = [ ( name or "" ) . lower ( ) for name in user_names ] db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) query = query . filter ( sa . func . lower ( cls . model . user_name ) . in_ ( user_names ) ) return query
|
fetch user objects by user names
|
5,459
|
def user_names_like ( cls , user_name , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) query = query . filter ( sa . func . lower ( cls . model . user_name ) . like ( ( user_name or "" ) . lower ( ) ) ) query = query . order_by ( cls . model . user_name ) return query
|
fetch users with similar names using LIKE clause
|
5,460
|
def by_email ( cls , email , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) . filter ( sa . func . lower ( cls . model . email ) == ( email or "" ) . lower ( ) ) query = query . options ( sa . orm . eagerload ( "groups" ) ) return query . first ( )
|
fetch user object by email
|
5,461
|
def users_for_perms ( cls , perm_names , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) query = query . filter ( cls . models_proxy . User . id == cls . models_proxy . UserGroup . user_id ) query = query . filter ( cls . models_proxy . UserGroup . group_id == cls . models_proxy . GroupPermission . group_id ) query = query . filter ( cls . models_proxy . GroupPermission . perm_name . in_ ( perm_names ) ) query2 = db_session . query ( cls . model ) query2 = query2 . filter ( cls . models_proxy . User . id == cls . models_proxy . UserPermission . user_id ) query2 = query2 . filter ( cls . models_proxy . UserPermission . perm_name . in_ ( perm_names ) ) users = query . union ( query2 ) . order_by ( cls . model . id ) return users
|
return users hat have one of given permissions
|
5,462
|
def handle_joined ( self , connection , event ) : nicknames = [ s . lstrip ( "@+" ) for s in event . arguments ( ) [ - 1 ] . split ( ) ] for nickname in nicknames : self . joined [ nickname ] = datetime . now ( )
|
Store join times for current nicknames when we first join .
|
5,463
|
def handle_join ( self , connection , event ) : nickname = self . get_nickname ( event ) self . joined [ nickname ] = datetime . now ( )
|
Store join time for a nickname when it joins .
|
5,464
|
def handle_quit ( self , connection , event ) : nickname = self . get_nickname ( event ) self . quit [ nickname ] = datetime . now ( ) del self . joined [ nickname ]
|
Store quit time for a nickname when it quits .
|
5,465
|
def timesince ( self , when ) : units = ( ( "year" , 60 * 60 * 24 * 365 ) , ( "week" , 60 * 60 * 24 * 7 ) , ( "day" , 60 * 60 * 24 ) , ( "hour" , 60 * 60 ) , ( "minute" , 60 ) , ( "second" , 1 ) , ) delta = datetime . now ( ) - when total_seconds = delta . days * 60 * 60 * 24 + delta . seconds parts = [ ] for name , seconds in units : value = total_seconds / seconds if value > 0 : total_seconds %= seconds s = "s" if value != 1 else "" parts . append ( "%s %s%s" % ( value , name , s ) ) return " and " . join ( ", " . join ( parts ) . rsplit ( ", " , 1 ) )
|
Returns human friendly version of the timespan between now and the given datetime .
|
5,466
|
def version ( self , event ) : name = "%s.%s" % ( self . __class__ . __module__ , self . __class__ . __name__ ) return "%s [%s]" % ( settings . GNOTTY_VERSION_STRING , name )
|
Shows version information .
|
5,467
|
def commands ( self , event ) : commands = sorted ( self . commands_dict ( ) . keys ( ) ) return "Available commands: %s" % " " . join ( commands )
|
Lists all available commands .
|
5,468
|
def help ( self , event , command_name = None ) : if command_name is None : return ( "Type !commands for a list of all commands. Type " "!help [command] to see help for a specific command." ) try : command = self . commands_dict ( ) [ command_name ] except KeyError : return "%s is not a command" % command_name argspec = getargspec ( command ) args = argspec . args [ 2 : ] defaults = argspec . defaults or [ ] for i in range ( - 1 , - len ( defaults ) - 1 , - 1 ) : args [ i ] = "%s [default: %s]" % ( args [ i ] , defaults [ i ] ) args = ", " . join ( args ) help = getdoc ( command ) . replace ( "\n" , " " ) return "help for %s: (args: %s) %s" % ( command_name , args , help )
|
Shows the help message for the bot . Takes an optional command name which when given will show help for that command .
|
5,469
|
def uptime ( self , event , nickname = None ) : if nickname and nickname != self . nickname : try : uptime = self . timesince ( self . joined [ nickname ] ) except KeyError : return "%s is not in the channel" % nickname else : if nickname == self . get_nickname ( event ) : prefix = "you have" else : prefix = "%s has" % nickname return "%s been here for %s" % ( prefix , uptime ) uptime = self . timesince ( self . joined [ self . nickname ] ) return "I've been here for %s" % uptime
|
Shows the amount of time since the given nickname has been in the channel . If no nickname is given I ll use my own .
|
5,470
|
def seen ( self , event , nickname ) : try : self . joined [ nickname ] except KeyError : pass else : if nickname == self . get_nickname ( event ) : prefix = "you are" else : prefix = "%s is" % nickname return "%s here right now" % prefix try : seen = self . timesince ( self . quit [ nickname ] ) except KeyError : return "%s has never been seen" % nickname else : return "%s was last seen %s ago" % ( nickname , seen )
|
Shows the amount of time since the given nickname was last seen in the channel .
|
5,471
|
def id ( self ) : return sa . Column ( sa . Integer , primary_key = True , autoincrement = True )
|
Unique identifier of user object
|
5,472
|
def last_login_date ( self ) : return sa . Column ( sa . TIMESTAMP ( timezone = False ) , default = lambda x : datetime . utcnow ( ) , server_default = sa . func . now ( ) , )
|
Date of user s last login
|
5,473
|
def security_code_date ( self ) : return sa . Column ( sa . TIMESTAMP ( timezone = False ) , default = datetime ( 2000 , 1 , 1 ) , server_default = "2000-01-01 01:01" , )
|
Date of user s security code update
|
5,474
|
def groups_dynamic ( self ) : return sa . orm . relationship ( "Group" , secondary = "users_groups" , lazy = "dynamic" , passive_deletes = True , passive_updates = True , )
|
returns dynamic relationship for groups - allowing for filtering of data
|
5,475
|
def validate_permission ( self , key , permission ) : if permission . perm_name not in self . __possible_permissions__ : raise AssertionError ( "perm_name is not one of {}" . format ( self . __possible_permissions__ ) ) return permission
|
validates if group can get assigned with permission
|
5,476
|
def parse_feeds ( self , message_channel = True ) : if parse : for feed_url in self . feeds : feed = parse ( feed_url ) for item in feed . entries : if item [ "id" ] not in self . feed_items : self . feed_items . add ( item [ "id" ] ) if message_channel : message = self . format_item_message ( feed , item ) self . message_channel ( message ) return
|
Iterates through each of the feed URLs parses their items and sends any items to the channel that have not been previously been parsed .
|
5,477
|
def print_meter_record ( file_path , rows = 5 ) : m = nr . read_nem_file ( file_path ) print ( 'Header:' , m . header ) print ( 'Transactions:' , m . transactions ) for nmi in m . readings : for channel in m . readings [ nmi ] : print ( nmi , 'Channel' , channel ) for reading in m . readings [ nmi ] [ channel ] [ - rows : ] : print ( '' , reading )
|
Output readings for specified number of rows to console
|
5,478
|
def users ( self ) : return sa . orm . relationship ( "User" , secondary = "users_resources_permissions" , passive_deletes = True , passive_updates = True , )
|
returns all users that have permissions for this resource
|
5,479
|
def register ( linter ) : linter . register_checker ( MongoEngineChecker ( linter ) ) add_transform ( 'mongoengine' ) add_transform ( 'mongomotor' ) suppress_qs_decorator_messages ( linter ) suppress_fields_attrs_messages ( linter )
|
Add the needed transformations and supressions .
|
5,480
|
def output_as_csv ( file_name , nmi = None , output_file = None ) : m = read_nem_file ( file_name ) if nmi is None : nmi = list ( m . readings . keys ( ) ) [ 0 ] channels = list ( m . transactions [ nmi ] . keys ( ) ) num_records = len ( m . readings [ nmi ] [ channels [ 0 ] ] ) last_date = m . readings [ nmi ] [ channels [ 0 ] ] [ - 1 ] . t_end if output_file is None : output_file = '{}_{}_transposed.csv' . format ( nmi , last_date . strftime ( '%Y%m%d' ) ) with open ( output_file , 'w' , newline = '' ) as csvfile : cwriter = csv . writer ( csvfile , delimiter = ',' , quotechar = '"' , quoting = csv . QUOTE_MINIMAL ) heading_list = [ 'period_start' , 'period_end' ] for channel in channels : heading_list . append ( channel ) heading_list . append ( 'quality_method' ) cwriter . writerow ( heading_list ) for i in range ( 0 , num_records ) : t_start = m . readings [ nmi ] [ channels [ 0 ] ] [ i ] . t_start t_end = m . readings [ nmi ] [ channels [ 0 ] ] [ i ] . t_end quality_method = m . readings [ nmi ] [ channels [ 0 ] ] [ i ] . quality_method row_list = [ t_start , t_end ] for ch in channels : val = m . readings [ nmi ] [ ch ] [ i ] . read_value row_list . append ( val ) row_list . append ( quality_method ) cwriter . writerow ( row_list ) return output_file
|
Transpose all channels and output a csv that is easier to read and do charting on
|
5,481
|
def by_group_name ( cls , group_name , db_session = None ) : db_session = get_db_session ( db_session ) query = db_session . query ( cls . model ) . filter ( cls . model . group_name == group_name ) return query . first ( )
|
fetch group by name
|
5,482
|
def get_user_paginator ( cls , instance , page = 1 , item_count = None , items_per_page = 50 , user_ids = None , GET_params = None , ) : if not GET_params : GET_params = { } GET_params . pop ( "page" , None ) query = instance . users_dynamic if user_ids : query = query . filter ( cls . models_proxy . UserGroup . user_id . in_ ( user_ids ) ) return SqlalchemyOrmPage ( query , page = page , item_count = item_count , items_per_page = items_per_page , ** GET_params )
|
returns paginator over users belonging to the group
|
5,483
|
def resources_with_possible_perms ( cls , instance , perm_names = None , resource_ids = None , resource_types = None , db_session = None , ) : db_session = get_db_session ( db_session , instance ) query = db_session . query ( cls . models_proxy . GroupResourcePermission . perm_name , cls . models_proxy . Group , cls . models_proxy . Resource , ) query = query . filter ( cls . models_proxy . Resource . resource_id == cls . models_proxy . GroupResourcePermission . resource_id ) query = query . filter ( cls . models_proxy . Group . id == cls . models_proxy . GroupResourcePermission . group_id ) if resource_ids : query = query . filter ( cls . models_proxy . GroupResourcePermission . resource_id . in_ ( resource_ids ) ) if resource_types : query = query . filter ( cls . models_proxy . Resource . resource_type . in_ ( resource_types ) ) if perm_names not in ( [ ANY_PERMISSION ] , ANY_PERMISSION ) and perm_names : query = query . filter ( cls . models_proxy . GroupResourcePermission . perm_name . in_ ( perm_names ) ) query = query . filter ( cls . models_proxy . GroupResourcePermission . group_id == instance . id ) perms = [ PermissionTuple ( None , row . perm_name , "group" , instance , row . Resource , False , True ) for row in query ] for resource in instance . resources : perms . append ( PermissionTuple ( None , ALL_PERMISSIONS , "group" , instance , resource , True , True ) ) return perms
|
returns list of permissions and resources for this group resource_ids restricts the search to specific resources
|
5,484
|
def wait_closed ( self ) : if self . _closed : return if not self . _closing : raise RuntimeError ( ".wait_closed() should be called " "after .close()" ) while self . _free : conn = self . _free . popleft ( ) if not conn . closed : yield from conn . close ( ) else : pass with ( yield from self . _cond ) : while self . size > self . freesize : yield from self . _cond . wait ( ) self . _used . clear ( ) self . _closed = True
|
Wait for closing all pool s connections .
|
5,485
|
def _fill_free_pool ( self , override_min ) : while self . size < self . minsize : self . _acquiring += 1 try : conn = yield from connect ( database = self . _database , echo = self . _echo , loop = self . _loop , ** self . _conn_kwargs ) self . _free . append ( conn ) self . _cond . notify ( ) finally : self . _acquiring -= 1 if self . _free : return if override_min and self . size < self . maxsize : self . _acquiring += 1 try : conn = yield from connect ( database = self . _database , echo = self . _echo , loop = self . _loop , ** self . _conn_kwargs ) self . _free . append ( conn ) self . _cond . notify ( ) finally : self . _acquiring -= 1
|
iterate over free connections and remove timeouted ones
|
5,486
|
def add_function ( self , function ) : function = self . build_function ( function ) if function . name in self . functions : raise FunctionAlreadyRegistered ( function . name ) self . functions [ function . name ] = function
|
Adds the function to the list of registered functions .
|
5,487
|
def get_one ( self , context , name ) : try : return self . functions [ name ] except KeyError : raise FunctionNotFound ( name )
|
Returns a function if it is registered the context is ignored .
|
5,488
|
def _preprocess_kwargs ( self , initial_kwargs ) : kwargs = initial_kwargs . copy ( ) generic_key_related_kwargs = self . _get_generic_key_related_kwargs ( initial_kwargs ) for key , value in generic_key_related_kwargs . items ( ) : del kwargs [ key ] try : suffix = key . split ( '__' ) [ 1 ] except IndexError : suffix = None new_kwargs = self . _get_filter_object_id_and_content_type_filter_kwargs ( value , suffix ) kwargs . update ( new_kwargs ) return kwargs
|
Replace generic key related attribute with filters by object_id and content_type fields
|
5,489
|
def drop_columns_with_unique_values ( data : pd . DataFrame , max_unique_values : int = 0.25 ) : size = data . shape [ 0 ] df_uv = data . apply ( lambda se : ( ( se . dropna ( ) . unique ( ) . shape [ 0 ] / size ) > max_unique_values and se . dtype in [ 'object' , 'category' ] ) ) data . drop ( df_uv [ df_uv ] . index , axis = 1 , inplace = True )
|
Remove columns when the proportion of the total of unique values is more than the max_unique_values threshold just for columns with type as object or category
|
5,490
|
def _get_url ( self , obj ) : format_kwargs = { 'app_label' : obj . _meta . app_label , } try : format_kwargs [ 'model_name' ] = getattr ( obj . __class__ , 'get_url_name' ) ( ) except AttributeError : format_kwargs [ 'model_name' ] = obj . _meta . object_name . lower ( ) return self . _default_view_name % format_kwargs
|
Gets object url
|
5,491
|
def to_representation ( self , obj ) : kwargs = None for field in self . lookup_fields : if hasattr ( obj , field ) : kwargs = { field : getattr ( obj , field ) } break if kwargs is None : raise AttributeError ( 'Related object does not have any of lookup_fields' ) request = self . _get_request ( ) return request . build_absolute_uri ( reverse ( self . _get_url ( obj ) , kwargs = kwargs ) )
|
Serializes any object to his url representation
|
5,492
|
def to_internal_value ( self , data ) : if not data : return None request = self . _get_request ( ) user = request . user try : obj = core_utils . instance_from_url ( data , user = user ) model = obj . __class__ except ValueError : raise serializers . ValidationError ( _ ( 'URL is invalid: %s.' ) % data ) except ( Resolver404 , AttributeError , MultipleObjectsReturned , ObjectDoesNotExist ) : raise serializers . ValidationError ( _ ( "Can't restore object from url: %s" ) % data ) if model not in self . related_models : raise serializers . ValidationError ( _ ( '%s object does not support such relationship.' ) % six . text_type ( obj ) ) return obj
|
Restores model instance from its url
|
5,493
|
def validate ( self , data ) : if 'start' in data and 'end' in data and data [ 'start' ] >= data [ 'end' ] : raise serializers . ValidationError ( _ ( 'End must occur after start.' ) ) return data
|
Check that the start is before the end .
|
5,494
|
def log_celery_task ( request ) : task = request . task description = None if isinstance ( task , Task ) : try : description = task . get_description ( * request . args , ** request . kwargs ) except NotImplementedError : pass except Exception as e : logger . exception ( 'Cannot get description for task %s. Error: %s' % ( task . __class__ . __name__ , e ) ) return '{0.name}[{0.id}]{1}{2}{3}' . format ( request , ' {0}' . format ( description ) if description else '' , ' eta:[{0}]' . format ( request . eta ) if request . eta else '' , ' expires:[{0}]' . format ( request . expires ) if request . expires else '' , )
|
Add description to celery log output
|
5,495
|
def run ( self , serialized_instance , * args , ** kwargs ) : try : instance = utils . deserialize_instance ( serialized_instance ) except ObjectDoesNotExist : message = ( 'Cannot restore instance from serialized object %s. Probably it was deleted.' % serialized_instance ) six . reraise ( ObjectDoesNotExist , message ) self . args = args self . kwargs = kwargs self . pre_execute ( instance ) result = self . execute ( instance , * self . args , ** self . kwargs ) self . post_execute ( instance ) if result and isinstance ( result , django_models . Model ) : result = utils . serialize_instance ( result ) return result
|
Deserialize input data and start backend operation execution
|
5,496
|
def is_previous_task_processing ( self , * args , ** kwargs ) : app = self . _get_app ( ) inspect = app . control . inspect ( ) active = inspect . active ( ) or { } scheduled = inspect . scheduled ( ) or { } reserved = inspect . reserved ( ) or { } uncompleted = sum ( list ( active . values ( ) ) + list ( scheduled . values ( ) ) + reserved . values ( ) , [ ] ) return any ( self . is_equal ( task , * args , ** kwargs ) for task in uncompleted )
|
Return True if exist task that is equal to current and is uncompleted
|
5,497
|
def apply_async ( self , args = None , kwargs = None , ** options ) : if self . is_previous_task_processing ( * args , ** kwargs ) : message = 'Background task %s was not scheduled, because its predecessor is not completed yet.' % self . name logger . info ( message ) return self . AsyncResult ( options . get ( 'task_id' ) or str ( uuid4 ( ) ) ) return super ( BackgroundTask , self ) . apply_async ( args = args , kwargs = kwargs , ** options )
|
Do not run background task if previous task is uncompleted
|
5,498
|
def _get_cache_key ( self , args , kwargs ) : hash_input = json . dumps ( { 'name' : self . name , 'args' : args , 'kwargs' : kwargs } , sort_keys = True ) return hashlib . md5 ( hash_input ) . hexdigest ( )
|
Returns key to be used in cache
|
5,499
|
def apply_async ( self , args = None , kwargs = None , ** options ) : key = self . _get_cache_key ( args , kwargs ) counter , penalty = cache . get ( key , ( 0 , 0 ) ) if not counter : return super ( PenalizedBackgroundTask , self ) . apply_async ( args = args , kwargs = kwargs , ** options ) cache . set ( key , ( counter - 1 , penalty ) , self . CACHE_LIFETIME ) logger . info ( 'The task %s will not be executed due to the penalty.' % self . name ) return self . AsyncResult ( options . get ( 'task_id' ) or str ( uuid4 ( ) ) )
|
Checks whether task must be skipped and decreases the counter in that case .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.