idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
15,200 | def abstract ( class_ ) : if not inspect . isclass ( class_ ) : raise TypeError ( "@abstract can only be applied to classes" ) abc_meta = None class_meta = type ( class_ ) if class_meta not in ( _ABCMetaclass , _ABCObjectMetaclass ) : if class_meta is type : abc_meta = _ABCMetaclass elif class_meta is ObjectMetaclass : abc_meta = _ABCObjectMetaclass else : raise ValueError ( "@abstract cannot be applied to classes with custom metaclass" ) class_ . __abstract__ = True return metaclass ( abc_meta ) ( class_ ) if abc_meta else class_ | Mark the class as _abstract_ base class forbidding its instantiation . |
15,201 | def final ( arg ) : if inspect . isclass ( arg ) : if not isinstance ( arg , ObjectMetaclass ) : raise ValueError ( "@final can only be applied to a class " "that is a subclass of Object" ) elif not is_method ( arg ) : raise TypeError ( "@final can only be applied to classes or methods" ) method = arg . method if isinstance ( arg , _WrappedMethod ) else arg method . __final__ = True return arg | Mark a class or method as _final_ . |
15,202 | def override ( base = ABSENT ) : arg = base if inspect . isfunction ( arg ) or isinstance ( arg , NonInstanceMethod ) : _OverrideDecorator . maybe_signal_classmethod ( arg ) decorator = _OverrideDecorator ( None ) return decorator ( arg ) if arg is ABSENT : return _OverrideDecorator ( None ) if is_class ( arg ) or is_string ( arg ) : return _OverrideDecorator ( arg ) raise TypeError ( "explicit base class for @override " "must be either a string or a class object" ) | Mark a method as overriding a corresponding method from superclass . |
15,203 | def find ( * args , ** kwargs ) : list_ , idx = _index ( * args , start = 0 , step = 1 , ** kwargs ) if idx < 0 : raise IndexError ( "element not found" ) return list_ [ idx ] | Find the first matching element in a list and return it . |
15,204 | def findlast ( * args , ** kwargs ) : list_ , idx = _index ( * args , start = sys . maxsize , step = - 1 , ** kwargs ) if idx < 0 : raise IndexError ( "element not found" ) return list_ [ idx ] | Find the last matching element in a list and return it . |
15,205 | def index ( * args , ** kwargs ) : _ , idx = _index ( * args , start = 0 , step = 1 , ** kwargs ) return idx | Search a list for an exact element or element satisfying a predicate . |
15,206 | def lastindex ( * args , ** kwargs ) : _ , idx = _index ( * args , start = sys . maxsize , step = - 1 , ** kwargs ) return idx | Search a list backwards for an exact element or element satisfying a predicate . |
15,207 | def _index ( * args , ** kwargs ) : start = kwargs . pop ( 'start' , 0 ) step = kwargs . pop ( 'step' , 1 ) if len ( args ) == 2 : elem , list_ = args ensure_sequence ( list_ ) predicate = lambda item : item == elem else : ensure_keyword_args ( kwargs , mandatory = ( 'in_' , ) , optional = ( 'of' , 'where' ) ) if 'of' in kwargs and 'where' in kwargs : raise TypeError ( "either an item or predicate must be supplied, not both" ) if not ( 'of' in kwargs or 'where' in kwargs ) : raise TypeError ( "an item or predicate must be supplied" ) list_ = ensure_sequence ( kwargs [ 'in_' ] ) if 'where' in kwargs : predicate = ensure_callable ( kwargs [ 'where' ] ) else : elem = kwargs [ 'of' ] predicate = lambda item : item == elem len_ = len ( list_ ) start = max ( 0 , min ( len_ - 1 , start ) ) i = start while 0 <= i < len_ : if predicate ( list_ [ i ] ) : return list_ , i i += step else : return list_ , - 1 | Implementation of list searching . |
15,208 | def intercalate ( elems , list_ ) : ensure_sequence ( elems ) ensure_sequence ( list_ ) if len ( list_ ) <= 1 : return list_ return sum ( ( elems + list_ [ i : i + 1 ] for i in xrange ( 1 , len ( list_ ) ) ) , list_ [ : 1 ] ) | Insert given elements between existing elements of a list . |
15,209 | def handle ( _ , msg , args ) : if args [ 'config' ] [ 'feature' ] . getboolean ( 'capskick' ) : nick = args [ 'nick' ] threshold = 0.65 text = "shutting caps lock off" upper = [ i for i in msg if i in string . ascii_uppercase ] if len ( msg ) == 0 : return upper_ratio = len ( upper ) / len ( msg . replace ( ' ' , '' ) ) if args [ 'target' ] != 'private' : with _caps_lock : if upper_ratio > threshold and len ( msg ) > 10 : if nick in _caps : args [ 'do_kick' ] ( args [ 'target' ] , nick , text ) _caps . remove ( nick ) else : _caps . append ( nick ) elif nick in _caps : _caps . remove ( nick ) | Check for capslock abuse . |
15,210 | def cmd ( send , msg , args ) : if not msg : msg = gen_word ( ) send ( gen_fullwidth ( msg . upper ( ) ) ) | Converts text to fullwidth characters . |
15,211 | def task_estimates ( channel , states ) : for state in states : if state != task_states . OPEN : raise NotImplementedError ( 'only estimate OPEN tasks' ) tasks = yield channel . tasks ( state = states ) packages = set ( [ task . package for task in tasks ] ) print ( 'checking avg build duration for %i packages:' % len ( packages ) ) packages = list ( packages ) durations = yield average_build_durations ( channel . connection , packages ) avg_package_durations = dict ( zip ( packages , durations ) ) results = [ ] utcnow = datetime . utcnow ( ) for task in tasks : avg_duration = avg_package_durations [ task . package ] est_complete = task . started + avg_duration est_remaining = est_complete - utcnow result = ( task , est_remaining ) results . append ( result ) defer . returnValue ( results ) | Estimate remaining time for all tasks in this channel . |
15,212 | def describe_delta ( delta ) : s = delta . total_seconds ( ) s = abs ( s ) hours , remainder = divmod ( s , 3600 ) minutes , seconds = divmod ( remainder , 60 ) if hours : return '%d hr %d min' % ( hours , minutes ) if minutes : return '%d min %d secs' % ( minutes , seconds ) return '%d secs' % seconds | Describe this timedelta in human - readable terms . |
15,213 | def log_est_complete ( est_complete ) : if not est_complete : print ( 'could not determine an estimated completion time' ) return remaining = est_complete - datetime . utcnow ( ) message = 'this task should be complete in %s' if remaining . total_seconds ( ) < 0 : message = 'this task exceeds estimate by %s' log_delta ( message , remaining ) | Log the relative time remaining for this est_complete datetime object . |
15,214 | def patched_normalizeargs ( sequence , output = None ) : if output is None : output = [ ] if Broken in getattr ( sequence , '__bases__' , ( ) ) : return [ sequence ] cls = sequence . __class__ if InterfaceClass in cls . __mro__ or zope . interface . declarations . Implements in cls . __mro__ : output . append ( sequence ) else : for v in sequence : patched_normalizeargs ( v , output ) return output | Normalize declaration arguments |
15,215 | def profiles ( ) : paths = [ ] for pattern in PROFILES : pattern = os . path . expanduser ( pattern ) paths += glob ( pattern ) return paths | List of all the connection profile files ordered by preference . |
15,216 | def lookup ( self , profile , setting ) : for path in profiles ( ) : cfg = SafeConfigParser ( ) cfg . read ( path ) if profile not in cfg . sections ( ) : continue if not cfg . has_option ( profile , setting ) : continue return cfg . get ( profile , setting ) | Check koji . conf . d files for this profile s setting . |
15,217 | def connect_from_web ( klass , url ) : if re . search ( r'\s' , url ) : return url = url . split ( ' ' , 1 ) [ 0 ] for path in profiles ( ) : cfg = SafeConfigParser ( ) cfg . read ( path ) for profile in cfg . sections ( ) : if not cfg . has_option ( profile , 'weburl' ) : continue weburl = cfg . get ( profile , 'weburl' ) if url . startswith ( weburl ) : return klass ( profile ) | Find a connection that matches this kojiweb URL . |
15,218 | def from_web ( self , url ) : if re . search ( r'\s' , url ) : return defer . succeed ( None ) o = urlparse ( url ) endpoint = os . path . basename ( o . path ) if o . query : query = parse_qs ( o . query ) endpoints = { 'buildinfo' : ( 'buildID' , self . getBuild ) , 'channelinfo' : ( 'channelID' , self . getChannel ) , 'hostinfo' : ( 'hostID' , self . getHost ) , 'packageinfo' : ( 'packageID' , self . getPackage ) , 'taskinfo' : ( 'taskID' , self . getTaskInfo ) , 'taginfo' : ( 'tagID' , self . getTag ) , 'targetinfo' : ( 'targetID' , self . getTarget ) , 'userinfo' : ( 'userID' , self . getUser ) , } try : ( param , method ) = endpoints [ endpoint ] except KeyError : return defer . succeed ( None ) try : id_str = query [ param ] [ 0 ] id_ = int ( id_str ) except ( KeyError , ValueError ) : return defer . succeed ( None ) return method ( id_ ) | Reverse - engineer a kojiweb URL into an equivalent API response . |
15,219 | def call ( self , method , * args , ** kwargs ) : if kwargs : kwargs [ '__starstar' ] = True args = args + ( kwargs , ) if self . session_id : self . proxy . path = self . _authenticated_path ( ) d = self . proxy . callRemote ( method , * args ) d . addCallback ( self . _munchify_callback ) d . addErrback ( self . _parse_errback ) if self . callnum is not None : self . callnum += 1 return d | Make an XML - RPC call to the server . |
15,220 | def _authenticated_path ( self ) : basepath = self . proxy . path . decode ( ) . split ( '?' ) [ 0 ] params = urlencode ( { 'session-id' : self . session_id , 'session-key' : self . session_key , 'callnum' : self . callnum } ) result = '%s?%s' % ( basepath , params ) return result . encode ( 'utf-8' ) | Get the path of our XML - RPC endpoint with session auth params added . |
15,221 | def getAverageBuildDuration ( self , package , ** kwargs ) : seconds = yield self . call ( 'getAverageBuildDuration' , package , ** kwargs ) if seconds is None : defer . returnValue ( None ) defer . returnValue ( timedelta ( seconds = seconds ) ) | Return a timedelta that Koji considers to be average for this package . |
15,222 | def getBuild ( self , build_id , ** kwargs ) : buildinfo = yield self . call ( 'getBuild' , build_id , ** kwargs ) build = Build . fromDict ( buildinfo ) if build : build . connection = self defer . returnValue ( build ) | Load all information about a build and return a custom Build class . |
15,223 | def getChannel ( self , channel_id , ** kwargs ) : channelinfo = yield self . call ( 'getChannel' , channel_id , ** kwargs ) channel = Channel . fromDict ( channelinfo ) if channel : channel . connection = self defer . returnValue ( channel ) | Load all information about a channel and return a custom Channel class . |
15,224 | def getPackage ( self , name , ** kwargs ) : packageinfo = yield self . call ( 'getPackage' , name , ** kwargs ) package = Package . fromDict ( packageinfo ) if package : package . connection = self defer . returnValue ( package ) | Load information about a package and return a custom Package class . |
15,225 | def getTaskDescendents ( self , task_id , ** kwargs ) : kwargs [ 'request' ] = True data = yield self . call ( 'getTaskDescendents' , task_id , ** kwargs ) tasks = [ ] for tdata in data [ str ( task_id ) ] : task = Task . fromDict ( tdata ) task . connection = self tasks . append ( task ) defer . returnValue ( tasks ) | Load all information about a task s descendents into Task classes . |
15,226 | def getTaskInfo ( self , task_id , ** kwargs ) : kwargs [ 'request' ] = True taskinfo = yield self . call ( 'getTaskInfo' , task_id , ** kwargs ) task = Task . fromDict ( taskinfo ) if task : task . connection = self defer . returnValue ( task ) | Load all information about a task and return a custom Task class . |
15,227 | def listBuilds ( self , package , ** kwargs ) : if isinstance ( package , int ) : package_id = package else : package_data = yield self . getPackage ( package ) if package_data is None : defer . returnValue ( [ ] ) package_id = package_data . id data = yield self . call ( 'listBuilds' , package_id , ** kwargs ) builds = [ ] for bdata in data : build = Build . fromDict ( bdata ) build . connection = self builds . append ( build ) defer . returnValue ( builds ) | Get information about all builds of a package . |
15,228 | def listTagged ( self , * args , ** kwargs ) : data = yield self . call ( 'listTagged' , * args , ** kwargs ) builds = [ ] for bdata in data : build = Build . fromDict ( bdata ) build . connection = self builds . append ( build ) defer . returnValue ( builds ) | List builds tagged with a tag . |
15,229 | def listTasks ( self , opts = { } , queryOpts = { } ) : opts [ 'decode' ] = True data = yield self . call ( 'listTasks' , opts , queryOpts ) tasks = [ ] for tdata in data : task = Task . fromDict ( tdata ) task . connection = self tasks . append ( task ) defer . returnValue ( tasks ) | Get information about all Koji tasks . |
15,230 | def listChannels ( self , ** kwargs ) : data = yield self . call ( 'listChannels' , ** kwargs ) channels = [ ] for cdata in data : channel = Channel . fromDict ( cdata ) channel . connection = self channels . append ( channel ) defer . returnValue ( channels ) | Get information about all Koji channels . |
15,231 | def login ( self ) : authtype = self . lookup ( self . profile , 'authtype' ) if authtype is None : cert = self . lookup ( self . profile , 'cert' ) if cert and os . path . isfile ( os . path . expanduser ( cert ) ) : authtype = 'ssl' if authtype == 'kerberos' : result = yield self . _gssapi_login ( ) elif authtype == 'ssl' : result = yield self . _ssl_login ( ) else : raise NotImplementedError ( 'unsupported auth: %s' % authtype ) self . session_id = result [ 'session-id' ] self . session_key = result [ 'session-key' ] self . callnum = 0 defer . returnValue ( True ) | Return True if we successfully logged into this Koji hub . |
15,232 | def _ssl_agent ( self ) : certfile = self . lookup ( self . profile , 'cert' ) certfile = os . path . expanduser ( certfile ) with open ( certfile ) as certfp : pemdata = certfp . read ( ) client_cert = PrivateCertificate . loadPEM ( pemdata ) trustRoot = None servercafile = self . lookup ( self . profile , 'serverca' ) if servercafile : servercafile = os . path . expanduser ( servercafile ) trustRoot = RootCATrustRoot ( servercafile ) policy = ClientCertPolicy ( trustRoot = trustRoot , client_cert = client_cert ) return Agent ( reactor , policy ) | Get a Twisted Agent that performs Client SSL authentication for Koji . |
15,233 | def _get_bmdl_ratio ( self , models ) : bmdls = [ model . output [ "BMDL" ] for model in models if model . output [ "BMDL" ] > 0 ] return max ( bmdls ) / min ( bmdls ) if len ( bmdls ) > 0 else 0 | Return BMDL ratio in list of models . |
15,234 | def _get_parsimonious_model ( models ) : params = [ len ( model . output [ "parameters" ] ) for model in models ] idx = params . index ( min ( params ) ) return models [ idx ] | Return the most parsimonious model of all available models . The most parsimonious model is defined as the model with the fewest number of parameters . |
15,235 | def make_pathable_string ( s , replacewith = '_' ) : import re return re . sub ( r'[^\w+/.]' , replacewith , s . lower ( ) ) | Removes symbols from a string to be compatible with directory structure . |
15,236 | def get_time ( ) : import datetime time = make_pathable_string ( '%s' % datetime . datetime . now ( ) ) return time . replace ( '-' , '_' ) . replace ( ':' , '_' ) . replace ( '.' , '_' ) | Gets current time in a form of a formated string . Used in logger function . |
15,237 | def bootstrap_general_election ( self , election ) : election_day = election . election_day page_type , created = PageType . objects . get_or_create ( model_type = ContentType . objects . get ( app_label = "election" , model = "electionday" ) , election_day = election_day , ) PageContent . objects . get_or_create ( content_type = ContentType . objects . get_for_model ( page_type ) , object_id = page_type . pk , election_day = election_day , ) | Create a general election page type |
15,238 | def cmd ( send , msg , _ ) : try : answer = subprocess . check_output ( [ 'wtf' , msg ] , stderr = subprocess . STDOUT ) send ( answer . decode ( ) . strip ( ) . replace ( '\n' , ' or ' ) . replace ( 'fuck' , 'fsck' ) ) except subprocess . CalledProcessError as ex : send ( ex . output . decode ( ) . rstrip ( ) . splitlines ( ) [ 0 ] ) | Tells you what acronyms mean . |
15,239 | def sys ( ** kwargs ) : output , err = cli_syncthing_adapter . sys ( ** kwargs ) if output : click . echo ( "%s" % output , err = err ) else : if not kwargs [ 'init' ] : click . echo ( click . get_current_context ( ) . get_help ( ) ) | Manage system configuration . |
15,240 | def ls ( ) : heading , body = cli_syncthing_adapter . ls ( ) if heading : click . echo ( heading ) if body : click . echo ( body . strip ( ) ) | List all synchronized directories . |
15,241 | def auth ( ** kwargs ) : option = 'add' path = kwargs [ 'path' ] key = kwargs [ 'key' ] if kwargs [ 'remove' ] : option = 'remove' if kwargs [ 'yes' ] : output , err = cli_syncthing_adapter . auth ( option , key , path ) click . echo ( "%s" % output , err = err ) else : verb = 'authorize' if not kwargs [ 'remove' ] else 'de-authorize' if click . confirm ( "Are you sure you want to %s this device to access %s?" % ( verb , path ) ) : output , err = cli_syncthing_adapter . auth ( option , key , path ) if output : click . echo ( "%s" % output , err = err ) | Authorize device synchronization . |
15,242 | def mv ( source , target ) : if os . path . isfile ( target ) and len ( source ) == 1 : if click . confirm ( "Are you sure you want to overwrite %s?" % target ) : err_msg = cli_syncthing_adapter . mv_edge_case ( source , target ) if err_msg : click . echo ( err_msg ) return if len ( source ) > 1 and not os . path . isdir ( target ) : click . echo ( click . get_current_context ( ) . get_help ( ) ) return else : err_msg , err = cli_syncthing_adapter . mv ( source , target ) if err_msg : click . echo ( err_msg , err ) | Move synchronized directory . |
15,243 | def push ( ** kwargs ) : output , err = cli_syncthing_adapter . refresh ( ** kwargs ) if output : click . echo ( "%s" % output , err = err ) if kwargs [ 'verbose' ] and not err : with click . progressbar ( iterable = None , length = 100 , label = 'Synchronizing' ) as bar : device_num = 0 max_devices = 1 prev_percent = 0 while True : kwargs [ 'progress' ] = True kwargs [ 'device_num' ] = device_num data , err = cli_syncthing_adapter . refresh ( ** kwargs ) device_num = data [ 'device_num' ] max_devices = data [ 'max_devices' ] cur_percent = math . floor ( data [ 'percent' ] ) - prev_percent if cur_percent > 0 : bar . update ( cur_percent ) prev_percent = math . floor ( data [ 'percent' ] ) if device_num < max_devices : time . sleep ( 0.5 ) else : break | Force synchronization of directory . |
15,244 | def tag ( path , name ) : output , err = cli_syncthing_adapter . tag ( path , name ) click . echo ( "%s" % output , err = err ) | Change tag associated with directory . |
15,245 | def free ( ** kwargs ) : output , err = cli_syncthing_adapter . free ( kwargs [ 'path' ] ) click . echo ( "%s" % output , err = err ) | Stop synchronization of directory . |
15,246 | def add ( ** kwargs ) : output , err = cli_syncthing_adapter . add ( ** kwargs ) click . echo ( "%s" % output , err = err ) | Make a directory shareable . |
15,247 | def info ( path ) : output , err = cli_syncthing_adapter . info ( folder = path ) if err : click . echo ( output , err = err ) else : stat = output [ 'status' ] click . echo ( "State: %s" % stat [ 'state' ] ) click . echo ( "\nTotal Files: %s" % stat [ 'localFiles' ] ) click . echo ( "Files Needed: %s" % stat [ 'needFiles' ] ) click . echo ( "\nTotal Bytes: %s" % stat [ 'localBytes' ] ) click . echo ( "Bytes Needed: %s" % stat [ 'needBytes' ] ) progress = output [ 'files_needed' ] [ 'progress' ] queued = output [ 'files_needed' ] [ 'queued' ] rest = output [ 'files_needed' ] [ 'rest' ] if len ( progress ) or len ( queued ) or len ( rest ) : click . echo ( "\nFiles Needed:" ) for f in progress : click . echo ( " " + f [ 'name' ] ) for f in queued : click . echo ( " " + f [ 'name' ] ) for f in rest : click . echo ( " " + f [ 'name' ] ) click . echo ( "\nDevices Authorized:\n%s" % output [ 'auth_ls' ] ) | Display synchronization information . |
15,248 | def key ( ** kwargs ) : output , err = cli_syncthing_adapter . key ( device = True ) click . echo ( "%s" % output , err = err ) | Display system key . |
15,249 | def start ( ** kwargs ) : output , err = cli_syncthing_adapter . start ( ** kwargs ) click . echo ( "%s" % output , err = err ) | Start KodeDrive daemon . |
15,250 | def stop ( ) : output , err = cli_syncthing_adapter . sys ( exit = True ) click . echo ( "%s" % output , err = err ) | Stop KodeDrive daemon . |
15,251 | def start_poll ( args ) : if args . type == 'privmsg' : return "We don't have secret ballots in this benevolent dictatorship!" if not args . msg : return "Polls need a question." ctrlchan = args . config [ 'core' ] [ 'ctrlchan' ] poll = Polls ( question = args . msg , submitter = args . nick ) args . session . add ( poll ) args . session . flush ( ) if args . isadmin or not args . config . getboolean ( 'adminrestrict' , 'poll' ) : poll . accepted = 1 return "Poll #%d created!" % poll . id else : args . send ( "Poll submitted for approval." , target = args . nick ) args . send ( "New Poll: #%d -- %s, Submitted by %s" % ( poll . id , args . msg , args . nick ) , target = ctrlchan ) return "" | Starts a poll . |
15,252 | def delete_poll ( args ) : if not args . isadmin : return "Nope, not gonna do it." if not args . msg : return "Syntax: !poll delete <pollnum>" if not args . msg . isdigit ( ) : return "Not A Valid Positive Integer." poll = args . session . query ( Polls ) . filter ( Polls . accepted == 1 , Polls . id == int ( args . msg ) ) . first ( ) if poll is None : return "Poll does not exist." if poll . active == 1 : return "You can't delete an active poll!" elif poll . deleted == 1 : return "Poll already deleted." poll . deleted = 1 return "Poll deleted." | Deletes a poll . |
15,253 | def edit_poll ( args ) : if not args . isadmin : return "Nope, not gonna do it." msg = args . msg . split ( maxsplit = 1 ) if len ( msg ) < 2 : return "Syntax: !vote edit <pollnum> <question>" if not msg [ 0 ] . isdigit ( ) : return "Not A Valid Positive Integer." pid = int ( msg [ 0 ] ) poll = get_open_poll ( args . session , pid ) if poll is None : return "That poll was deleted or does not exist!" poll . question = msg [ 1 ] return "Poll updated!" | Edits a poll . |
15,254 | def reopen ( args ) : if not args . isadmin : return "Nope, not gonna do it." msg = args . msg . split ( ) if not msg : return "Syntax: !poll reopen <pollnum>" if not msg [ 0 ] . isdigit ( ) : return "Not a valid positve integer." pid = int ( msg [ 0 ] ) poll = get_open_poll ( args . session , pid ) if poll is None : return "That poll doesn't exist or has been deleted!" poll . active = 1 return "Poll %d reopened!" % pid | reopens a closed poll . |
15,255 | def end_poll ( args ) : if not args . isadmin : return "Nope, not gonna do it." if not args . msg : return "Syntax: !vote end <pollnum>" if not args . msg . isdigit ( ) : return "Not A Valid Positive Integer." poll = get_open_poll ( args . session , int ( args . msg ) ) if poll is None : return "That poll doesn't exist or has already been deleted!" if poll . active == 0 : return "Poll already ended!" poll . active = 0 return "Poll ended!" | Ends a poll . |
15,256 | def tally_poll ( args ) : if not args . msg : return "Syntax: !vote tally <pollnum>" if not args . msg . isdigit ( ) : return "Not A Valid Positive Integer." pid = int ( args . msg ) poll = get_open_poll ( args . session , pid ) if poll is None : return "That poll doesn't exist or was deleted. Use !poll list to see valid polls" state = "Active" if poll . active == 1 else "Closed" votes = args . session . query ( Poll_responses ) . filter ( Poll_responses . pid == pid ) . all ( ) args . send ( "%s poll: %s, %d total votes" % ( state , poll . question , len ( votes ) ) ) votemap = collections . defaultdict ( list ) for v in votes : votemap [ v . response ] . append ( v . voter ) for x in sorted ( votemap . keys ( ) ) : args . send ( "%s: %d -- %s" % ( x , len ( votemap [ x ] ) , ", " . join ( votemap [ x ] ) ) , target = args . nick ) if not votemap : return "" ranking = collections . defaultdict ( list ) for x in votemap . keys ( ) : num = len ( votemap [ x ] ) ranking [ num ] . append ( x ) high = max ( ranking ) winners = ( ranking [ high ] , high ) if len ( winners [ 0 ] ) == 1 : winners = ( winners [ 0 ] [ 0 ] , high ) return "The winner is %s with %d votes." % winners else : winners = ( ", " . join ( winners [ 0 ] ) , high ) return "Tie between %s with %d votes." % winners | Shows the results of poll . |
15,257 | def vote ( session , nick , pid , response ) : if not response : return "You have to vote something!" if response == "n" or response == "nay" : response = "no" elif response == "y" or response == "aye" : response = "yes" poll = get_open_poll ( session , pid ) if poll is None : return "That poll doesn't exist or isn't active. Use !poll list to see valid polls" old_vote = get_response ( session , pid , nick ) if old_vote is None : session . add ( Poll_responses ( pid = pid , response = response , voter = nick ) ) return "%s voted %s." % ( nick , response ) else : if response == old_vote . response : return "You've already voted %s." % response else : msg = "%s changed their vote from %s to %s." % ( nick , old_vote . response , response ) old_vote . response = response return msg | Votes on a poll . |
15,258 | def retract ( args ) : if not args . msg : return "Syntax: !vote retract <pollnum>" if not args . msg . isdigit ( ) : return "Not A Valid Positive Integer." response = get_response ( args . session , args . msg , args . nick ) if response is None : return "You haven't voted on that poll yet!" args . session . delete ( response ) return "Vote retracted" | Deletes a vote for a poll . |
15,259 | def cmd ( send , msg , args ) : command = msg . split ( ) msg = " " . join ( command [ 1 : ] ) if not command : send ( "Which poll?" ) return else : command = command [ 0 ] if command . isdigit ( ) : if args [ 'type' ] == 'privmsg' : send ( "We don't have secret ballots in this benevolent dictatorship!" ) else : send ( vote ( args [ 'db' ] , args [ 'nick' ] , int ( command ) , msg ) ) return isadmin = args [ 'is_admin' ] ( args [ 'nick' ] ) parser = arguments . ArgParser ( args [ 'config' ] ) parser . set_defaults ( session = args [ 'db' ] , msg = msg , nick = args [ 'nick' ] ) subparser = parser . add_subparsers ( ) start_parser = subparser . add_parser ( 'start' , config = args [ 'config' ] , aliases = [ 'open' , 'add' , 'create' ] ) start_parser . set_defaults ( func = start_poll , send = send , isadmin = isadmin , type = args [ 'type' ] ) tally_parser = subparser . add_parser ( 'tally' ) tally_parser . set_defaults ( func = tally_poll , send = send ) list_parser = subparser . add_parser ( 'list' , config = args [ 'config' ] ) list_parser . set_defaults ( func = list_polls ) retract_parser = subparser . add_parser ( 'retract' ) retract_parser . set_defaults ( func = retract ) end_parser = subparser . add_parser ( 'end' , aliases = [ 'close' ] ) end_parser . set_defaults ( func = end_poll , isadmin = isadmin ) delete_parser = subparser . add_parser ( 'delete' ) delete_parser . set_defaults ( func = delete_poll , isadmin = isadmin ) edit_parser = subparser . add_parser ( 'edit' ) edit_parser . set_defaults ( func = edit_poll , isadmin = isadmin ) reopen_parser = subparser . add_parser ( 'reopen' ) reopen_parser . set_defaults ( func = reopen , isadmin = isadmin ) try : cmdargs = parser . parse_args ( command ) except arguments . ArgumentException as e : send ( str ( e ) ) return send ( cmdargs . func ( cmdargs ) ) | Handles voting . |
15,260 | def cmd ( send , msg , args ) : parser = arguments . ArgParser ( args [ 'config' ] ) group = parser . add_mutually_exclusive_group ( ) group . add_argument ( '--high' , action = 'store_true' ) group . add_argument ( '--low' , action = 'store_true' ) group . add_argument ( '--userhigh' , action = 'store_true' ) group . add_argument ( '--nick' , action = arguments . NickParser ) group . add_argument ( 'command' , nargs = '?' ) try : cmdargs = parser . parse_args ( msg ) except arguments . ArgumentException as e : send ( str ( e ) ) return session = args [ 'db' ] totals = get_command_totals ( session ) sortedtotals = sorted ( totals , key = totals . get ) if command_registry . is_registered ( cmdargs . command ) : send ( get_command ( session , cmdargs . command , totals ) ) elif cmdargs . command and not command_registry . is_registered ( cmdargs . command ) : send ( "Command %s not found." % cmdargs . command ) elif cmdargs . high : send ( 'Most Used Commands:' ) high = list ( reversed ( sortedtotals ) ) for x in range ( 3 ) : if x < len ( high ) : send ( "%s: %s" % ( high [ x ] , totals [ high [ x ] ] ) ) elif cmdargs . low : send ( 'Least Used Commands:' ) low = sortedtotals for x in range ( 3 ) : if x < len ( low ) : send ( "%s: %s" % ( low [ x ] , totals [ low [ x ] ] ) ) elif cmdargs . userhigh : totals = get_nick_totals ( session ) sortedtotals = sorted ( totals , key = totals . get ) high = list ( reversed ( sortedtotals ) ) send ( 'Most active bot users:' ) for x in range ( 3 ) : if x < len ( high ) : send ( "%s: %s" % ( high [ x ] , totals [ high [ x ] ] ) ) elif cmdargs . nick : send ( get_nick ( session , cmdargs . nick ) ) else : command = choice ( list ( totals . keys ( ) ) ) send ( "%s has been used %s times." % ( command , totals [ command ] ) ) | Gets stats . |
15,261 | def _create_page ( cls , page , lang , auto_title , cms_app = None , parent = None , namespace = None , site = None , set_home = False ) : from cms . api import create_page , create_title from cms . utils . conf import get_templates default_template = get_templates ( ) [ 0 ] [ 0 ] if page is None : page = create_page ( auto_title , language = lang , parent = parent , site = site , template = default_template , in_navigation = True , published = True ) page . application_urls = cms_app page . application_namespace = namespace page . save ( ) page . publish ( lang ) elif lang not in page . get_languages ( ) : create_title ( language = lang , title = auto_title , page = page ) page . publish ( lang ) if set_home : page . set_as_homepage ( ) return page . get_draft_object ( ) | Create a single page or titles |
15,262 | def _create_config ( cls ) : return cls . app_config . objects . create ( namespace = cls . auto_setup [ 'namespace' ] , ** cls . auto_setup [ 'config_fields' ] ) | Creates an ApphookConfig instance |
15,263 | def _create_config_translation ( cls , config , lang ) : config . set_current_language ( lang , initialize = True ) for field , data in cls . auto_setup [ 'config_translated_fields' ] . items ( ) : setattr ( config , field , data ) config . save_translations ( ) | Creates a translation for the given ApphookConfig |
15,264 | def _setup_pages ( cls , config ) : from cms . exceptions import NoHomeFound from cms . models import Page from cms . utils import get_language_list from django . conf import settings from django . utils . translation import override app_page = None get_url = False if getattr ( settings , 'ALDRYN_SEARCH_CMS_PAGE' , False ) : from aldryn_search . search_indexes import TitleIndex def fake_url ( self , obj ) : return '' get_url = TitleIndex . get_url TitleIndex . get_url = fake_url site = Site . objects . get_current ( ) auto_sites = cls . auto_setup . get ( 'sites' , True ) if auto_sites is True or site . pk in auto_sites : if getattr ( cls , 'app_config' , False ) : configs = cls . app_config . objects . all ( ) if not configs . exists ( ) : config = cls . _create_config ( ) else : config = configs . first ( ) langs = get_language_list ( site . pk ) if not Page . objects . on_site ( site . pk ) . filter ( application_urls = cls . __name__ ) . exists ( ) : for lang in langs : with override ( lang ) : if config : if cls . auto_setup [ 'config_translated_fields' ] : cls . _create_config_translation ( config , lang ) namespace = config . namespace elif cls . app_name : namespace = cls . app_name else : namespace = None try : home = Page . objects . get_home ( site . pk ) . get_draft_object ( ) except NoHomeFound : home = None set_home = hasattr ( Page , 'set_as_homepage' ) home = cls . _create_page ( home , lang , cls . auto_setup [ 'home title' ] , site = site , set_home = set_home ) app_page = cls . _create_page ( app_page , lang , cls . auto_setup [ 'page title' ] , cls . __name__ , home , namespace , site = site ) if get_url : TitleIndex . get_url = get_url | Create the page structure . |
15,265 | def setup ( cls ) : try : if cls . auto_setup and cls . auto_setup . get ( 'enabled' , False ) : if not cls . auto_setup . get ( 'home title' , False ) : warnings . warn ( '"home title" is not set in {0}.auto_setup attribute' . format ( cls ) ) return if not cls . auto_setup . get ( 'page title' , False ) : warnings . warn ( '"page title" is not set in {0}.auto_setup attribute' . format ( cls ) ) return if cls . app_name and not cls . auto_setup . get ( 'namespace' , False ) : warnings . warn ( '"page title" is not set in {0}.auto_setup attribute' . format ( cls ) ) return config = None cls . _setup_pages ( config ) except Exception : pass | Main method to auto setup Apphook |
15,266 | def run ( self , verbose = True ) : self . results . clear ( ) for analysis_group in self . config . analysis_groups : if analysis_group . providers : for provider in analysis_group . providers : logger . info ( 'Run provider %s' , provider . identifier ) provider . run ( ) for checker in analysis_group . checkers : result = self . _get_checker_result ( analysis_group , checker , provider ) self . results . append ( result ) analysis_group . results . append ( result ) if verbose : result . print ( ) else : for checker in analysis_group . checkers : result = self . _get_checker_result ( analysis_group , checker , nd = 'no-data-' ) self . results . append ( result ) analysis_group . results . append ( result ) if verbose : result . print ( ) | Run the analysis . |
15,267 | def output_tap ( self ) : tracker = Tracker ( streaming = True , stream = sys . stdout ) for group in self . config . analysis_groups : n_providers = len ( group . providers ) n_checkers = len ( group . checkers ) if not group . providers and group . checkers : test_suite = group . name description_lambda = lambda r : r . checker . name elif not group . checkers : logger . warning ( 'Invalid analysis group (no checkers), skipping' ) continue elif n_providers > n_checkers : test_suite = group . checkers [ 0 ] . name description_lambda = lambda r : r . provider . name else : test_suite = group . providers [ 0 ] . name description_lambda = lambda r : r . checker . name for result in group . results : description = description_lambda ( result ) if result . code == ResultCode . PASSED : tracker . add_ok ( test_suite , description ) elif result . code == ResultCode . IGNORED : tracker . add_ok ( test_suite , description + ' (ALLOWED FAILURE)' ) elif result . code == ResultCode . NOT_IMPLEMENTED : tracker . add_not_ok ( test_suite , description , 'TODO implement the test' ) elif result . code == ResultCode . FAILED : tracker . add_not_ok ( test_suite , description , diagnostics = ' ---\n message: %s\n hint: %s\n ...' % ( '\n message: ' . join ( result . messages . split ( '\n' ) ) , result . checker . hint ) ) | Output analysis results in TAP format . |
15,268 | def find_latex_font_serif ( ) : r import os , re import matplotlib . font_manager name = lambda font : os . path . splitext ( os . path . split ( font ) [ - 1 ] ) [ 0 ] . split ( ' - ' ) [ 0 ] fonts = matplotlib . font_manager . findSystemFonts ( fontpaths = None , fontext = 'ttf' ) matches = [ r'.*Computer\ Modern\ Roman.*' , r'.*CMU\ Serif.*' , r'.*CMU.*' , r'.*Times.*' , r'.*DejaVu.*' , r'.*Serif.*' , ] for match in matches : for font in fonts : if re . match ( match , font ) : return name ( font ) return None | r Find an available font to mimic LaTeX and return its name . |
15,269 | def copy_style ( ) : r import os import matplotlib styles = { } styles [ 'goose.mplstyle' ] = styles [ 'goose-tick-in.mplstyle' ] = styles [ 'goose-tick-lower.mplstyle' ] = if find_latex_font_serif ( ) is not None : styles [ 'goose-latex.mplstyle' ] = r . format ( serif = find_latex_font_serif ( ) ) else : styles [ 'goose-latex.mplstyle' ] = r dirname = os . path . abspath ( os . path . join ( matplotlib . get_configdir ( ) , 'stylelib' ) ) if not os . path . isdir ( dirname ) : os . makedirs ( dirname ) for fname , style in styles . items ( ) : open ( os . path . join ( dirname , fname ) , 'w' ) . write ( style ) | r Write all goose - styles to the relevant matplotlib configuration directory . |
15,270 | def scale_lim ( lim , factor = 1.05 ) : r if type ( lim ) == str : lim = eval ( lim ) D = lim [ 1 ] - lim [ 0 ] lim [ 0 ] -= ( factor - 1. ) / 2. * D lim [ 1 ] += ( factor - 1. ) / 2. * D return lim | r Scale limits to be 5% wider to have a nice plot . |
15,271 | def abs2rel_y ( y , axis = None ) : r if axis is None : axis = plt . gca ( ) ymin , ymax = axis . get_ylim ( ) if axis . get_xscale ( ) == 'log' : try : return [ ( np . log10 ( i ) - np . log10 ( ymin ) ) / ( np . log10 ( ymax ) - np . log10 ( ymin ) ) if i is not None else i for i in y ] except : return ( np . log10 ( y ) - np . log10 ( ymin ) ) / ( np . log10 ( ymax ) - np . log10 ( ymin ) ) else : try : return [ ( i - ymin ) / ( ymax - ymin ) if i is not None else i for i in y ] except : return ( y - ymin ) / ( ymax - ymin ) | r Transform absolute y - coordinates to relative y - coordinates . Relative coordinates correspond to a fraction of the relevant axis . Be sure to set the limits and scale before calling this function! |
15,272 | def rel2abs_x ( x , axis = None ) : r if axis is None : axis = plt . gca ( ) xmin , xmax = axis . get_xlim ( ) if axis . get_xscale ( ) == 'log' : try : return [ 10. ** ( np . log10 ( xmin ) + i * ( np . log10 ( xmax ) - np . log10 ( xmin ) ) ) if i is not None else i for i in x ] except : return 10. ** ( np . log10 ( xmin ) + x * ( np . log10 ( xmax ) - np . log10 ( xmin ) ) ) else : try : return [ xmin + i * ( xmax - xmin ) if i is not None else i for i in x ] except : return xmin + x * ( xmax - xmin ) | r Transform relative x - coordinates to absolute x - coordinates . Relative coordinates correspond to a fraction of the relevant axis . Be sure to set the limits and scale before calling this function! |
15,273 | def subplots ( scale_x = None , scale_y = None , scale = None , ** kwargs ) : r if 'figsize' in kwargs : return plt . subplots ( ** kwargs ) width , height = mpl . rcParams [ 'figure.figsize' ] if scale is not None : width *= scale height *= scale if scale_x is not None : width *= scale_x if scale_y is not None : height *= scale_y nrows = kwargs . pop ( 'nrows' , 1 ) ncols = kwargs . pop ( 'ncols' , 1 ) width = ncols * width height = nrows * height return plt . subplots ( nrows = nrows , ncols = ncols , figsize = ( width , height ) , ** kwargs ) | r Run matplotlib . pyplot . subplots with figsize set to the correct multiple of the default . |
15,274 | def plot ( x , y , units = 'absolute' , axis = None , ** kwargs ) : r if axis is None : axis = plt . gca ( ) if units . lower ( ) == 'relative' : x = rel2abs_x ( x , axis ) y = rel2abs_y ( y , axis ) return axis . plot ( x , y , ** kwargs ) | r Plot . |
15,275 | def plot_powerlaw ( exp , startx , starty , width = None , ** kwargs ) : r endx = kwargs . pop ( 'endx' , None ) endy = kwargs . pop ( 'endy' , None ) height = kwargs . pop ( 'height' , None ) units = kwargs . pop ( 'units' , 'relative' ) axis = kwargs . pop ( 'axis' , plt . gca ( ) ) if axis . get_xscale ( ) != 'log' or axis . get_yscale ( ) != 'log' : raise IOError ( 'This function only works on a log-log scale, where the power-law is a straight line' ) if width is not None : endx = startx + width endy = None elif height is not None : if exp > 0 : endy = starty + height elif exp == 0 : endy = starty else : endy = starty - height endx = None if units . lower ( ) == 'relative' : [ startx , endx ] = rel2abs_x ( [ startx , endx ] , axis ) [ starty , endy ] = rel2abs_y ( [ starty , endy ] , axis ) const = starty / ( startx ** exp ) if endx is not None : endy = const * endx ** exp else : endx = ( endy / const ) ** ( 1 / exp ) return axis . plot ( [ startx , endx ] , [ starty , endy ] , ** kwargs ) | r Plot a power - law . |
15,276 | def histogram_bin_edges_minwidth ( min_width , bins ) : r if min_width is None : return bins if min_width is False : return bins while True : idx = np . where ( np . diff ( bins ) < min_width ) [ 0 ] if len ( idx ) == 0 : return bins idx = idx [ 0 ] if idx + 1 == len ( bins ) - 1 : bins = np . hstack ( ( bins [ : ( idx ) ] , bins [ - 1 ] ) ) else : bins = np . hstack ( ( bins [ : ( idx + 1 ) ] , bins [ ( idx + 2 ) : ] ) ) | r Merge bins with right - neighbour until each bin has a minimum width . |
15,277 | def histogram_bin_edges_mincount ( data , min_count , bins ) : r if min_count is None : return bins if min_count is False : return bins if type ( min_count ) != int : raise IOError ( '"min_count" must be an integer number' ) while True : P , _ = np . histogram ( data , bins = bins , density = False ) idx = np . where ( P < min_count ) [ 0 ] if len ( idx ) == 0 : return bins idx = idx [ 0 ] if idx + 1 == len ( P ) : bins = np . hstack ( ( bins [ : ( idx ) ] , bins [ - 1 ] ) ) else : bins = np . hstack ( ( bins [ : ( idx + 1 ) ] , bins [ ( idx + 2 ) : ] ) ) | r Merge bins with right - neighbour until each bin has a minimum number of data - points . |
15,278 | def histogram_bin_edges ( data , bins = 10 , mode = 'equal' , min_count = None , integer = False , remove_empty_edges = True , min_width = None ) : r if mode == 'equal' : bin_edges = np . linspace ( np . min ( data ) , np . max ( data ) , bins + 1 ) elif mode == 'log' : bin_edges = np . logspace ( np . log10 ( np . min ( data ) ) , np . log10 ( np . max ( data ) ) , bins + 1 ) elif mode == 'uniform' : if hasattr ( bins , "__len__" ) : raise IOError ( 'Only the number of bins can be specified' ) if min_count is not None and min_count is not False : if type ( min_count ) != int : raise IOError ( '"min_count" must be an integer number' ) bins = int ( np . floor ( float ( len ( data ) ) / float ( min_count ) ) ) count = int ( np . floor ( float ( len ( data ) ) / float ( bins ) ) ) * np . ones ( bins , dtype = 'int' ) count [ np . linspace ( 0 , bins - 1 , len ( data ) - np . sum ( count ) ) . astype ( np . int ) ] += 1 idx = np . empty ( ( bins + 1 ) , dtype = 'int' ) idx [ 0 ] = 0 idx [ 1 : ] = np . cumsum ( count ) idx [ - 1 ] = len ( data ) - 1 bin_edges = np . unique ( np . sort ( data ) [ idx ] ) else : raise IOError ( 'Unknown option' ) if remove_empty_edges : N , _ = np . histogram ( data , bins = bin_edges , density = False ) idx = np . min ( np . where ( N > 0 ) [ 0 ] ) jdx = np . max ( np . where ( N > 0 ) [ 0 ] ) bin_edges = bin_edges [ ( idx ) : ( jdx + 2 ) ] bin_edges = histogram_bin_edges_mincount ( data , min_count = min_count , bins = bin_edges ) bin_edges = histogram_bin_edges_minwidth ( min_width = min_width , bins = bin_edges ) if integer : idx = np . where ( np . diff ( np . floor ( bin_edges ) ) >= 1 ) [ 0 ] idx = np . unique ( np . hstack ( ( 0 , idx , len ( bin_edges ) - 1 ) ) ) bin_edges = bin_edges [ idx ] return bin_edges | r Determine bin - edges . |
15,279 | def hist ( P , edges , ** kwargs ) : r from matplotlib . collections import PatchCollection from matplotlib . patches import Polygon axis = kwargs . pop ( 'axis' , plt . gca ( ) ) cindex = kwargs . pop ( 'cindex' , None ) autoscale = kwargs . pop ( 'autoscale' , True ) kwargs . setdefault ( 'edgecolor' , 'k' ) if cindex is None : kwargs . setdefault ( 'facecolor' , ( 0. , 0. , 0. , 0. ) ) poly = [ ] for p , xl , xu in zip ( P , edges [ : - 1 ] , edges [ 1 : ] ) : coor = np . array ( [ [ xl , 0. ] , [ xu , 0. ] , [ xu , p ] , [ xl , p ] , ] ) poly . append ( Polygon ( coor ) ) args = ( poly ) p = PatchCollection ( args , ** kwargs ) if cindex is not None : p . set_array ( cindex ) axis . add_collection ( p ) if autoscale : xlim = [ edges [ 0 ] , edges [ - 1 ] ] ylim = [ 0 , np . max ( P ) ] axis . set_xlim ( [ xlim [ 0 ] - .1 * ( xlim [ 1 ] - xlim [ 0 ] ) , xlim [ 1 ] + .1 * ( xlim [ 1 ] - xlim [ 0 ] ) ] ) axis . set_ylim ( [ ylim [ 0 ] - .1 * ( ylim [ 1 ] - ylim [ 0 ] ) , ylim [ 1 ] + .1 * ( ylim [ 1 ] - ylim [ 0 ] ) ] ) return p | r Plot histogram . |
15,280 | def cdf ( data , mode = 'continuous' , ** kwargs ) : return ( np . linspace ( 0.0 , 1.0 , len ( data ) ) , np . sort ( data ) ) | Return cumulative density . |
15,281 | def patch ( * args , ** kwargs ) : from matplotlib . collections import PatchCollection from matplotlib . patches import Polygon if ( 'coor' not in kwargs or 'conn' not in kwargs ) : raise IOError ( 'Specify both "coor" and "conn"' ) axis = kwargs . pop ( 'axis' , plt . gca ( ) ) cindex = kwargs . pop ( 'cindex' , None ) coor = kwargs . pop ( 'coor' , None ) conn = kwargs . pop ( 'conn' , None ) autoscale = kwargs . pop ( 'autoscale' , True ) kwargs . setdefault ( 'edgecolor' , 'k' ) if cindex is None : kwargs . setdefault ( 'facecolor' , ( 0. , 0. , 0. , 0. ) ) if coor is not None and conn is not None : poly = [ ] for iconn in conn : poly . append ( Polygon ( coor [ iconn , : ] ) ) args = tuple ( poly , * args ) p = PatchCollection ( args , ** kwargs ) if cindex is not None : p . set_array ( cindex ) axis . add_collection ( p ) if autoscale : xlim = [ np . min ( coor [ : , 0 ] ) , np . max ( coor [ : , 0 ] ) ] ylim = [ np . min ( coor [ : , 1 ] ) , np . max ( coor [ : , 1 ] ) ] axis . set_xlim ( [ xlim [ 0 ] - .1 * ( xlim [ 1 ] - xlim [ 0 ] ) , xlim [ 1 ] + .1 * ( xlim [ 1 ] - xlim [ 0 ] ) ] ) axis . set_ylim ( [ ylim [ 0 ] - .1 * ( ylim [ 1 ] - ylim [ 0 ] ) , ylim [ 1 ] + .1 * ( ylim [ 1 ] - ylim [ 0 ] ) ] ) return p | Add patches to plot . The color of the patches is indexed according to a specified color - index . |
15,282 | def cmd ( send , msg , args ) : if args [ 'type' ] == 'privmsg' : send ( 'Filters must be set in channels, not via private message.' ) return isadmin = args [ 'is_admin' ] ( args [ 'nick' ] ) parser = arguments . ArgParser ( args [ 'config' ] ) parser . add_argument ( '--channel' , nargs = '?' , default = args [ 'target' ] ) group = parser . add_mutually_exclusive_group ( ) group . add_argument ( 'filter' , nargs = '?' ) group . add_argument ( '--show' , action = 'store_true' ) group . add_argument ( '--list' , action = 'store_true' ) group . add_argument ( '--reset' , '--clear' , action = 'store_true' ) group . add_argument ( '--chain' ) if not msg : send ( get_filters ( args [ 'handler' ] , args [ 'target' ] ) ) return try : cmdargs = parser . parse_args ( msg ) except arguments . ArgumentException as e : send ( str ( e ) ) return if cmdargs . list : send ( "Available filters are %s" % ", " . join ( textutils . output_filters . keys ( ) ) ) elif cmdargs . reset and isadmin : args [ 'handler' ] . outputfilter [ cmdargs . channel ] . clear ( ) send ( "Okay!" ) elif cmdargs . chain and isadmin : if not args [ 'handler' ] . outputfilter [ cmdargs . channel ] : send ( "Must have a filter set in order to chain." ) return filter_list , output = textutils . append_filters ( cmdargs . chain ) if filter_list is not None : args [ 'handler' ] . outputfilter [ cmdargs . channel ] . extend ( filter_list ) send ( output ) elif cmdargs . show : send ( get_filters ( args [ 'handler' ] , cmdargs . channel ) ) elif isadmin : filter_list , output = textutils . append_filters ( cmdargs . filter ) if filter_list is not None : args [ 'handler' ] . outputfilter [ cmdargs . channel ] . clear ( ) args [ 'handler' ] . outputfilter [ cmdargs . channel ] . extend ( filter_list ) send ( output ) else : send ( 'This command requires admin privileges.' ) | Changes the output filter . |
15,283 | def inner ( self ) : inner_array = nd . morphology . binary_erosion ( self . bitmap ) return Region ( inner_array ) | Region formed by taking non - border elements . |
15,284 | def border ( self ) : border_array = self . bitmap - self . inner . bitmap return Region ( border_array ) | Region formed by taking border elements . |
15,285 | def convex_hull ( self ) : hull_array = skimage . morphology . convex_hull_image ( self . bitmap ) return Region ( hull_array ) | Region representing the convex hull . |
15,286 | def dilate ( self , iterations = 1 ) : dilated_array = nd . morphology . binary_dilation ( self . bitmap , iterations = iterations ) return Region ( dilated_array ) | Return a dilated region . |
15,287 | def cmd ( send , _ , args ) : guarded = args [ 'handler' ] . guarded if not guarded : send ( "Nobody is guarded." ) else : send ( ", " . join ( guarded ) ) | Shows the currently guarded nicks . |
15,288 | def mapColorRampToValues ( cls , colorRamp , minValue , maxValue , alpha = 1.0 ) : minRampIndex = 0 maxRampIndex = float ( len ( colorRamp ) - 1 ) if minValue != maxValue : slope = ( maxRampIndex - minRampIndex ) / ( maxValue - minValue ) intercept = maxRampIndex - ( slope * maxValue ) else : slope = 0 intercept = minRampIndex mappedColorRamp = MappedColorRamp ( colorRamp = colorRamp , slope = slope , intercept = intercept , min = minValue , max = maxValue , alpha = alpha ) return mappedColorRamp | Creates color ramp based on min and max values of all the raster pixels from all rasters . If pixel value is one of the no data values it will be excluded in the color ramp interpolation . Returns colorRamp slope intercept |
15,289 | def cmd ( send , msg , args ) : user = choice ( get_users ( args ) ) if msg : msg = " for " + msg msg = "blames " + user + msg send ( msg , 'action' ) | Blames a random user for something . |
15,290 | def set_default ( nick , location , session , send , apikey ) : if valid_location ( location , apikey ) : send ( "Setting default location" ) default = session . query ( Weather_prefs ) . filter ( Weather_prefs . nick == nick ) . first ( ) if default is None : default = Weather_prefs ( nick = nick , location = location ) session . add ( default ) else : default . location = location else : send ( "Invalid or Ambiguous Location" ) | Sets nick s default location to location . |
15,291 | def cmd ( send , msg , _ ) : if not msg : msg = textutils . gen_word ( ) send ( textutils . gen_slogan ( msg ) ) | Gets a slogan . |
15,292 | def evolve ( self , years ) : world_file = fldr + os . sep + self . name + '.txt' self . build_base ( ) self . world . add_mountains ( ) self . add_life ( ) self . world . grd . save ( world_file ) print ( 'TODO - run ' + str ( years ) + ' years' ) | run the evolution of the planet to see how it looks after years |
15,293 | def build_base ( self ) : self . world = my_world . World ( self . grid_height , self . grid_width , [ ' ' , 'x' , '#' ] ) perc_land = ( self . lava + ( self . wind / 10 ) + ( self . rain / 20 ) + ( self . sun / 10 ) ) * 100 perc_sea = ( 100 - perc_land ) perc_blocked = ( self . lava / 10 ) * 100 self . world . build_random ( self . num_seeds , perc_land , perc_sea , perc_blocked ) | create a base random land structure using the AIKIF world model |
15,294 | def url ( self ) : url = u'{home_url}{permalink}' . format ( home_url = settings . HOME_URL , permalink = self . _permalink ) url = re . sub ( r'/{2,}' , r'/' , url ) return url | The site - relative URL to the post . |
15,295 | def content ( self ) : content_list = wrap_list ( self . _content_preprocessed ) content_list . extend ( self . _content_stash ) content_to_render = '\n' . join ( content_list ) return typogrify ( self . content_renderer . render ( content_to_render , self . format ) ) | The post s content in HTML format . |
15,296 | def is_published ( self ) : return self . status == Status . published and self . timestamp <= arrow . now ( ) | True if the post is published False otherwise . |
15,297 | def is_pending ( self ) : return self . status == Status . published and self . timestamp >= arrow . now ( ) | True if the post is marked as published but has a timestamp set in the future . |
15,298 | def set_finalized_content ( self , content , caller_class ) : caller = caller_class . get_name ( ) if hasattr ( caller_class , 'get_name' ) else unicode ( caller_class ) if not FinalizationPlugin . is_enabled ( ) : logger . warning ( "A plugin is trying to modify the post content but the FINALIZE_METADATA setting is " "disabled. This setting must be enabled for plugins to modify post content. " "Plugin: %s" % caller ) return False perms = settings . PLUGIN_PERMISSIONS [ 'MODIFY_RAW_POST' ] if caller not in perms and '*' not in perms : logger . warning ( "A plugin is trying to modify the post content but does not have the " "MODIFY_RAW_POST permission. Plugin: %s" % caller ) return False else : logger . debug ( "%s is setting post source content." % caller ) self . _content_finalized = self . _remove_all_stashed_content ( ) return True | Plugins can call this method to modify post content that is written back to source post files . This method can be called at any time by anyone but it has no effect if the caller is not granted the MODIFY_RAW_POST permission in the Engineer configuration . |
15,299 | def all_tags ( self ) : tags = set ( ) for post in self : tags . update ( post . tags ) return list ( tags ) | Returns a list of all the unique tags as strings that posts in the collection have . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.