idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
57,600
def get_collection_documents_generator ( client , database_name , collection_name , spec , latest_n , sort_key ) : mongo_database = client [ database_name ] collection = mongo_database [ collection_name ] collection . create_index ( sort_key ) if latest_n is not None : skip_n = collection . count ( ) - latest_n if collection . count ( ) - latest_n < 0 : skip_n = 0 cursor = collection . find ( filter = spec ) . sort ( [ ( sort_key , ASCENDING ) , ] ) cursor = cursor [ skip_n : ] else : cursor = collection . find ( filter = spec ) . sort ( [ ( sort_key , ASCENDING ) , ] ) for document in cursor : yield document
This is a python generator that yields tweets stored in a mongodb collection .
57,601
def extract_connected_components ( graph , connectivity_type , node_to_id ) : nx_graph = nx . from_scipy_sparse_matrix ( graph , create_using = nx . DiGraph ( ) ) if connectivity_type == "weak" : largest_connected_component_list = nxalgcom . weakly_connected_component_subgraphs ( nx_graph ) elif connectivity_type == "strong" : largest_connected_component_list = nxalgcom . strongly_connected_component_subgraphs ( nx_graph ) else : print ( "Invalid connectivity type input." ) raise RuntimeError try : largest_connected_component = max ( largest_connected_component_list , key = len ) except ValueError : print ( "Error: Empty graph." ) raise RuntimeError old_node_list = largest_connected_component . nodes ( ) node_to_node = dict ( zip ( np . arange ( len ( old_node_list ) ) , old_node_list ) ) largest_connected_component = nx . to_scipy_sparse_matrix ( largest_connected_component , dtype = np . float64 , format = "csr" ) new_node_to_id = { k : node_to_id [ v ] for k , v in node_to_node . items ( ) } return largest_connected_component , new_node_to_id , old_node_list
Extract the largest connected component from a graph .
57,602
def sendEmail ( self , subject , body , toAddress = False ) : if not toAddress : toAddress = self . toAddress toAddress = toAddress . split ( ';' ) message = MIMEText ( body ) message [ 'Subject' ] = subject message [ 'From' ] = self . fromAddress message [ 'To' ] = ',' . join ( toAddress ) if not self . testing : s = SMTP ( self . server , self . port ) s . sendmail ( self . fromAddress , toAddress , message . as_string ( ) ) s . quit ( ) print ( 'email sent' ) else : print ( '***Begin Test Email Message***' ) print ( message ) print ( '***End Test Email Message***' )
sends an email using the agrcpythonemailer
57,603
def _get_completions ( self ) : completions = [ ] self . begidx = self . l_buffer . point self . endidx = self . l_buffer . point buf = self . l_buffer . line_buffer if self . completer : while self . begidx > 0 : self . begidx -= 1 if buf [ self . begidx ] in self . completer_delims : self . begidx += 1 break text = ensure_str ( u'' . join ( buf [ self . begidx : self . endidx ] ) ) log ( u'complete text="%s"' % ensure_unicode ( text ) ) i = 0 while 1 : try : r = ensure_unicode ( self . completer ( text , i ) ) except IndexError : break i += 1 if r is None : break elif r and r not in completions : completions . append ( r ) else : pass log ( u'text completions=<%s>' % map ( ensure_unicode , completions ) ) if ( self . complete_filesystem == "on" ) and not completions : while self . begidx > 0 : self . begidx -= 1 if buf [ self . begidx ] in u' \t\n' : self . begidx += 1 break text = ensure_str ( u'' . join ( buf [ self . begidx : self . endidx ] ) ) log ( u'file complete text="%s"' % ensure_unicode ( text ) ) completions = map ( ensure_unicode , glob . glob ( os . path . expanduser ( text ) + '*' ) ) if self . mark_directories == u'on' : mc = [ ] for f in completions : if os . path . isdir ( f ) : mc . append ( f + os . sep ) else : mc . append ( f ) completions = mc log ( u'fnames=<%s>' % map ( ensure_unicode , completions ) ) return completions
Return a list of possible completions for the string ending at the point . Also set begidx and endidx in the process .
57,604
def complete ( self , e ) : u completions = self . _get_completions ( ) if completions : cprefix = commonprefix ( completions ) if len ( cprefix ) > 0 : rep = [ c for c in cprefix ] point = self . l_buffer . point self . l_buffer [ self . begidx : self . endidx ] = rep self . l_buffer . point = point + len ( rep ) - ( self . endidx - self . begidx ) if len ( completions ) > 1 : if self . show_all_if_ambiguous == u'on' : self . _display_completions ( completions ) else : self . _bell ( ) else : self . _bell ( ) self . finalize ( )
u Attempt to perform completion on the text before point . The actual completion performed is application - specific . The default is filename completion .
57,605
def possible_completions ( self , e ) : u completions = self . _get_completions ( ) self . _display_completions ( completions ) self . finalize ( )
u List the possible completions of the text before point .
57,606
def insert_completions ( self , e ) : u completions = self . _get_completions ( ) b = self . begidx e = self . endidx for comp in completions : rep = [ c for c in comp ] rep . append ( ' ' ) self . l_buffer [ b : e ] = rep b += len ( rep ) e = b self . line_cursor = b self . finalize ( )
u Insert all completions of the text before point that would have been generated by possible - completions .
57,607
def insert_text ( self , string ) : u self . l_buffer . insert_text ( string , self . argument_reset ) self . finalize ( )
u Insert text into the command line .
57,608
def delete_char ( self , e ) : u self . l_buffer . delete_char ( self . argument_reset ) self . finalize ( )
u Delete the character at point . If point is at the beginning of the line there are no characters in the line and the last character typed was not bound to delete - char then return EOF .
57,609
def self_insert ( self , e ) : u if e . char and ord ( e . char ) != 0 : self . insert_text ( e . char ) self . finalize ( )
u Insert yourself .
57,610
def paste ( self , e ) : u if self . enable_win32_clipboard : txt = clipboard . get_clipboard_text_and_convert ( False ) txt = txt . split ( "\n" ) [ 0 ] . strip ( "\r" ) . strip ( "\n" ) log ( "paste: >%s<" % map ( ord , txt ) ) self . insert_text ( txt ) self . finalize ( )
u Paste windows clipboard . Assume single line strip other lines and end of line markers and trailing spaces
57,611
def dump_functions ( self , e ) : u print txt = "\n" . join ( self . rl_settings_to_string ( ) ) print txt self . _print_prompt ( ) self . finalize ( )
u Print all of the functions and their key bindings to the Readline output stream . If a numeric argument is supplied the output is formatted in such a way that it can be made part of an inputrc file . This command is unbound by default .
57,612
def fit ( self , X , y = None ) : self . transmat , self . genmat , self . transcount , self . statetime = ctmc ( X , self . numstates , self . transintv , self . toltime , self . debug ) return self
Calls the ctmc . ctmc function
57,613
def RV_1 ( self ) : return self . orbpop_long . RV * ( self . orbpop_long . M2 / ( self . orbpop_long . M1 + self . orbpop_long . M2 ) )
Instantaneous RV of star 1 with respect to system center - of - mass
57,614
def RV_2 ( self ) : return - self . orbpop_long . RV * ( self . orbpop_long . M1 / ( self . orbpop_long . M1 + self . orbpop_long . M2 ) ) + self . orbpop_short . RV_com1
Instantaneous RV of star 2 with respect to system center - of - mass
57,615
def RV_3 ( self ) : return - self . orbpop_long . RV * ( self . orbpop_long . M1 / ( self . orbpop_long . M1 + self . orbpop_long . M2 ) ) + self . orbpop_short . RV_com2
Instantaneous RV of star 3 with respect to system center - of - mass
57,616
def save_hdf ( self , filename , path = '' ) : self . orbpop_long . save_hdf ( filename , '{}/long' . format ( path ) ) self . orbpop_short . save_hdf ( filename , '{}/short' . format ( path ) )
Save to . h5 file .
57,617
def Rsky ( self ) : return np . sqrt ( self . position . x ** 2 + self . position . y ** 2 )
Projected sky separation of stars
57,618
def RV_com1 ( self ) : return self . RV * ( self . M2 / ( self . M1 + self . M2 ) )
RVs of star 1 relative to center - of - mass
57,619
def RV_com2 ( self ) : return - self . RV * ( self . M1 / ( self . M1 + self . M2 ) )
RVs of star 2 relative to center - of - mass
57,620
def save_hdf ( self , filename , path = '' ) : self . dataframe . to_hdf ( filename , '{}/df' . format ( path ) )
Saves all relevant data to . h5 file ; so state can be restored .
57,621
def add_pii_permissions ( self , group , view_only = None ) : pii_model_names = [ m . split ( "." ) [ 1 ] for m in self . pii_models ] if view_only : permissions = Permission . objects . filter ( ( Q ( codename__startswith = "view" ) | Q ( codename__startswith = "display" ) ) , content_type__model__in = pii_model_names , ) else : permissions = Permission . objects . filter ( content_type__model__in = pii_model_names ) for permission in permissions : group . permissions . add ( permission ) for model in self . pii_models : permissions = Permission . objects . filter ( codename__startswith = "view" , content_type__app_label = model . split ( "." ) [ 0 ] , content_type__model = f"historical{model.split('.')[1]}" , ) for permission in permissions : group . permissions . add ( permission ) for permission in Permission . objects . filter ( content_type__app_label = "edc_registration" , codename__in = [ "add_registeredsubject" , "delete_registeredsubject" , "change_registeredsubject" , ] , ) : group . permissions . remove ( permission ) permission = Permission . objects . get ( content_type__app_label = "edc_registration" , codename = "view_historicalregisteredsubject" , ) group . permissions . add ( permission )
Adds PII model permissions .
57,622
def get_attribute_cardinality ( attribute ) : if attribute . kind == RESOURCE_ATTRIBUTE_KINDS . MEMBER : card = CARDINALITY_CONSTANTS . ONE elif attribute . kind == RESOURCE_ATTRIBUTE_KINDS . COLLECTION : card = CARDINALITY_CONSTANTS . MANY else : raise ValueError ( 'Can not determine cardinality for non-terminal ' 'attributes.' ) return card
Returns the cardinality of the given resource attribute .
57,623
def setup ( path_config = "~/.config/scalar/config.yaml" , configuration_name = None ) : global config global client global token global room path_config = Path ( path_config ) . expanduser ( ) log . debug ( "load config {path}" . format ( path = path_config ) ) if not path_config . exists ( ) : log . error ( "no config {path} found" . format ( path = path_config ) ) sys . exit ( ) else : with open ( str ( path_config ) , "r" ) as _file : config = yaml . load ( _file ) if not configuration_name : for configuration in list ( config [ "configurations" ] . items ( ) ) : if configuration [ 1 ] [ "default" ] : config = configuration [ 1 ] else : config [ "configurations" ] [ configuration_name ] log . debug ( "Matrix username: " + config [ "username" ] ) log . debug ( "connect to homeserver " + config [ "homeserver" ] ) client = MatrixClient ( config [ "homeserver" ] ) token = client . login_with_password ( username = config [ "username" ] , password = config [ "passcode" ] ) log . debug ( "connect to room " + config [ "room_alias" ] ) room = client . join_room ( config [ "room_alias" ] )
Load a configuration from a default or specified configuration file accessing a default or specified configuration name .
57,624
def worker_wrapper ( worker_instance , pid_path ) : def exit_handler ( * args ) : if len ( args ) > 0 : print ( "Exit py signal {signal}" . format ( signal = args [ 0 ] ) ) remove ( pid_path ) atexit . register ( exit_handler ) signal . signal ( signal . SIGINT , exit_handler ) signal . signal ( signal . SIGTERM , exit_handler ) worker_instance . work ( ) exit_handler ( 2 )
A wrapper to start RQ worker as a new process .
57,625
def collection ( self ) : if not self . include_collections : return None ctx = stack . top if ctx is not None : if not hasattr ( ctx , 'redislite_collection' ) : ctx . redislite_collection = Collection ( redis = self . connection ) return ctx . redislite_collection
Return the redis - collection instance .
57,626
def queue ( self ) : if not self . include_rq : return None ctx = stack . top if ctx is not None : if not hasattr ( ctx , 'redislite_queue' ) : ctx . redislite_queue = { } for queue_name in self . queues : ctx . redislite_queue [ queue_name ] = Queue ( queue_name , connection = self . connection ) return ctx . redislite_queue
The queue property . Return rq . Queue instance .
57,627
def start_worker ( self ) : if not self . include_rq : return None worker = Worker ( queues = self . queues , connection = self . connection ) worker_pid_path = current_app . config . get ( "{}_WORKER_PID" . format ( self . config_prefix ) , 'rl_worker.pid' ) try : worker_pid_file = open ( worker_pid_path , 'r' ) worker_pid = int ( worker_pid_file . read ( ) ) print ( "Worker already started with PID=%d" % worker_pid ) worker_pid_file . close ( ) return worker_pid except ( IOError , TypeError ) : self . worker_process = Process ( target = worker_wrapper , kwargs = { 'worker_instance' : worker , 'pid_path' : worker_pid_path } ) self . worker_process . start ( ) worker_pid_file = open ( worker_pid_path , 'w' ) worker_pid_file . write ( "%d" % self . worker_process . pid ) worker_pid_file . close ( ) print ( "Start a worker process with PID=%d" % self . worker_process . pid ) return self . worker_process . pid
Trigger new process as a RQ worker .
57,628
def image_save_buffer_fix ( maxblock = 1048576 ) : before = ImageFile . MAXBLOCK ImageFile . MAXBLOCK = maxblock try : yield finally : ImageFile . MAXBLOCK = before
Contextmanager that change MAXBLOCK in ImageFile .
57,629
def upgrade_many ( upgrade = True , create_examples_all = True ) : urls = set ( ) def inst ( url ) : print ( 'upgrading %s' % url ) assert url not in urls urls . add ( url ) try : lib = install_lib ( url , upgrade ) print ( ' -> %s' % lib ) except Exception as e : print ( e ) inst ( 'https://github.com/sensorium/Mozzi/zipball/master' ) inst ( 'https://github.com/madsci1016/Arduino-EasyTransfer/zipball/master' ) inst ( 'https://github.com/sparkfun/SevSeg/zipball/master' ) inst ( 'https://github.com/madsci1016/Arduino-SoftEasyTransfer/zipball/master' ) inst ( 'https://github.com/madsci1016/Arduino-PS2X/zipball/master' ) inst ( 'https://github.com/kerinin/arduino-splines/zipball/master' ) inst ( 'https://github.com/asynclabs/WiShield/zipball/master' ) inst ( 'https://github.com/asynclabs/dataflash/zipball/master' ) inst ( 'https://github.com/slugmobile/AtTouch/zipball/master' ) inst ( 'https://github.com/carlynorama/Arduino-Library-Button/zipball/master' ) inst ( 'https://github.com/carlynorama/Arduino-Library-FancyLED/zipball/master' ) inst ( 'https://github.com/markfickett/arduinomorse/zipball/master' ) inst ( 'https://github.com/rocketscream/Low-Power/zipball/master' ) inst ( 'https://github.com/arduino-libraries/CapacitiveSensor/zipball/master' ) inst ( 'http://arduiniana.org/PString/PString2.zip' ) inst ( 'http://arduiniana.org/Flash/Flash3.zip' ) inst ( 'http://arduiniana.org/NewSoftSerial/NewSoftSerial10c.zip' ) inst ( 'http://arduiniana.org/Streaming/Streaming4.zip' ) inst ( 'http://arduiniana.org/PWMServo/PWMServo.zip' ) inst ( 'http://arduiniana.org/TinyGPS/TinyGPS10.zip' ) inst ( 'http://rogue-code.googlecode.com/files/Arduino-Library-Tone.zip' ) inst ( 'http://arduino-playground.googlecode.com/files/LedDisplay03.zip' ) inst ( 'http://sserial2mobile.googlecode.com/files/SSerial2Mobile-1.1.0.zip' ) inst ( 'http://webduino.googlecode.com/files/webduino-1.4.1.zip' ) inst ( 'http://arduino-pid-library.googlecode.com/files/PID_v1.0.1.zip' ) inst ( 'http://ideoarduinolibraries.googlecode.com/files/Qtouch1Wire.zip' ) inst ( 'http://arduino-timerone.googlecode.com/files/TimerOne-v8.zip' ) inst ( 'http://arduinounit.googlecode.com/files/arduinounit-1.4.2.zip' ) inst ( 'http://arduinode.googlecode.com/files/arduinode_0.1.zip' ) inst ( 'http://arduino-edb.googlecode.com/files/EDB_r7.zip' ) inst ( 'http://arduino-dblib.googlecode.com/files/DB.zip' ) inst ( 'http://morse-endecoder.googlecode.com/files/Morse_EnDecoder_2010.12.06.tar.gz' ) inst ( 'http://arduino-pinchangeint.googlecode.com/files/PinChangeInt.zip' ) inst ( 'http://arduino-tvout.googlecode.com/files/TVout_R5.91.zip' ) inst ( 'http://narcoleptic.googlecode.com/files/Narcoleptic_v1a.zip' ) inst ( 'http://www.pjrc.com/teensy/arduino_libraries/OneWire.zip' ) inst ( 'http://www.pjrc.com/teensy/arduino_libraries/VirtualWire.zip' ) inst ( 'http://www.pjrc.com/teensy/arduino_libraries/FrequencyTimer2.zip' ) inst ( 'http://www.pjrc.com/teensy/arduino_libraries/FreqCount.zip' ) inst ( 'http://www.pjrc.com/teensy/arduino_libraries/FreqMeasure.zip' ) inst ( 'http://www.shikadi.net/files/arduino/SerialIP-1.0.zip' ) inst ( 'http://siggiorn.com/wp-content/uploads/libraries/ArduinoByteBuffer.zip' ) inst ( 'http://siggiorn.com/wp-content/uploads/libraries/ArduinoSerialManager.zip' ) inst ( 'http://arduino-tweet.appspot.com/Library-Twitter-1.2.2.zip' ) inst ( 'http://sebastian.setz.name/wp-content/uploads/2011/01/multiCameraIrControl_1-5.zip' ) inst ( 'http://alexandre.quessy.net/static/avr/Tween_01.zip' ) inst ( 'http://www.lpelettronica.it/images/stories/LPM11162_images/Arduino/LPM11162_ArduinoLib_v1.zip' ) inst ( 'http://interface.khm.de/wp-content/uploads/2009/01/FreqCounter_1_12.zip' ) inst ( 'http://interface.khm.de/wp-content/uploads/2010/06/FreqPeriod_1_12.zip' ) inst ( 'http://arduino.cc/playground/uploads/Main/PS2Keyboard002.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/Metro.zip' ) inst ( 'http://www.arduino.cc/playground/uploads/Main/MsTimer2.zip' ) inst ( 'http://arduino.cc/playground/uploads/Main/LedControl.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/Bounce.zip' ) inst ( 'http://arduino.cc/playground/uploads/Main/CapacitiveSense003.zip' ) inst ( 'http://arduino.cc/playground/uploads/Main/PinChangeInt.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/TimedAction-1_6.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/EventFuse.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/Charlieplex.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/DigitalToggle.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/Enerlib.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/AdvButton_11.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/MatrixMath.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/StackArray.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/StackList.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/QueueArray.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/QueueList.zip' ) inst ( 'http://arduino.cc/playground/uploads/Code/Ping-1_3.zip' ) inst ( 'http://www.arduino.cc/playground/uploads/Code/LED.zip' ) if create_examples_all : print ( 'create "all" menu item' ) exampallcreate . create_examples_all ( ) print ( 'install finished' )
upgrade many libs .
57,630
def confirm ( self , batch_id = None , filename = None ) : if batch_id or filename : export_history = self . history_model . objects . using ( self . using ) . filter ( Q ( batch_id = batch_id ) | Q ( filename = filename ) , sent = True , confirmation_code__isnull = True , ) else : export_history = self . history_model . objects . using ( self . using ) . filter ( sent = True , confirmation_code__isnull = True ) if export_history . count ( ) == 0 : raise ConfirmationError ( "Nothing to do. No history of sent and unconfirmed files" ) else : confirmation_code = ConfirmationCode ( ) export_history . update ( confirmation_code = confirmation_code . identifier , confirmation_datetime = get_utcnow ( ) , ) return confirmation_code . identifier
Flags the batch as confirmed by updating confirmation_datetime on the history model for this batch .
57,631
def clean_single_word ( word , lemmatizing = "wordnet" ) : if lemmatizing == "porter" : porter = PorterStemmer ( ) lemma = porter . stem ( word ) elif lemmatizing == "snowball" : snowball = SnowballStemmer ( 'english' ) lemma = snowball . stem ( word ) elif lemmatizing == "wordnet" : wordnet = WordNetLemmatizer ( ) lemma = wordnet . lemmatize ( word ) else : print ( "Invalid lemmatizer argument." ) raise RuntimeError return lemma
Performs stemming or lemmatizing on a single word .
57,632
def clean_document ( document , sent_tokenize , _treebank_word_tokenize , tagger , lemmatizer , lemmatize , stopset , first_cap_re , all_cap_re , digits_punctuation_whitespace_re , pos_set ) : try : tokenized_document = fast_word_tokenize ( document , sent_tokenize , _treebank_word_tokenize ) except LookupError : print ( "Warning: Could not tokenize document. If these warnings are commonplace, there is a problem with the nltk resources." ) lemma_list = list ( ) lemma_to_keywordbag = defaultdict ( lambda : defaultdict ( int ) ) return lemma_list , lemma_to_keywordbag tokenized_document = [ separate_camel_case ( token , first_cap_re , all_cap_re ) . lower ( ) for token in tokenized_document ] tokenized_document = tagger . tag ( tokenized_document ) tokenized_document = [ token [ 0 ] for token in tokenized_document if ( token [ 1 ] in pos_set ) ] tokenized_document_no_punctuation = list ( ) append_token = tokenized_document_no_punctuation . append for token in tokenized_document : new_token = remove_digits_punctuation_whitespace ( token , digits_punctuation_whitespace_re ) if not new_token == u'' : append_token ( new_token ) tokenized_document_no_stopwords = list ( ) append_word = tokenized_document_no_stopwords . append for word in tokenized_document_no_punctuation : if word not in stopset : append_word ( word ) lemma_to_keywordbag = defaultdict ( lambda : defaultdict ( int ) ) final_doc = list ( ) append_lemma = final_doc . append for word in tokenized_document_no_stopwords : lemma = lemmatize ( word ) append_lemma ( lemma ) lemma_to_keywordbag [ lemma ] [ word ] += 1 lemma_list = list ( ) append_word = lemma_list . append for word in final_doc : if word not in stopset : append_word ( word ) return lemma_list , lemma_to_keywordbag
Extracts a clean bag - of - words from a document .
57,633
def clean_corpus_serial ( corpus , lemmatizing = "wordnet" ) : list_of_bags_of_words = list ( ) append_bag_of_words = list_of_bags_of_words . append lemma_to_keywordbag_total = defaultdict ( lambda : defaultdict ( int ) ) for document in corpus : word_list , lemma_to_keywordbag = clean_document ( document = document , lemmatizing = lemmatizing ) bag_of_words = combine_word_list ( word_list ) append_bag_of_words ( bag_of_words ) for lemma , keywordbag in lemma_to_keywordbag . items ( ) : for keyword , multiplicity in keywordbag . items ( ) : lemma_to_keywordbag_total [ lemma ] [ keyword ] += multiplicity return list_of_bags_of_words , lemma_to_keywordbag_total
Extracts a bag - of - words from each document in a corpus serially .
57,634
def extract_bag_of_words_from_corpus_parallel ( corpus , lemmatizing = "wordnet" ) : pool = Pool ( processes = get_threads_number ( ) * 2 , ) partitioned_corpus = chunks ( corpus , len ( corpus ) / get_threads_number ( ) ) list_of_bags_of_words , list_of_lemma_to_keywordset_maps = pool . map ( partial ( clean_corpus_serial , lemmatizing = lemmatizing ) , partitioned_corpus ) bag_of_words = reduce_list_of_bags_of_words ( list_of_bags_of_words ) lemma_to_keywordbag_total = defaultdict ( lambda : defaultdict ( int ) ) for lemma_to_keywordbag in list_of_lemma_to_keywordset_maps : for lemma , keywordbag in lemma_to_keywordbag . items ( ) : for keyword , multiplicity in keywordbag . items ( ) : lemma_to_keywordbag_total [ lemma ] [ keyword ] += multiplicity return bag_of_words , lemma_to_keywordbag_total
This extracts one bag - of - words from a list of strings . The documents are mapped to parallel processes .
57,635
def middleware ( func ) : @ wraps ( func ) def parse ( * args , ** kwargs ) : middleware = copy . deepcopy ( kwargs [ 'middleware' ] ) kwargs . pop ( 'middleware' ) if request . method == "OPTIONS" : return JsonResponse ( 200 ) if middleware is None : return func ( * args , ** kwargs ) for mware in middleware : ware = mware ( ) if ware . status is False : return ware . response return func ( * args , ** kwargs ) return parse
Executes routes . py route middleware
57,636
def progress_bar_media ( ) : if PROGRESSBARUPLOAD_INCLUDE_JQUERY : js = [ "http://code.jquery.com/jquery-1.8.3.min.js" , ] else : js = [ ] js . append ( "js/progress_bar.js" ) m = Media ( js = js ) return m . render ( )
progress_bar_media simple tag
57,637
def send ( MESSAGE , SOCKET , MESSAGE_ID = None , CODE_FILE = None , CODE_LINE = None , CODE_FUNC = None , ** kwargs ) : r args = [ 'MESSAGE=' + MESSAGE ] if MESSAGE_ID is not None : id = getattr ( MESSAGE_ID , 'hex' , MESSAGE_ID ) args . append ( 'MESSAGE_ID=' + id ) if CODE_LINE == CODE_FILE == CODE_FUNC == None : CODE_FILE , CODE_LINE , CODE_FUNC = _traceback . extract_stack ( limit = 2 ) [ 0 ] [ : 3 ] if CODE_FILE is not None : args . append ( 'CODE_FILE=' + CODE_FILE ) if CODE_LINE is not None : args . append ( 'CODE_LINE={:d}' . format ( CODE_LINE ) ) if CODE_FUNC is not None : args . append ( 'CODE_FUNC=' + CODE_FUNC ) args . extend ( _make_line ( key . upper ( ) , val ) for key , val in kwargs . items ( ) ) return sendv ( SOCKET , * args )
r Send a message to the journal .
57,638
def exists ( self ) : self_object = self . query . filter_by ( id = self . id ) . first ( ) if self_object is None : return False return True
Checks if item already exists in database
57,639
def delete ( self ) : try : if self . exists ( ) is False : return None self . db . session . delete ( self ) self . db . session . commit ( ) except ( Exception , BaseException ) as error : return None
Easy delete for db models
57,640
def row_to_dict ( self , row ) : constellation = self . parse_constellation ( row [ 0 ] ) name = self . parse_name ( row [ 1 ] ) ra , dec = self . parse_coordinates ( row [ 2 ] ) variable_type = row [ 3 ] . strip ( ) max_magnitude , symbol = self . parse_magnitude ( row [ 4 ] ) min_magnitude , symbol = self . parse_magnitude ( row [ 5 ] ) if symbol == '(' and max_magnitude is not None : min_magnitude = max_magnitude + min_magnitude epoch = self . parse_epoch ( row [ 8 ] ) period = self . parse_period ( row [ 10 ] ) return { 'constellation' : constellation , 'name' : name , 'ra' : ra , 'dec' : dec , 'variable_type' : variable_type , 'max_magnitude' : max_magnitude , 'min_magnitude' : min_magnitude , 'epoch' : epoch , 'period' : period , }
Converts a raw GCVS record to a dictionary of star data .
57,641
def parse_magnitude ( self , magnitude_str ) : symbol = magnitude_str [ 0 ] . strip ( ) magnitude = magnitude_str [ 1 : 6 ] . strip ( ) return float ( magnitude ) if magnitude else None , symbol
Converts magnitude field to a float value or None if GCVS does not list the magnitude .
57,642
def parse_period ( self , period_str ) : period = period_str . translate ( TRANSLATION_MAP ) [ 3 : 14 ] . strip ( ) return float ( period ) if period else None
Converts period field to a float value or None if there is no period in GCVS record .
57,643
def find_hwpack_dir ( root ) : root = path ( root ) log . debug ( 'files in dir: %s' , root ) for x in root . walkfiles ( ) : log . debug ( ' %s' , x ) hwpack_dir = None for h in ( root . walkfiles ( 'boards.txt' ) ) : assert not hwpack_dir hwpack_dir = h . parent log . debug ( 'found hwpack: %s' , hwpack_dir ) assert hwpack_dir return hwpack_dir
search for hwpack dir under root .
57,644
def install_hwpack ( url , replace_existing = False ) : d = tmpdir ( tmpdir ( ) ) f = download ( url ) Archive ( f ) . extractall ( d ) clean_dir ( d ) src_dhwpack = find_hwpack_dir ( d ) targ_dhwpack = hwpack_dir ( ) / src_dhwpack . name doaction = 0 if targ_dhwpack . exists ( ) : log . debug ( 'hwpack already exists: %s' , targ_dhwpack ) if replace_existing : log . debug ( 'remove %s' , targ_dhwpack ) targ_dhwpack . rmtree ( ) doaction = 1 else : doaction = 1 if doaction : log . debug ( 'move %s -> %s' , src_dhwpack , targ_dhwpack ) src_dhwpack . move ( targ_dhwpack ) hwpack_dir ( ) . copymode ( targ_dhwpack ) for x in targ_dhwpack . walk ( ) : hwpack_dir ( ) . copymode ( x )
install hwpackrary from web or local files system .
57,645
def create ( self , volume_id , vtype , size , affinity ) : volume_id = volume_id or str ( uuid . uuid4 ( ) ) params = { 'volume_type_name' : vtype , 'size' : size , 'affinity' : affinity } return self . http_put ( '/volumes/%s' % volume_id , params = self . unused ( params ) )
create a volume
57,646
def restore ( self , volume_id , ** kwargs ) : self . required ( 'create' , kwargs , [ 'backup' , 'size' ] ) volume_id = volume_id or str ( uuid . uuid4 ( ) ) kwargs [ 'volume_type_name' ] = kwargs [ 'volume_type_name' ] or 'vtype' kwargs [ 'size' ] = kwargs [ 'size' ] or 1 return self . http_put ( '/volumes/%s' % volume_id , params = self . unused ( kwargs ) )
restore a volume from a backup
57,647
def create ( self , volume_id , backup_id ) : backup_id = backup_id or str ( uuid . uuid4 ( ) ) return self . http_put ( '/backups/%s' % backup_id , params = { 'volume' : volume_id } )
create a backup
57,648
def delete ( self , volume_id , force = False ) : return self . http_delete ( '/volumes/%s/export' % volume_id , params = { 'force' : force } )
delete an export
57,649
def update ( self , volume_id , ** kwargs ) : self . allowed ( 'update' , kwargs , [ 'status' , 'instance_id' , 'mountpoint' , 'ip' , 'initiator' , 'session_ip' , 'session_initiator' ] ) params = self . unused ( kwargs ) return self . http_post ( '/volumes/%s/export' % volume_id , params = params )
update an export
57,650
def proto_refactor ( proto_filename , namespace , namespace_path ) : with open ( proto_filename ) as f : data = f . read ( ) if not re . search ( 'syntax = "proto2"' , data ) : insert_syntax = 'syntax = "proto2";\n' data = insert_syntax + data substitution = 'import "{}/\\1";' . format ( namespace_path ) data = re . sub ( 'import\s+"([^"]+\.proto)"\s*;' , substitution , data ) return data
This method refactors a Protobuf file to import from a namespace that will map to the desired python package structure . It also ensures that the syntax is set to proto2 since protoc complains without it .
57,651
def proto_refactor_files ( dest_dir , namespace , namespace_path ) : for dn , dns , fns in os . walk ( dest_dir ) : for fn in fns : fn = os . path . join ( dn , fn ) if fnmatch . fnmatch ( fn , '*.proto' ) : data = proto_refactor ( fn , namespace , namespace_path ) with open ( fn , 'w' ) as f : f . write ( data )
This method runs the refactoring on all the Protobuf files in the Dropsonde repo .
57,652
def clone_source_dir ( source_dir , dest_dir ) : if os . path . isdir ( dest_dir ) : print ( 'removing' , dest_dir ) shutil . rmtree ( dest_dir ) shutil . copytree ( source_dir , dest_dir )
Copies the source Protobuf files into a build directory .
57,653
def are_budget_data_package_fields_filled_in ( self , resource ) : fields = [ 'country' , 'currency' , 'year' , 'status' ] return all ( [ self . in_resource ( f , resource ) for f in fields ] )
Check if the budget data package fields are all filled in because if not then this can t be a budget data package
57,654
def generate_budget_data_package ( self , resource ) : if not self . are_budget_data_package_fields_filled_in ( resource ) : return try : resource [ 'schema' ] = self . data . schema except exceptions . NotABudgetDataPackageException : log . debug ( 'Resource is not a Budget Data Package' ) resource [ 'schema' ] = [ ] return resource [ 'BudgetDataPackage' ] = True resource [ 'standard' ] = self . data . version resource [ 'granularity' ] = self . data . granularity resource [ 'type' ] = self . data . budget_type
Try to grab a budget data package schema from the resource . The schema only allows fields which are defined in the budget data package specification . If a field is found that is not in the specification this will return a NotABudgetDataPackageException and in that case we can just return and ignore the resource
57,655
def before_update ( self , context , current , resource ) : if not self . are_budget_data_package_fields_filled_in ( resource ) : return if resource . get ( 'upload' , '' ) == '' : if current [ 'url' ] == resource [ 'url' ] : return else : self . data . load ( resource [ 'url' ] ) else : self . data . load ( resource [ 'upload' ] . file ) self . generate_budget_data_package ( resource )
If the resource has changed we try to generate a budget data package but if it hasn t then we don t do anything
57,656
def upload_directory_contents ( input_dict , environment_dict ) : if environment_dict [ "currentkeyname" ] is None : raise seash_exceptions . UserError ( ) if environment_dict [ "currenttarget" ] is None : raise seash_exceptions . UserError ( ) try : source_directory = input_dict [ "uploaddir" ] [ "children" ] . keys ( ) [ 0 ] except IndexError : raise seash_exceptions . UserError ( ) if not os . path . exists ( source_directory ) : raise seash_exceptions . UserError ( "Error: Source directory '" + source_directory + "' does not exist." ) if not os . path . isdir ( source_directory ) : raise seash_exceptions . UserError ( "Error: Source directory '" + source_directory + "' is not a directory.\nDid you mean to use the 'upload' command instead?" ) file_list = os . listdir ( source_directory ) for filename in file_list : path_and_filename = source_directory + os . sep + filename if not os . path . isdir ( path_and_filename ) : print "Uploading '" + path_and_filename + "'..." faked_input_dict = { "upload" : { "name" : "upload" , "children" : { path_and_filename : { "name" : "filename" } } } } command_callbacks . upload_filename ( faked_input_dict , environment_dict ) else : print "Skipping sub-directory '" + filename + "'. You may upload it separately."
This function serves to upload every file in a user - supplied source directory to all of the vessels in the current target group . It essentially calls seash s upload function repeatedly each time with a file name taken from the source directory .
57,657
def __load_file ( self , key_list ) -> str : file = str ( key_list [ 0 ] ) + self . extension key_list . pop ( 0 ) file_path = os . path . join ( self . path , file ) if os . path . exists ( file_path ) : return Json . from_file ( file_path ) else : raise FileNotFoundError ( file_path )
Load a translator file
57,658
def remove_programmer ( programmer_id ) : log . debug ( 'remove %s' , programmer_id ) lines = programmers_txt ( ) . lines ( ) lines = filter ( lambda x : not x . strip ( ) . startswith ( programmer_id + '.' ) , lines ) programmers_txt ( ) . write_lines ( lines )
remove programmer .
57,659
def load ( self , entity_class , entity ) : if self . __needs_flushing : self . flush ( ) if entity . id is None : raise ValueError ( 'Can not load entity without an ID.' ) cache = self . __get_cache ( entity_class ) sess_ent = cache . get_by_id ( entity . id ) if sess_ent is None : if self . __clone_on_load : sess_ent = self . __clone ( entity , cache ) else : cache . add ( entity ) sess_ent = entity self . __unit_of_work . register_clean ( entity_class , sess_ent ) return sess_ent
Load the given repository entity into the session and return a clone . If it was already loaded before look up the loaded entity and return it .
57,660
def onStart ( self , event ) : c = event . container print '+' * 5 , 'started:' , c kv = lambda s : s . split ( '=' , 1 ) env = { k : v for ( k , v ) in ( kv ( s ) for s in c . attrs [ 'Config' ] [ 'Env' ] ) } print env
Display the environment of a started container
57,661
def _identifier_data ( self ) : data = [ ff . name for ff in self . files ] data . sort ( ) data . append ( self . path . name ) data += self . _identifier_meta ( ) return hash_obj ( data )
Return a unique identifier for the folder data
57,662
def _search_files ( path ) : path = pathlib . Path ( path ) fifo = [ ] for fp in path . glob ( "*" ) : if fp . is_dir ( ) : continue for fmt in formats : if not fmt . is_series and fmt . verify ( fp ) : fifo . append ( ( fp , fmt . __name__ ) ) break theformats = [ ff [ 1 ] for ff in fifo ] formset = set ( theformats ) if len ( formset ) > 1 : fmts_qpimage = [ "SingleHdf5Qpimage" , "SeriesHdf5Qpimage" ] fifo = [ ff for ff in fifo if ff [ 1 ] not in fmts_qpimage ] if len ( formset ) > 1 and "SingleTifPhasics" in theformats : fmts_badtif = "SingleTifHolo" fifo = [ ff for ff in fifo if ff [ 1 ] not in fmts_badtif ] theformats2 = [ ff [ 1 ] for ff in fifo ] formset2 = set ( theformats2 ) if len ( formset2 ) > 1 : msg = "Qpformat does not support multiple different file " + "formats within one directory: {}" . format ( formset2 ) raise MultipleFormatsNotSupportedError ( msg ) fifo = sorted ( fifo ) return fifo
Search a folder for data files
57,663
def get_identifier ( self , idx ) : name = self . _get_cropped_file_names ( ) [ idx ] return "{}:{}:{}" . format ( self . identifier , name , idx + 1 )
Return an identifier for the data at index idx
57,664
def verify ( path ) : valid = True fifo = SeriesFolder . _search_files ( path ) if len ( fifo ) == 0 : valid = False fifmts = [ ff [ 1 ] for ff in fifo ] if len ( set ( fifmts ) ) != 1 : valid = False return valid
Verify folder file format
57,665
def load_file ( path ) : path = pathlib . Path ( path ) data = path . open ( ) . readlines ( ) data = [ l for l in data if len ( l . strip ( ) ) and not l . startswith ( "#" ) ] n = len ( data ) m = len ( data [ 0 ] . strip ( ) . split ( ) ) res = np . zeros ( ( n , m ) , dtype = np . dtype ( float ) ) for ii in range ( n ) : res [ ii ] = np . array ( data [ ii ] . strip ( ) . replace ( "," , "." ) . split ( ) , dtype = float ) return res
Load a txt data file
57,666
def emit ( self , record ) : if record . args and isinstance ( record . args , collections . Mapping ) : extra = dict ( self . _extra , ** record . args ) else : extra = self . _extra try : msg = self . format ( record ) pri = self . mapPriority ( record . levelno ) mid = getattr ( record , 'MESSAGE_ID' , None ) send ( msg , SOCKET = self . socket , MESSAGE_ID = mid , PRIORITY = format ( pri ) , LOGGER = record . name , THREAD_NAME = record . threadName , CODE_FILE = record . pathname , CODE_LINE = record . lineno , CODE_FUNC = record . funcName , ** extra ) except Exception : self . handleError ( record )
Write record as journal event .
57,667
def mapPriority ( levelno ) : if levelno <= _logging . DEBUG : return LOG_DEBUG elif levelno <= _logging . INFO : return LOG_INFO elif levelno <= _logging . WARNING : return LOG_WARNING elif levelno <= _logging . ERROR : return LOG_ERR elif levelno <= _logging . CRITICAL : return LOG_CRIT else : return LOG_ALERT
Map logging levels to journald priorities .
57,668
def get_args ( self , func ) : def reverse ( iterable ) : if iterable : iterable = list ( iterable ) while len ( iterable ) : yield iterable . pop ( ) args , varargs , varkw , defaults = inspect . getargspec ( func ) result = { } for default in reverse ( defaults ) : result [ args . pop ( ) ] = default for arg in reverse ( args ) : if arg == 'self' : continue result [ arg ] = None return result
Get the arguments of a method and return it as a dictionary with the supplied defaults method arguments with no default are assigned None
57,669
def guess_format ( path ) : for fmt in formats : if fmt . verify ( path ) : return fmt . __name__ else : msg = "Undefined file format: '{}'" . format ( path ) raise UnknownFileFormatError ( msg )
Determine the file format of a folder or a file
57,670
def load_data ( path , fmt = None , bg_data = None , bg_fmt = None , meta_data = { } , holo_kw = { } , as_type = "float32" ) : path = pathlib . Path ( path ) . resolve ( ) for kk in meta_data : if kk not in qpimage . meta . DATA_KEYS : msg = "Meta data key not allowed: {}" . format ( kk ) raise ValueError ( msg ) for kk in list ( meta_data . keys ( ) ) : if meta_data [ kk ] in [ np . nan , None ] : meta_data . pop ( kk ) if fmt is None : fmt = guess_format ( path ) else : if not formats_dict [ fmt ] . verify ( path ) : msg = "Wrong file format '{}' for '{}'!" . format ( fmt , path ) raise WrongFileFormatError ( msg ) dataobj = formats_dict [ fmt ] ( path = path , meta_data = meta_data , holo_kw = holo_kw , as_type = as_type ) if bg_data is not None : if isinstance ( bg_data , qpimage . QPImage ) : dataobj . set_bg ( bg_data ) else : bg_path = pathlib . Path ( bg_data ) . resolve ( ) if bg_fmt is None : bg_fmt = guess_format ( bg_path ) bgobj = formats_dict [ bg_fmt ] ( path = bg_path , meta_data = meta_data , holo_kw = holo_kw , as_type = as_type ) dataobj . set_bg ( bgobj ) return dataobj
Load experimental data
57,671
def duration ( seconds ) : if seconds < 1 : return 'less than 1 sec' seconds = int ( round ( seconds ) ) components = [ ] for magnitude , label in ( ( 3600 , 'hr' ) , ( 60 , 'min' ) , ( 1 , 'sec' ) ) : if seconds >= magnitude : components . append ( '{} {}' . format ( seconds // magnitude , label ) ) seconds %= magnitude return ' ' . join ( components )
Return a string of the form 1 hr 2 min 3 sec representing the given number of seconds .
57,672
def _get_shortcut_prefix ( self , user_agent , standart_prefix ) : if user_agent is not None : user_agent = user_agent . lower ( ) opera = 'opera' in user_agent mac = 'mac' in user_agent konqueror = 'konqueror' in user_agent spoofer = 'spoofer' in user_agent safari = 'applewebkit' in user_agent windows = 'windows' in user_agent chrome = 'chrome' in user_agent firefox = ( ( 'firefox' in user_agent ) or ( 'minefield' in user_agent ) ) internet_explorer = ( ( 'msie' in user_agent ) or ( 'trident' in user_agent ) ) if opera : return 'SHIFT + ESC' elif chrome and mac and ( not spoofer ) : return 'CTRL + OPTION' elif safari and ( not windows ) and ( not spoofer ) : return 'CTRL + ALT' elif ( not windows ) and ( safari or mac or konqueror ) : return 'CTRL' elif firefox : return 'ALT + SHIFT' elif chrome or internet_explorer : return 'ALT' return standart_prefix return standart_prefix
Returns the shortcut prefix of browser .
57,673
def _get_role_description ( self , role ) : parameter = 'role-' + role . lower ( ) if self . configure . has_parameter ( parameter ) : return self . configure . get_parameter ( parameter ) return None
Returns the description of role .
57,674
def _get_language_description ( self , language_code ) : language = language_code . lower ( ) parameter = 'language-' + language if self . configure . has_parameter ( parameter ) : return self . configure . get_parameter ( parameter ) elif '-' in language : codes = re . split ( r'\-' , language ) parameter = 'language-' + codes [ 0 ] if self . configure . has_parameter ( parameter ) : return self . configure . get_parameter ( parameter ) return None
Returns the description of language .
57,675
def _get_description ( self , element ) : description = None if element . has_attribute ( 'title' ) : description = element . get_attribute ( 'title' ) elif element . has_attribute ( 'aria-label' ) : description = element . get_attribute ( 'aria-label' ) elif element . has_attribute ( 'alt' ) : description = element . get_attribute ( 'alt' ) elif element . has_attribute ( 'label' ) : description = element . get_attribute ( 'label' ) elif ( ( element . has_attribute ( 'aria-labelledby' ) ) or ( element . has_attribute ( 'aria-describedby' ) ) ) : if element . has_attribute ( 'aria-labelledby' ) : description_ids = re . split ( '[ \n\r\t]+' , element . get_attribute ( 'aria-labelledby' ) . strip ( ) ) else : description_ids = re . split ( '[ \n\r\t]+' , element . get_attribute ( 'aria-describedby' ) . strip ( ) ) for description_id in description_ids : element_description = self . parser . find ( '#' + description_id ) . first_result ( ) if element_description is not None : description = element_description . get_text_content ( ) break elif ( ( element . get_tag_name ( ) == 'INPUT' ) and ( element . has_attribute ( 'type' ) ) ) : type_attribute = element . get_attribute ( 'type' ) . lower ( ) if ( ( ( type_attribute == 'button' ) or ( type_attribute == 'submit' ) or ( type_attribute == 'reset' ) ) and ( element . has_attribute ( 'value' ) ) ) : description = element . get_attribute ( 'value' ) if not bool ( description ) : description = element . get_text_content ( ) return re . sub ( '[ \n\r\t]+' , ' ' , description . strip ( ) )
Returns the description of element .
57,676
def _generate_list_shortcuts ( self ) : id_container_shortcuts_before = ( AccessibleDisplayImplementation . ID_CONTAINER_SHORTCUTS_BEFORE ) id_container_shortcuts_after = ( AccessibleDisplayImplementation . ID_CONTAINER_SHORTCUTS_AFTER ) local = self . parser . find ( 'body' ) . first_result ( ) if local is not None : container_before = self . parser . find ( '#' + id_container_shortcuts_before ) . first_result ( ) if ( ( container_before is None ) and ( self . attribute_accesskey_before ) ) : container_before = self . parser . create_element ( 'div' ) container_before . set_attribute ( 'id' , id_container_shortcuts_before ) text_container_before = self . parser . create_element ( 'span' ) text_container_before . set_attribute ( 'class' , AccessibleDisplayImplementation . CLASS_TEXT_SHORTCUTS ) text_container_before . append_text ( self . attribute_accesskey_before ) container_before . append_element ( text_container_before ) local . prepend_element ( container_before ) if container_before is not None : self . list_shortcuts_before = self . parser . find ( container_before ) . find_children ( 'ul' ) . first_result ( ) if self . list_shortcuts_before is None : self . list_shortcuts_before = self . parser . create_element ( 'ul' ) container_before . append_element ( self . list_shortcuts_before ) container_after = self . parser . find ( '#' + id_container_shortcuts_after ) . first_result ( ) if ( ( container_after is None ) and ( self . attribute_accesskey_after ) ) : container_after = self . parser . create_element ( 'div' ) container_after . set_attribute ( 'id' , id_container_shortcuts_after ) text_container_after = self . parser . create_element ( 'span' ) text_container_after . set_attribute ( 'class' , AccessibleDisplayImplementation . CLASS_TEXT_SHORTCUTS ) text_container_after . append_text ( self . attribute_accesskey_after ) container_after . append_element ( text_container_after ) local . append_element ( container_after ) if container_after is not None : self . list_shortcuts_after = self . parser . find ( container_after ) . find_children ( 'ul' ) . first_result ( ) if self . list_shortcuts_after is None : self . list_shortcuts_after = self . parser . create_element ( 'ul' ) container_after . append_element ( self . list_shortcuts_after ) self . list_shortcuts_added = True
Generate the list of shortcuts of page .
57,677
def _insert ( self , element , new_element , before ) : tag_name = element . get_tag_name ( ) append_tags = [ 'BODY' , 'A' , 'FIGCAPTION' , 'LI' , 'DT' , 'DD' , 'LABEL' , 'OPTION' , 'TD' , 'TH' ] controls = [ 'INPUT' , 'SELECT' , 'TEXTAREA' ] if tag_name == 'HTML' : body = self . parser . find ( 'body' ) . first_result ( ) if body is not None : self . _insert ( body , new_element , before ) elif tag_name in append_tags : if before : element . prepend_element ( new_element ) else : element . append_element ( new_element ) elif tag_name in controls : labels = [ ] if element . has_attribute ( 'id' ) : labels = self . parser . find ( 'label[for="' + element . get_attribute ( 'id' ) + '"]' ) . list_results ( ) if not labels : labels = self . parser . find ( element ) . find_ancestors ( 'label' ) . list_results ( ) for label in labels : self . _insert ( label , new_element , before ) elif before : element . insert_before ( new_element ) else : element . insert_after ( new_element )
Insert a element before or after other element .
57,678
def _force_read_simple ( self , element , text_before , text_after , data_of ) : self . id_generator . generate_id ( element ) identifier = element . get_attribute ( 'id' ) selector = '[' + data_of + '="' + identifier + '"]' reference_before = self . parser . find ( '.' + AccessibleDisplayImplementation . CLASS_FORCE_READ_BEFORE + selector ) . first_result ( ) reference_after = self . parser . find ( '.' + AccessibleDisplayImplementation . CLASS_FORCE_READ_AFTER + selector ) . first_result ( ) references = self . parser . find ( selector ) . list_results ( ) if reference_before in references : references . remove ( reference_before ) if reference_after in references : references . remove ( reference_after ) if not references : if text_before : if reference_before is not None : reference_before . remove_node ( ) span = self . parser . create_element ( 'span' ) span . set_attribute ( 'class' , AccessibleDisplayImplementation . CLASS_FORCE_READ_BEFORE ) span . set_attribute ( data_of , identifier ) span . append_text ( text_before ) self . _insert ( element , span , True ) if text_after : if reference_after is not None : reference_after . remove_node ( ) span = self . parser . create_element ( 'span' ) span . set_attribute ( 'class' , AccessibleDisplayImplementation . CLASS_FORCE_READ_AFTER ) span . set_attribute ( data_of , identifier ) span . append_text ( text_after ) self . _insert ( element , span , False )
Force the screen reader display an information of element .
57,679
def _force_read ( self , element , value , text_prefix_before , text_suffix_before , text_prefix_after , text_suffix_after , data_of ) : if ( text_prefix_before ) or ( text_suffix_before ) : text_before = text_prefix_before + value + text_suffix_before else : text_before = '' if ( text_prefix_after ) or ( text_suffix_after ) : text_after = text_prefix_after + value + text_suffix_after else : text_after = '' self . _force_read_simple ( element , text_before , text_after , data_of )
Force the screen reader display an information of element with prefixes or suffixes .
57,680
def provider ( func = None , * , singleton = False , injector = None ) : def decorator ( func ) : wrapped = _wrap_provider_func ( func , { 'singleton' : singleton } ) if injector : injector . register_provider ( wrapped ) return wrapped if func : return decorator ( func ) return decorator
Decorator to mark a function as a provider .
57,681
def inject ( * args , ** kwargs ) : def wrapper ( obj ) : if inspect . isclass ( obj ) or callable ( obj ) : _inject_object ( obj , * args , ** kwargs ) return obj raise DiayException ( "Don't know how to inject into %r" % obj ) return wrapper
Mark a class or function for injection meaning that a DI container knows that it should inject dependencies into it .
57,682
def register_plugin ( self , plugin : Plugin ) : if isinstance ( plugin , Plugin ) : lazy = False elif issubclass ( plugin , Plugin ) : lazy = True else : msg = 'plugin %r must be an object/class of type Plugin' % plugin raise DiayException ( msg ) predicate = inspect . isfunction if lazy else inspect . ismethod methods = inspect . getmembers ( plugin , predicate = predicate ) for _ , method in methods : if getattr ( method , '__di__' , { } ) . get ( 'provides' ) : if lazy : self . register_lazy_provider_method ( plugin , method ) else : self . register_provider ( method )
Register a plugin .
57,683
def register_provider ( self , func ) : if 'provides' not in getattr ( func , '__di__' , { } ) : raise DiayException ( 'function %r is not a provider' % func ) self . factories [ func . __di__ [ 'provides' ] ] = func
Register a provider function .
57,684
def register_lazy_provider_method ( self , cls , method ) : if 'provides' not in getattr ( method , '__di__' , { } ) : raise DiayException ( 'method %r is not a provider' % method ) @ functools . wraps ( method ) def wrapper ( * args , ** kwargs ) : return getattr ( self . get ( cls ) , method . __name__ ) ( * args , ** kwargs ) self . factories [ method . __di__ [ 'provides' ] ] = wrapper
Register a class method lazily as a provider .
57,685
def set_factory ( self , thing : type , value , overwrite = False ) : if thing in self . factories and not overwrite : raise DiayException ( 'factory for %r already exists' % thing ) self . factories [ thing ] = value
Set the factory for something .
57,686
def set_instance ( self , thing : type , value , overwrite = False ) : if thing in self . instances and not overwrite : raise DiayException ( 'instance for %r already exists' % thing ) self . instances [ thing ] = value
Set an instance of a thing .
57,687
def get ( self , thing : type ) : if thing in self . instances : return self . instances [ thing ] if thing in self . factories : fact = self . factories [ thing ] ret = self . get ( fact ) if hasattr ( fact , '__di__' ) and fact . __di__ [ 'singleton' ] : self . instances [ thing ] = ret return ret if inspect . isclass ( thing ) : return self . _call_class_init ( thing ) elif callable ( thing ) : return self . call ( thing ) raise DiayException ( 'cannot resolve: %r' % thing )
Get an instance of some type .
57,688
def call ( self , func , * args , ** kwargs ) : guessed_kwargs = self . _guess_kwargs ( func ) for key , val in guessed_kwargs . items ( ) : kwargs . setdefault ( key , val ) try : return func ( * args , ** kwargs ) except TypeError as exc : msg = ( "tried calling function %r but failed, probably " "because it takes arguments that cannot be resolved" ) % func raise DiayException ( msg ) from exc
Call a function resolving any type - hinted arguments .
57,689
def do_load ( self , filename ) : try : self . __session . load ( filename ) except IOError as e : self . logger . error ( e . strerror )
Load disk image for analysis
57,690
def do_session ( self , args ) : filename = 'Not specified' if self . __session . filename is None else self . __session . filename print ( '{0: <30}: {1}' . format ( 'Filename' , filename ) )
Print current session information
57,691
def _convert_iterable ( self , iterable ) : if not callable ( self . _wrapper ) : return iterable return [ self . _wrapper ( x ) for x in iterable ]
Converts elements returned by an iterable into instances of self . _wrapper
57,692
def get ( self , ** kwargs ) : for x in self : if self . _check_element ( kwargs , x ) : return x kv_str = self . _stringify_kwargs ( kwargs ) raise QueryList . NotFound ( "Element not found with attributes: %s" % kv_str )
Returns the first object encountered that matches the specified lookup parameters .
57,693
def runserver ( ctx , conf , port , foreground ) : config = read_config ( conf ) debug = config [ 'conf' ] . get ( 'debug' , False ) click . echo ( 'Debug mode {0}.' . format ( 'on' if debug else 'off' ) ) port = port or config [ 'conf' ] [ 'server' ] [ 'port' ] app_settings = { 'debug' : debug , 'auto_reload' : config [ 'conf' ] [ 'server' ] . get ( 'auto_reload' , False ) , } handlers_settings = __create_handler_settings ( config ) if foreground : click . echo ( 'Requested mode: foreground' ) start_app ( port , app_settings , handlers_settings ) else : click . echo ( 'Requested mode: background' ) raise NotImplementedError
Run the fnExchange server
57,694
def as_repository ( resource ) : reg = get_current_registry ( ) if IInterface in provided_by ( resource ) : resource = reg . getUtility ( resource , name = 'collection-class' ) return reg . getAdapter ( resource , IRepository )
Adapts the given registered resource to its configured repository .
57,695
def commit_veto ( request , response ) : tm_header = response . headers . get ( 'x-tm' ) if not tm_header is None : result = tm_header != 'commit' else : result = not response . status . startswith ( '2' ) and not tm_header == 'commit' return result
Strict commit veto to use with the transaction manager .
57,696
def set ( cls , key , obj ) : with cls . _lock : if not cls . _globs . get ( key ) is None : raise ValueError ( 'Duplicate key "%s".' % key ) cls . _globs [ key ] = obj return cls . _globs [ key ]
Sets the given object as global object for the given key .
57,697
def as_representer ( resource , content_type ) : reg = get_current_registry ( ) rpr_reg = reg . queryUtility ( IRepresenterRegistry ) return rpr_reg . create ( type ( resource ) , content_type )
Adapts the given resource and content type to a representer .
57,698
def data_element_tree_to_string ( data_element ) : def __dump ( data_el , stream , offset ) : name = data_el . __class__ . __name__ stream . write ( "%s%s" % ( ' ' * offset , name ) ) offset += 2 ifcs = provided_by ( data_el ) if ICollectionDataElement in ifcs : stream . write ( "[" ) first_member = True for member_data_el in data_el . get_members ( ) : if first_member : stream . write ( '%s' % os . linesep + ' ' * offset ) first_member = False else : stream . write ( ',%s' % os . linesep + ' ' * offset ) __dump ( member_data_el , stream , offset ) stream . write ( "]" ) else : stream . write ( "(" ) if ILinkedDataElement in ifcs : stream . write ( "url=%s, kind=%s, relation=%s" % ( data_el . get_url ( ) , data_el . get_kind ( ) , data_el . get_relation ( ) ) ) else : first_attr = True for attr_name , attr_value in iteritems_ ( data_el . data ) : if first_attr : first_attr = False else : stream . write ( ',%s' % os . linesep + ' ' * ( offset + len ( name ) + 1 ) ) if attr_value is None : continue if not IResourceDataElement in provided_by ( attr_value ) : stream . write ( "%s=%s" % ( attr_name , attr_value ) ) else : stream . write ( "%s=" % attr_name ) __dump ( attr_value , stream , offset ) stream . write ( ')' ) stream = NativeIO ( ) __dump ( data_element , stream , 0 ) return stream . getvalue ( )
Creates a string representation of the given data element tree .
57,699
def initialize_path ( self , path_num = None ) : for c in self . consumers : c . initialize_path ( path_num ) self . state = [ c . state for c in self . consumers ]
make the consumer_state ready for the next MC path