idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
3,700
|
def return_selected_form_items ( form_info ) : selected_keys = [ ] selected_names = [ ] for chosen in form_info : if chosen [ 'choice' ] : selected_keys . append ( chosen [ 'key' ] ) selected_names . append ( chosen [ 'name' ] ) return selected_keys , selected_names
|
It returns chosen keys list from a given form .
|
3,701
|
def selection_error_control ( self , form_info ) : keys , names = self . return_selected_form_items ( form_info [ 'ChannelList' ] ) chosen_channels_number = len ( keys ) if form_info [ 'new_channel' ] and chosen_channels_number < 2 : return False , _ ( u"You should choose at least two channel to merge operation at a new channel." ) elif form_info [ 'existing_channel' ] and chosen_channels_number == 0 : return False , _ ( u"You should choose at least one channel to merge operation with existing channel." ) elif form_info [ 'find_chosen_channel' ] and chosen_channels_number != 1 : return False , _ ( u"You should choose one channel for split operation." ) return True , None
|
It controls the selection from the form according to the operations and returns an error message if it does not comply with the rules .
|
3,702
|
def _eratosthenes ( ) : d = { } for q in count ( 2 ) : p = d . pop ( q , None ) if p is None : yield q d [ q * q ] = q else : x = p + q while x in d : x += p d [ x ] = p
|
Yields the sequence of prime numbers via the Sieve of Eratosthenes .
|
3,703
|
def atoms_order ( self ) : if not len ( self ) : return { } elif len ( self ) == 1 : return dict . fromkeys ( self , 2 ) params = { n : ( int ( node ) , tuple ( sorted ( int ( edge ) for edge in self . _adj [ n ] . values ( ) ) ) ) for n , node in self . atoms ( ) } newlevels = { } countprime = iter ( primes ) weights = { x : newlevels . get ( y ) or newlevels . setdefault ( y , next ( countprime ) ) for x , y in sorted ( params . items ( ) , key = itemgetter ( 1 ) ) } tries = len ( self ) * 4 numb = len ( set ( weights . values ( ) ) ) stab = 0 while tries : oldnumb = numb neweights = { } countprime = iter ( primes ) tmp = { n : reduce ( mul , ( weights [ x ] for x in m ) , weights [ n ] ** 2 ) for n , m in self . _adj . items ( ) } weights = { x : ( neweights . get ( y ) or neweights . setdefault ( y , next ( countprime ) ) ) for x , y in sorted ( tmp . items ( ) , key = itemgetter ( 1 ) ) } numb = len ( set ( weights . values ( ) ) ) if numb == len ( self ) : break elif numb == oldnumb : x = Counter ( weights . values ( ) ) if x [ min ( x ) ] > 1 : if stab == 3 : break elif stab >= 2 : break stab += 1 elif stab : stab = 0 tries -= 1 if not tries and numb < oldnumb : warning ( 'morgan. number of attempts exceeded. uniqueness has decreased. next attempt will be made' ) tries = 1 else : warning ( 'morgan. number of attempts exceeded' ) return weights
|
Morgan like algorithm for graph nodes ordering
|
3,704
|
def init_manual ( cls , pawn_value , knight_value , bishop_value , rook_value , queen_value , king_value ) : piece_values = cls ( ) piece_values . PAWN_VALUE = pawn_value piece_values . KNIGHT_VALUE = knight_value piece_values . BISHOP_VALUE = bishop_value piece_values . ROOK_VALUE = rook_value piece_values . QUEEN_VALUE = queen_value piece_values . KING_VALUE = king_value return piece_values
|
Manual init method for external piece values
|
3,705
|
def val ( self , piece , ref_color ) : if piece is None : return 0 if ref_color == piece . color : const = 1 else : const = - 1 if isinstance ( piece , Pawn ) : return self . PAWN_VALUE * const elif isinstance ( piece , Queen ) : return self . QUEEN_VALUE * const elif isinstance ( piece , Bishop ) : return self . BISHOP_VALUE * const elif isinstance ( piece , Rook ) : return self . ROOK_VALUE * const elif isinstance ( piece , Knight ) : return self . KNIGHT_VALUE * const elif isinstance ( piece , King ) : return self . KING_VALUE * const return 0
|
Finds value of Piece
|
3,706
|
def get_field_cache ( self , cache_type = 'es' ) : if cache_type == 'kibana' : try : search_results = urlopen ( self . get_url ) . read ( ) . decode ( 'utf-8' ) except HTTPError : return [ ] index_pattern = json . loads ( search_results ) fields_str = index_pattern [ '_source' ] [ 'fields' ] return json . loads ( fields_str ) elif cache_type == 'es' or cache_type . startswith ( 'elastic' ) : search_results = urlopen ( self . es_get_url ) . read ( ) . decode ( 'utf-8' ) es_mappings = json . loads ( search_results ) field_cache = [ ] for ( index_name , val ) in iteritems ( es_mappings ) : if index_name != self . index : m_dict = es_mappings [ index_name ] [ 'mappings' ] mappings = self . get_index_mappings ( m_dict ) field_cache . extend ( mappings ) field_cache = self . dedup_field_cache ( field_cache ) return field_cache self . pr_err ( "Unknown cache type: %s" % cache_type ) return None
|
Return a list of fields mappings
|
3,707
|
def post_field_cache ( self , field_cache ) : index_pattern = self . field_cache_to_index_pattern ( field_cache ) resp = requests . post ( self . post_url , data = index_pattern ) . text resp = json . loads ( resp ) return 0
|
Where field_cache is a list of fields mappings
|
3,708
|
def field_cache_to_index_pattern ( self , field_cache ) : mapping_dict = { } mapping_dict [ 'customFormats' ] = "{}" mapping_dict [ 'title' ] = self . index_pattern mapping_dict [ 'fields' ] = json . dumps ( field_cache , separators = ( ',' , ':' ) ) mapping_str = json . dumps ( mapping_dict , separators = ( ',' , ':' ) ) return mapping_str
|
Return a . kibana index - pattern doc_type
|
3,709
|
def check_mapping ( self , m ) : if 'name' not in m : self . pr_dbg ( "Missing %s" % "name" ) return False for x in [ 'analyzed' , 'indexed' , 'type' , 'scripted' , 'count' ] : if x not in m or m [ x ] == "" : self . pr_dbg ( "Missing %s" % x ) self . pr_dbg ( "Full %s" % m ) return False if 'doc_values' not in m or m [ 'doc_values' ] == "" : if not m [ 'name' ] . startswith ( '_' ) : self . pr_dbg ( "Missing %s" % "doc_values" ) return False m [ 'doc_values' ] = False return True
|
Assert minimum set of fields in cache does not validate contents
|
3,710
|
def get_index_mappings ( self , index ) : fields_arr = [ ] for ( key , val ) in iteritems ( index ) : doc_mapping = self . get_doc_type_mappings ( index [ key ] ) if doc_mapping is None : return None fields_arr . extend ( doc_mapping ) return fields_arr
|
Converts all index s doc_types to . kibana
|
3,711
|
def get_doc_type_mappings ( self , doc_type ) : doc_fields_arr = [ ] found_score = False for ( key , val ) in iteritems ( doc_type ) : add_it = False retdict = { } if not key . startswith ( '_' ) : if 'mapping' not in doc_type [ key ] : self . pr_err ( "No mapping in doc_type[%s]" % key ) return None if key in doc_type [ key ] [ 'mapping' ] : subkey_name = key else : subkey_name = re . sub ( '.*\.' , '' , key ) if subkey_name not in doc_type [ key ] [ 'mapping' ] : self . pr_err ( "Couldn't find subkey " + "doc_type[%s]['mapping'][%s]" % ( key , subkey_name ) ) return None retdict = self . get_field_mappings ( doc_type [ key ] [ 'mapping' ] [ subkey_name ] ) add_it = True if key in self . sys_mappings : retdict [ 'analyzed' ] = False retdict [ 'indexed' ] = False if key == '_source' : retdict = self . get_field_mappings ( doc_type [ key ] [ 'mapping' ] [ key ] ) retdict [ 'type' ] = "_source" elif key == '_score' : retdict [ 'type' ] = "number" elif 'type' not in retdict : retdict [ 'type' ] = "string" add_it = True if add_it : retdict [ 'name' ] = key retdict [ 'count' ] = 0 retdict [ 'scripted' ] = False if not self . check_mapping ( retdict ) : self . pr_err ( "Error, invalid mapping" ) return None doc_fields_arr . append ( retdict ) if not found_score : doc_fields_arr . append ( { "name" : "_score" , "type" : "number" , "count" : 0 , "scripted" : False , "indexed" : False , "analyzed" : False , "doc_values" : False } ) return doc_fields_arr
|
Converts all doc_types fields to . kibana
|
3,712
|
def get_field_mappings ( self , field ) : retdict = { } retdict [ 'indexed' ] = False retdict [ 'analyzed' ] = False for ( key , val ) in iteritems ( field ) : if key in self . mappings : if ( key == 'type' and ( val == "long" or val == "integer" or val == "double" or val == "float" ) ) : val = "number" retdict [ key ] = val if key == 'index' and val != "no" : retdict [ 'indexed' ] = True if val == "analyzed" : retdict [ 'analyzed' ] = True return retdict
|
Converts ES field mappings to . kibana field mappings
|
3,713
|
def is_kibana_cache_incomplete ( self , es_cache , k_cache ) : k_dict = { } for field in k_cache : k_dict [ field [ 'name' ] ] = field for ign_f in self . mappings_ignore : k_dict [ field [ 'name' ] ] [ ign_f ] = 0 es_dict = { } for field in es_cache : es_dict [ field [ 'name' ] ] = field for ign_f in self . mappings_ignore : es_dict [ field [ 'name' ] ] [ ign_f ] = 0 es_set = set ( es_dict . keys ( ) ) k_set = set ( k_dict . keys ( ) ) return len ( es_set - k_set . intersection ( es_set ) ) > 0
|
Test if k_cache is incomplete
|
3,714
|
def list_to_compare_dict ( self , list_form ) : compare_dict = { } for field in list_form : if field [ 'name' ] in compare_dict : self . pr_dbg ( "List has duplicate field %s:\n%s" % ( field [ 'name' ] , compare_dict [ field [ 'name' ] ] ) ) if compare_dict [ field [ 'name' ] ] != field : self . pr_dbg ( "And values are different:\n%s" % field ) return None compare_dict [ field [ 'name' ] ] = field for ign_f in self . mappings_ignore : compare_dict [ field [ 'name' ] ] [ ign_f ] = 0 return compare_dict
|
Convert list into a data structure we can query easier
|
3,715
|
def compare_field_caches ( self , replica , original ) : if original is None : original = [ ] if replica is None : replica = [ ] self . pr_dbg ( "Comparing orig with %s fields to replica with %s fields" % ( len ( original ) , len ( replica ) ) ) orig = self . list_to_compare_dict ( original ) if orig is None : self . pr_dbg ( "Original has duplicate fields" ) return 1 repl = self . list_to_compare_dict ( replica ) if repl is None : self . pr_dbg ( "Replica has duplicate fields" ) return 1 orig_found = { } for ( key , field ) in iteritems ( repl ) : field_name = field [ 'name' ] if field_name not in orig : self . pr_dbg ( "Replica has field not found in orig %s: %s" % ( field_name , field ) ) return 1 orig_found [ field_name ] = True if orig [ field_name ] != field : self . pr_dbg ( "Field in replica doesn't match orig:" ) self . pr_dbg ( "orig:%s\nrepl:%s" % ( orig [ field_name ] , field ) ) return 1 unfound = set ( orig_found . keys ( ) ) - set ( repl . keys ( ) ) if len ( unfound ) > 0 : self . pr_dbg ( "Orig contains fields that were not in replica" ) self . pr_dbg ( '%s' % unfound ) return 1 self . pr_dbg ( "Original matches replica" ) return 0
|
Verify original is subset of replica
|
3,716
|
def start_daemon_thread ( target , args = ( ) ) : th = Thread ( target = target , args = args ) th . daemon = True th . start ( ) return th
|
starts a deamon thread for a given target function and arguments .
|
3,717
|
def serialize_dict_keys ( d , prefix = "" ) : keys = [ ] for k , v in d . iteritems ( ) : fqk = '%s%s' % ( prefix , k ) keys . append ( fqk ) if isinstance ( v , dict ) : keys . extend ( serialize_dict_keys ( v , prefix = "%s." % fqk ) ) return keys
|
returns all the keys in a dictionary .
|
3,718
|
def set_user ( self , user ) : self . session [ 'user_id' ] = user . key self . session [ 'user_data' ] = user . clean_value ( ) role = self . get_role ( ) self . session [ 'role_id' ] = role . key self . current . role_id = role . key self . current . user_id = user . key self . session [ 'permissions' ] = role . get_permissions ( )
|
Writes user data to session .
|
3,719
|
def contains_opposite_color_piece ( self , square , position ) : return not position . is_square_empty ( square ) and position . piece_at_square ( square ) . color != self . color
|
Finds if square on the board is occupied by a Piece belonging to the opponent .
|
3,720
|
def gettext ( message , domain = DEFAULT_DOMAIN ) : if six . PY2 : return InstalledLocale . _active_catalogs [ domain ] . ugettext ( message ) else : return InstalledLocale . _active_catalogs [ domain ] . gettext ( message )
|
Mark a message as translateable and translate it .
|
3,721
|
def gettext_lazy ( message , domain = DEFAULT_DOMAIN ) : return LazyProxy ( gettext , message , domain = domain , enable_cache = False )
|
Mark a message as translatable but delay the translation until the message is used .
|
3,722
|
def ngettext ( singular , plural , n , domain = DEFAULT_DOMAIN ) : if six . PY2 : return InstalledLocale . _active_catalogs [ domain ] . ungettext ( singular , plural , n ) else : return InstalledLocale . _active_catalogs [ domain ] . ngettext ( singular , plural , n )
|
Mark a message as translateable and translate it considering plural forms .
|
3,723
|
def ngettext_lazy ( singular , plural , n , domain = DEFAULT_DOMAIN ) : return LazyProxy ( ngettext , singular , plural , n , domain = domain , enable_cache = False )
|
Mark a message with plural forms translateable and delay the translation until the message is used .
|
3,724
|
def install_language ( cls , language_code ) : if language_code == cls . language : return try : cls . _active_catalogs = cls . _translation_catalogs [ language_code ] cls . language = language_code log . debug ( 'Installed language %s' , language_code ) except KeyError : default = settings . DEFAULT_LANG log . warning ( 'Unknown language %s, falling back to %s' , language_code , default ) cls . _active_catalogs = cls . _translation_catalogs [ default ] cls . language = default
|
Install the translations for language specified by language_code .
|
3,725
|
def install_locale ( cls , locale_code , locale_type ) : if locale_code == getattr ( cls , locale_type ) : return try : locale = Locale ( locale_code ) log . debug ( 'Installed locale %s' , locale_code ) except UnknownLocaleError : default = settings . DEFAULT_LOCALIZATION_FORMAT log . warning ( 'Unknown locale %s, falling back to %s' , locale_code , default ) locale = Locale ( default ) setattr ( cls , locale_type , locale . language )
|
Install the locale specified by language_code for localizations of type locale_type .
|
3,726
|
def _rotate_vector ( x , y , x2 , y2 , x1 , y1 ) : angle = atan2 ( y2 - y1 , x2 - x1 ) cos_rad = cos ( angle ) sin_rad = sin ( angle ) return cos_rad * x + sin_rad * y , - sin_rad * x + cos_rad * y
|
rotate x y vector over x2 - x1 y2 - y1 angle
|
3,727
|
def get_by_index ( self , index ) : index -= 1 if 0 <= index < len ( CocaineHeaders . STATIC_TABLE ) : return CocaineHeaders . STATIC_TABLE [ index ] index -= len ( CocaineHeaders . STATIC_TABLE ) if 0 <= index < len ( self . dynamic_entries ) : return self . dynamic_entries [ index ] raise InvalidTableIndex ( "Invalid table index %d" % index )
|
Returns the entry specified by index
|
3,728
|
def add ( self , name , value ) : size = table_entry_size ( name , value ) if size > self . _maxsize : self . dynamic_entries . clear ( ) self . _current_size = 0 elif self . _maxsize > 0 : self . dynamic_entries . appendleft ( ( name , value ) ) self . _current_size += size self . _shrink ( )
|
Adds a new entry to the table
|
3,729
|
def search ( self , name , value ) : partial = None header_name_search_result = CocaineHeaders . STATIC_TABLE_MAPPING . get ( name ) if header_name_search_result : index = header_name_search_result [ 1 ] . get ( value ) if index is not None : return index , name , value partial = ( header_name_search_result [ 0 ] , name , None ) offset = len ( CocaineHeaders . STATIC_TABLE ) for ( i , ( n , v ) ) in enumerate ( self . dynamic_entries ) : if n == name : if v == value : return i + offset + 1 , n , v elif partial is None : partial = ( i + offset + 1 , n , None ) return partial
|
Searches the table for the entry specified by name and value
|
3,730
|
def _shrink ( self ) : cursize = self . _current_size while cursize > self . _maxsize : name , value = self . dynamic_entries . pop ( ) cursize -= table_entry_size ( name , value ) self . _current_size = cursize
|
Shrinks the dynamic table to be at or below maxsize
|
3,731
|
def safe_print ( ustring , errors = 'replace' , ** kwargs ) : encoding = sys . stdout . encoding or 'utf-8' if sys . version_info [ 0 ] == 3 : print ( ustring , ** kwargs ) else : bytestr = ustring . encode ( encoding , errors = errors ) print ( bytestr , ** kwargs )
|
Safely print a unicode string
|
3,732
|
def edit_permissions ( self ) : key = self . current . input [ 'object_id' ] self . current . task_data [ 'role_id' ] = key role = RoleModel . objects . get ( key = key ) permission_tree = self . _permission_trees ( PermissionModel . objects ) role_tree = self . _apply_role_tree ( permission_tree , role ) self . output [ 'objects' ] = [ { 'type' : 'tree-toggle' , 'action' : 'apply_change' , 'trees' : self . _format_tree_output ( role_tree ) , } , ] self . form_out ( PermissionForm ( ) )
|
Creates the view used to edit permissions .
|
3,733
|
def _permission_trees ( permissions ) : treecache = PermissionTreeCache ( ) cached = treecache . get ( ) if not cached : tree = PermissionTreeBuilder ( ) for permission in permissions : tree . insert ( permission ) result = tree . serialize ( ) treecache . set ( result ) return result return cached
|
Get the cached permission tree or build a new one if necessary .
|
3,734
|
def _traverse_tree ( tree , path ) : path_steps = ( step for step in path . split ( '.' ) if step != '' ) first_step = path_steps . next ( ) subtree = tree [ first_step ] for step in path_steps : subtree = subtree [ 'children' ] [ step ] return subtree
|
Traverses the permission tree returning the permission at given permission path .
|
3,735
|
def _format_subtree ( self , subtree ) : subtree [ 'children' ] = list ( subtree [ 'children' ] . values ( ) ) for child in subtree [ 'children' ] : self . _format_subtree ( child ) return subtree
|
Recursively format all subtrees .
|
3,736
|
def apply_change ( self ) : changes = self . input [ 'change' ] key = self . current . task_data [ 'role_id' ] role = RoleModel . objects . get ( key = key ) for change in changes : permission = PermissionModel . objects . get ( code = change [ 'id' ] ) if change [ 'checked' ] is True : role . add_permission ( permission ) else : role . remove_permission ( permission ) role . save ( )
|
Applies changes to the permissions of the role .
|
3,737
|
def write ( self , data ) : m = self . _convert_structure ( data ) self . _file . write ( self . _format_mol ( * m ) ) self . _file . write ( 'M END\n' ) for k , v in data . meta . items ( ) : self . _file . write ( f'> <{k}>\n{v}\n' ) self . _file . write ( '$$$$\n' )
|
write single molecule into file
|
3,738
|
def save_workflow_to_cache ( self , serialized_wf_instance ) : task_data = self . current . task_data . copy ( ) for k , v in list ( task_data . items ( ) ) : if k . startswith ( '_' ) : del task_data [ k ] if 'cmd' in task_data : del task_data [ 'cmd' ] self . wf_state . update ( { 'step' : serialized_wf_instance , 'data' : task_data , 'name' : self . current . workflow_name , 'wf_id' : self . workflow_spec . wf_id } ) if self . current . lane_id : self . current . pool [ self . current . lane_id ] = self . current . role . key self . wf_state [ 'pool' ] = self . current . pool self . current . log . debug ( "POOL Content before WF Save: %s" % self . current . pool ) self . current . wf_cache . save ( self . wf_state )
|
If we aren t come to the end of the wf saves the wf state and task_data to cache
|
3,739
|
def get_pool_context ( self ) : context = { self . current . lane_id : self . current . role , 'self' : self . current . role } for lane_id , role_id in self . current . pool . items ( ) : if role_id : context [ lane_id ] = lazy_object_proxy . Proxy ( lambda : self . role_model ( super_context ) . objects . get ( role_id ) ) return context
|
Builds context for the WF pool .
|
3,740
|
def load_workflow_from_cache ( self ) : if not self . current . new_token : self . wf_state = self . current . wf_cache . get ( self . wf_state ) self . current . task_data = self . wf_state [ 'data' ] self . current . set_client_cmds ( ) self . current . pool = self . wf_state [ 'pool' ] return self . wf_state [ 'step' ]
|
loads the serialized wf state and data from cache updates the self . current . task_data
|
3,741
|
def serialize_workflow ( self ) : self . workflow . refresh_waiting_tasks ( ) return CompactWorkflowSerializer ( ) . serialize_workflow ( self . workflow , include_spec = False )
|
Serializes the current WF .
|
3,742
|
def find_workflow_path ( self ) : for pth in settings . WORKFLOW_PACKAGES_PATHS : path = "%s/%s.bpmn" % ( pth , self . current . workflow_name ) if os . path . exists ( path ) : return path err_msg = "BPMN file cannot found: %s" % self . current . workflow_name log . error ( err_msg ) raise RuntimeError ( err_msg )
|
Tries to find the path of the workflow diagram file in WORKFLOW_PACKAGES_PATHS .
|
3,743
|
def get_worfklow_spec ( self ) : if self . current . workflow_name not in self . workflow_spec_cache : try : self . current . wf_object = BPMNWorkflow . objects . get ( name = self . current . workflow_name ) except ObjectDoesNotExist : self . current . wf_object = BPMNWorkflow . objects . get ( name = 'not_found' ) self . current . task_data [ 'non-existent-wf' ] = self . current . workflow_name self . current . workflow_name = 'not_found' xml_content = self . current . wf_object . xml . body spec = ZopsSerializer ( ) . deserialize_workflow_spec ( xml_content , self . current . workflow_name ) spec . wf_id = self . current . wf_object . key self . workflow_spec_cache [ self . current . workflow_name ] = spec return self . workflow_spec_cache [ self . current . workflow_name ]
|
Generates and caches the workflow spec package from BPMN diagrams that read from disk
|
3,744
|
def _save_or_delete_workflow ( self ) : if not self . current . task_type . startswith ( 'Start' ) : if self . current . task_name . startswith ( 'End' ) and not self . are_we_in_subprocess ( ) : self . wf_state [ 'finished' ] = True self . wf_state [ 'finish_date' ] = datetime . now ( ) . strftime ( settings . DATETIME_DEFAULT_FORMAT ) if self . current . workflow_name not in settings . EPHEMERAL_WORKFLOWS and not self . wf_state [ 'in_external' ] : wfi = WFCache ( self . current ) . get_instance ( ) TaskInvitation . objects . filter ( instance = wfi , role = self . current . role , wf_name = wfi . wf . name ) . delete ( ) self . current . log . info ( "Delete WFCache: %s %s" % ( self . current . workflow_name , self . current . token ) ) self . save_workflow_to_cache ( self . serialize_workflow ( ) )
|
Calls the real save method if we pass the beggining of the wf
|
3,745
|
def start_engine ( self , ** kwargs ) : self . current = WFCurrent ( ** kwargs ) self . wf_state = { 'in_external' : False , 'finished' : False } if not self . current . new_token : self . wf_state = self . current . wf_cache . get ( self . wf_state ) self . current . workflow_name = self . wf_state [ 'name' ] if 'subject' in self . wf_state : self . current . input [ 'id' ] = self . wf_state [ 'subject' ] self . current . task_data [ 'object_id' ] = self . wf_state [ 'subject' ] self . check_for_authentication ( ) self . check_for_permission ( ) self . workflow = self . load_or_create_workflow ( ) if 'form' in self . current . input : form = self . current . input [ 'form' ] if 'form_name' in form : self . current . task_data [ form [ 'form_name' ] ] = form start_init_values = self . workflow_spec . wf_properties . get ( 'init' , 'False' ) == 'True' if start_init_values : WFInit = get_object_from_path ( settings . WF_INITIAL_VALUES ) ( ) WFInit . assign_wf_initial_values ( self . current ) log_msg = ( "\n\n::::::::::: ENGINE STARTED :::::::::::\n" "\tWF: %s (Possible) TASK:%s\n" "\tCMD:%s\n" "\tSUBCMD:%s" % ( self . workflow . name , self . workflow . get_tasks ( Task . READY ) , self . current . input . get ( 'cmd' ) , self . current . input . get ( 'subcmd' ) ) ) log . debug ( log_msg ) sys . _zops_wf_state_log = log_msg self . current . workflow = self . workflow
|
Initializes the workflow with given request response objects and diagram name .
|
3,746
|
def generate_wf_state_log ( self ) : output = '\n- - - - - -\n' output += "WORKFLOW: %s ( %s )" % ( self . current . workflow_name . upper ( ) , self . current . workflow . name ) output += "\nTASK: %s ( %s )\n" % ( self . current . task_name , self . current . task_type ) output += "DATA:" for k , v in self . current . task_data . items ( ) : if v : output += "\n\t%s: %s" % ( k , v ) output += "\nCURRENT:" output += "\n\tACTIVITY: %s" % self . current . activity output += "\n\tPOOL: %s" % self . current . pool output += "\n\tIN EXTERNAL: %s" % self . wf_state [ 'in_external' ] output += "\n\tLANE: %s" % self . current . lane_name output += "\n\tTOKEN: %s" % self . current . token sys . _zops_wf_state_log = output return output
|
Logs the state of workflow and content of task_data .
|
3,747
|
def switch_from_external_to_main_wf ( self ) : if self . wf_state [ 'in_external' ] and self . current . task_type == 'EndEvent' and self . current . task_name == 'EndEvent' : main_wf = self . wf_state [ 'main_wf' ] self . current . workflow_name = main_wf [ 'name' ] self . _clear_current_task ( ) self . check_for_authentication ( ) self . check_for_permission ( ) self . workflow_spec = self . get_worfklow_spec ( ) self . workflow = self . deserialize_workflow ( main_wf [ 'step' ] ) self . current . workflow = self . workflow self . wf_state [ 'in_external' ] = False self . wf_state [ 'finished' ] = False self . wf_state [ 'pool' ] = main_wf [ 'pool' ] self . current . pool = self . wf_state [ 'pool' ] self . run ( )
|
Main workflow switcher .
|
3,748
|
def switch_to_external_wf ( self ) : if ( self . current . task_type == 'ServiceTask' and self . current . task . task_spec . type == 'external' ) : log . debug ( "Entering to EXTERNAL WF" ) main_wf = self . wf_state . copy ( ) self . current . workflow_name = self . current . task_data . pop ( 'external_wf' , False ) or self . current . task . task_spec . topic self . _clear_current_task ( ) self . check_for_authentication ( ) self . check_for_permission ( ) self . workflow_spec = self . get_worfklow_spec ( ) self . workflow = self . create_workflow ( ) self . current . workflow = self . workflow self . wf_state = { 'main_wf' : main_wf , 'in_external' : True , 'finished' : False }
|
External workflow switcher .
|
3,749
|
def _clear_current_task ( self ) : self . current . task_name = None self . current . task_type = None self . current . task = None
|
Clear tasks related attributes checks permissions While switching WF to WF authentication and permissions are checked for new WF .
|
3,750
|
def run ( self ) : is_lane_changed = False while self . _should_we_run ( ) : self . check_for_rerun_user_task ( ) task = None for task in self . workflow . get_tasks ( state = Task . READY ) : self . current . old_lane = self . current . lane_name self . current . _update_task ( task ) if self . catch_lane_change ( ) : return self . check_for_permission ( ) self . check_for_lane_permission ( ) self . log_wf_state ( ) self . switch_lang ( ) self . run_activity ( ) self . parse_workflow_messages ( ) self . workflow . complete_task_from_id ( self . current . task . id ) self . _save_or_delete_workflow ( ) self . switch_to_external_wf ( ) if task is None : break self . switch_from_external_to_main_wf ( ) self . current . output [ 'token' ] = self . current . token for task in self . workflow . get_tasks ( state = Task . READY ) : self . current . _update_task ( task ) self . catch_lane_change ( ) self . handle_wf_finalization ( )
|
Main loop of the workflow engine
|
3,751
|
def switch_lang ( self ) : locale = self . current . locale translation . InstalledLocale . install_language ( locale [ 'locale_language' ] ) translation . InstalledLocale . install_locale ( locale [ 'locale_datetime' ] , 'datetime' ) translation . InstalledLocale . install_locale ( locale [ 'locale_number' ] , 'number' )
|
Switch to the language of the current user .
|
3,752
|
def catch_lane_change ( self ) : if self . current . lane_name : if self . current . old_lane and self . current . lane_name != self . current . old_lane : if ( self . current . lane_id not in self . current . pool or self . current . pool [ self . current . lane_id ] != self . current . user_id ) : self . current . log . info ( "LANE CHANGE : %s >> %s" % ( self . current . old_lane , self . current . lane_name ) ) if self . current . lane_auto_sendoff : self . current . sendoff_current_user ( ) self . current . flow_enabled = False if self . current . lane_auto_invite : self . current . invite_other_parties ( self . _get_possible_lane_owners ( ) ) return True
|
trigger a lane_user_change signal if we switched to a new lane and new lane s user is different from current one
|
3,753
|
def parse_workflow_messages ( self ) : if 'client_message' in self . current . spec . data : m = self . current . spec . data [ 'client_message' ] self . current . msg_box ( title = m . get ( 'title' ) , msg = m . get ( 'body' ) , typ = m . get ( 'type' , 'info' ) )
|
Transmits client message that defined in a workflow task s inputOutput extension
|
3,754
|
def run_activity ( self ) : activity = self . current . activity if activity : if activity not in self . wf_activities : self . _load_activity ( activity ) self . current . log . debug ( "Calling Activity %s from %s" % ( activity , self . wf_activities [ activity ] ) ) self . wf_activities [ self . current . activity ] ( self . current )
|
runs the method that referenced from current task
|
3,755
|
def _import_object ( self , path , look_for_cls_method ) : last_nth = 2 if look_for_cls_method else 1 path = path . split ( '.' ) module_path = '.' . join ( path [ : - last_nth ] ) class_name = path [ - last_nth ] module = importlib . import_module ( module_path ) if look_for_cls_method and path [ - last_nth : ] [ 0 ] == path [ - last_nth ] : class_method = path [ - last_nth : ] [ 1 ] else : class_method = None return getattr ( module , class_name ) , class_name , class_method
|
Imports the module that contains the referenced method .
|
3,756
|
def _load_activity ( self , activity ) : fpths = [ ] full_path = '' errors = [ ] paths = settings . ACTIVITY_MODULES_IMPORT_PATHS number_of_paths = len ( paths ) for index_no in range ( number_of_paths ) : full_path = "%s.%s" % ( paths [ index_no ] , activity ) for look4kls in ( 0 , 1 ) : try : self . current . log . info ( "try to load from %s[%s]" % ( full_path , look4kls ) ) kls , cls_name , cls_method = self . _import_object ( full_path , look4kls ) if cls_method : self . current . log . info ( "WILLCall %s(current).%s()" % ( kls , cls_method ) ) self . wf_activities [ activity ] = lambda crnt : getattr ( kls ( crnt ) , cls_method ) ( ) else : self . wf_activities [ activity ] = kls return except ( ImportError , AttributeError ) : fpths . append ( full_path ) errmsg = "{activity} not found under these paths:\n\n >>> {paths} \n\n" "Error Messages:\n {errors}" errors . append ( "\n========================================================>\n" "| PATH | %s" "\n========================================================>\n\n" "%s" % ( full_path , traceback . format_exc ( ) ) ) assert index_no != number_of_paths - 1 , errmsg . format ( activity = activity , paths = '\n >>> ' . join ( set ( fpths ) ) , errors = '\n\n' . join ( errors ) ) except : self . current . log . exception ( "Cannot found the %s" % activity )
|
Iterates trough the all enabled ~zengine . settings . ACTIVITY_MODULES_IMPORT_PATHS to find the given path .
|
3,757
|
def check_for_lane_permission ( self ) : if self . current . lane_permission : log . debug ( "HAS LANE PERM: %s" % self . current . lane_permission ) perm = self . current . lane_permission if not self . current . has_permission ( perm ) : raise HTTPError ( 403 , "You don't have required lane permission: %s" % perm ) if self . current . lane_relations : context = self . get_pool_context ( ) log . debug ( "HAS LANE RELS: %s" % self . current . lane_relations ) try : cond_result = eval ( self . current . lane_relations , context ) except : log . exception ( "CONDITION EVAL ERROR : %s || %s" % ( self . current . lane_relations , context ) ) raise if not cond_result : log . debug ( "LANE RELATION ERR: %s %s" % ( self . current . lane_relations , context ) ) raise HTTPError ( 403 , "You aren't qualified for this lane: %s" % self . current . lane_relations )
|
One or more permissions can be associated with a lane of a workflow . In a similar way a lane can be restricted with relation to other lanes of the workflow .
|
3,758
|
def handle_wf_finalization ( self ) : if ( ( not self . current . flow_enabled or ( self . current . task_type . startswith ( 'End' ) and not self . are_we_in_subprocess ( ) ) ) and 'token' in self . current . output ) : del self . current . output [ 'token' ]
|
Removes the token key from current . output if WF is over .
|
3,759
|
def from_rdkit_molecule ( data ) : m = MoleculeContainer ( ) atoms , mapping = [ ] , [ ] for a in data . GetAtoms ( ) : atom = { 'element' : a . GetSymbol ( ) , 'charge' : a . GetFormalCharge ( ) } atoms . append ( atom ) mapping . append ( a . GetAtomMapNum ( ) ) isotope = a . GetIsotope ( ) if isotope : atom [ 'isotope' ] = isotope radical = a . GetNumRadicalElectrons ( ) if radical : atom [ 'multiplicity' ] = radical + 1 conformers = data . GetConformers ( ) if conformers : for atom , ( x , y , z ) in zip ( atoms , conformers [ 0 ] . GetPositions ( ) ) : atom [ 'x' ] = x atom [ 'y' ] = y atom [ 'z' ] = z for atom , mapping in zip ( atoms , mapping ) : a = m . add_atom ( atom ) if mapping : m . atom ( a ) . _parsed_mapping = mapping for bond in data . GetBonds ( ) : m . add_bond ( bond . GetBeginAtomIdx ( ) + 1 , bond . GetEndAtomIdx ( ) + 1 , _rdkit_bond_map [ bond . GetBondType ( ) ] ) return m
|
RDKit molecule object to MoleculeContainer converter
|
3,760
|
def to_rdkit_molecule ( data ) : mol = RWMol ( ) conf = Conformer ( ) mapping = { } is_3d = False for n , a in data . atoms ( ) : ra = Atom ( a . number ) ra . SetAtomMapNum ( n ) if a . charge : ra . SetFormalCharge ( a . charge ) if a . isotope != a . common_isotope : ra . SetIsotope ( a . isotope ) if a . radical : ra . SetNumRadicalElectrons ( a . radical ) mapping [ n ] = m = mol . AddAtom ( ra ) conf . SetAtomPosition ( m , ( a . x , a . y , a . z ) ) if a . z : is_3d = True if not is_3d : conf . Set3D ( False ) for n , m , b in data . bonds ( ) : mol . AddBond ( mapping [ n ] , mapping [ m ] , _bond_map [ b . order ] ) mol . AddConformer ( conf ) SanitizeMol ( mol ) return mol
|
MoleculeContainer to RDKit molecule object converter
|
3,761
|
def __dfs ( self , start , weights , depth_limit ) : adj = self . _adj stack = [ ( start , depth_limit , iter ( sorted ( adj [ start ] , key = weights ) ) ) ] visited = { start } disconnected = defaultdict ( list ) edges = defaultdict ( list ) while stack : parent , depth_now , children = stack [ - 1 ] try : child = next ( children ) except StopIteration : stack . pop ( ) else : if child not in visited : edges [ parent ] . append ( child ) visited . add ( child ) if depth_now > 1 : front = adj [ child ] . keys ( ) - { parent } if front : stack . append ( ( child , depth_now - 1 , iter ( sorted ( front , key = weights ) ) ) ) elif child not in disconnected : disconnected [ parent ] . append ( child ) return visited , edges , disconnected
|
modified NX dfs
|
3,762
|
def get_args_parser ( ) : parser = argparse . ArgumentParser ( description = 'Marabunta: Migrating ants for Odoo' ) parser . add_argument ( '--migration-file' , '-f' , action = EnvDefault , envvar = 'MARABUNTA_MIGRATION_FILE' , required = True , help = 'The yaml file containing the migration steps' ) parser . add_argument ( '--database' , '-d' , action = EnvDefault , envvar = 'MARABUNTA_DATABASE' , required = True , help = "Odoo's database" ) parser . add_argument ( '--db-user' , '-u' , action = EnvDefault , envvar = 'MARABUNTA_DB_USER' , required = True , help = "Odoo's database user" ) parser . add_argument ( '--db-password' , '-w' , action = EnvDefault , envvar = 'MARABUNTA_DB_PASSWORD' , required = True , help = "Odoo's database password" ) parser . add_argument ( '--db-port' , '-p' , default = os . environ . get ( 'MARABUNTA_DB_PORT' , 5432 ) , help = "Odoo's database port" ) parser . add_argument ( '--db-host' , '-H' , default = os . environ . get ( 'MARABUNTA_DB_HOST' , 'localhost' ) , help = "Odoo's database host" ) parser . add_argument ( '--mode' , action = EnvDefault , envvar = 'MARABUNTA_MODE' , required = False , help = "Specify the mode in which we run the migration," "such as 'demo' or 'prod'. Additional operations " "of this mode will be executed after the main " "operations and the addons list of this mode " "will be merged with the main addons list." ) parser . add_argument ( '--allow-serie' , action = BoolEnvDefault , required = False , envvar = 'MARABUNTA_ALLOW_SERIE' , help = 'Allow to run more than 1 version upgrade at a ' 'time.' ) parser . add_argument ( '--force-version' , required = False , default = os . environ . get ( 'MARABUNTA_FORCE_VERSION' ) , help = 'Force upgrade of a version, even if it has ' 'already been applied.' ) group = parser . add_argument_group ( title = 'Web' , description = 'Configuration related to the internal web server, ' 'used to publish a maintenance page during the migration.' , ) group . add_argument ( '--web-host' , required = False , default = os . environ . get ( 'MARABUNTA_WEB_HOST' , '0.0.0.0' ) , help = 'Host for the web server' ) group . add_argument ( '--web-port' , required = False , default = os . environ . get ( 'MARABUNTA_WEB_PORT' , 8069 ) , help = 'Port for the web server' ) group . add_argument ( '--web-custom-html' , required = False , default = os . environ . get ( 'MARABUNTA_WEB_CUSTOM_HTML' ) , help = 'Path to a custom html file to publish' ) return parser
|
Return a parser for command line options .
|
3,763
|
def from_parse_args ( cls , args ) : return cls ( args . migration_file , args . database , db_user = args . db_user , db_password = args . db_password , db_port = args . db_port , db_host = args . db_host , mode = args . mode , allow_serie = args . allow_serie , force_version = args . force_version , web_host = args . web_host , web_port = args . web_port , web_custom_html = args . web_custom_html , )
|
Constructor from command line args .
|
3,764
|
def set_current ( self , current ) : self . current = current self . input = current . input self . output = current . output self . cmd = current . task_data [ 'cmd' ] if self . cmd and NEXT_CMD_SPLITTER in self . cmd : self . cmd , self . next_cmd = self . cmd . split ( NEXT_CMD_SPLITTER ) else : self . next_cmd = None
|
Creates some aliases for attributes of current .
|
3,765
|
def form_out ( self , _form = None ) : _form = _form or self . object_form self . output [ 'forms' ] = _form . serialize ( ) self . _add_meta_props ( _form ) self . output [ 'forms' ] [ 'grouping' ] = _form . Meta . grouping self . output [ 'forms' ] [ 'constraints' ] = _form . Meta . constraints self . _patch_form ( self . output [ 'forms' ] ) self . set_client_cmd ( 'form' )
|
Renders form . Applies form modifiers then writes result to response payload . If supplied given form object instance will be used instead of view s default ObjectForm .
|
3,766
|
def run ( self ) : from pyoko . lib . utils import get_object_from_path from zengine . config import settings model = get_object_from_path ( settings . PERMISSION_MODEL ) perm_provider = get_object_from_path ( settings . PERMISSION_PROVIDER ) existing_perms = [ ] new_perms = [ ] for code , name , desc in perm_provider ( ) : code = six . text_type ( code ) if self . manager . args . dry : exists = model . objects . filter ( code = code , name = name ) if exists : perm = exists [ 0 ] new = False else : new = True perm = model ( code = code , name = name ) else : try : perm = model . objects . get ( code ) existing_perms . append ( perm ) except ObjectDoesNotExist : perm = model ( description = desc , code = code , name = name ) perm . key = code perm . save ( ) new_perms . append ( perm ) report = "\n\n%s permission(s) were found in DB. " % len ( existing_perms ) if new_perms : report += "\n%s new permission record added. " % len ( new_perms ) else : report += 'No new perms added. ' if new_perms : if not self . manager . args . dry : SelectBoxCache . flush ( model . __name__ ) report += 'Total %s perms exists.' % ( len ( existing_perms ) + len ( new_perms ) ) report = "\n + " + "\n + " . join ( [ p . name or p . code for p in new_perms ] ) + report if self . manager . args . dry : print ( "\n~~~~~~~~~~~~~~ DRY RUN ~~~~~~~~~~~~~~\n" ) print ( report + "\n" )
|
Creates new permissions .
|
3,767
|
def run ( self ) : from zengine . models import User user = User ( username = self . manager . args . username , superuser = self . manager . args . super ) user . set_password ( self . manager . args . password ) user . save ( ) print ( "New user created with ID: %s" % user . key )
|
Creates user encrypts password .
|
3,768
|
def _prepare_domain ( mapping ) : try : domain , dir = mapping . split ( ':' ) except ValueError : print ( "Please provide the sources in the form of '<domain>:<directory>'" ) sys . exit ( 1 ) try : default_language = settings . TRANSLATION_DOMAINS [ domain ] except KeyError : print ( "Unknown domain {domain}, check the settings file to make sure" " this domain is set in TRANSLATION_DOMAINS" . format ( domain = domain ) ) sys . exit ( 1 ) handle , path = tempfile . mkstemp ( prefix = 'zengine_i18n_' , suffix = '.pot' ) return ( domain , { 'default' : default_language , 'pot' : path , 'source' : dir , } )
|
Prepare a helper dictionary for the domain to temporarily hold some information .
|
3,769
|
def _validate_domains ( domains ) : missing = set ( settings . TRANSLATION_DOMAINS . keys ( ) ) - set ( domains . keys ( ) ) if missing : print ( 'The following domains have been set in the configuration, ' 'but their sources were not provided, use the `--source` ' 'option to specify their sources: {domains}' . format ( domains = ', ' . join ( missing ) ) ) sys . exit ( 1 )
|
Check that all domains specified in the settings was provided in the options .
|
3,770
|
def _extract_translations ( self , domains ) : for domain , options in domains . items ( ) : extractor = babel_frontend . extract_messages ( ) extractor . initialize_options ( ) extractor . output_file = options [ 'pot' ] extractor . add_comments = [ 'tn:' ] extractor . strip_comments = True extractor . input_paths = [ options [ 'source' ] ] extractor . msgid_bugs_address = self . manager . args . contact extractor . copyright_holder = self . manager . args . copyright extractor . version = self . manager . args . version extractor . project = self . manager . args . project extractor . finalize_options ( ) extractor . keywords . update ( { 'gettext_lazy' : extractor . keywords [ 'gettext' ] , 'ngettext_lazy' : extractor . keywords [ 'ngettext' ] , '__' : extractor . keywords [ 'gettext' ] , } ) _run_babel_command ( extractor )
|
Extract the translations into . pot files
|
3,771
|
def _init_update_po_files ( self , domains ) : for language in settings . TRANSLATIONS : for domain , options in domains . items ( ) : if language == options [ 'default' ] : continue if os . path . isfile ( _po_path ( language , domain ) ) : self . _update_po_file ( language , domain , options [ 'pot' ] ) else : self . _init_po_file ( language , domain , options [ 'pot' ] )
|
Update or initialize the . po translation files
|
3,772
|
def _cleanup ( self , domains ) : for option in domains . values ( ) : try : os . remove ( option [ 'pot' ] ) except ( IOError , OSError ) : pass
|
Remove the temporary . pot files that were created for the domains .
|
3,773
|
def run ( self ) : from zengine . lib . cache import WFSpecNames if self . manager . args . clear : self . _clear_models ( ) return if self . manager . args . wf_path : paths = self . get_wf_from_path ( self . manager . args . wf_path ) else : paths = self . get_workflows ( ) self . count = 0 self . do_with_submit ( self . load_diagram , paths , threads = self . manager . args . threads ) WFSpecNames ( ) . refresh ( ) print ( "%s BPMN file loaded" % self . count )
|
read workflows checks if it s updated tries to update if there aren t any running instances of that wf
|
3,774
|
def get_workflows ( self ) : for pth in settings . WORKFLOW_PACKAGES_PATHS : for f in glob . glob ( "%s/*.bpmn" % pth ) : with open ( f ) as fp : yield os . path . basename ( os . path . splitext ( f ) [ 0 ] ) , fp . read ( )
|
Scans and loads all wf found under WORKFLOW_PACKAGES_PATHS
|
3,775
|
def check_migration_and_solr ( self ) : from pyoko . db . schema_update import SchemaUpdater from socket import error as socket_error from pyoko . conf import settings from importlib import import_module import_module ( settings . MODELS_MODULE ) registry = import_module ( 'pyoko.model' ) . model_registry models = [ model for model in registry . get_base_models ( ) ] try : print ( __ ( u"Checking migration and solr ..." ) ) updater = SchemaUpdater ( models , 1 , False ) updater . run ( check_only = True ) except socket_error as e : print ( __ ( u"{0}Error not connected, open redis and rabbitmq{1}" ) . format ( CheckList . FAIL , CheckList . ENDC ) )
|
The model or models are checked for migrations that need to be done . Solr is also checked .
|
3,776
|
def check_redis ( ) : from pyoko . db . connection import cache from redis . exceptions import ConnectionError try : cache . ping ( ) print ( CheckList . OKGREEN + "{0}Redis is working{1}" + CheckList . ENDC ) except ConnectionError as e : print ( __ ( u"{0}Redis is not working{1} " ) . format ( CheckList . FAIL , CheckList . ENDC ) , e . message )
|
Redis checks the connection It displays on the screen whether or not you have a connection .
|
3,777
|
def check_riak ( ) : from pyoko . db . connection import client from socket import error as socket_error try : if client . ping ( ) : print ( __ ( u"{0}Riak is working{1}" ) . format ( CheckList . OKGREEN , CheckList . ENDC ) ) else : print ( __ ( u"{0}Riak is not working{1}" ) . format ( CheckList . FAIL , CheckList . ENDC ) ) except socket_error as e : print ( __ ( u"{0}Riak is not working{1}" ) . format ( CheckList . FAIL , CheckList . ENDC ) , e . message )
|
Riak checks the connection It displays on the screen whether or not you have a connection .
|
3,778
|
def check_mq_connection ( self ) : import pika from zengine . client_queue import BLOCKING_MQ_PARAMS from pika . exceptions import ProbableAuthenticationError , ConnectionClosed try : connection = pika . BlockingConnection ( BLOCKING_MQ_PARAMS ) channel = connection . channel ( ) if channel . is_open : print ( __ ( u"{0}RabbitMQ is working{1}" ) . format ( CheckList . OKGREEN , CheckList . ENDC ) ) elif self . channel . is_closed or self . channel . is_closing : print ( __ ( u"{0}RabbitMQ is not working!{1}" ) . format ( CheckList . FAIL , CheckList . ENDC ) ) except ConnectionClosed as e : print ( __ ( u"{0}RabbitMQ is not working!{1}" ) . format ( CheckList . FAIL , CheckList . ENDC ) , e ) except ProbableAuthenticationError as e : print ( __ ( u"{0}RabbitMQ username and password wrong{1}" ) . format ( CheckList . FAIL , CheckList . ENDC ) )
|
RabbitMQ checks the connection It displays on the screen whether or not you have a connection .
|
3,779
|
def check_encoding_and_env ( ) : import sys import os if sys . getfilesystemencoding ( ) in [ 'utf-8' , 'UTF-8' ] : print ( __ ( u"{0}File system encoding correct{1}" ) . format ( CheckList . OKGREEN , CheckList . ENDC ) ) else : print ( __ ( u"{0}File system encoding wrong!!{1}" ) . format ( CheckList . FAIL , CheckList . ENDC ) ) check_env_list = [ 'RIAK_PROTOCOL' , 'RIAK_SERVER' , 'RIAK_PORT' , 'REDIS_SERVER' , 'DEFAULT_BUCKET_TYPE' , 'PYOKO_SETTINGS' , 'MQ_HOST' , 'MQ_PORT' , 'MQ_USER' , 'MQ_VHOST' , ] env = os . environ for k , v in env . items ( ) : if k in check_env_list : print ( __ ( u"{0}{1} : {2}{3}" ) . format ( CheckList . BOLD , k , v , CheckList . ENDC ) )
|
It brings the environment variables to the screen . The user checks to see if they are using the correct variables .
|
3,780
|
def no_moves ( position ) : return position . no_moves ( color . white ) or position . no_moves ( color . black )
|
Finds if the game is over .
|
3,781
|
def is_checkmate ( position , input_color ) : return position . no_moves ( input_color ) and position . get_king ( input_color ) . in_check ( position )
|
Finds if particular King is checkmated .
|
3,782
|
def _paginate ( self , current_page , query_set , per_page = 10 ) : total_objects = query_set . count ( ) total_pages = int ( total_objects / per_page or 1 ) current_per_page = per_page + ( total_objects % per_page if current_page == total_pages else 0 ) pagination_data = dict ( page = current_page , total_pages = total_pages , total_objects = total_objects , per_page = current_per_page ) query_set = query_set . set_params ( rows = current_per_page , start = ( current_page - 1 ) * per_page ) return query_set , pagination_data
|
Handles pagination of object listings .
|
3,783
|
def create_message ( current ) : msg = current . input [ 'message' ] msg_obj = Channel . add_message ( msg [ 'channel' ] , body = msg [ 'body' ] , typ = msg [ 'type' ] , sender = current . user , title = msg [ 'title' ] , receiver = msg [ 'receiver' ] or None ) current . output = { 'msg_key' : msg_obj . key , 'status' : 'Created' , 'code' : 201 } if 'attachment' in msg : for atch in msg [ 'attachments' ] : typ = current . _dedect_file_type ( atch [ 'name' ] , atch [ 'content' ] ) Attachment ( channel_id = msg [ 'channel' ] , msg = msg_obj , name = atch [ 'name' ] , file = atch [ 'content' ] , description = atch [ 'description' ] , typ = typ ) . save ( )
|
Creates a message for the given channel .
|
3,784
|
def show_channel ( current , waited = False ) : ch = Channel ( current ) . objects . get ( current . input [ 'key' ] ) sbs = ch . get_subscription_for_user ( current . user_id ) current . output = { 'key' : current . input [ 'key' ] , 'description' : ch . description , 'name' : sbs . name , 'actions' : sbs . get_actions ( ) , 'avatar_url' : ch . get_avatar ( current . user ) , 'no_of_members' : len ( ch . subscriber_set ) , 'member_list' : [ { 'name' : sb . user . full_name , 'is_online' : sb . user . is_online ( ) , 'avatar_url' : sb . user . get_avatar_url ( ) } for sb in ch . subscriber_set . objects . all ( ) ] , 'last_messages' : [ ] , 'status' : 'OK' , 'code' : 200 } for msg in ch . get_last_messages ( ) : current . output [ 'last_messages' ] . insert ( 0 , msg . serialize ( current . user ) )
|
Initial display of channel content . Returns channel description members no of members last 20 messages etc .
|
3,785
|
def channel_history ( current ) : current . output = { 'status' : 'OK' , 'code' : 201 , 'messages' : [ ] } for msg in list ( Message . objects . filter ( channel_id = current . input [ 'channel_key' ] , updated_at__lte = current . input [ 'timestamp' ] ) [ : 20 ] ) : current . output [ 'messages' ] . insert ( 0 , msg . serialize ( current . user ) ) if current . output [ 'messages' ] : current . output [ 'messages' ] . pop ( - 1 )
|
Get old messages for a channel . 20 messages per request
|
3,786
|
def report_last_seen_message ( current ) : sbs = Subscriber ( current ) . objects . filter ( channel_id = current . input [ 'channel_key' ] , user_id = current . user_id ) [ 0 ] sbs . last_seen_msg_time = current . input [ 'timestamp' ] sbs . save ( ) current . output = { 'status' : 'OK' , 'code' : 200 }
|
Push timestamp of latest message of an ACTIVE channel .
|
3,787
|
def list_channels ( current ) : current . output = { 'status' : 'OK' , 'code' : 200 , 'channels' : [ ] } for sbs in current . user . subscriptions . objects . filter ( is_visible = True ) : try : current . output [ 'channels' ] . append ( sbs . get_channel_listing ( ) ) except ObjectDoesNotExist : log . exception ( "UNPAIRED DIRECT EXCHANGES!!!!" ) sbs . delete ( )
|
List channel memberships of current user
|
3,788
|
def unread_count ( current ) : unread_ntf = 0 unread_msg = 0 for sbs in current . user . subscriptions . objects . filter ( is_visible = True ) : try : if sbs . channel . key == current . user . prv_exchange : unread_ntf += sbs . unread_count ( ) else : unread_msg += sbs . unread_count ( ) except ObjectDoesNotExist : log . exception ( "MULTIPLE PRV EXCHANGES!!!!" ) sbs . delete ( ) current . output = { 'status' : 'OK' , 'code' : 200 , 'notifications' : unread_ntf , 'messages' : unread_msg }
|
Number of unread messages for current user
|
3,789
|
def get_notifications ( current ) : current . output = { 'status' : 'OK' , 'code' : 200 , 'notifications' : [ ] , } amount = current . input . get ( 'amount' , 8 ) try : notif_sbs = current . user . subscriptions . objects . get ( channel_id = current . user . prv_exchange ) except MultipleObjectsReturned : log . exception ( "MULTIPLE PRV EXCHANGES!!!!" ) sbs = current . user . subscriptions . objects . filter ( channel_id = current . user . prv_exchange ) sbs [ 0 ] . delete ( ) notif_sbs = sbs [ 1 ] for msg in notif_sbs . channel . message_set . objects . all ( ) [ : amount ] : current . output [ 'notifications' ] . insert ( 0 , { 'title' : msg . msg_title , 'body' : msg . body , 'type' : msg . typ , 'url' : msg . url , 'channel_key' : msg . channel . key , 'message_key' : msg . key , 'timestamp' : msg . updated_at } )
|
Returns last N notifications for current user
|
3,790
|
def create_channel ( current ) : channel = Channel ( name = current . input [ 'name' ] , description = current . input [ 'description' ] , owner = current . user , typ = 15 ) . save ( ) with BlockSave ( Subscriber ) : Subscriber . objects . get_or_create ( user = channel . owner , channel = channel , can_manage = True , can_leave = False ) current . input [ 'key' ] = channel . key show_channel ( current ) current . output . update ( { 'status' : 'Created' , 'code' : 201 } )
|
Create a public channel . Can be a broadcast channel or normal chat room .
|
3,791
|
def add_unit_to_channel ( current ) : read_only = current . input [ 'read_only' ] newly_added , existing = [ ] , [ ] for member_key in UnitModel . get_user_keys ( current , current . input [ 'unit_key' ] ) : sb , new = Subscriber ( current ) . objects . get_or_create ( user_id = member_key , read_only = read_only , channel_id = current . input [ 'channel_key' ] ) if new : newly_added . append ( member_key ) else : existing . append ( member_key ) current . output = { 'existing' : existing , 'newly_added' : newly_added , 'status' : 'OK' , 'code' : 201 }
|
Subscribe users of a given unit to given channel
|
3,792
|
def search_user ( current ) : current . output = { 'results' : [ ] , 'status' : 'OK' , 'code' : 201 } qs = UserModel ( current ) . objects . exclude ( key = current . user_id ) . search_on ( * settings . MESSAGING_USER_SEARCH_FIELDS , contains = current . input [ 'query' ] ) for user in qs : if user . key != current . user_id : current . output [ 'results' ] . append ( ( user . full_name , user . key , user . get_avatar_url ( ) ) )
|
Search users for adding to a public room or creating one to one direct messaging
|
3,793
|
def search_unit ( current ) : current . output = { 'results' : [ ] , 'status' : 'OK' , 'code' : 201 } for user in UnitModel ( current ) . objects . search_on ( * settings . MESSAGING_UNIT_SEARCH_FIELDS , contains = current . input [ 'query' ] ) : current . output [ 'results' ] . append ( ( user . name , user . key ) )
|
Search on units for subscribing it s users to a channel
|
3,794
|
def create_direct_channel ( current ) : channel , sub_name = Channel . get_or_create_direct_channel ( current . user_id , current . input [ 'user_key' ] ) current . input [ 'key' ] = channel . key show_channel ( current ) current . output . update ( { 'status' : 'Created' , 'code' : 201 } )
|
Create a One - To - One channel between current and selected user .
|
3,795
|
def find_message ( current ) : current . output = { 'results' : [ ] , 'status' : 'OK' , 'code' : 201 } query_set = Message ( current ) . objects . search_on ( [ 'msg_title' , 'body' , 'url' ] , contains = current . input [ 'query' ] ) if current . input [ 'channel_key' ] : query_set = query_set . filter ( channel_id = current . input [ 'channel_key' ] ) else : subscribed_channels = Subscriber . objects . filter ( user_id = current . user_id ) . values_list ( "channel_id" , flatten = True ) query_set = query_set . filter ( channel_id__in = subscribed_channels ) query_set , pagination_data = _paginate ( current_page = current . input [ 'page' ] , query_set = query_set ) current . output [ 'pagination' ] = pagination_data for msg in query_set : current . output [ 'results' ] . append ( msg . serialize ( current . user ) )
|
Search in messages . If channel_key given search will be limited to that channel otherwise search will be performed on all of user s subscribed channels .
|
3,796
|
def delete_channel ( current ) : ch_key = current . input [ 'channel_key' ] ch = Channel ( current ) . objects . get ( owner_id = current . user_id , key = ch_key ) ch . delete ( ) Subscriber . objects . filter ( channel_id = ch_key ) . delete ( ) Message . objects . filter ( channel_id = ch_key ) . delete ( ) current . output = { 'status' : 'Deleted' , 'code' : 200 }
|
Delete a channel
|
3,797
|
def edit_channel ( current ) : ch = Channel ( current ) . objects . get ( owner_id = current . user_id , key = current . input [ 'channel_key' ] ) ch . name = current . input [ 'name' ] ch . description = current . input [ 'description' ] ch . save ( ) for sbs in ch . subscriber_set . objects . all ( ) : sbs . name = ch . name sbs . save ( ) current . output = { 'status' : 'OK' , 'code' : 200 }
|
Update channel name or description
|
3,798
|
def pin_channel ( current ) : try : Subscriber ( current ) . objects . filter ( user_id = current . user_id , channel_id = current . input [ 'channel_key' ] ) . update ( pinned = True ) current . output = { 'status' : 'OK' , 'code' : 200 } except ObjectDoesNotExist : raise HTTPError ( 404 , "" )
|
Pin a channel to top of channel list
|
3,799
|
def delete_message ( current ) : try : Message ( current ) . objects . get ( sender_id = current . user_id , key = current . input [ 'key' ] ) . delete ( ) current . output = { 'status' : 'Deleted' , 'code' : 200 , 'key' : current . input [ 'key' ] } except ObjectDoesNotExist : raise HTTPError ( 404 , "" )
|
Delete a message
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.