idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
6,500
|
def update ( access_key , resource_policy , is_admin , is_active , rate_limit ) : with Session ( ) as session : try : data = session . KeyPair . update ( access_key , is_active = is_active , is_admin = is_admin , resource_policy = resource_policy , rate_limit = rate_limit ) except Exception as e : print_error ( e ) sys . exit ( 1 ) if not data [ 'ok' ] : print_fail ( 'KeyPair creation has failed: {0}' . format ( data [ 'msg' ] ) ) sys . exit ( 1 ) print ( 'Key pair is updated: ' + access_key + '.' )
|
Update an existing keypair .
|
6,501
|
def delete ( access_key ) : with Session ( ) as session : try : data = session . KeyPair . delete ( access_key ) except Exception as e : print_error ( e ) sys . exit ( 1 ) if not data [ 'ok' ] : print_fail ( 'KeyPair deletion has failed: {0}' . format ( data [ 'msg' ] ) ) sys . exit ( 1 ) print ( 'Key pair is deleted: ' + access_key + '.' )
|
Delete an existing keypair .
|
6,502
|
def login ( self ) : log . debug ( 'performing auth test' ) test = self . _get ( urls [ 'test' ] ) user = User ( { 'name' : test [ 'user' ] , 'id' : test [ 'user_id' ] } ) self . _refresh ( ) return test [ 'team' ] , user
|
perform API auth test returning user and team
|
6,503
|
def user ( self , match ) : if len ( match ) == 9 and match [ 0 ] == 'U' : return self . _lookup ( User , 'id' , match ) return self . _lookup ( User , 'name' , match )
|
Return User object for a given Slack ID or name
|
6,504
|
def channel ( self , match ) : if len ( match ) == 9 and match [ 0 ] in ( 'C' , 'G' , 'D' ) : return self . _lookup ( Channel , 'id' , match ) return self . _lookup ( Channel , 'name' , match )
|
Return Channel object for a given Slack ID or name
|
6,505
|
def _refresh ( self ) : log . debug ( 'refreshing directory cache' ) self . _users . update ( list ( self . _user_gen ( ) ) ) self . _channels . update ( list ( self . _channel_gen ( ) ) )
|
refresh internal directory cache
|
6,506
|
def match ( self , attr , val ) : self . _lock . acquire ( ) try : for x in self : if getattr ( x , attr ) == val : return x finally : self . _lock . release ( )
|
lookup object in directory with attribute matching value
|
6,507
|
def biggus_chunk ( chunk_key , biggus_array , masked ) : if masked : array = biggus_array . masked_array ( ) else : array = biggus_array . ndarray ( ) return biggus . _init . Chunk ( chunk_key , array )
|
A function that lazily evaluates a biggus . Chunk . This is useful for passing through as a dask task so that we don t have to compute the chunk in order to compute the graph .
|
6,508
|
def lazy_chunk_creator ( name ) : def biggus_chunk ( chunk_key , biggus_array , masked ) : if masked : array = biggus_array . masked_array ( ) else : array = biggus_array . ndarray ( ) return biggus . _init . Chunk ( chunk_key , array ) biggus_chunk . __name__ = name return biggus_chunk
|
Create a lazy chunk creating function with a nice name that is suitable for representation in a dask graph .
|
6,509
|
def _make_nodes ( self , dsk_graph , array , iteration_order , masked , top = False ) : cache_key = _array_id ( array , iteration_order , masked ) nodes = self . _node_cache . get ( cache_key , None ) if nodes is None : if hasattr ( array , 'streams_handler' ) : nodes = self . _make_stream_handler_nodes ( dsk_graph , array , iteration_order , masked ) else : nodes = { } chunks = [ ] name = '{}\n{}' . format ( array . __class__ . __name__ , array . shape ) biggus_chunk_func = self . lazy_chunk_creator ( name ) chunk_index_gen = biggus . _init . ProducerNode . chunk_index_gen for chunk_key in chunk_index_gen ( array . shape , iteration_order [ : : - 1 ] ) : biggus_array = array [ chunk_key ] pretty_key = ', ' . join ( map ( slice_repr , chunk_key ) ) chunk_id = ( 'chunk shape: {}\nsource key: [{}]\n\n{}' '' . format ( biggus_array . shape , pretty_key , uuid . uuid4 ( ) ) ) task = ( biggus_chunk_func , chunk_key , biggus_array , masked ) chunks . append ( task ) assert chunk_id not in dsk_graph dsk_graph [ chunk_id ] = task nodes [ chunk_id ] = task self . _node_cache [ cache_key ] = nodes return nodes
|
Recursive function that returns the dask items for the given array .
|
6,510
|
def wait_until_element_has_focus ( self , locator , timeout = None ) : self . _info ( "Waiting for focus on '%s'" % ( locator ) ) self . _wait_until_no_error ( timeout , self . _check_element_focus_exp , True , locator , timeout )
|
Waits until the element identified by locator has focus . You might rather want to use Element Focus Should Be Set
|
6,511
|
def wait_until_element_does_not_have_focus ( self , locator , timeout = None ) : self . _info ( "Waiting until '%s' does not have focus" % ( locator ) ) self . _wait_until_no_error ( timeout , self . _check_element_focus_exp , False , locator , timeout )
|
Waits until the element identified by locator doesn t have focus . You might rather want to use Element Focus Should Not Be Set
|
6,512
|
def wait_until_element_value_is ( self , locator , expected , strip = False , timeout = None ) : self . _info ( "Waiting for '%s' value to be '%s'" % ( locator , expected ) ) self . _wait_until_no_error ( timeout , self . _check_element_value_exp , False , locator , expected , strip , timeout )
|
Waits until the element identified by locator value is exactly the expected value . You might want to use Element Value Should Be instead .
|
6,513
|
def wait_until_element_value_contains ( self , locator , expected , timeout = None ) : self . _info ( "Waiting for '%s' value to contain '%s'" % ( locator , expected ) ) self . _wait_until_no_error ( timeout , self . _check_element_value_exp , True , locator , expected , False , timeout )
|
Waits until the element identified by locator contains the expected value . You might want to use Element Value Should Contain instead .
|
6,514
|
def set_element_focus ( self , locator ) : self . _info ( "Setting focus on element '%s'" % ( locator ) ) element = self . _element_find ( locator , True , True ) element . send_keys ( Keys . NULL ) self . _wait_until_no_error ( None , self . _check_element_focus , True , locator )
|
Sets focus on the element identified by locator . Should be used with elements meant to have focus only such as text fields . This keywords also waits for the focus to be active by calling the Wait Until Element Has Focus keyword .
|
6,515
|
def clear_input_field ( self , locator , method = 0 ) : element = self . _element_find ( locator , True , True ) if ( int ( method ) == 0 ) : self . _info ( "Clearing input on element '%s'" % ( locator ) ) element . clear ( ) elif ( int ( method ) == 1 ) : self . _info ( "Clearing input on element '%s' by pressing 'CTRL + A + DELETE'" % ( locator ) ) element . send_keys ( Keys . CONTROL + 'a' ) element . send_keys ( Keys . DELETE ) elif ( int ( method ) == 2 ) : self . _info ( "Clearing input on element '%s' by repeatedly pressing BACKSPACE" % ( locator ) ) while ( len ( element . get_attribute ( 'value' ) ) != 0 ) : element . send_keys ( Keys . BACKSPACE ) else : element . clear ( )
|
Clears the text field identified by locator
|
6,516
|
def element_width_should_be ( self , locator , expected ) : self . _info ( "Verifying element '%s' width is '%s'" % ( locator , expected ) ) self . _check_element_size ( locator , 'width' , expected )
|
Verifies the element identified by locator has the expected width . Expected width should be in pixels .
|
6,517
|
def element_height_should_be ( self , locator , expected ) : self . _info ( "Verifying element '%s' height is '%s'" % ( locator , expected ) ) self . _check_element_size ( locator , 'height' , expected )
|
Verifies the element identified by locator has the expected height . Expected height should be in pixels .
|
6,518
|
def element_value_should_be ( self , locator , expected , strip = False ) : self . _info ( "Verifying element '%s' value is '%s'" % ( locator , expected ) ) element = self . _element_find ( locator , True , True ) value = element . get_attribute ( 'value' ) if ( strip ) : value = value . strip ( ) if str ( value ) == expected : return else : raise AssertionError ( "Element '%s' value was not '%s', it was '%s'" % ( locator , expected , value ) )
|
Verifies the element identified by locator has the expected value .
|
6,519
|
def element_value_should_not_be ( self , locator , value , strip = False ) : self . _info ( "Verifying element '%s' value is not '%s'" % ( locator , value ) ) element = self . _element_find ( locator , True , True ) elem_value = str ( element . get_attribute ( 'value' ) ) if ( strip ) : elem_value = elem_value . strip ( ) if elem_value == value : raise AssertionError ( "Value was '%s' for element '%s' while it shouldn't have" % ( elem_value , locator ) )
|
Verifies the element identified by locator is not the specified value .
|
6,520
|
def element_value_should_contain ( self , locator , expected ) : self . _info ( "Verifying element '%s' value contains '%s'" % ( locator , expected ) ) element = self . _element_find ( locator , True , True ) value = str ( element . get_attribute ( 'value' ) ) if expected in value : return else : raise AssertionError ( "Value '%s' did not appear in element '%s'. It's value was '%s'" % ( expected , locator , value ) )
|
Verifies the element identified by locator contains the expected value .
|
6,521
|
def element_value_should_not_contain ( self , locator , value ) : self . _info ( "Verifying element '%s' value does not contain '%s'" % ( locator , value ) ) element = self . _element_find ( locator , True , True ) elem_value = str ( element . get_attribute ( 'value' ) ) if value in elem_value : raise AssertionError ( "Value '%s' was found in element '%s' while it shouldn't have" % ( value , locator ) )
|
Verifies the element identified by locator does not contain the specified value .
|
6,522
|
def element_focus_should_be_set ( self , locator ) : self . _info ( "Verifying element '%s' focus is set" % locator ) self . _check_element_focus ( True , locator )
|
Verifies the element identified by locator has focus .
|
6,523
|
def element_focus_should_not_be_set ( self , locator ) : self . _info ( "Verifying element '%s' focus is not set" % locator ) self . _check_element_focus ( False , locator )
|
Verifies the element identified by locator does not have focus .
|
6,524
|
def element_css_attribute_should_be ( self , locator , prop , expected ) : self . _info ( "Verifying element '%s' has css attribute '%s' with a value of '%s'" % ( locator , prop , expected ) ) self . _check_element_css_value ( locator , prop , expected )
|
Verifies the element identified by locator has the expected value for the targeted prop .
|
6,525
|
def wait_until_page_does_not_contain_these_elements ( self , timeout , * locators ) : self . _wait_until_no_error ( timeout , self . _wait_for_elements_to_go_away , locators )
|
Waits until all of the specified elements are not found on the page .
|
6,526
|
def wait_until_element_is_clickable ( self , locator , timeout = None ) : self . _wait_until_no_error ( timeout , self . _wait_for_click_to_succeed , locator )
|
Clicks the element specified by locator until the operation succeeds . This should be used with buttons that are generated in real - time and that don t have their click handling available immediately . This keyword avoids unclickable element exceptions .
|
6,527
|
def _visitor_impl ( self , arg ) : if ( _qualname ( type ( self ) ) , type ( arg ) ) in _methods : method = _methods [ ( _qualname ( type ( self ) ) , type ( arg ) ) ] return method ( self , arg ) else : arg_parent_type = arg . __class__ . __bases__ [ 0 ] while arg_parent_type != object : if ( _qualname ( type ( self ) ) , arg_parent_type ) in _methods : method = _methods [ ( _qualname ( type ( self ) ) , arg_parent_type ) ] return method ( self , arg ) else : arg_parent_type = arg_parent_type . __bases__ [ 0 ] raise VisitorException ( 'No visitor found for class ' + str ( type ( arg ) ) )
|
Actual visitor method implementation .
|
6,528
|
def visitor ( arg_type ) : def decorator ( fn ) : declaring_class = _declaring_class ( fn ) _methods [ ( declaring_class , arg_type ) ] = fn return _visitor_impl return decorator
|
Decorator that creates a visitor method .
|
6,529
|
def absolute ( parser , token ) : node = url ( parser , token ) return AbsoluteUrlNode ( view_name = node . view_name , args = node . args , kwargs = node . kwargs , asvar = node . asvar )
|
Returns a full absolute URL based on the request host .
|
6,530
|
def site ( parser , token ) : node = url ( parser , token ) return SiteUrlNode ( view_name = node . view_name , args = node . args , kwargs = node . kwargs , asvar = node . asvar )
|
Returns a full absolute URL based on the current site .
|
6,531
|
def get_callable_method_dict ( obj ) : methodDict = { } for methodStr in dir ( obj ) : method = getattr ( obj , methodStr , 'none' ) if callable ( method ) and not methodStr . startswith ( '_' ) : methodDict [ methodStr ] = method return methodDict
|
Returns a dictionary of callable methods of object obj .
|
6,532
|
def get_properties ( zos_obj ) : prop_get = set ( zos_obj . _prop_map_get_ . keys ( ) ) prop_set = set ( zos_obj . _prop_map_put_ . keys ( ) ) if prop_set . issubset ( prop_get ) : prop_get = prop_get . difference ( prop_set ) else : msg = 'Assumption all getters are also setters is incorrect!' raise NotImplementedError ( msg ) return list ( prop_get ) , list ( prop_set )
|
Returns a lists of properties bound to the object zos_obj
|
6,533
|
def wrapped_zos_object ( zos_obj ) : if hasattr ( zos_obj , '_wrapped' ) or ( 'CLSID' not in dir ( zos_obj ) ) : return zos_obj else : Class = managed_wrapper_class_factory ( zos_obj ) return Class ( zos_obj )
|
Helper function to wrap ZOS API COM objects .
|
6,534
|
def reset ( self ) : self . current_table = None self . tables = [ ] self . data = [ { } ] self . additional_data = { } self . lines = [ ] self . set_state ( 'document' ) self . current_file = None self . set_of_energies = set ( )
|
Clean any processing data and prepare object for reuse
|
6,535
|
def _set_table ( self , data ) : self . set_state ( 'table' ) self . current_table = HEPTable ( index = len ( self . tables ) + 1 ) self . tables . append ( self . current_table ) self . data . append ( self . current_table . metadata )
|
Set current parsing state to table create new table object and add it to tables collection
|
6,536
|
def _reformat_matrix ( self ) : nxax = len ( self . current_table . data [ 'independent_variables' ] ) nyax = len ( self . current_table . data [ 'dependent_variables' ] ) npts = len ( self . current_table . data [ 'dependent_variables' ] [ 0 ] [ 'values' ] ) if nxax != 1 or nyax != npts or npts < 2 : return False if len ( self . current_table . xheaders ) == 2 : xheader = self . current_table . xheaders [ 1 ] else : xheader = copy . deepcopy ( self . current_table . data [ 'independent_variables' ] [ 0 ] [ 'header' ] ) self . current_table . data [ 'independent_variables' ] . append ( { 'header' : xheader , 'values' : [ ] } ) for value in self . current_table . data [ 'independent_variables' ] [ 0 ] [ 'values' ] : self . current_table . data [ 'independent_variables' ] [ 1 ] [ 'values' ] . extend ( [ copy . deepcopy ( value ) for npt in range ( npts ) ] ) self . current_table . data [ 'independent_variables' ] [ 0 ] [ 'values' ] = [ copy . deepcopy ( value ) for npt in range ( npts ) for value in self . current_table . data [ 'independent_variables' ] [ 0 ] [ 'values' ] ] if self . current_table . data [ 'dependent_variables' ] [ 0 ] [ 'header' ] != self . current_table . data [ 'dependent_variables' ] [ 1 ] [ 'header' ] : self . current_table . data [ 'dependent_variables' ] [ 0 ] [ 'header' ] = { 'name' : '' } iqdel = [ ] for iq , qualifier in enumerate ( self . current_table . data [ 'dependent_variables' ] [ 0 ] [ 'qualifiers' ] ) : if qualifier != self . current_table . data [ 'dependent_variables' ] [ 1 ] [ 'qualifiers' ] [ iq ] : iqdel . append ( iq ) for iq in iqdel [ : : - 1 ] : del self . current_table . data [ 'dependent_variables' ] [ 0 ] [ 'qualifiers' ] [ iq ] for iy in range ( 1 , nyax ) : for value in self . current_table . data [ 'dependent_variables' ] [ iy ] [ 'values' ] : self . current_table . data [ 'dependent_variables' ] [ 0 ] [ 'values' ] . append ( value ) for iy in range ( nyax - 1 , 0 , - 1 ) : del self . current_table . data [ 'dependent_variables' ] [ iy ] return True
|
Transform a square matrix into a format with two independent variables and one dependent variable .
|
6,537
|
def _parse_qual ( self , data ) : list = [ ] headers = data . split ( ':' ) name = headers [ 0 ] . strip ( ) name = re . split ( ' IN ' , name , flags = re . I ) units = None if len ( name ) > 1 : units = name [ 1 ] . strip ( ) name = name [ 0 ] . strip ( ) if len ( headers ) < 2 : raise BadFormat ( "*qual line must contain a name and values: %s" % data ) for header in headers [ 1 : ] : xheader = { 'name' : name } if units : xheader [ 'units' ] = units xheader [ 'value' ] = header . strip ( ) list . append ( xheader ) if name . startswith ( 'SQRT(S)' ) and lower ( units ) in ( 'gev' ) : energies = re . split ( ' TO ' , xheader [ 'value' ] , flags = re . I ) for energy in energies : try : energy = float ( energy ) self . set_of_energies . add ( energy ) except : pass self . current_table . qualifiers . append ( list )
|
Parse qual attribute of the old HEPData format
|
6,538
|
def _strip_comments ( line ) : if line == '' : return line r = re . search ( '(?P<line>[^#]*)(#(?P<comment>.*))?' , line ) if r : line = r . group ( 'line' ) if not line . endswith ( '\n' ) : line += '\n' return line return '\n'
|
Processes line stripping any comments from it
|
6,539
|
def _bind_set_table_metadata ( self , key , multiline = False ) : def set_table_metadata ( self , data ) : if multiline : data = self . _read_multiline ( data ) if key == 'location' and data : data = 'Data from ' + data self . current_table . metadata [ key ] = data . strip ( ) return set_table_metadata . __get__ ( self )
|
Returns parsing function which will parse data as text and add it to the table metatadata dictionary with the provided key
|
6,540
|
def _bind_parse_additional_data ( self , key , multiline = False ) : def _set_additional_data_bound ( self , data ) : if multiline : data = self . _read_multiline ( data ) if key not in self . additional_data : self . additional_data [ key ] = [ ] self . additional_data [ key ] . append ( data ) return _set_additional_data_bound . __get__ ( self )
|
Returns parsing function which will parse data as text and add it to the table additional data dictionary with the provided key
|
6,541
|
def error_value_processor ( value , error ) : if isinstance ( error , ( str , unicode ) ) : try : if "%" in error : error_float = float ( error . replace ( "%" , "" ) ) error_abs = ( value / 100 ) * error_float return error_abs elif error == "" : error = 0.0 else : error = float ( error ) except : pass return error
|
If an error is a percentage we convert to a float then calculate the percentage of the supplied value .
|
6,542
|
def send_msg ( self , text , channel , confirm = True ) : self . _send_id += 1 msg = SlackMsg ( self . _send_id , channel . id , text ) self . ws . send ( msg . json ) self . _stats [ 'messages_sent' ] += 1 if confirm : for e in self . events ( ) : if e . get ( 'reply_to' ) == self . _send_id : msg . sent = True msg . ts = e . ts return msg else : return msg
|
Send a message to a channel or group via Slack RTM socket returning the resulting message object
|
6,543
|
def _process_event ( self , event ) : if event . get ( 'user' ) : event . user = self . lookup_user ( event . get ( 'user' ) ) if event . get ( 'channel' ) : event . channel = self . lookup_channel ( event . get ( 'channel' ) ) if self . user . id in event . mentions : event . mentions_me = True event . mentions = [ self . lookup_user ( uid ) for uid in event . mentions ] return event
|
Extend event object with User and Channel objects
|
6,544
|
def get_token_stream ( source : str ) -> CommonTokenStream : lexer = LuaLexer ( InputStream ( source ) ) stream = CommonTokenStream ( lexer ) return stream
|
Get the antlr token stream .
|
6,545
|
def get_evolution_stone ( self , slug ) : endpoint = '/evolution-stone/' + slug return self . make_request ( self . BASE_URL + endpoint )
|
Returns a Evolution Stone object containing the details about the evolution stone .
|
6,546
|
def get_league ( self , slug ) : endpoint = '/league/' + slug return self . make_request ( self . BASE_URL + endpoint )
|
Returns a Pokemon League object containing the details about the league .
|
6,547
|
def get_pokemon_by_name ( self , name ) : endpoint = '/pokemon/' + str ( name ) return self . make_request ( self . BASE_URL + endpoint )
|
Returns an array of Pokemon objects containing all the forms of the Pokemon specified the name of the Pokemon .
|
6,548
|
def get_pokemon_by_number ( self , number ) : endpoint = '/pokemon/' + str ( number ) return self . make_request ( self . BASE_URL + endpoint )
|
Returns an array of Pokemon objects containing all the forms of the Pokemon specified the Pokedex number .
|
6,549
|
def _get_keyid ( keytype , scheme , key_value , hash_algorithm = 'sha256' ) : key_meta = format_keyval_to_metadata ( keytype , scheme , key_value , private = False ) key_update_data = securesystemslib . formats . encode_canonical ( key_meta ) digest_object = securesystemslib . hash . digest ( hash_algorithm ) digest_object . update ( key_update_data . encode ( 'utf-8' ) ) keyid = digest_object . hexdigest ( ) return keyid
|
Return the keyid of key_value .
|
6,550
|
def get_concrete_class ( cls , class_name ) : def recurrent_class_lookup ( cls ) : for cls in cls . __subclasses__ ( ) : if lower ( cls . __name__ ) == lower ( class_name ) : return cls elif len ( cls . __subclasses__ ( ) ) > 0 : r = recurrent_class_lookup ( cls ) if r is not None : return r return None cls = recurrent_class_lookup ( cls ) if cls : return cls else : raise ValueError ( "'class_name '%s' is invalid" % class_name )
|
This method provides easier access to all writers inheriting Writer class
|
6,551
|
def GetPupil ( self ) : pupil_data = _co . namedtuple ( 'pupil_data' , [ 'ZemaxApertureType' , 'ApertureValue' , 'entrancePupilDiameter' , 'entrancePupilPosition' , 'exitPupilDiameter' , 'exitPupilPosition' , 'ApodizationType' , 'ApodizationFactor' ] ) data = self . _ilensdataeditor . GetPupil ( ) return pupil_data ( * data )
|
Retrieve pupil data
|
6,552
|
def _groups_of ( length , total_length ) : indices = tuple ( range ( 0 , total_length , length ) ) + ( None , ) return _pairwise ( indices )
|
Return an iterator of tuples for slicing in length chunks .
|
6,553
|
def save ( sources , targets , masked = False ) : assert len ( sources ) == 1 and len ( targets ) == 1 array = sources [ 0 ] target = targets [ 0 ] all_slices = _all_slices ( array ) for index in np . ndindex ( * [ len ( slices ) for slices in all_slices ] ) : keys = tuple ( slices [ i ] for slices , i in zip ( all_slices , index ) ) if masked : target [ keys ] = array [ keys ] . masked_array ( ) else : target [ keys ] = array [ keys ] . ndarray ( )
|
Save the numeric results of each source into its corresponding target .
|
6,554
|
def count ( a , axis = None ) : axes = _normalise_axis ( axis , a ) if axes is None or len ( axes ) != 1 : msg = "This operation is currently limited to a single axis" raise AxisSupportError ( msg ) return _Aggregation ( a , axes [ 0 ] , _CountStreamsHandler , _CountMaskedStreamsHandler , np . dtype ( 'i' ) , { } )
|
Count the non - masked elements of the array along the given axis .
|
6,555
|
def min ( a , axis = None ) : axes = _normalise_axis ( axis , a ) assert axes is not None and len ( axes ) == 1 return _Aggregation ( a , axes [ 0 ] , _MinStreamsHandler , _MinMaskedStreamsHandler , a . dtype , { } )
|
Request the minimum of an Array over any number of axes .
|
6,556
|
def max ( a , axis = None ) : axes = _normalise_axis ( axis , a ) assert axes is not None and len ( axes ) == 1 return _Aggregation ( a , axes [ 0 ] , _MaxStreamsHandler , _MaxMaskedStreamsHandler , a . dtype , { } )
|
Request the maximum of an Array over any number of axes .
|
6,557
|
def sum ( a , axis = None ) : axes = _normalise_axis ( axis , a ) assert axes is not None and len ( axes ) == 1 return _Aggregation ( a , axes [ 0 ] , _SumStreamsHandler , _SumMaskedStreamsHandler , a . dtype , { } )
|
Request the sum of an Array over any number of axes .
|
6,558
|
def mean ( a , axis = None , mdtol = 1 ) : axes = _normalise_axis ( axis , a ) if axes is None or len ( axes ) != 1 : msg = "This operation is currently limited to a single axis" raise AxisSupportError ( msg ) dtype = ( np . array ( [ 0 ] , dtype = a . dtype ) / 1. ) . dtype kwargs = dict ( mdtol = mdtol ) return _Aggregation ( a , axes [ 0 ] , _MeanStreamsHandler , _MeanMaskedStreamsHandler , dtype , kwargs )
|
Request the mean of an Array over any number of axes .
|
6,559
|
def std ( a , axis = None , ddof = 0 ) : axes = _normalise_axis ( axis , a ) if axes is None or len ( axes ) != 1 : msg = "This operation is currently limited to a single axis" raise AxisSupportError ( msg ) dtype = ( np . array ( [ 0 ] , dtype = a . dtype ) / 1. ) . dtype return _Aggregation ( a , axes [ 0 ] , _StdStreamsHandler , _StdMaskedStreamsHandler , dtype , dict ( ddof = ddof ) )
|
Request the standard deviation of an Array over any number of axes .
|
6,560
|
def var ( a , axis = None , ddof = 0 ) : axes = _normalise_axis ( axis , a ) if axes is None or len ( axes ) != 1 : msg = "This operation is currently limited to a single axis" raise AxisSupportError ( msg ) dtype = ( np . array ( [ 0 ] , dtype = a . dtype ) / 1. ) . dtype return _Aggregation ( a , axes [ 0 ] , _VarStreamsHandler , _VarMaskedStreamsHandler , dtype , dict ( ddof = ddof ) )
|
Request the variance of an Array over any number of axes .
|
6,561
|
def _ufunc_wrapper ( ufunc , name = None ) : if not isinstance ( ufunc , np . ufunc ) : raise TypeError ( '{} is not a ufunc' . format ( ufunc ) ) ufunc_name = ufunc . __name__ ma_ufunc = getattr ( np . ma , ufunc_name , None ) if ufunc . nin == 2 and ufunc . nout == 1 : func = _dual_input_fn_wrapper ( 'np.{}' . format ( ufunc_name ) , ufunc , ma_ufunc , name ) elif ufunc . nin == 1 and ufunc . nout == 1 : func = _unary_fn_wrapper ( 'np.{}' . format ( ufunc_name ) , ufunc , ma_ufunc , name ) else : raise ValueError ( 'Unsupported ufunc {!r} with {} input arrays & {} ' 'output arrays.' . format ( ufunc_name , ufunc . nin , ufunc . nout ) ) return func
|
A function to generate the top level biggus ufunc wrappers .
|
6,562
|
def _sliced_shape ( shape , keys ) : keys = _full_keys ( keys , len ( shape ) ) sliced_shape = [ ] shape_dim = - 1 for key in keys : shape_dim += 1 if _is_scalar ( key ) : continue elif isinstance ( key , slice ) : size = len ( range ( * key . indices ( shape [ shape_dim ] ) ) ) sliced_shape . append ( size ) elif isinstance ( key , np . ndarray ) and key . dtype == np . dtype ( 'bool' ) : sliced_shape . append ( builtins . sum ( key ) ) elif isinstance ( key , ( tuple , np . ndarray ) ) : sliced_shape . append ( len ( key ) ) elif key is np . newaxis : shape_dim -= 1 sliced_shape . append ( 1 ) else : raise ValueError ( 'Invalid indexing object "{}"' . format ( key ) ) sliced_shape = tuple ( sliced_shape ) return sliced_shape
|
Returns the shape that results from slicing an array of the given shape by the given keys .
|
6,563
|
def size ( array ) : nbytes = array . nbytes if nbytes < ( 1 << 10 ) : size = '{} B' . format ( nbytes ) elif nbytes < ( 1 << 20 ) : size = '{:.02f} KiB' . format ( nbytes / ( 1 << 10 ) ) elif nbytes < ( 1 << 30 ) : size = '{:.02f} MiB' . format ( nbytes / ( 1 << 20 ) ) elif nbytes < ( 1 << 40 ) : size = '{:.02f} GiB' . format ( nbytes / ( 1 << 30 ) ) else : size = '{:.02f} TiB' . format ( nbytes / ( 1 << 40 ) ) return size
|
Return a human - readable description of the number of bytes required to store the data of the given array .
|
6,564
|
def output ( self , chunk ) : if chunk is not None : for queue in self . output_queues : queue . put ( chunk )
|
Dispatch the given Chunk onto all the registered output queues .
|
6,565
|
def run ( self ) : try : chunk_index = self . chunk_index_gen ( self . array . shape , self . iteration_order ) for key in chunk_index : if self . masked : data = self . array [ key ] . masked_array ( ) else : data = self . array [ key ] . ndarray ( ) output_chunk = Chunk ( key , data ) self . output ( output_chunk ) except : self . abort ( ) raise else : for queue in self . output_queues : queue . put ( QUEUE_FINISHED )
|
Emit the Chunk instances which cover the underlying Array .
|
6,566
|
def add_input_nodes ( self , input_nodes ) : self . input_queues = [ queue . Queue ( maxsize = 3 ) for _ in input_nodes ] for input_node , input_queue in zip ( input_nodes , self . input_queues ) : input_node . add_output_queue ( input_queue )
|
Set the given nodes as inputs for this node .
|
6,567
|
def run ( self ) : try : while True : input_chunks = [ input . get ( ) for input in self . input_queues ] for input in self . input_queues : input . task_done ( ) if any ( chunk is QUEUE_ABORT for chunk in input_chunks ) : self . abort ( ) return if any ( chunk is QUEUE_FINISHED for chunk in input_chunks ) : break self . output ( self . process_chunks ( input_chunks ) ) self . output ( self . finalise ( ) ) except : self . abort ( ) raise else : for queue in self . output_queues : queue . put ( QUEUE_FINISHED )
|
Process the input queues in lock - step and push any results to the registered output queues .
|
6,568
|
def process_chunks ( self , chunks ) : chunk , = chunks if chunk . keys : self . result [ chunk . keys ] = chunk . data else : self . result [ ... ] = chunk . data
|
Store the incoming chunk at the corresponding position in the result array .
|
6,569
|
def _cleanup_new_key ( self , key , size , axis ) : if _is_scalar ( key ) : if key >= size or key < - size : msg = 'index {0} is out of bounds for axis {1} with' ' size {2}' . format ( key , axis , size ) raise IndexError ( msg ) elif isinstance ( key , slice ) : pass elif isinstance ( key , np . ndarray ) and key . dtype == np . dtype ( 'bool' ) : if key . size > size : msg = 'too many boolean indices. Boolean index array ' 'of size {0} is greater than axis {1} with ' 'size {2}' . format ( key . size , axis , size ) raise IndexError ( msg ) elif isinstance ( key , collections . Iterable ) and not isinstance ( key , six . string_types ) : key = tuple ( key ) for sub_key in key : if sub_key >= size or sub_key < - size : msg = 'index {0} is out of bounds for axis {1}' ' with size {2}' . format ( sub_key , axis , size ) raise IndexError ( msg ) else : raise TypeError ( 'invalid key {!r}' . format ( key ) ) return key
|
Return a key of type int slice or tuple that is guaranteed to be valid for the given dimension size .
|
6,570
|
def _remap_new_key ( self , indices , new_key , axis ) : size = len ( indices ) if _is_scalar ( new_key ) : if new_key >= size or new_key < - size : msg = 'index {0} is out of bounds for axis {1}' ' with size {2}' . format ( new_key , axis , size ) raise IndexError ( msg ) result_key = indices [ new_key ] elif isinstance ( new_key , slice ) : result_key = indices . __getitem__ ( new_key ) elif isinstance ( new_key , np . ndarray ) and new_key . dtype == np . dtype ( 'bool' ) : if new_key . size > size : msg = 'too many boolean indices. Boolean index array ' 'of size {0} is greater than axis {1} with ' 'size {2}' . format ( new_key . size , axis , size ) raise IndexError ( msg ) result_key = tuple ( np . array ( indices ) [ new_key ] ) elif isinstance ( new_key , collections . Iterable ) and not isinstance ( new_key , six . string_types ) : new_key = tuple ( new_key ) for sub_key in new_key : if sub_key >= size or sub_key < - size : msg = 'index {0} is out of bounds for axis {1}' ' with size {2}' . format ( sub_key , axis , size ) raise IndexError ( msg ) result_key = tuple ( indices [ key ] for key in new_key ) else : raise TypeError ( 'invalid key {!r}' . format ( new_key ) ) return result_key
|
Return a key of type int slice or tuple that represents the combination of new_key with the given indices .
|
6,571
|
def _apply_axes_mapping ( self , target , inverse = False ) : if len ( target ) != self . ndim : raise ValueError ( 'The target iterable is of length {}, but ' 'should be of length {}.' . format ( len ( target ) , self . ndim ) ) if inverse : axis_map = self . _inverse_axes_map else : axis_map = self . _forward_axes_map result = [ None ] * self . ndim for axis , item in enumerate ( target ) : result [ axis_map [ axis ] ] = item return tuple ( result )
|
Apply the transposition to the target iterable .
|
6,572
|
def output_keys ( self , source_keys ) : keys = list ( source_keys ) del keys [ self . axis ] return tuple ( keys )
|
Given input chunk keys compute what keys will be needed to put the result into the result array .
|
6,573
|
def zDDEInit ( self ) : self . pyver = _get_python_version ( ) if _PyZDDE . liveCh == 0 : try : _PyZDDE . server = _dde . CreateServer ( ) _PyZDDE . server . Create ( "ZCLIENT" ) except Exception as err : _sys . stderr . write ( "{}: DDE server may be in use!" . format ( str ( err ) ) ) return - 1 self . conversation = _dde . CreateConversation ( _PyZDDE . server ) try : self . conversation . ConnectTo ( self . appName , " " ) except Exception as err : _sys . stderr . write ( "{}.\nOpticStudio UI may not be running!\n" . format ( str ( err ) ) ) self . zDDEClose ( ) return - 1 else : _PyZDDE . liveCh += 1 self . connection = True return 0
|
Initiates link with OpticStudio DDE server
|
6,574
|
def zDDEClose ( self ) : if _PyZDDE . server and not _PyZDDE . liveCh : _PyZDDE . server . Shutdown ( self . conversation ) _PyZDDE . server = 0 elif _PyZDDE . server and self . connection and _PyZDDE . liveCh == 1 : _PyZDDE . server . Shutdown ( self . conversation ) self . connection = False self . appName = '' _PyZDDE . liveCh -= 1 _PyZDDE . server = 0 elif self . connection : _PyZDDE . server . Shutdown ( self . conversation ) self . connection = False self . appName = '' _PyZDDE . liveCh -= 1 return 0
|
Close the DDE link with Zemax server
|
6,575
|
def setTimeout ( self , time ) : self . conversation . SetDDETimeout ( round ( time ) ) return self . conversation . GetDDETimeout ( )
|
Set global timeout value in seconds for all DDE calls
|
6,576
|
def _sendDDEcommand ( self , cmd , timeout = None ) : reply = self . conversation . Request ( cmd , timeout ) if self . pyver > 2 : reply = reply . decode ( 'ascii' ) . rstrip ( ) return reply
|
Send command to DDE client
|
6,577
|
def zGetUpdate ( self ) : status , ret = - 998 , None ret = self . _sendDDEcommand ( "GetUpdate" ) if ret != None : status = int ( ret ) return status
|
Update the lens
|
6,578
|
def zLoadFile ( self , fileName , append = None ) : reply = None if append : cmd = "LoadFile,{},{}" . format ( fileName , append ) else : cmd = "LoadFile,{}" . format ( fileName ) reply = self . _sendDDEcommand ( cmd ) if reply : return int ( reply ) else : return - 998
|
Loads a zmx file into the DDE server
|
6,579
|
def zPushLens ( self , update = None , timeout = None ) : reply = None if update == 1 : reply = self . _sendDDEcommand ( 'PushLens,1' , timeout ) elif update == 0 or update is None : reply = self . _sendDDEcommand ( 'PushLens,0' , timeout ) else : raise ValueError ( 'Invalid value for flag' ) if reply : return int ( reply ) else : return - 998
|
Copy lens in the Zemax DDE server into LDE
|
6,580
|
def zSaveFile ( self , fileName ) : cmd = "SaveFile,{}" . format ( fileName ) reply = self . _sendDDEcommand ( cmd ) return int ( float ( reply . rstrip ( ) ) )
|
Saves the lens currently loaded in the server to a Zemax file
|
6,581
|
def zSyncWithUI ( self ) : if not OpticalSystem . _dde_link : OpticalSystem . _dde_link = _get_new_dde_link ( ) if not self . _sync_ui_file : self . _sync_ui_file = _get_sync_ui_filename ( ) self . _sync_ui = True
|
Turn on sync - with - ui
|
6,582
|
def zPushLens ( self , update = None ) : self . SaveAs ( self . _sync_ui_file ) OpticalSystem . _dde_link . zLoadFile ( self . _sync_ui_file ) OpticalSystem . _dde_link . zPushLens ( update )
|
Push lens in ZOS COM server to UI
|
6,583
|
def zGetRefresh ( self ) : OpticalSystem . _dde_link . zGetRefresh ( ) OpticalSystem . _dde_link . zSaveFile ( self . _sync_ui_file ) self . _iopticalsystem . LoadFile ( self . _sync_ui_file , False )
|
Copy lens in UI to headless ZOS COM server
|
6,584
|
def SaveAs ( self , filename ) : directory , zfile = _os . path . split ( filename ) if zfile . startswith ( 'pyzos_ui_sync_file' ) : self . _iopticalsystem . SaveAs ( filename ) else : if not _os . path . exists ( directory ) : raise ValueError ( '{} is not valid.' . format ( directory ) ) else : self . _file_to_save_on_Save = filename self . _iopticalsystem . SaveAs ( filename )
|
Saves the current system to the specified file .
|
6,585
|
def Save ( self ) : if self . _file_to_save_on_Save : self . _iopticalsystem . SaveAs ( self . _file_to_save_on_Save ) else : self . _iopticalsystem . Save ( )
|
Saves the current system
|
6,586
|
def zGetSurfaceData ( self , surfNum ) : if self . pMode == 0 : surf_data = _co . namedtuple ( 'surface_data' , [ 'radius' , 'thick' , 'material' , 'semidia' , 'conic' , 'comment' ] ) surf = self . pLDE . GetSurfaceAt ( surfNum ) return surf_data ( surf . pRadius , surf . pThickness , surf . pMaterial , surf . pSemiDiameter , surf . pConic , surf . pComment ) else : raise NotImplementedError ( 'Function not implemented for non-sequential mode' )
|
Return surface data
|
6,587
|
def zSetSurfaceData ( self , surfNum , radius = None , thick = None , material = None , semidia = None , conic = None , comment = None ) : if self . pMode == 0 : surf = self . pLDE . GetSurfaceAt ( surfNum ) if radius is not None : surf . pRadius = radius if thick is not None : surf . pThickness = thick if material is not None : surf . pMaterial = material if semidia is not None : surf . pSemiDiameter = semidia if conic is not None : surf . pConic = conic if comment is not None : surf . pComment = comment else : raise NotImplementedError ( 'Function not implemented for non-sequential mode' )
|
Sets surface data
|
6,588
|
def zSetDefaultMeritFunctionSEQ ( self , ofType = 0 , ofData = 0 , ofRef = 0 , pupilInteg = 0 , rings = 0 , arms = 0 , obscuration = 0 , grid = 0 , delVignetted = False , useGlass = False , glassMin = 0 , glassMax = 1000 , glassEdge = 0 , useAir = False , airMin = 0 , airMax = 1000 , airEdge = 0 , axialSymm = True , ignoreLatCol = False , addFavOper = False , startAt = 1 , relativeXWgt = 1.0 , overallWgt = 1.0 , configNum = 0 ) : mfe = self . pMFE wizard = mfe . pSEQOptimizationWizard wizard . pType = ofType wizard . pData = ofData wizard . pReference = ofRef wizard . pPupilIntegrationMethod = pupilInteg wizard . pRing = rings wizard . pArm = arms wizard . pObscuration = obscuration wizard . pGrid = grid wizard . pIsDeleteVignetteUsed = delVignetted wizard . pIsGlassUsed = useGlass wizard . pGlassMin = glassMin wizard . pGlassMax = glassMax wizard . pGlassEdge = glassEdge wizard . pIsAirUsed = useAir wizard . pAirMin = airMin wizard . pAirMax = airMax wizard . pAirEdge = airEdge wizard . pIsAssumeAxialSymmetryUsed = axialSymm wizard . pIsIgnoreLateralColorUsed = ignoreLatCol wizard . pConfiguration = configNum wizard . pIsAddFavoriteOperandsUsed = addFavOper wizard . pStartAt = startAt wizard . pRelativeXWeight = relativeXWgt wizard . pOverallWeight = overallWgt wizard . CommonSettings . OK ( )
|
Sets the default merit function for Sequential Merit Function Editor
|
6,589
|
def process_error_labels ( value ) : observed_error_labels = { } for error in value . get ( 'errors' , [ ] ) : label = error . get ( 'label' , 'error' ) if label not in observed_error_labels : observed_error_labels [ label ] = 0 observed_error_labels [ label ] += 1 if observed_error_labels [ label ] > 1 : error [ 'label' ] = label + '_' + str ( observed_error_labels [ label ] ) if observed_error_labels [ label ] == 2 : for error1 in value . get ( 'errors' , [ ] ) : error1_label = error1 . get ( 'label' , 'error' ) if error1_label == label : error1 [ 'label' ] = label + "_1" break
|
Process the error labels of a dependent variable value to ensure uniqueness .
|
6,590
|
def raw ( text ) : new_string = '' for char in text : try : new_string += escape_dict [ char ] except KeyError : new_string += char return new_string
|
Returns a raw string representation of text
|
6,591
|
def WinMSGLoop ( ) : LPMSG = POINTER ( MSG ) LRESULT = c_ulong GetMessage = get_winfunc ( "user32" , "GetMessageW" , BOOL , ( LPMSG , HWND , UINT , UINT ) ) TranslateMessage = get_winfunc ( "user32" , "TranslateMessage" , BOOL , ( LPMSG , ) ) DispatchMessage = get_winfunc ( "user32" , "DispatchMessageW" , LRESULT , ( LPMSG , ) ) msg = MSG ( ) lpmsg = byref ( msg ) while GetMessage ( lpmsg , HWND ( ) , 0 , 0 ) > 0 : TranslateMessage ( lpmsg ) DispatchMessage ( lpmsg )
|
Run the main windows message loop .
|
6,592
|
def Request ( self , item , timeout = None ) : if not timeout : timeout = self . ddetimeout try : reply = self . ddec . request ( item , int ( timeout * 1000 ) ) except DDEError : err_str = str ( sys . exc_info ( ) [ 1 ] ) error = err_str [ err_str . find ( 'err=' ) + 4 : err_str . find ( 'err=' ) + 10 ] if error == hex ( DMLERR_DATAACKTIMEOUT ) : print ( "TIMEOUT REACHED. Please use a higher timeout.\n" ) if ( sys . version_info > ( 3 , 0 ) ) : reply = b'-998' else : reply = '-998' return reply
|
Request DDE client timeout in seconds Note ... handle the exception within this function .
|
6,593
|
def advise ( self , item , stop = False ) : hszItem = DDE . CreateStringHandle ( self . _idInst , item , CP_WINUNICODE ) hDdeData = DDE . ClientTransaction ( LPBYTE ( ) , 0 , self . _hConv , hszItem , CF_TEXT , XTYP_ADVSTOP if stop else XTYP_ADVSTART , TIMEOUT_ASYNC , LPDWORD ( ) ) DDE . FreeStringHandle ( self . _idInst , hszItem ) if not hDdeData : raise DDEError ( "Unable to %s advise" % ( "stop" if stop else "start" ) , self . _idInst ) DDE . FreeDataHandle ( hDdeData )
|
Request updates when DDE data changes .
|
6,594
|
def execute ( self , command ) : pData = c_char_p ( command ) cbData = DWORD ( len ( command ) + 1 ) hDdeData = DDE . ClientTransaction ( pData , cbData , self . _hConv , HSZ ( ) , CF_TEXT , XTYP_EXECUTE , TIMEOUT_ASYNC , LPDWORD ( ) ) if not hDdeData : raise DDEError ( "Unable to send command" , self . _idInst ) DDE . FreeDataHandle ( hDdeData )
|
Execute a DDE command .
|
6,595
|
def request ( self , item , timeout = 5000 ) : hszItem = DDE . CreateStringHandle ( self . _idInst , item , CP_WINUNICODE ) pdwResult = DWORD ( 0 ) hDdeData = DDE . ClientTransaction ( LPBYTE ( ) , 0 , self . _hConv , hszItem , CF_TEXT , XTYP_REQUEST , timeout , byref ( pdwResult ) ) DDE . FreeStringHandle ( self . _idInst , hszItem ) if not hDdeData : raise DDEError ( "Unable to request item" , self . _idInst ) if timeout != TIMEOUT_ASYNC : pdwSize = DWORD ( 0 ) pData = DDE . AccessData ( hDdeData , byref ( pdwSize ) ) if not pData : DDE . FreeDataHandle ( hDdeData ) raise DDEError ( "Unable to access data in request function" , self . _idInst ) DDE . UnaccessData ( hDdeData ) else : pData = None DDE . FreeDataHandle ( hDdeData ) return pData
|
Request data from DDE service .
|
6,596
|
def get_molo_comments ( parser , token ) : keywords = token . contents . split ( ) if len ( keywords ) != 5 and len ( keywords ) != 7 and len ( keywords ) != 9 : raise template . TemplateSyntaxError ( "'%s' tag takes exactly 2,4 or 6 arguments" % ( keywords [ 0 ] , ) ) if keywords [ 1 ] != 'for' : raise template . TemplateSyntaxError ( "first argument to '%s' tag must be 'for'" % ( keywords [ 0 ] , ) ) if keywords [ 3 ] != 'as' : raise template . TemplateSyntaxError ( "first argument to '%s' tag must be 'as'" % ( keywords [ 0 ] , ) ) if len ( keywords ) > 5 and keywords [ 5 ] != 'limit' : raise template . TemplateSyntaxError ( "third argument to '%s' tag must be 'limit'" % ( keywords [ 0 ] , ) ) if len ( keywords ) == 7 : return GetMoloCommentsNode ( keywords [ 2 ] , keywords [ 4 ] , keywords [ 6 ] ) if len ( keywords ) > 7 and keywords [ 7 ] != 'child_limit' : raise template . TemplateSyntaxError ( "third argument to '%s' tag must be 'child_limit'" % ( keywords [ 0 ] , ) ) if len ( keywords ) > 7 : return GetMoloCommentsNode ( keywords [ 2 ] , keywords [ 4 ] , keywords [ 6 ] , keywords [ 8 ] ) return GetMoloCommentsNode ( keywords [ 2 ] , keywords [ 4 ] )
|
Get a limited set of comments for a given object . Defaults to a limit of 5 . Setting the limit to - 1 disables limiting . Set the amount of comments to
|
6,597
|
def get_comments_content_object ( parser , token ) : keywords = token . contents . split ( ) if len ( keywords ) != 5 : raise template . TemplateSyntaxError ( "'%s' tag takes exactly 2 arguments" % ( keywords [ 0 ] , ) ) if keywords [ 1 ] != 'for' : raise template . TemplateSyntaxError ( "first argument to '%s' tag must be 'for'" % ( keywords [ 0 ] , ) ) if keywords [ 3 ] != 'as' : raise template . TemplateSyntaxError ( "first argument to '%s' tag must be 'as'" % ( keywords [ 0 ] , ) ) return GetCommentsContentObject ( keywords [ 2 ] , keywords [ 4 ] )
|
Get a limited set of comments for a given object . Defaults to a limit of 5 . Setting the limit to - 1 disables limiting .
|
6,598
|
def report ( request , comment_id ) : comment = get_object_or_404 ( django_comments . get_model ( ) , pk = comment_id , site__pk = settings . SITE_ID ) if comment . parent is not None : messages . info ( request , _ ( 'Reporting comment replies is not allowed.' ) ) else : perform_flag ( request , comment ) messages . info ( request , _ ( 'The comment has been reported.' ) ) next = request . GET . get ( 'next' ) or comment . get_absolute_url ( ) return HttpResponseRedirect ( next )
|
Flags a comment on GET .
|
6,599
|
def post_molo_comment ( request , next = None , using = None ) : data = request . POST . copy ( ) if 'submit_anonymously' in data : data [ 'name' ] = 'Anonymous' data [ 'email' ] = request . user . email or 'blank@email.com' request . POST = data return post_comment ( request , next = next , using = next )
|
Allows for posting of a Molo Comment this allows comments to be set with the user_name as Anonymous
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.