idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
59,500
def CharacterData ( self , data ) : 'SAX character data event handler' data = data . encode ( "utf-8" ) element = self . nodeStack [ - 1 ] element . cdata += data return
SAX character data event handler
59,501
def doShow ( self , xml = 0 ) : if xml == 0 : print 'Errorcode:' , self . errorcode print print 'Product information:' for key in self . product . keys ( ) : print ' ' , key . encode ( 'UTF-8' ) , print '->' , self . product [ key ] . encode ( 'UTF-8' ) print print 'Database information:' for key in self . database . keys ( ) : print ' ' , key . encode ( 'UTF-8' ) , print '->' , self . database [ key ] . encode ( 'UTF-8' ) print print 'Metadata:' for field in self . metadata . keys ( ) : print print ' ' , field . encode ( 'UTF-8' ) for property in self . metadata [ field ] : print ' ' , property . encode ( 'UTF-8' ) , print '->' , self . metadata [ field ] [ property ] . encode ( 'UTF-8' ) print print 'Records:' for record in self . resultset : print for column in record : print ' ' , column . encode ( 'UTF-8' ) , if type ( record [ column ] ) == UnicodeType : print '->' , record [ column ] . encode ( 'UTF-8' ) else : print '->' , record [ column ] else : tags = [ 'FMPXMLRESULT' , 'ERRORCODE' , 'PRODUCT' , 'DATABASE' , 'METADATA' , 'FIELD' , 'RESULTSET' , 'ROW' , 'COL' , 'DATA' ] xml = self . data for tag in tags : xml = string . replace ( xml , '></' + tag , '>\n</' + tag ) xml = string . replace ( xml , '><' + tag , '>\n<' + tag ) print xml
Shows the contents of our resultset .
59,502
def _setSkipRecords ( self , skipRec ) : if type ( skipRec ) == int or ( type ( skipRec ) == str and skipRec . isdigit ( ) ) : self . _skipRecords = skipRec else : raise FMError , 'Unsupported -skip value (not a number).'
Specifies how many records to skip in the found set
59,503
def _setLogicalOperator ( self , lop ) : if not lop . lower ( ) in [ 'and' , 'or' ] : raise FMError , 'Unsupported logical operator (not one of "and" or "or").' self . _lop = lop . lower ( )
Sets the way the find fields should be combined together .
59,504
def _setComparasionOperator ( self , field , oper ) : if oper != '' : validOperators = { 'eq' : 'eq' , 'equals' : 'eq' , '=' : 'eq' , '==' : 'eq' , 'cn' : 'cn' , 'contains' : 'cn' , '%%' : 'cn' , '%' : 'cn' , '*' : 'cn' , 'bw' : 'bw' , 'begins with' : 'bw' , '^' : 'bw' , 'ew' : 'ew' , 'ends with' : 'ew' , '$' : 'ew' , 'gt' : 'gt' , 'greater than' : 'gt' , '>' : 'gt' , 'gte' : 'gte' , 'greater than or equals' : 'gte' , '>=' : 'gte' , 'lt' : 'lt' , 'less than' : 'lt' , '<' : 'lt' , 'lte' : 'lte' , 'less than or equals' : 'lte' , '<=' : 'lte' , 'neq' : 'neq' , 'not equals' : 'neq' , '!=' : 'neq' , '<>' : 'neq' } if not string . lower ( oper ) in validOperators . keys ( ) : raise FMError , 'Invalid operator "' + oper + '" for "' + field + '"' oper = validOperators [ oper . lower ( ) ] self . _dbParams . append ( [ "%s.op" % field , oper ] )
Sets correct operator for given string representation
59,505
def _addDBParam ( self , name , value ) : if name [ - 4 : ] == '__OP' : return self . _setComparasionOperator ( name [ : - 4 ] , value ) if name [ - 3 : ] == '.op' : return self . _setComparasionOperator ( name [ : - 3 ] , value ) if name . find ( '__' ) != - 1 : import re name = name . replace ( '__' , '::' ) elif name . find ( '.' ) != - 1 : name = name . replace ( '.' , '::' ) self . _dbParams . append ( [ name , value ] )
Adds a database parameter
59,506
def getFile ( self , file_xml_uri ) : find = re . match ( '/fmi/xml/cnt/([\w\d.-]+)\.([\w]+)?-*' , file_xml_uri ) file_name = find . group ( 1 ) file_extension = find . group ( 2 ) file_binary = self . _doRequest ( is_file = True , file_xml_uri = file_xml_uri ) return ( file_name , file_extension , file_binary )
This will execute cmd to fetch file data from FMServer
59,507
def doScript ( self , script_name , params = None , return_all = False ) : request = [ uu ( { '-db' : self . _db } ) , uu ( { '-lay' : self . _layout } ) , uu ( { '-script' : script_name } ) ] if params : request . append ( uu ( { '-script.param' : params } ) ) request . append ( uu ( { '-findall' : '' } ) ) result = self . _doRequest ( request ) result = FMResultset . FMResultset ( result ) try : resp = result . resultset if return_all else result . resultset [ 0 ] except IndexError : resp = None return resp
This function executes the script for given layout for the current db .
59,508
def doScriptAfter ( self , func , func_kwargs = { } , script_name = '' , params = None ) : request = [ uu ( { '-script' : script_name } ) ] if params : request . append ( uu ( { '-script.param' : params } ) ) self . _extra_script = request return func ( ** func_kwargs )
This function will execute extra script after passed function
59,509
def getDbNames ( self ) : request = [ ] request . append ( uu ( { '-dbnames' : '' } ) ) result = self . _doRequest ( request ) result = FMResultset . FMResultset ( result ) dbNames = [ ] for dbName in result . resultset : dbNames . append ( string . lower ( dbName [ 'DATABASE_NAME' ] ) ) return dbNames
This function returns the list of open databases
59,510
def doFind ( self , WHAT = { } , SORT = [ ] , SKIP = None , MAX = None , LOP = 'AND' , ** params ) : self . _preFind ( WHAT , SORT , SKIP , MAX , LOP ) for key in params : self . _addDBParam ( key , params [ key ] ) try : return self . _doAction ( '-find' ) except FMServerError as e : if e . args [ 0 ] in [ 401 , 8 ] : return [ ]
This function will perform the command - find .
59,511
def doFindAll ( self , WHAT = { } , SORT = [ ] , SKIP = None , MAX = None ) : self . _preFind ( WHAT , SORT , SKIP , MAX ) return self . _doAction ( '-findall' )
This function will perform the command - findall .
59,512
def doFindAny ( self , WHAT = { } , SORT = [ ] , SKIP = None , MAX = None , LOP = 'AND' , ** params ) : self . _preFind ( WHAT , SORT , SKIP , MAX , LOP ) for key in params : self . _addDBParam ( key , params [ key ] ) return self . _doAction ( '-findany' )
This function will perform the command - findany .
59,513
def doDelete ( self , WHAT = { } ) : if hasattr ( WHAT , '_modified' ) : self . _addDBParam ( 'RECORDID' , WHAT . RECORDID ) self . _addDBParam ( 'MODID' , WHAT . MODID ) elif type ( WHAT ) == dict and WHAT . has_key ( 'RECORDID' ) : self . _addDBParam ( 'RECORDID' , WHAT [ 'RECORDID' ] ) else : raise FMError , 'Python Runtime: Object type (%s) given to function doDelete as argument WHAT cannot be used.' % type ( WHAT ) if self . _layout == '' : raise FMError , 'No layout was selected' if self . _checkRecordID ( ) == 0 : raise FMError , 'RecordID is missing' return self . _doAction ( '-delete' )
This function will perform the command - delete .
59,514
def doNew ( self , WHAT = { } , ** params ) : if hasattr ( WHAT , '_modified' ) : for key in WHAT : if key not in [ 'RECORDID' , 'MODID' ] : if WHAT . __new2old__ . has_key ( key ) : self . _addDBParam ( WHAT . __new2old__ [ key ] . encode ( 'utf-8' ) , WHAT [ key ] ) else : self . _addDBParam ( key , WHAT [ key ] ) elif type ( WHAT ) == dict : for key in WHAT : self . _addDBParam ( key , WHAT [ key ] ) else : raise FMError , 'Python Runtime: Object type (%s) given to function doNew as argument WHAT cannot be used.' % type ( WHAT ) if self . _layout == '' : raise FMError , 'No layout was selected' for key in params : self . _addDBParam ( key , params [ key ] ) if len ( self . _dbParams ) == 0 : raise FMError , 'No data to be added' return self . _doAction ( '-new' )
This function will perform the command - new .
59,515
def doDup ( self , WHAT = { } , ** params ) : if hasattr ( WHAT , '_modified' ) : for key , value in WHAT . _modified ( ) : if WHAT . __new2old__ . has_key ( key ) : self . _addDBParam ( WHAT . __new2old__ [ key ] . encode ( 'utf-8' ) , value ) else : self . _addDBParam ( key , value ) self . _addDBParam ( 'RECORDID' , WHAT . RECORDID ) self . _addDBParam ( 'MODID' , WHAT . MODID ) elif type ( WHAT ) == dict : for key in WHAT : self . _addDBParam ( key , WHAT [ key ] ) else : raise FMError , 'Python Runtime: Object type (%s) given to function doDup as argument WHAT cannot be used.' % type ( WHAT ) if self . _layout == '' : raise FMError , 'No layout was selected' for key in params : self . _addDBParam ( key , params [ key ] ) if self . _checkRecordID ( ) == 0 : raise FMError , 'RecordID is missing' return self . _doAction ( '-dup' )
This function will perform the command - dup .
59,516
def _buildUrl ( self ) : return '%(protocol)s://%(host)s:%(port)s%(address)s' % { 'protocol' : self . _protocol , 'host' : self . _host , 'port' : self . _port , 'address' : self . _address , }
Builds url for normal FM requests .
59,517
def _buildFileUrl ( self , xml_req ) : return '%(protocol)s://%(host)s:%(port)s%(xml_req)s' % { 'protocol' : self . _protocol , 'host' : self . _host , 'port' : self . _port , 'xml_req' : xml_req , }
Builds url for fetching the files from FM .
59,518
def _doRequest ( self , request = None , is_file = False , file_xml_uri = '' ) : if request is None : request = [ ] if is_file and file_xml_uri : url = self . _buildFileUrl ( file_xml_uri ) else : request = '&' . join ( request ) url = "%s?%s" % ( self . _buildUrl ( ) , request ) if self . _debug : print '[PyFileMaker DEBUG] ' , url resp = requests . get ( url = url , auth = ( self . _login , self . _password ) ) resp . raise_for_status ( ) return resp . content
This function will perform the specified request on the FileMaker server and it will return the raw result from FileMaker .
59,519
def FMErrorByNum ( num ) : if not num in FMErrorNum . keys ( ) : raise FMServerError , ( num , FMErrorNum [ - 1 ] ) elif num == 102 : raise FMFieldError , ( num , FMErrorNum [ num ] ) else : raise FMServerError , ( num , FMErrorNum [ num ] )
This function raises an error based on the specified error code .
59,520
def doParseXMLData ( self ) : parser = xml2obj . Xml2Obj ( ) if self . data [ - 6 : ] == '</COL>' : self . data += '</ROW></RESULTSET></FMPXMLRESULT>' xobj = parser . ParseString ( self . data ) try : el = xobj . getElements ( 'ERRORCODE' ) if el : self . errorcode = int ( el [ 0 ] . getData ( ) ) else : self . errorcode = int ( xobj . getElements ( 'error' ) [ 0 ] . getAttribute ( 'code' ) ) except : FMErrorByNum ( 954 ) if self . errorcode != 0 : FMErrorByNum ( self . errorcode ) return xobj
This function parses the XML output of FileMaker .
59,521
def fill ( metrics_headers = ( ) ) : answer = collections . OrderedDict ( ( ( 'gl-python' , platform . python_version ( ) ) , ) ) for key , value in collections . OrderedDict ( metrics_headers ) . items ( ) : answer [ key ] = value answer [ 'gax' ] = gax . __version__ answer [ 'grpc' ] = pkg_resources . get_distribution ( 'grpcio' ) . version return answer
Add the metrics headers known to GAX .
59,522
def stringify ( metrics_headers = ( ) ) : metrics_headers = collections . OrderedDict ( metrics_headers ) return ' ' . join ( [ '%s/%s' % ( k , v ) for k , v in metrics_headers . items ( ) ] )
Convert the provided metrics headers to a string .
59,523
def _str_dotted_getattr ( obj , name ) : for part in name . split ( '.' ) : obj = getattr ( obj , part ) return str ( obj ) if obj else None
Expands extends getattr to allow dots in x to indicate nested objects .
59,524
def request_bytesize ( self ) : return sum ( len ( str ( e ) ) for elts in self . _in_deque for e in elts )
The size of in bytes of the bundled field elements .
59,525
def run ( self ) : if not self . _in_deque : return req = self . _bundling_request del getattr ( req , self . bundled_field ) [ : ] getattr ( req , self . bundled_field ) . extend ( [ e for elts in self . _in_deque for e in elts ] ) subresponse_field = self . subresponse_field if subresponse_field : self . _run_with_subresponses ( req , subresponse_field , self . _kwargs ) else : self . _run_with_no_subresponse ( req , self . _kwargs )
Call the task s func .
59,526
def extend ( self , elts ) : elts = elts [ : ] self . _in_deque . append ( elts ) event = self . _event_for ( elts ) self . _event_deque . append ( event ) return event
Adds elts to the tasks .
59,527
def _event_for ( self , elts ) : event = Event ( ) event . canceller = self . _canceller_for ( elts , event ) return event
Creates an Event that is set when the bundle with elts is sent .
59,528
def _canceller_for ( self , elts , event ) : def canceller ( ) : try : self . _event_deque . remove ( event ) self . _in_deque . remove ( elts ) return True except ValueError : return False return canceller
Obtains a cancellation function that removes elts .
59,529
def schedule ( self , api_call , bundle_id , bundle_desc , bundling_request , kwargs = None ) : kwargs = kwargs or dict ( ) bundle = self . _bundle_for ( api_call , bundle_id , bundle_desc , bundling_request , kwargs ) elts = getattr ( bundling_request , bundle_desc . bundled_field ) event = bundle . extend ( elts ) count_threshold = self . _options . element_count_threshold if count_threshold > 0 and bundle . element_count >= count_threshold : self . _run_now ( bundle . bundle_id ) size_threshold = self . _options . request_byte_threshold if size_threshold > 0 and bundle . request_bytesize >= size_threshold : self . _run_now ( bundle . bundle_id ) return event
Schedules bundle_desc of bundling_request as part of bundle_id .
59,530
def create_stub ( generated_create_stub , channel = None , service_path = None , service_port = None , credentials = None , scopes = None , ssl_credentials = None ) : if channel is None : target = '{}:{}' . format ( service_path , service_port ) if credentials is None : credentials = _grpc_google_auth . get_default_credentials ( scopes ) channel = _grpc_google_auth . secure_authorized_channel ( credentials , target , ssl_credentials = ssl_credentials ) return generated_create_stub ( channel )
Creates a gRPC client stub .
59,531
def get_default_credentials ( scopes ) : credentials , _ = google . auth . default ( scopes = scopes ) return credentials
Gets the Application Default Credentials .
59,532
def add_timeout_arg ( a_func , timeout , ** kwargs ) : def inner ( * args ) : updated_args = args + ( timeout , ) return a_func ( * updated_args , ** kwargs ) return inner
Updates a_func so that it gets called with the timeout as its final arg .
59,533
def retryable ( a_func , retry_options , ** kwargs ) : delay_mult = retry_options . backoff_settings . retry_delay_multiplier max_delay_millis = retry_options . backoff_settings . max_retry_delay_millis has_timeout_settings = _has_timeout_settings ( retry_options . backoff_settings ) if has_timeout_settings : timeout_mult = retry_options . backoff_settings . rpc_timeout_multiplier max_timeout = ( retry_options . backoff_settings . max_rpc_timeout_millis / _MILLIS_PER_SECOND ) total_timeout = ( retry_options . backoff_settings . total_timeout_millis / _MILLIS_PER_SECOND ) def inner ( * args ) : delay = retry_options . backoff_settings . initial_retry_delay_millis exc = errors . RetryError ( 'Retry total timeout exceeded before any' 'response was received' ) if has_timeout_settings : timeout = ( retry_options . backoff_settings . initial_rpc_timeout_millis / _MILLIS_PER_SECOND ) now = time . time ( ) deadline = now + total_timeout else : timeout = None deadline = None while deadline is None or now < deadline : try : to_call = add_timeout_arg ( a_func , timeout , ** kwargs ) return to_call ( * args ) except Exception as exception : code = config . exc_to_code ( exception ) if code not in retry_options . retry_codes : raise errors . RetryError ( 'Exception occurred in retry method that was not' ' classified as transient' , exception ) exc = errors . RetryError ( 'Retry total timeout exceeded with exception' , exception ) to_sleep = random . uniform ( 0 , delay * 2 ) time . sleep ( to_sleep / _MILLIS_PER_SECOND ) delay = min ( delay * delay_mult , max_delay_millis ) if has_timeout_settings : now = time . time ( ) timeout = min ( timeout * timeout_mult , max_timeout , deadline - now ) raise exc return inner
Creates a function equivalent to a_func but that retries on certain exceptions .
59,534
def create_error ( msg , cause = None ) : status_code = config . exc_to_code ( cause ) status_name = config . NAME_STATUS_CODES . get ( status_code ) if status_name == 'INVALID_ARGUMENT' : return InvalidArgumentError ( msg , cause = cause ) else : return GaxError ( msg , cause = cause )
Creates a GaxError or subclass .
59,535
def get_operation ( self , name , options = None ) : request = operations_pb2 . GetOperationRequest ( name = name ) return self . _get_operation ( request , options )
Gets the latest state of a long - running operation . Clients can use this method to poll the operation result at intervals as recommended by the API service .
59,536
def cancel_operation ( self , name , options = None ) : request = operations_pb2 . CancelOperationRequest ( name = name ) self . _cancel_operation ( request , options )
Starts asynchronous cancellation on a long - running operation . The server makes a best effort to cancel the operation but success is not guaranteed . If the server doesn t support this method it returns google . rpc . Code . UNIMPLEMENTED . Clients can use Operations . GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation . On successful cancellation the operation is not deleted ; instead it becomes an operation with an Operation . error value with a google . rpc . Status . code of 1 corresponding to Code . CANCELLED .
59,537
def delete_operation ( self , name , options = None ) : request = operations_pb2 . DeleteOperationRequest ( name = name ) self . _delete_operation ( request , options )
Deletes a long - running operation . This method indicates that the client is no longer interested in the operation result . It does not cancel the operation . If the server doesn t support this method it returns google . rpc . Code . UNIMPLEMENTED .
59,538
def check_oneof ( ** kwargs ) : if not kwargs : return None not_nones = [ val for val in kwargs . values ( ) if val is not None ] if len ( not_nones ) > 1 : raise ValueError ( 'Only one of {fields} should be set.' . format ( fields = ', ' . join ( sorted ( kwargs . keys ( ) ) ) , ) )
Raise ValueError if more than one keyword argument is not none .
59,539
def _bundleable ( desc ) : def inner ( a_func , settings , request , ** kwargs ) : if not settings . bundler : return a_func ( request , ** kwargs ) the_id = bundling . compute_bundle_id ( request , desc . request_discriminator_fields ) return settings . bundler . schedule ( a_func , the_id , desc , request , kwargs ) return inner
Creates a function that transforms an API call into a bundling call .
59,540
def _page_streamable ( page_descriptor ) : def inner ( a_func , settings , request , ** kwargs ) : page_iterator = gax . PageIterator ( a_func , page_descriptor , settings . page_token , request , ** kwargs ) if settings . flatten_pages : return gax . ResourceIterator ( page_iterator ) else : return page_iterator return inner
Creates a function that yields an iterable to performs page - streaming .
59,541
def construct_settings ( service_name , client_config , config_override , retry_names , bundle_descriptors = None , page_descriptors = None , metrics_headers = ( ) , kwargs = None ) : defaults = { } bundle_descriptors = bundle_descriptors or { } page_descriptors = page_descriptors or { } kwargs = kwargs or { } if 'metadata' in kwargs : kwargs [ 'metadata' ] = [ value for value in kwargs [ 'metadata' ] if value [ 0 ] . lower ( ) != 'x-goog-api-client' ] kwargs . setdefault ( 'metadata' , [ ] ) kwargs [ 'metadata' ] . append ( ( 'x-goog-api-client' , metrics . stringify ( metrics . fill ( metrics_headers ) ) ) ) try : service_config = client_config [ 'interfaces' ] [ service_name ] except KeyError : raise KeyError ( 'Client configuration not found for service: {}' . format ( service_name ) ) overrides = config_override . get ( 'interfaces' , { } ) . get ( service_name , { } ) for method in service_config . get ( 'methods' ) : method_config = service_config [ 'methods' ] [ method ] overriding_method = overrides . get ( 'methods' , { } ) . get ( method , { } ) snake_name = _upper_camel_to_lower_under ( method ) if overriding_method and overriding_method . get ( 'timeout_millis' ) : timeout = overriding_method [ 'timeout_millis' ] else : timeout = method_config [ 'timeout_millis' ] timeout /= _MILLIS_PER_SECOND bundle_descriptor = bundle_descriptors . get ( snake_name ) bundling_config = method_config . get ( 'bundling' , None ) if overriding_method and 'bundling' in overriding_method : bundling_config = overriding_method [ 'bundling' ] bundler = _construct_bundling ( bundling_config , bundle_descriptor ) retry_options = _merge_retry_options ( _construct_retry ( method_config , service_config [ 'retry_codes' ] , service_config [ 'retry_params' ] , retry_names ) , _construct_retry ( overriding_method , overrides . get ( 'retry_codes' ) , overrides . get ( 'retry_params' ) , retry_names ) ) defaults [ snake_name ] = gax . _CallSettings ( timeout = timeout , retry = retry_options , page_descriptor = page_descriptors . get ( snake_name ) , bundler = bundler , bundle_descriptor = bundle_descriptor , kwargs = kwargs ) return defaults
Constructs a dictionary mapping method names to _CallSettings .
59,542
def _catch_errors ( a_func , to_catch ) : def inner ( * args , ** kwargs ) : try : return a_func ( * args , ** kwargs ) except tuple ( to_catch ) as exception : utils . raise_with_traceback ( gax . errors . create_error ( 'RPC failed' , cause = exception ) ) return inner
Updates a_func to wrap exceptions with GaxError
59,543
def create_api_call ( func , settings ) : def base_caller ( api_call , _ , * args ) : return api_call ( * args ) def inner ( request , options = None ) : this_options = _merge_options_metadata ( options , settings ) this_settings = settings . merge ( this_options ) if this_settings . retry and this_settings . retry . retry_codes : api_call = gax . retry . retryable ( func , this_settings . retry , ** this_settings . kwargs ) else : api_call = gax . retry . add_timeout_arg ( func , this_settings . timeout , ** this_settings . kwargs ) api_call = _catch_errors ( api_call , gax . config . API_ERRORS ) return api_caller ( api_call , this_settings , request ) if settings . page_descriptor : if settings . bundler and settings . bundle_descriptor : raise ValueError ( 'The API call has incompatible settings: ' 'bundling and page streaming' ) api_caller = _page_streamable ( settings . page_descriptor ) elif settings . bundler and settings . bundle_descriptor : api_caller = _bundleable ( settings . bundle_descriptor ) else : api_caller = base_caller return inner
Converts an rpc call into an API call governed by the settings .
59,544
def get ( pb_or_dict , key , default = _SENTINEL ) : key , subkey = _resolve_subkeys ( key ) if isinstance ( pb_or_dict , Message ) : answer = getattr ( pb_or_dict , key , default ) elif isinstance ( pb_or_dict , collections . Mapping ) : answer = pb_or_dict . get ( key , default ) else : raise TypeError ( 'Tried to fetch a key %s on an invalid object; ' 'expected a dict or protobuf message.' ) if answer is _SENTINEL : raise KeyError ( key ) if subkey and answer is not default : return get ( answer , subkey , default = default ) return answer
Retrieve the given key off of the object .
59,545
def set ( pb_or_dict , key , value ) : if not isinstance ( pb_or_dict , ( collections . MutableMapping , Message ) ) : raise TypeError ( 'Tried to set a key %s on an invalid object; ' 'expected a dict or protobuf message.' % key ) key , subkey = _resolve_subkeys ( key ) if subkey is not None : if isinstance ( pb_or_dict , collections . MutableMapping ) : pb_or_dict . setdefault ( key , { } ) set ( get ( pb_or_dict , key ) , subkey , value ) return if isinstance ( pb_or_dict , collections . MutableMapping ) : pb_or_dict [ key ] = value elif isinstance ( value , ( collections . MutableSequence , tuple ) ) : while getattr ( pb_or_dict , key ) : getattr ( pb_or_dict , key ) . pop ( ) for item in value : if isinstance ( item , collections . Mapping ) : getattr ( pb_or_dict , key ) . add ( ** item ) else : getattr ( pb_or_dict , key ) . extend ( [ item ] ) elif isinstance ( value , collections . Mapping ) : for item_key , item_value in value . items ( ) : set ( getattr ( pb_or_dict , key ) , item_key , item_value ) elif isinstance ( value , Message ) : for item_key , item_value in value . ListFields ( ) : set ( getattr ( pb_or_dict , key ) , item_key . name , item_value ) else : setattr ( pb_or_dict , key , value )
Set the given key on the object .
59,546
def setdefault ( pb_or_dict , key , value ) : if not get ( pb_or_dict , key , default = None ) : set ( pb_or_dict , key , value )
Set the key on the object to the value if the current value is falsy .
59,547
def _resolve_subkeys ( key , separator = '.' ) : subkey = None if separator in key : index = key . index ( separator ) subkey = key [ index + 1 : ] key = key [ : index ] return key , subkey
Given a key which may actually be a nested key return the top level key and any nested subkeys as separate values .
59,548
def merge ( self , options ) : if not options : return _CallSettings ( timeout = self . timeout , retry = self . retry , page_descriptor = self . page_descriptor , page_token = self . page_token , bundler = self . bundler , bundle_descriptor = self . bundle_descriptor , kwargs = self . kwargs ) else : if options . timeout == OPTION_INHERIT : timeout = self . timeout else : timeout = options . timeout if options . retry == OPTION_INHERIT : retry = self . retry else : retry = options . retry if options . page_token == OPTION_INHERIT : page_token = self . page_token else : page_token = options . page_token if options . is_bundling : bundler = self . bundler else : bundler = None if options . kwargs == OPTION_INHERIT : kwargs = self . kwargs else : kwargs = self . kwargs . copy ( ) kwargs . update ( options . kwargs ) return _CallSettings ( timeout = timeout , retry = retry , page_descriptor = self . page_descriptor , page_token = page_token , bundler = bundler , bundle_descriptor = self . bundle_descriptor , kwargs = kwargs )
Returns new _CallSettings merged from this and a CallOptions object .
59,549
def cancel ( self ) : if self . done ( ) : return False self . _client . cancel_operation ( self . _operation . name ) return True
If last Operation s value of done is true returns false ; otherwise issues OperationsClient . cancel_operation and returns true .
59,550
def result ( self , timeout = None ) : if not self . _poll ( timeout ) . HasField ( 'response' ) : raise GaxError ( self . _operation . error . message ) return _from_any ( self . _result_type , self . _operation . response )
Enters polling loop on OperationsClient . get_operation and once Operation . done is true then returns Operation . response if successful or throws GaxError if not successful .
59,551
def add_done_callback ( self , fn ) : if self . _operation . done : _try_callback ( self , fn ) else : self . _queue . put ( dill . dumps ( fn ) ) if self . _process is None : self . _process = mp . Process ( target = self . _execute_tasks ) self . _process . start ( )
Enters a polling loop on OperationsClient . get_operation and once the operation is done or cancelled calls the function with this _OperationFuture . Added callables are called in the order that they were added .
59,552
def get_sql ( self ) : return '{0} {1} ON {2}' . format ( self . join_type , self . right_table . get_sql ( ) , self . get_condition ( ) )
Generates the JOIN sql for the join tables and join condition
59,553
def set_left_table ( self , left_table = None ) : if left_table : self . left_table = TableFactory ( table = left_table , owner = self . owner , ) else : self . left_table = self . get_left_table ( )
Sets the left table for this join clause . If no table is specified the first table in the query will be used
59,554
def get_left_table ( self ) : if self . left_table : return self . left_table if len ( self . owner . tables ) : return self . owner . tables [ 0 ]
Returns the left table if one was specified otherwise the first table in the query is returned
59,555
def get_all_related_objects ( self , table ) : if hasattr ( table . model . _meta , 'get_all_related_objects' ) : return table . model . _meta . get_all_related_objects ( ) else : return [ f for f in table . model . _meta . get_fields ( ) if ( f . one_to_many or f . one_to_one ) and f . auto_created and not f . concrete ]
Fix for django 1 . 10 to replace deprecated code . Keep support for django 1 . 7
59,556
def set_right_table ( self , table ) : self . right_table = table if self . left_table is None : return if type ( self . left_table ) is ModelTable and type ( self . right_table ) is ModelTable : for field in self . get_all_related_objects ( self . left_table ) : related_model = field . model if hasattr ( field , 'related_model' ) : related_model = field . related_model if related_model == self . right_table . model : if self . right_table . field_prefix is None : self . right_table . field_prefix = field . get_accessor_name ( ) if len ( self . right_table . field_prefix ) > 4 and self . right_table . field_prefix [ - 4 : ] == '_set' : self . right_table . field_prefix = self . right_table . field_prefix [ : - 4 ] return for field in self . left_table . model . _meta . fields : if ( field . get_internal_type ( ) == 'OneToOneField' or field . get_internal_type ( ) == 'ForeignKey' ) : if field . remote_field . model == self . right_table . model : if self . right_table . field_prefix is None : self . right_table . field_prefix = field . name return
Sets the right table for this join clause and try to automatically set the condition if one isn t specified
59,557
def get_condition ( self ) : if self . condition : return self . condition if type ( self . right_table ) is ModelTable and type ( self . right_table ) is ModelTable : for field in self . get_all_related_objects ( self . right_table ) : related_model = field . model if hasattr ( field , 'related_model' ) : related_model = field . related_model if related_model == self . left_table . model : table_join_field = field . field . column condition = '{0}.{1} = {2}.{3}' . format ( self . right_table . get_identifier ( ) , self . right_table . model . _meta . pk . name , self . left_table . get_identifier ( ) , table_join_field , ) return condition for field in self . right_table . model . _meta . fields : if ( field . get_internal_type ( ) == 'OneToOneField' or field . get_internal_type ( ) == 'ForeignKey' ) : if field . remote_field . model == self . left_table . model : table_join_field = field . column condition = '{0}.{1} = {2}.{3}' . format ( self . right_table . get_identifier ( ) , table_join_field , self . left_table . get_identifier ( ) , self . left_table . model . _meta . pk . name ) return condition return None
Determines the condition to be used in the condition part of the join sql .
59,558
def get_sql ( self ) : self . arg_index = 0 self . args = { } if len ( self . wheres ) : where = self . build_where_part ( self . wheres ) return 'WHERE {0} ' . format ( where ) return ''
Builds and returns the WHERE portion of the sql
59,559
def get_condition_value ( self , operator , value ) : if operator in ( 'contains' , 'icontains' ) : value = '%{0}%' . format ( value ) elif operator == 'startswith' : value = '{0}%' . format ( value ) return value
Gets the condition value based on the operator and value
59,560
def set_arg ( self , value ) : named_arg = '{0}A{1}' . format ( self . arg_prefix , self . arg_index ) self . args [ named_arg ] = value self . arg_index += 1 return named_arg
Set the query param in self . args based on the prefix and arg index and auto increment the arg_index
59,561
def get_name ( self , use_alias = True ) : if self . desc : direction = 'DESC' else : direction = 'ASC' if use_alias : return '{0} {1}' . format ( self . field . get_identifier ( ) , direction ) return '{0} {1}' . format ( self . field . get_select_sql ( ) , direction )
Gets the name to reference the sorted field
59,562
def get_sql ( self ) : sql = '' if self . limit and self . limit > 0 : sql += 'LIMIT {0} ' . format ( self . limit ) if self . offset and self . offset > 0 : sql += 'OFFSET {0} ' . format ( self . offset ) return sql
Generates the sql used for the limit clause of a Query
59,563
def init_defaults ( self ) : self . sql = '' self . tables = [ ] self . joins = [ ] self . _where = Where ( ) self . groups = [ ] self . sorters = [ ] self . _limit = None self . table_prefix = '' self . is_inner = False self . with_tables = [ ] self . _distinct = False self . distinct_ons = [ ] self . field_names = [ ] self . field_names_pk = None self . values = [ ]
Sets the default values for this instance
59,564
def from_table ( self , table = None , fields = '*' , schema = None , ** kwargs ) : self . tables . append ( TableFactory ( table = table , fields = fields , schema = schema , owner = self , ** kwargs ) ) return self
Adds a Table and any optional fields to the list of tables this query is selecting from .
59,565
def insert_into ( self , table = None , field_names = None , values = None , ** kwargs ) : table = TableFactory ( table = table , ** kwargs ) self . tables . append ( table ) self . field_names = field_names self . values = values return self
Bulk inserts a list of values into a table
59,566
def update_table ( self , table = None , field_names = None , values = None , pk = None , ** kwargs ) : table = TableFactory ( table = table , ** kwargs ) self . tables . append ( table ) self . field_names = field_names self . values = values self . field_names_pk = pk
Bulk updates rows in a table
59,567
def join ( self , right_table = None , fields = None , condition = None , join_type = 'JOIN' , schema = None , left_table = None , extract_fields = True , prefix_fields = False , field_prefix = None , allow_duplicates = False ) : new_join_item = Join ( left_table = left_table , right_table = right_table , fields = fields , condition = condition , join_type = join_type , schema = schema , owner = self , extract_fields = extract_fields , prefix_fields = prefix_fields , field_prefix = field_prefix , ) if allow_duplicates is False : for join_item in self . joins : if join_item . right_table . get_identifier ( ) == new_join_item . right_table . get_identifier ( ) : if join_item . left_table . get_identifier ( ) == new_join_item . left_table . get_identifier ( ) : return self self . joins . append ( new_join_item ) return self
Joins a table to another table based on a condition and adds fields from the joined table to the returned fields .
59,568
def join_left ( self , right_table = None , fields = None , condition = None , join_type = 'LEFT JOIN' , schema = None , left_table = None , extract_fields = True , prefix_fields = False , field_prefix = None , allow_duplicates = False ) : return self . join ( right_table = right_table , fields = fields , condition = condition , join_type = join_type , schema = schema , left_table = left_table , extract_fields = extract_fields , prefix_fields = prefix_fields , field_prefix = field_prefix , allow_duplicates = allow_duplicates )
Wrapper for self . join with a default join of LEFT JOIN
59,569
def where ( self , q = None , where_type = 'AND' , ** kwargs ) : if q is not None : self . _where . wheres . add ( q , where_type ) if len ( kwargs ) : for key , value in kwargs . items ( ) : q = Q ( ** { key : value } ) self . _where . wheres . add ( q , where_type ) return self
Adds a where condition as a Q object to the query s Where instance .
59,570
def group_by ( self , field = None , table = None , allow_duplicates = False ) : new_group_item = Group ( field = field , table = table , ) if allow_duplicates is False : for group_item in self . groups : if group_item . field . get_identifier ( ) == new_group_item . field . get_identifier ( ) : return self self . groups . append ( new_group_item ) return self
Adds a group by clause to the query by adding a Group instance to the query s groups list
59,571
def order_by ( self , field = None , table = None , desc = False ) : self . sorters . append ( Sorter ( field = field , table = table , desc = desc ) ) return self
Adds an order by clause to the query by adding a Sorter instance to the query s sorters list
59,572
def check_name_collisions ( self ) : table_index = 0 table_names = { } for table in self . tables + self . with_tables : table_prefix = 'T{0}' . format ( table_index ) auto_alias = '{0}{1}' . format ( self . table_prefix , table_prefix ) identifier = table . get_identifier ( ) if identifier is None or identifier in table_names : table . auto_alias = auto_alias table_names [ identifier ] = True if type ( table ) is QueryTable : table . query . prefix_args ( auto_alias ) table . query . table_prefix = auto_alias table_index += 1
Checks if there are any tables referenced by the same identifier and updated the auto_alias accordingly . This is called when generating the sql for a query and should only be called internally .
59,573
def get_sql ( self , debug = False , use_cache = True ) : self . check_name_collisions ( ) if debug : return self . format_sql ( ) sql = '' sql += self . build_withs ( ) sql += self . build_select_fields ( ) sql += self . build_from_table ( ) sql += self . build_joins ( ) sql += self . build_where ( ) sql += self . build_groups ( ) sql += self . build_order_by ( ) sql += self . build_limit ( ) self . sql = sql . strip ( ) return self . sql
Generates the sql for this query and returns the sql as a string .
59,574
def get_update_sql ( self , rows ) : field_names = self . get_field_names ( ) pk = field_names [ 0 ] update_field_names = field_names [ 1 : ] num_columns = len ( rows [ 0 ] ) if num_columns < 2 : raise Exception ( 'At least 2 fields must be passed to get_update_sql' ) all_null_indices = [ all ( row [ index ] is None for row in rows ) for index in range ( 1 , num_columns ) ] field_names_sql = '({0})' . format ( ', ' . join ( field_names ) ) row_values = [ ] sql_args = [ ] for row in rows : placeholders = [ ] for value in row : sql_args . append ( value ) placeholders . append ( '%s' ) row_values . append ( '({0})' . format ( ', ' . join ( placeholders ) ) ) row_values_sql = ', ' . join ( row_values ) set_field_list = [ '{0} = NULL' . format ( field_name ) if all_null_indices [ idx ] else '{0} = new_values.{0}' . format ( field_name ) for idx , field_name in enumerate ( update_field_names ) ] set_field_list_sql = ', ' . join ( set_field_list ) self . sql = 'UPDATE {0} SET {1} FROM (VALUES {2}) AS new_values {3} WHERE {0}.{4} = new_values.{4}' . format ( self . tables [ 0 ] . get_identifier ( ) , set_field_list_sql , row_values_sql , field_names_sql , pk ) return self . sql , sql_args
Returns SQL UPDATE for rows rows
59,575
def format_sql ( self ) : sql = '' select_segment = self . build_select_fields ( ) select_segment = select_segment . replace ( 'SELECT ' , '' , 1 ) fields = [ field . strip ( ) for field in select_segment . split ( ',' ) ] sql += 'SELECT\n\t{0}\n' . format ( ',\n\t' . join ( fields ) ) from_segment = self . build_from_table ( ) from_segment = from_segment . replace ( 'FROM ' , '' , 1 ) tables = [ table . strip ( ) for table in from_segment . split ( ',' ) ] sql += 'FROM\n\t{0}\n' . format ( ',\n\t' . join ( tables ) ) order_by_segment = self . build_order_by ( ) if len ( order_by_segment ) : order_by_segment = order_by_segment . replace ( 'ORDER BY ' , '' , 1 ) sorters = [ sorter . strip ( ) for sorter in order_by_segment . split ( ',' ) ] sql += 'ORDER BY\n\t{0}\n' . format ( ',\n\t' . join ( sorters ) ) limit_segment = self . build_limit ( ) if len ( limit_segment ) : if 'LIMIT' in limit_segment : limit_segment = limit_segment . replace ( 'LIMIT ' , 'LIMIT\n\t' , 1 ) if 'OFFSET' in limit_segment : limit_segment = limit_segment . replace ( 'OFFSET ' , '\nOFFSET\n\t' , 1 ) elif 'OFFSET' in limit_segment : limit_segment = limit_segment . replace ( 'OFFSET ' , 'OFFSET\n\t' , 1 ) sql += limit_segment return sql
Builds the sql in a format that is easy for humans to read and debug
59,576
def build_select_fields ( self ) : field_sql = [ ] for table in self . tables : field_sql += table . get_field_sql ( ) for join_item in self . joins : field_sql += join_item . right_table . get_field_sql ( ) sql = 'SELECT {0}{1} ' . format ( self . get_distinct_sql ( ) , ', ' . join ( field_sql ) ) return sql
Generates the sql for the SELECT portion of the query
59,577
def build_from_table ( self ) : table_parts = [ ] for table in self . tables : sql = table . get_sql ( ) if len ( sql ) : table_parts . append ( sql ) sql = 'FROM {0} ' . format ( ', ' . join ( table_parts ) ) return sql
Generates the sql for the FROM portion of the query
59,578
def build_joins ( self ) : join_parts = [ ] for join_item in self . joins : join_parts . append ( join_item . get_sql ( ) ) if len ( join_parts ) : combined_joins = ' ' . join ( join_parts ) return '{0} ' . format ( combined_joins ) return ''
Generates the sql for the JOIN portion of the query
59,579
def build_groups ( self ) : if len ( self . groups ) : groups = [ ] for group in self . groups : groups . append ( group . get_name ( ) ) return 'GROUP BY {0} ' . format ( ', ' . join ( groups ) ) return ''
Generates the sql for the GROUP BY portion of the query
59,580
def build_order_by ( self , use_alias = True ) : if len ( self . sorters ) : sorters = [ ] for sorter in self . sorters : sorters . append ( sorter . get_name ( use_alias = use_alias ) ) return 'ORDER BY {0} ' . format ( ', ' . join ( sorters ) ) return ''
Generates the sql for the ORDER BY portion of the query
59,581
def find_table ( self , table ) : table = TableFactory ( table ) identifier = table . get_identifier ( ) join_tables = [ join_item . right_table for join_item in self . joins ] for table in ( self . tables + join_tables ) : if table . get_identifier ( ) == identifier : return table return None
Finds a table by name or alias . The FROM tables and JOIN tables are included in the search .
59,582
def wrap ( self , alias = None ) : field_names = self . get_field_names ( ) query = Query ( self . connection ) . from_table ( deepcopy ( self ) , alias = alias ) self . __dict__ . update ( query . __dict__ ) self . tables [ 0 ] . set_fields ( field_names ) field_names = self . get_field_names ( ) return self
Wraps the query by selecting all fields from itself
59,583
def copy ( self ) : connection = self . connection del self . connection copied_query = deepcopy ( self ) copied_query . connection = connection self . connection = connection return copied_query
Deeply copies everything in the query object except the connection object is shared
59,584
def get_args ( self ) : for table in self . tables + self . with_tables : if type ( table ) is QueryTable : self . _where . args . update ( table . query . get_args ( ) ) return self . _where . args
Gets the args for the query which will be escaped when being executed by the db . All inner queries are inspected and their args are combined with this query s args .
59,585
def explain ( self , sql = None , sql_args = None ) : cursor = self . get_cursor ( ) if sql is None : sql = self . get_sql ( ) sql_args = self . get_args ( ) elif sql_args is None : sql_args = { } cursor . execute ( 'EXPLAIN {0}' . format ( sql ) , sql_args ) rows = self . _fetch_all_as_dict ( cursor ) return rows
Runs EXPLAIN on this query
59,586
def select ( self , return_models = False , nest = False , bypass_safe_limit = False , sql = None , sql_args = None ) : if bypass_safe_limit is False : if Query . enable_safe_limit : if self . count ( ) > Query . safe_limit : self . limit ( Query . safe_limit ) if sql is None : sql = self . get_sql ( ) if sql_args is None : sql_args = self . get_args ( ) cursor = self . get_cursor ( ) cursor . execute ( sql , sql_args ) rows = self . _fetch_all_as_dict ( cursor ) if return_models : nest = True model_map = { } for join_item in self . joins : model_map [ join_item . right_table . field_prefix ] = join_item . right_table . model if nest : for row in rows : _row = row . copy ( ) for key , value in _row . items ( ) : set_value_for_keypath ( row , key , value , True , '__' ) if '__' in key : row . pop ( key ) if return_models : model_class = self . tables [ 0 ] . model new_rows = [ ] for row in rows : model = model_class ( ) for key , value in row . items ( ) : if key not in model_map : setattr ( model , key , value ) for key , value in row . items ( ) : if key in model_map : child_model = model_map [ key ] ( ) for child_key , child_value in value . items ( ) : setattr ( child_model , child_key , child_value ) value = child_model setattr ( model , key , value ) new_rows . append ( model ) rows = new_rows return rows
Executes the SELECT statement and returns the rows as a list of dictionaries or a list of model instances
59,587
def update ( self , rows ) : if len ( rows ) == 0 : return sql , sql_args = self . get_update_sql ( rows ) cursor = self . get_cursor ( ) cursor . execute ( sql , sql_args )
Updates records in the db
59,588
def get_auto_field_name ( self , model_class ) : for field in model_class . _meta . fields : if isinstance ( field , AutoField ) : return field . column return None
If one of the unique_fields is the model s AutoField return the field name otherwise return None
59,589
def upsert ( self , rows , unique_fields , update_fields , return_rows = False , return_models = False ) : if len ( rows ) == 0 : return ModelClass = self . tables [ 0 ] . model rows_with_null_auto_field_value = [ ] auto_field_name = self . get_auto_field_name ( ModelClass ) if auto_field_name in unique_fields : rows_with_null_auto_field_value = [ row for row in rows if getattr ( row , auto_field_name ) is None ] rows = [ row for row in rows if getattr ( row , auto_field_name ) is not None ] return_value = [ ] if rows : sql , sql_args = self . get_upsert_sql ( rows , unique_fields , update_fields , auto_field_name = auto_field_name , return_rows = return_rows or return_models ) cursor = self . get_cursor ( ) cursor . execute ( sql , sql_args ) if return_rows or return_models : return_value . extend ( self . _fetch_all_as_dict ( cursor ) ) if rows_with_null_auto_field_value : sql , sql_args = self . get_upsert_sql ( rows_with_null_auto_field_value , unique_fields , update_fields , auto_field_name = auto_field_name , only_insert = True , return_rows = return_rows or return_models ) cursor = self . get_cursor ( ) cursor . execute ( sql , sql_args ) if return_rows or return_models : return_value . extend ( self . _fetch_all_as_dict ( cursor ) ) if return_models : ModelClass = self . tables [ 0 ] . model model_objects = [ ModelClass ( ** row_dict ) for row_dict in return_value ] for model_object in model_objects : model_object . _state . adding = False model_object . _state . db = 'default' return_value = model_objects return return_value
Performs an upsert with the set of models defined in rows . If the unique field which is meant to cause a conflict is an auto increment field then the field should be excluded when its value is null . In this case an upsert will be performed followed by a bulk_create
59,590
def get_count_query ( self ) : query_copy = self . copy ( ) if not query_copy . tables : raise Exception ( 'No tables specified to do a count' ) for table in query_copy . tables : del table . fields [ : ] query_copy . tables [ 0 ] . add_field ( CountField ( '*' ) ) del query_copy . sorters [ : ] return query_copy
Copies the query object and alters the field list and order by to do a more efficient count
59,591
def count ( self , field = '*' ) : rows = self . get_count_query ( ) . select ( bypass_safe_limit = True ) return list ( rows [ 0 ] . values ( ) ) [ 0 ]
Returns a COUNT of the query by wrapping the query and performing a COUNT aggregate of the specified field
59,592
def _fetch_all_as_dict ( self , cursor ) : desc = cursor . description return [ dict ( zip ( [ col [ 0 ] for col in desc ] , row ) ) for row in cursor . fetchall ( ) ]
Iterates over the result set and converts each row to a dictionary
59,593
def get_sql ( self , debug = False , use_cache = True ) : sql = '' sql += self . build_partition_by_fields ( ) sql += self . build_order_by ( use_alias = False ) sql += self . build_limit ( ) sql = sql . strip ( ) sql = 'OVER ({0})' . format ( sql ) self . sql = sql return self . sql
Generates the sql for this query window and returns the sql as a string .
59,594
def value_for_keypath ( dict , keypath ) : if len ( keypath ) == 0 : return dict keys = keypath . split ( '.' ) value = dict for key in keys : if key in value : value = value [ key ] else : return None return value
Returns the value of a keypath in a dictionary if the keypath exists or None if the keypath does not exist .
59,595
def set_value_for_keypath ( item , keypath , value , create_if_needed = False , delimeter = '.' ) : if len ( keypath ) == 0 : return None keys = keypath . split ( delimeter ) if len ( keys ) > 1 : key = keys [ 0 ] if create_if_needed : item [ key ] = item . get ( key , { } ) if key in item : if set_value_for_keypath ( item [ key ] , delimeter . join ( keys [ 1 : ] ) , value , create_if_needed = create_if_needed , delimeter = delimeter ) : return item return None if create_if_needed : item [ keypath ] = item . get ( keypath , { } ) if keypath in item : item [ keypath ] = value return item else : return None
Sets the value for a keypath in a dictionary if the keypath exists . This modifies the original dictionary .
59,596
def get_alias ( self ) : alias = None if self . alias : alias = self . alias elif self . auto_alias : alias = self . auto_alias if self . table and self . table . prefix_fields : field_prefix = self . table . get_field_prefix ( ) if alias : alias = '{0}__{1}' . format ( field_prefix , alias ) else : alias = '{0}__{1}' . format ( field_prefix , self . name ) return alias
Gets the alias for the field or the auto_alias if one is set . If there isn t any kind of alias None is returned .
59,597
def get_select_sql ( self ) : if self . table : return '{0}.{1}' . format ( self . table . get_identifier ( ) , self . name ) return '{0}' . format ( self . name )
Gets the SELECT field portion for the field without the alias . If the field has a table it will be included here like table . field
59,598
def set_table ( self , table ) : super ( MultiField , self ) . set_table ( table ) if self . field and self . field . table is None : self . field . set_table ( self . table )
Setter for the table of this field . Also sets the inner field s table .
59,599
def add_to_table ( self , field , alias , add_group = False ) : self . table . add_field ( { alias : field } ) if add_group : self . table . owner . group_by ( alias )
Adds this field to the field s table and optionally group by it