idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
50,900 | def get_frame_info ( tb , context_lines = 7 , simple = False ) : lineno = tb . tb_lineno function = tb . tb_frame . f_code . co_name variables = tb . tb_frame . f_locals files = { } if simple : fn = tb . tb_frame . f_code . co_filename else : fn = tb . tb_frame . f_globals . get ( '__file__' ) if not fn : fn = os . path . realpath ( inspect . getsourcefile ( tb ) or inspect . getfile ( tb ) ) if fn [ - 4 : ] in ( '.pyc' , '.pyo' ) : fn = fn [ : - 1 ] loader = None if not os . path . exists ( fn ) : loader = tb . tb_frame . f_globals . get ( '__loader__' ) while not loader and tb . tb_next : tb = tb . tb_next loader = tb . tb_frame . f_globals . get ( '__loader__' ) source = '' pre_context , post_context = [ ] , [ ] context_line = raw_context_line = context_lineno = None try : if loader : source = loader . get_source ( fn ) else : if not fn in files : source = open ( fn ) . read ( ) files [ fn ] = source else : source = files [ fn ] except : pass else : try : raw_context_line = source . splitlines ( ) [ lineno - 1 ] . strip ( ) except IndexError : pass if not simple : parsed_source = highlight_python ( source ) lbound = max ( 0 , lineno - context_lines - 1 ) ubound = lineno + context_lines try : context_line = parsed_source [ lineno - 1 ] pre_context = parsed_source [ lbound : lineno - 1 ] post_context = parsed_source [ lineno : ubound ] except IndexError as e : pass context_lineno = lbound if isinstance ( fn , unicode ) : fn = fn . encode ( 'utf-8' ) return { 'tb' : tb , 'filename' : fn , 'basename' : os . path . basename ( fn ) , 'loader' : loader , 'function' : function , 'lineno' : lineno , 'vars' : variables , 'pre_context' : pre_context , 'context_line' : context_line , 'raw_context_line' : raw_context_line , 'post_context' : post_context , 'context_lineno' : context_lineno , 'source' : source } | Return a dict of information about a given traceback . |
50,901 | def get_html_output ( self ) : def html_splitlines ( lines ) : open_tag_re = re . compile ( r'<(\w+)(\s.*)?[^/]?>' ) close_tag_re = re . compile ( r'</(\w+)>' ) open_tags = [ ] for line in lines : for tag in open_tags : line = tag . group ( 0 ) + line open_tags = [ ] for tag in open_tag_re . finditer ( line ) : open_tags . append ( tag ) open_tags . reverse ( ) for ctag in close_tag_re . finditer ( line ) : for otag in open_tags : if otag . group ( 1 ) == ctag . group ( 1 ) : open_tags . remove ( otag ) break for tag in open_tags : line += '</%s>' % tag . group ( 1 ) yield line if self . error : return escape ( self . raw ) . splitlines ( ) return list ( html_splitlines ( self . out . getvalue ( ) . splitlines ( ) ) ) | Return line generator . |
50,902 | def get_columns ( model = None , fields = None , meta = None ) : if model : M = get_model ( model ) else : M = None if fields is not None : f = fields if M : if meta and hasattr ( M , meta ) : m = getattr ( model , meta ) if hasattr ( m , 'fields' ) : f = m . fields else : f = M . _fields_list else : f = M . _fields_list columns = [ ] for x in f : if isinstance ( x , str ) : field_name = x elif isinstance ( x , dict ) : field_name = x [ 'name' ] else : raise UliwebError ( "Field definition is not right, it should be just like str or {'name':xxx}" ) if '.' in field_name : model_name , field_name = field_name . split ( '.' ) M = get_model ( model_name ) if not M : raise UliwebError ( "Model can't be empty, because field name not has `model.` prefix" ) if field_name in M . c : columns . append ( M . c [ field_name ] ) return columns | Get model columns list |
50,903 | def get_field ( name , model = None ) : if '.' in name : m , name = name . split ( '.' ) model = get_model ( m ) if model : return getattr ( model , name , None ) | get model field according to name the name can be like model . column |
50,904 | def get_column ( name , model = None ) : if '.' in name : m , name = name . split ( '.' ) model = get_model ( m ) if model : return model . c . get ( name ) | get table column according to name the name can be like model . column |
50,905 | def _process_file ( self , obj , fobj , field ) : from uliweb import settings paths = [ ] upload_to = self . upload_to or self . _get_upload_path ( field , 'upload_to' , obj ) if upload_to : self . fileserving . to_path = upload_to upload_to_sub = self . upload_to_sub or self . _get_upload_path ( field , 'upload_to_sub' , obj ) if upload_to_sub : paths . append ( upload_to_sub ) paths . append ( fobj [ 'filename' ] ) return self . fileserving . save_file ( os . path . join ( * paths ) , fobj [ 'file' ] , replace = self . file_replace , convert = self . file_convert ) | obj is record object fobj is data field is FileField instance |
50,906 | def count ( self , query ) : if self . manual : return self . total if isinstance ( query , Select ) : q = query . with_only_columns ( [ func . count ( ) ] ) . order_by ( None ) . limit ( None ) . offset ( None ) return do_ ( q ) . scalar ( ) return query . count ( ) | If query is Select object this function will try to get count of select |
50,907 | def get_data ( self , query , fields_convert_map , encoding = 'utf-8' , auto_convert = True , include_hidden = False , header = None ) : fields_convert_map = fields_convert_map or { } d = self . fields_convert_map . copy ( ) d . update ( fields_convert_map ) if isinstance ( query , Select ) : query = do_ ( query ) for record in query : self . _cal_sum ( record ) row = [ ] record = self . _get_record ( record ) if self . before_record_render : self . before_record_render ( record ) if isinstance ( record , orm . Model ) : model = record . __class__ else : model = None for i , x in enumerate ( self . table_info [ 'fields_list' ] ) : field = get_field ( x [ 'name' ] , model ) if not field : field = { 'name' : x [ 'name' ] } else : field = { 'name' : x [ 'name' ] , 'prop' : field } if not include_hidden and x . get ( 'hidden' ) : continue if isinstance ( record , orm . Model ) : v = make_view_field ( field , record , fields_convert_map = d , auto_convert = auto_convert ) else : v = make_view_field ( field , record , fields_convert_map = d , auto_convert = auto_convert , value = record [ x [ 'name' ] ] ) value = v [ 'display' ] row . append ( value ) if header : ret = dict ( zip ( header , row ) ) else : ret = row yield ret total = self . _get_sum ( ) if total : row = [ ] for x in total : v = x if isinstance ( x , str ) : v = safe_unicode ( x , encoding ) row . append ( v ) if header : ret = dict ( zip ( header , row ) ) else : ret = row yield ret | If convert = True will convert field value |
50,908 | def objects ( self , json_result = False ) : self . rows_num = 0 query = self . query ( ) if not isinstance ( query , ( orm . Result , list , dict ) ) : query = do_ ( query ) for record in query : self . rows_num += 1 r = self . object ( record , json_result ) self . _cal_sum ( record ) yield r total = self . _render_sum ( True ) if total : yield total | Return a generator of all processed data it just like render but it ll not return a table or json format data but just data . And the data will be processed by fields_convert_map if passed . |
50,909 | def query_all ( self ) : return self . query_model ( self . model , self . condition , order_by = self . order_by , group_by = self . group_by , having = self . having ) | Query all records without limit and offset . |
50,910 | def copy ( self ) : missing = object ( ) result = object . __new__ ( self . __class__ ) for name in self . __slots__ : val = getattr ( self , name , missing ) if val is not missing : setattr ( result , name , val ) return result | Create a flat copy of the dict . |
50,911 | def list ( self ) : before , after = self . filename_template . split ( '%s' , 1 ) filename_re = re . compile ( r'%s(.{5,})%s$' % ( re . escape ( before ) , re . escape ( after ) ) ) result = [ ] for filename in os . listdir ( self . path ) : if filename . endswith ( _fs_transaction_suffix ) : continue match = filename_re . match ( filename ) if match is not None : result . append ( match . group ( 1 ) ) return result | Lists all sessions in the store . |
50,912 | def to_timezone ( dt , tzinfo = None ) : if not dt : return dt tz = pick_timezone ( tzinfo , __timezone__ ) if not tz : return dt dttz = getattr ( dt , 'tzinfo' , None ) if not dttz : return dt . replace ( tzinfo = tz ) else : return dt . astimezone ( tz ) | Convert a datetime to timezone |
50,913 | def to_date ( dt , tzinfo = None , format = None ) : d = to_datetime ( dt , tzinfo , format ) if not d : return d return date ( d . year , d . month , d . day ) | Convert a datetime to date with tzinfo |
50,914 | def to_time ( dt , tzinfo = None , format = None ) : d = to_datetime ( dt , tzinfo , format ) if not d : return d return time_ ( d . hour , d . minute , d . second , d . microsecond , tzinfo = d . tzinfo ) | Convert a datetime to time with tzinfo |
50,915 | def to_datetime ( dt , tzinfo = None , format = None ) : if not dt : return dt tz = pick_timezone ( tzinfo , __timezone__ ) if isinstance ( dt , ( str , unicode ) ) : if not format : formats = DEFAULT_DATETIME_INPUT_FORMATS else : formats = list ( format ) d = None for fmt in formats : try : d = datetime . strptime ( dt , fmt ) except ValueError : continue if not d : return None d = d . replace ( tzinfo = tz ) else : d = datetime ( getattr ( dt , 'year' , 1970 ) , getattr ( dt , 'month' , 1 ) , getattr ( dt , 'day' , 1 ) , getattr ( dt , 'hour' , 0 ) , getattr ( dt , 'minute' , 0 ) , getattr ( dt , 'second' , 0 ) , getattr ( dt , 'microsecond' , 0 ) ) if not getattr ( dt , 'tzinfo' , None ) : d = d . replace ( tzinfo = tz ) else : d = d . replace ( tzinfo = dt . tzinfo ) return to_timezone ( d , tzinfo ) | Convert a date or time to datetime with tzinfo |
50,916 | def parse_time ( t ) : if isinstance ( t , ( str , unicode ) ) : b = re_time . match ( t ) if b : v , unit = int ( b . group ( 1 ) ) , b . group ( 2 ) if unit == 's' : return v * 1000 elif unit == 'm' : return v * 60 * 1000 elif unit == 'h' : return v * 60 * 60 * 1000 else : return v else : raise TimeFormatError ( t ) elif isinstance ( t , ( int , long ) ) : return t else : raise TimeFormatError ( t ) | Parse string time format to microsecond |
50,917 | def process_exception ( self , request , e ) : if isinstance ( e , RedirectException ) : response = e . get_response ( ) self . process_response ( request , response ) | Still process session data when specially Exception |
50,918 | def jsonp ( data , ** json_kwargs ) : from uliweb import request if 'jsonp' in json_kwargs : cb = json_kwargs . pop ( 'jsonp' ) else : cb = 'callback' begin = str ( request . GET . get ( cb ) ) if not begin : raise BadRequest ( "Can't found %s parameter in request's query_string" % cb ) if not r_callback . match ( begin ) : raise BadRequest ( "The callback name is not right, it can be alphabetic, number and underscore only" ) if callable ( data ) : @ wraps ( data ) def f ( * arg , ** kwargs ) : ret = data ( * arg , ** kwargs ) return Response ( begin + '(' + json_dumps ( ret ) + ');' , ** json_kwargs ) return f else : return Response ( begin + '(' + json_dumps ( data ) + ');' , ** json_kwargs ) | jsonp is callback key name |
50,919 | def get_url_adapter ( _domain_name ) : from werkzeug . _compat import wsgi_decoding_dance domain = application . domains . get ( _domain_name , { } ) server_name = None if domain . get ( 'domain' , '' ) : server_name = domain [ 'domain' ] try : env = { } environ = request . environ env [ 'url_scheme' ] = environ [ 'wsgi.url_scheme' ] env [ 'default_method' ] = environ [ 'REQUEST_METHOD' ] def _get_wsgi_string ( name ) : val = environ . get ( name ) if val is not None : return wsgi_decoding_dance ( val , "utf-8" ) env [ 'script_name' ] = _get_wsgi_string ( 'SCRIPT_NAME' ) env [ 'path_info' ] = _get_wsgi_string ( 'PATH_INFO' ) env [ 'query_args' ] = _get_wsgi_string ( 'QUERY_STRING' ) except : env = { } adapter = url_map . bind ( server_name , ** env ) else : try : env = request . environ except : env = { 'HTTP_ACCEPT' : 'text/html,application/xhtml+xml,application/xml;' 'q=0.9,*/*;q=0.8' , 'HTTP_ACCEPT_CHARSET' : 'ISO-8859-1,utf-8;q=0.7,*;q=0.3' , 'HTTP_ACCEPT_ENCODING' : 'gzip,deflate,sdch' , 'HTTP_ACCEPT_LANGUAGE' : 'uk,en-US;q=0.8,en;q=0.6' , 'HTTP_CACHE_CONTROL' : 'max-age=0' , 'HTTP_CONNECTION' : 'keep-alive' , 'HTTP_USER_AGENT' : 'Mozilla/5.0 (X11; Linux i686)' , 'REMOTE_ADDR' : '127.0.0.1' , 'REQUEST_METHOD' : 'GET' , 'REQUEST_URI' : '/' , 'SCRIPT_NAME' : '' , 'SERVER_NAME' : 'localhost' , 'SERVER_PORT' : '8080' , 'SERVER_PROTOCOL' : 'HTTP/1.1' , 'wsgi.errors' : None , 'wsgi.file_wrapper' : None , 'wsgi.multiprocess' : False , 'wsgi.multithread' : False , 'wsgi.run_once' : False , 'wsgi.url_scheme' : 'http' , 'wsgi.version' : ( 1 , 0 ) , } adapter = url_map . bind_to_environ ( env ) return adapter | Fetch a domain url_adapter object and bind it to according domain |
50,920 | def get_app_dir ( app ) : path = __app_dirs__ . get ( app ) if path is not None : return path else : p = app . split ( '.' ) try : path = pkg . resource_filename ( p [ 0 ] , '' ) except ImportError as e : log . error ( "Can't import app %s" % app ) log . exception ( e ) path = '' if len ( p ) > 1 : path = os . path . join ( path , * p [ 1 : ] ) __app_dirs__ [ app ] = path return path | Get an app s directory |
50,921 | def get_file ( self , filename , dir = 'static' ) : if os . path . exists ( filename ) : return filename dirs = self . apps if dir : fname = os . path . join ( dir , filename ) else : fname = filename for d in reversed ( dirs ) : path = pkg . resource_filename ( d , fname ) if os . path . exists ( path ) : return path return None | get_file will search from apps directory |
50,922 | def get_template_dirs ( self ) : def if_not_empty ( dir ) : if not os . path . exists ( dir ) : return for root , dirs , files in os . walk ( dir ) : if dirs : return True for f in files : if f != 'readme.txt' : return True template_dirs = [ os . path . join ( self . project_dir , x ) for x in settings . GLOBAL . TEMPLATE_DIRS or [ ] ] taglibs_dirs = [ ] for p in reversed ( self . apps ) : app_path = get_app_dir ( p ) path = os . path . join ( app_path , 'templates' ) if if_not_empty ( path ) : template_dirs . append ( path ) path = os . path . join ( app_path , 'taglibs' ) if if_not_empty ( path ) : taglibs_dirs . append ( path ) Dispatcher . template_dirs = template_dirs Dispatcher . taglibs_dirs = taglibs_dirs | Get templates directory from apps but in reversed order so the same named template file will be overrided by latter defined app |
50,923 | def get_lock ( key , value = None , expiry_time = 60 ) : from uliweb . utils . common import get_uuid redis = get_redis ( ) value = value or get_uuid ( ) return redis . set ( key , value , ex = expiry_time , nx = True ) | Get a distribute lock |
50,924 | def set_lock ( key , value = None , expiry_time = 60 ) : from uliweb . utils . common import get_uuid redis = get_redis ( ) value = value or get_uuid ( ) return redis . set ( key , value , ex = expiry_time , xx = True ) | Force to set a distribute lock |
50,925 | def after_init_apps ( sender ) : from uliweb import settings from uliweb . utils . common import log check = settings . get_var ( 'REDIS/check_version' ) if check : client = get_redis ( ) try : info = client . info ( ) except Exception as e : log . exception ( e ) log . error ( 'Redis is not started!' ) return redis_version = info [ 'redis_version' ] version = tuple ( map ( int , redis_version . split ( '.' ) ) ) op = re_compare_op . search ( check ) if op : _op = op . group ( ) _v = check [ op . end ( ) + 1 : ] . strip ( ) else : _op = '=' _v = check nv = tuple ( map ( int , _v . split ( '.' ) ) ) if _op == '=' : flag = version [ : len ( nv ) ] == nv elif _op == '>=' : flag = version >= nv elif _op == '>' : flag = version > nv elif _op == '<=' : flag = version <= nv elif _op == '<' : flag = version < nv else : log . error ( "Can't support operator %s when check redis version" % _op ) if not flag : log . error ( "Redis version %s is not matched what you want %s" % ( redis_version , _v ) ) | Check redis version |
50,926 | def _make_text_block ( name , content , content_type = None ) : if content_type == 'xhtml' : return u'<%s type="xhtml"><div xmlns="%s">%s</div></%s>\n' % ( name , XHTML_NAMESPACE , content , name ) if not content_type : return u'<%s>%s</%s>\n' % ( name , escape ( content ) , name ) return u'<%s type="%s">%s</%s>\n' % ( name , content_type , escape ( content ) , name ) | Helper function for the builder that creates an XML text block . |
50,927 | def _style_range ( self , cell , cell_range , border = None , fill = None , font = None , alignment = None ) : from openpyxl . styles import Border , Side top = left = right = bottom = Side ( border_style = 'thin' , color = self . border_color ) def border_add ( border , top = None , right = None , left = None , bottom = None ) : top = top or border . top left = left or border . left right = right or border . right bottom = bottom or border . bottom return Border ( top = top , left = left , right = right , bottom = bottom ) cell . alignment = alignment cell . fill = fill rows = list ( self . sheet [ cell_range ] ) for cell in rows [ 0 ] : cell . border = border_add ( cell . border , top = top ) for cell in rows [ - 1 ] : cell . border = border_add ( cell . border , bottom = bottom ) for row in rows : l = row [ 0 ] r = row [ - 1 ] l . border = border_add ( l . border , left = left ) r . border = border_add ( r . border , right = right ) | Apply styles to a range of cells as if they were a single cell . |
50,928 | def url_unquote ( string , charset = 'utf-8' , errors = 'replace' , unsafe = '' ) : rv = _unquote_to_bytes ( string , unsafe ) if charset is not None : rv = rv . decode ( charset , errors ) return rv | URL decode a single string with a given encoding . If the charset is set to None no unicode decoding is performed and raw bytes are returned . |
50,929 | def decode_netloc ( self ) : rv = _decode_idna ( self . host or '' ) if ':' in rv : rv = '[%s]' % rv port = self . port if port is not None : rv = '%s:%d' % ( rv , port ) auth = ':' . join ( filter ( None , [ _url_unquote_legacy ( self . raw_username or '' , '/:%@' ) , _url_unquote_legacy ( self . raw_password or '' , '/:%@' ) , ] ) ) if auth : rv = '%s@%s' % ( auth , rv ) return rv | Decodes the netloc part into a string . |
50,930 | def decode ( self , charset = 'utf-8' , errors = 'replace' ) : return URL ( self . scheme . decode ( 'ascii' ) , self . decode_netloc ( ) , self . path . decode ( charset , errors ) , self . query . decode ( charset , errors ) , self . fragment . decode ( charset , errors ) ) | Decodes the URL to a tuple made out of strings . The charset is only being used for the path query and fragment . |
50,931 | def _mixed_join ( iterable , sentinel ) : iterator = iter ( iterable ) first_item = next ( iterator , sentinel ) if isinstance ( first_item , bytes ) : return first_item + b'' . join ( iterator ) return first_item + u'' . join ( iterator ) | concatenate any string type in an intelligent way . |
50,932 | def _buf_append ( self , string ) : if not self . _buf : self . _buf = string else : self . _buf += string | Replace string directly without appending to an empty string avoiding type issues . |
50,933 | def quote_etag ( etag , weak = False ) : if '"' in etag : raise ValueError ( 'invalid etag' ) etag = '"%s"' % etag if weak : etag = 'w/' + etag return etag | Quote an etag . |
50,934 | def parse_etags ( value ) : if not value : return ETags ( ) strong = [ ] weak = [ ] end = len ( value ) pos = 0 while pos < end : match = _etag_re . match ( value , pos ) if match is None : break is_weak , quoted , raw = match . groups ( ) if raw == '*' : return ETags ( star_tag = True ) elif quoted : raw = quoted if is_weak : weak . append ( raw ) else : strong . append ( raw ) pos = match . end ( ) return ETags ( strong , weak ) | Parse an etag header . |
50,935 | def wait_pid ( pid , timeout = None , callback = None ) : def check_timeout ( delay ) : if timeout is not None : if time . time ( ) >= stop_at : if callback : callback ( pid ) else : raise TimeoutExpired time . sleep ( delay ) return min ( delay * 2 , 0.04 ) if timeout is not None : waitcall = lambda : os . waitpid ( pid , os . WNOHANG ) stop_at = time . time ( ) + timeout else : waitcall = lambda : os . waitpid ( pid , 0 ) delay = 0.0001 while 1 : try : retpid , status = waitcall ( ) except OSError as err : if err . errno == errno . EINTR : delay = check_timeout ( delay ) continue elif err . errno == errno . ECHILD : while 1 : if pid_exists ( pid ) : delay = check_timeout ( delay ) else : return else : raise else : if retpid == 0 : delay = check_timeout ( delay ) continue if os . WIFSIGNALED ( status ) : return os . WTERMSIG ( status ) elif os . WIFEXITED ( status ) : return os . WEXITSTATUS ( status ) else : raise RuntimeError ( "unknown process exit status" ) | Wait for process with pid pid to terminate and return its exit status code as an integer . |
50,936 | def get_filename ( self , filename , filesystem = False , convert = False , subpath = '' ) : from uliweb . utils . common import safe_unicode s = settings . GLOBAL if convert : _p , _f = os . path . split ( filename ) _filename = os . path . join ( _p , self . filename_convert ( _f ) ) else : _filename = filename nfile = safe_unicode ( _filename , s . HTMLPAGE_ENCODING ) if subpath : paths = [ application_path ( self . to_path ) , subpath , nfile ] else : paths = [ application_path ( self . to_path ) , nfile ] f = os . path . normpath ( os . path . join ( * paths ) ) . replace ( '\\' , '/' ) if filesystem : return files . encode_filename ( f , to_encoding = s . FILESYSTEM_ENCODING ) return f | Get the filename according to self . to_path and if filesystem is False then return unicode filename otherwise return filesystem encoded filename |
50,937 | def download ( self , filename , action = 'download' , x_filename = '' , x_sendfile = None , real_filename = '' ) : from uliweb import request from uliweb . utils . common import safe_str from uliweb . utils . filedown import filedown s = settings . GLOBAL action = request . GET . get ( 'action' , action ) if not real_filename : real_filename = self . get_filename ( filename , True , convert = False ) else : real_filename = files . encode_filename ( real_filename , to_encoding = s . FILESYSTEM_ENCODING ) if not x_filename : x_filename = safe_str ( filename , s . FILESYSTEM_ENCODING ) if self . x_file_prefix : x_filename = os . path . normpath ( os . path . join ( self . x_file_prefix , x_filename ) ) . replace ( '\\' , '/' ) xsend_flag = bool ( self . x_sendfile ) if x_sendfile is None else x_sendfile return filedown ( request . environ , filename , action = action , x_sendfile = xsend_flag , x_header_name = self . x_header_name , x_filename = x_filename , real_filename = real_filename ) | action will be download inline and if the request . GET has action then the action will be replaced by it . |
50,938 | def logout ( ) : from uliweb import request delete_user_session ( ) request . session . delete ( ) request . user = None return True | Remove the authenticated user s ID from the request . |
50,939 | def get ( self , key ) : if isinstance ( key , unicode ) : key = key . encode ( 'utf-8' ) v = self . client . get ( key ) if v is None : raise KeyError ( "Cache key [%s] not found" % key ) else : return v | because memcached does not provide a function to check if a key is existed so here is a heck way if the value is None then raise Exception |
50,940 | def get_commands ( mod ) : import inspect import types commands = { } def check ( c ) : return ( inspect . isclass ( c ) and issubclass ( c , Command ) and c is not Command and not issubclass ( c , CommandManager ) ) for name in dir ( mod ) : c = getattr ( mod , name ) if check ( c ) : commands [ c . name ] = c return commands | Find commands from a module |
50,941 | def usage ( self , subcommand ) : if len ( self . option_list ) > 0 : usage = '%%prog %s [options] %s' % ( subcommand , self . args ) else : usage = '%%prog %s %s' % ( subcommand , self . args ) if self . help : return '%s\n\n%s' % ( usage , self . help ) else : return usage | Return a brief description of how to use this command by default from the attribute self . help . |
50,942 | def show_table ( name , table , i , total ) : return '[%d/%d, %s] %s' % ( i + 1 , total , table . __appname__ , name ) | Display table info name is tablename table is table object i is current Index total is total of tables |
50,943 | def get_template ( self , name ) : filename = path . join ( self . search_path , * [ p for p in name . split ( '/' ) if p and p [ 0 ] != '.' ] ) if not path . exists ( filename ) : raise TemplateNotFound ( name ) return Template . from_file ( filename , self . encoding ) | Get a template from a given name . |
50,944 | def render_to_string ( self , * args , ** kwargs ) : try : template_name , args = args [ 0 ] , args [ 1 : ] except IndexError : raise TypeError ( 'name of template required' ) return self . get_template ( template_name ) . render ( * args , ** kwargs ) | Load and render a template into a unicode string . |
50,945 | def get_template ( self , template_name ) : try : return self . loader . load ( template_name , encoding = self . encoding ) except self . not_found_exception , e : raise TemplateNotFound ( template_name ) | Get the template which is at the given name |
50,946 | def render_to_string ( self , template_name , context = None ) : context = context or { } tmpl = self . get_template ( template_name ) return tmpl . generate ( ** context ) . render ( self . output_type , encoding = None ) | Load and render a template into an unicode string |
50,947 | def process_permission_roles ( perm , v ) : if isinstance ( v , ( tuple , list ) ) : roles = v else : roles = [ v ] for r in roles : if isinstance ( r , ( tuple , list ) ) : role_name , role_props = r else : role_name , role_props = r , '' role = Role . get ( Role . c . name == role_name ) if not role : raise Exception , 'Role [%s] not found.' % r rel = Rel . get ( ( Rel . c . role == role . id ) & ( Rel . c . permission == perm . id ) ) if not rel : rel = Rel ( role = role , permission = perm , props = role_props ) msg = 'Add Relation(Permision=%s, Role=%s)...' % ( name , role_name ) else : rel . update ( props = role_props ) msg = 'Update Relation(Permision=%s, Role=%s)...' % ( name , role_name ) flag = rel . save ( ) if flag : print msg | v is roles |
50,948 | def generate_adapter ( adapter , name = 'url_for' , map_name = 'url_map' ) : values = { u'server_name' : dumps ( adapter . server_name ) , u'script_name' : dumps ( adapter . script_name ) , u'subdomain' : dumps ( adapter . subdomain ) , u'url_scheme' : dumps ( adapter . url_scheme ) , u'name' : name , u'map_name' : map_name } return u % values | Generates the url building function for a map . |
50,949 | def js_to_url_function ( converter ) : if hasattr ( converter , 'js_to_url_function' ) : data = converter . js_to_url_function ( ) else : for cls in getmro ( type ( converter ) ) : if cls in js_to_url_functions : data = js_to_url_functions [ cls ] ( converter ) break else : return 'encodeURIComponent' return '(function(value) { %s })' % data | Get the JavaScript converter function from a rule . |
50,950 | def _warn_if_string ( iterable ) : if isinstance ( iterable , string_types ) : from warnings import warn warn ( Warning ( 'response iterable was set to a string. This appears ' 'to work but means that the server will send the ' 'data to the client char, by char. This is almost ' 'never intended behavior, use response.data to assign ' 'strings to the response object.' ) , stacklevel = 2 ) | Helper for the response objects to check if the iterable returned to the WSGI server is not a string . |
50,951 | def _get_file_stream ( self , total_content_length , content_type , filename = None , content_length = None ) : return default_stream_factory ( total_content_length , content_type , filename , content_length ) | Called to get a stream for the file upload . |
50,952 | def close ( self ) : files = self . __dict__ . get ( 'files' ) for key , value in iter_multi_items ( files or ( ) ) : value . close ( ) | Closes associated resources of this request object . This closes all file handles explicitly . You can also use the request object in a with statement with will automatically close it . |
50,953 | def get_data ( self , as_text = False ) : self . _ensure_sequence ( ) rv = b'' . join ( self . iter_encoded ( ) ) if as_text : rv = rv . decode ( self . charset ) return rv | The string representation of the request body . Whenever you call this property the request iterable is encoded and flattened . This can lead to unwanted behavior if you stream big data . |
50,954 | def _ensure_sequence ( self , mutable = False ) : if self . is_sequence : if mutable and not isinstance ( self . response , list ) : self . response = list ( self . response ) return if self . direct_passthrough : raise RuntimeError ( 'Attempted implicit sequence conversion ' 'but the response object is in direct ' 'passthrough mode.' ) if not self . implicit_sequence_conversion : raise RuntimeError ( 'The response object required the iterable ' 'to be a sequence, but the implicit ' 'conversion was disabled. Call ' 'make_sequence() yourself.' ) self . make_sequence ( ) | This method can be called by methods that need a sequence . If mutable is true it will also ensure that the response sequence is a standard Python list . |
50,955 | def delete_cookie ( self , key , path = '/' , domain = None ) : self . set_cookie ( key , expires = 0 , max_age = 0 , path = path , domain = domain ) | Delete a cookie . Fails silently if key doesn t exist . |
50,956 | def freeze ( self ) : self . response = list ( self . iter_encoded ( ) ) self . headers [ 'Content-Length' ] = str ( sum ( map ( len , self . response ) ) ) | Call this method if you want to make your response object ready for being pickled . This buffers the generator if there is one . It will also set the Content - Length header to the length of the body . |
50,957 | def get_app_iter ( self , environ ) : status = self . status_code if environ [ 'REQUEST_METHOD' ] == 'HEAD' or 100 <= status < 200 or status in ( 204 , 304 ) : iterable = ( ) elif self . direct_passthrough : if __debug__ : _warn_if_string ( self . response ) return self . response else : iterable = self . iter_encoded ( ) return ClosingIterator ( iterable , self . close ) | Returns the application iterator for the given environ . Depending on the request method and the current status code the return value might be an empty response rather than the one from the response . |
50,958 | def www_authenticate ( self ) : def on_update ( www_auth ) : if not www_auth and 'www-authenticate' in self . headers : del self . headers [ 'www-authenticate' ] elif www_auth : self . headers [ 'WWW-Authenticate' ] = www_auth . to_header ( ) header = self . headers . get ( 'www-authenticate' ) return parse_www_authenticate_header ( header , on_update ) | The WWW - Authenticate header in a parsed form . |
50,959 | def make_alias_redirect_url ( self , path , endpoint , values , method , query_args ) : url = self . build ( endpoint , values , method , append_unknown = False , force_external = True ) if query_args : url += '?' + self . encode_query_args ( query_args ) assert url != path , 'detected invalid alias setting. No canonical ' 'URL found' return url | Internally called to make an alias redirect URL . |
50,960 | def from_file ( cls , file , charset = 'utf-8' , errors = 'strict' , unicode_mode = True ) : close = False f = file if isinstance ( file , basestring ) : f = open ( file , 'r' ) close = True try : data = _decode_unicode ( f . read ( ) , charset , errors ) finally : if close : f . close ( ) return cls ( data , getattr ( f , 'name' , '<template>' ) , charset , errors , unicode_mode ) | Load a template from a file . |
50,961 | def safe_str_cmp ( a , b ) : if _builtin_safe_str_cmp is not None : return _builtin_safe_str_cmp ( a , b ) if len ( a ) != len ( b ) : return False rv = 0 if isinstance ( a , bytes ) and isinstance ( b , bytes ) and not PY2 : for x , y in izip ( a , b ) : rv |= x ^ y else : for x , y in izip ( a , b ) : rv |= ord ( x ) ^ ord ( y ) return rv == 0 | This function compares strings in somewhat constant time . This requires that the length of at least one string is known in advance . |
50,962 | def gen_salt ( length ) : if length <= 0 : raise ValueError ( 'requested salt of length <= 0' ) return '' . join ( _sys_rng . choice ( SALT_CHARS ) for _ in range_type ( length ) ) | Generate a random string of SALT_CHARS with specified length . |
50,963 | def html ( self , data = '' , py = True ) : if py : value = self . to_html ( data ) else : value = data if self . static : return str ( '<span class="value">%s</span>' % safe_str ( value ) ) else : if self . hidden : build = Hidden else : build = self . build self . _get_http_attrs ( ) return str ( build ( name = self . name , value = value , id = self . id , ** self . html_attrs ) ) | Convert data to html value format . |
50,964 | def validate ( self , data , all_data = None ) : all_data = all_data or { } if hasattr ( data , 'stream' ) : data . file = data . stream if hasattr ( data , 'file' ) : if data . file : v = data . filename else : raise Exception , 'Unsupport type %s' % type ( data ) else : v = data msg = TEST_NOT_EMPTY ( ) ( v ) if self . required : if msg : return False , msg else : if msg : return True , self . default try : if isinstance ( data , list ) : v = [ ] for i in data : v . append ( self . to_python ( i ) ) data = v else : data = self . to_python ( data ) except : return False , unicode ( ERR_CONVERT ) % ( data , self . __class__ . __name__ ) for v in self . get_validators ( ) : msg = v ( data , all_data ) if msg : return False , msg return True , data | if rule in kwargs then validate extra rules |
50,965 | def cache_property ( key , empty , type ) : return property ( lambda x : x . _get_cache_value ( key , empty , type ) , lambda x , v : x . _set_cache_value ( key , v , type ) , lambda x : x . _del_cache_value ( key ) , 'accessor for %r' % key ) | Return a new property object for a cache header . Useful if you want to add support for a cache extension in a subclass . |
50,966 | def set ( self , start , stop , length = None , units = 'bytes' ) : assert is_byte_range_valid ( start , stop , length ) , 'Bad range provided' self . _units = units self . _start = start self . _stop = stop self . _length = length if self . on_update is not None : self . on_update ( self ) | Simple method to update the ranges . |
50,967 | def qop ( self ) : def on_update ( header_set ) : if not header_set and 'qop' in self : del self [ 'qop' ] elif header_set : self [ 'qop' ] = header_set . to_header ( ) return parse_set_header ( self . get ( 'qop' ) , on_update ) | Indicates what quality of protection the client has applied to the message for HTTP digest auth . |
50,968 | def set_basic ( self , realm = 'authentication required' ) : dict . clear ( self ) dict . update ( self , { '__auth_type__' : 'basic' , 'realm' : realm } ) if self . on_update : self . on_update ( self ) | Clear the auth info and enable basic auth . |
50,969 | def get_connection ( connection = '' , engine_name = None , connection_type = 'long' , ** args ) : engine_name = engine_name or __default_engine__ if '://' in connection : d = { 'connection_string' : connection , 'connection_args' : args , 'connection_type' : connection_type , } return engine_manager . add ( engine_name , d ) . engine else : connection = connection or __default_engine__ if connection in engine_manager : return engine_manager [ connection ] . engine else : raise Error ( "Can't find engine %s" % connection ) | Creating an NamedEngine or just return existed engine instance |
50,970 | def get_metadata ( engine_name = None ) : dispatch . get ( None , 'load_models' ) engine = engine_manager [ engine_name ] for tablename , m in engine . models . items ( ) : get_model ( tablename , engine_name , signal = False ) if hasattr ( m , '__dynamic__' ) and getattr ( m , '__dynamic__' ) : m . table . __mapping_only__ = True return engine . metadata | get metadata according used for alembic It ll import all tables |
50,971 | def get_session ( ec = None , create = True ) : ec = ec or __default_engine__ if isinstance ( ec , ( str , unicode ) ) : session = engine_manager [ ec ] . session ( create = True ) elif isinstance ( ec , Session ) : session = ec else : raise Error ( "Connection %r should be existed engine name or Session object" % ec ) return session | ec - engine_name or connection |
50,972 | def rawsql ( query , ec = None ) : if isinstance ( query , Result ) : query = query . get_query ( ) ec = ec or __default_engine__ if isinstance ( ec , ( str , unicode ) ) : engine = engine_manager [ ec ] dialect = engine . engine . dialect else : dialect = ec . dialect if isinstance ( query , ( str , unicode ) ) : return query compiler = query . _compiler ( dialect ) class LiteralCompiler ( compiler . __class__ ) : def visit_bindparam ( self , bindparam , within_columns_clause = False , literal_binds = False , ** kwargs ) : return super ( LiteralCompiler , self ) . render_literal_bindparam ( bindparam , within_columns_clause = within_columns_clause , literal_binds = literal_binds , ** kwargs ) def render_literal_value ( self , value , type_ ) : return repr_value ( value ) compiler = LiteralCompiler ( dialect , query ) return str ( compiler . process ( query ) ) . replace ( '\n' , '' ) | ec could be engine name or engine instance |
50,973 | def get_engine_name ( ec = None ) : ec = ec or __default_engine__ if isinstance ( ec , ( str , unicode ) ) : return ec elif isinstance ( ec , Session ) : return ec . engine_name else : raise Error ( "Parameter ec should be an engine_name or Session object, but %r found" % ec ) | Get the name of a engine or session |
50,974 | def CommitAll ( close = None ) : if close : warnings . simplefilter ( 'default' ) warnings . warn ( "close parameter will not need at all." , DeprecationWarning ) for k , v in engine_manager . items ( ) : session = v . session ( create = False ) if session : session . commit ( ) | Commit all transactions according Local . conn |
50,975 | def RollbackAll ( close = None ) : if close : warnings . simplefilter ( 'default' ) warnings . warn ( "close parameter will not need at all." , DeprecationWarning ) for k , v in engine_manager . items ( ) : session = v . session ( create = False ) if session : session . rollback ( ) | Rollback all transactions according Local . conn |
50,976 | def set_model ( model , tablename = None , created = None , appname = None , model_path = None ) : if isinstance ( model , type ) and issubclass ( model , Model ) : tablename = model . _alias or model . tablename tablename = tablename . lower ( ) d = __models__ . setdefault ( tablename , { } ) engines = d . get ( 'config' , { } ) . pop ( 'engines' , [ 'default' ] ) if isinstance ( engines , ( str , unicode ) ) : engines = [ engines ] d [ 'engines' ] = engines item = { } if created is not None : item [ 'created' ] = created else : item [ 'created' ] = None if isinstance ( model , ( str , unicode ) ) : if model_path is None : model_path = model else : model_path = model_path if not appname : appname = model . rsplit ( '.' , 2 ) [ 0 ] model = None else : appname = model . __module__ . rsplit ( '.' , 1 ) [ 0 ] if model_path is None : model_path = model . __module__ + '.' + model . __name__ else : model_path = '' model . __engines__ = engines item [ 'model' ] = model item [ 'model_path' ] = model_path item [ 'appname' ] = appname d [ 'model_path' ] = model_path d [ 'appname' ] = appname for name in engines : if not isinstance ( name , ( str , unicode ) ) : raise BadValueError ( 'Engine name should be string type, but %r found' % name ) engine_manager [ name ] . models [ tablename ] = item . copy ( ) | Register an model and tablename to a global variable . model could be a string format i . e . uliweb . contrib . auth . models . User |
50,977 | def create_model ( modelname , fields , indexes = None , basemodel = None , ** props ) : assert not props or isinstance ( props , dict ) assert not indexes or isinstance ( indexes , list ) props = SortedDict ( props or { } ) props [ '__dynamic__' ] = True props [ '__config__' ] = False for p in fields : kwargs = p . copy ( ) name = kwargs . pop ( 'name' ) _type = kwargs . pop ( 'type' ) for k in kwargs . keys ( ) : if k . startswith ( '_' ) : kwargs . pop ( k , None ) field_type = get_field_type ( _type ) prop = field_type ( ** kwargs ) props [ name ] = prop if basemodel : model = import_attr ( basemodel ) else : model = Model cls = type ( str ( modelname . title ( ) ) , ( model , ) , props ) tablename = props . get ( '__tablename__' , modelname ) set_model ( cls , tablename , appname = __name__ , model_path = '' ) get_model ( modelname , signal = False , reload = True ) indexes = indexes or [ ] for x in indexes : kwargs = x . copy ( ) name = kwargs . pop ( 'name' ) fields = kwargs . pop ( 'fields' ) for k in kwargs . keys ( ) : if k . startswith ( '_' ) : kwargs . pop ( k , None ) if not isinstance ( fields , ( list , tuple ) ) : raise ValueError ( "Index value format is not right, the value is %r" % indexes ) props = [ ] for y in fields : props . append ( cls . c [ y ] ) Index ( name , * props , ** kwargs ) return cls | Create model dynamically |
50,978 | def reflect_table_model ( table , mapping = None , without_id = False , engine_name = 'default' ) : table = reflect_table ( table , engine_name ) mapping = mapping or { } meta = reflect_table_data ( table ) code = [ 'class {}(Model):' . format ( table . name . title ( ) ) ] code . append ( ' . format ( table . name ) ) if sa_version >= '1.2' and table . comment : code . append ( ' __verbose_name__ = {}\n' . format ( dumps ( table . comment , bool_int = False ) ) ) if 'id' not in meta [ 'columns' ] and without_id : code . append ( ' __without_id__ = True\n' ) for k , v in meta [ 'columns' ] . items ( ) : kw = v [ 1 ] . items ( ) x_v = mapping . get ( v [ 0 ] ) kwargs = ', ' . join ( [ v [ 0 ] ] + [ '{0}={1}' . format ( x , dumps ( y , bool_int = False ) ) for x , y in kw ] ) if x_v : type_class = ' ,type_class={}' . format ( x_v ) else : type_class = '' txt = " " * 4 + "{0} = Field({1}{2})" . format ( k , kwargs , type_class ) code . append ( txt ) if meta [ 'indexes' ] : code . append ( ) for index in meta [ 'indexes' ] : buf = [ ] buf . append ( index [ 'name' ] ) for c in index [ 'columns' ] : buf . append ( 'cls.c.{}' . format ( c ) ) if index [ 'unique' ] : buf . append ( 'unique=True' ) code . append ( ' ' * 8 + 'Index({})' . format ( ', ' . join ( buf ) ) ) return '\n' . join ( code ) | Write table to Model class |
50,979 | def SelfReferenceProperty ( label = None , collection_name = None , ** attrs ) : if 'reference_class' in attrs : raise ConfigurationError ( 'Do not provide reference_class to self-reference.' ) return ReferenceProperty ( _SELF_REFERENCE , label , collection_name , ** attrs ) | Create a self reference . |
50,980 | def session ( self , create = True ) : if hasattr ( self . local , 'session' ) : return self . local . session else : if create : s = Session ( self . name ) self . local . session = s return s | Used to created default session |
50,981 | def get_parameters ( self ) : d = { } for k in [ 'label' , 'verbose_name' , 'required' , 'hint' , 'placeholder' , 'choices' , 'default' , 'validators' , 'max_length' ] : d [ k ] = getattr ( self , k ) return d | Get common attributes and it ll used for Model . relationship clone process |
50,982 | def validate ( self , value ) : if value == '' : if self . kwargs . get ( 'nullable' , __nullable__ ) : value = None else : value = 0 if not isinstance ( value , Model ) : return super ( ReferenceProperty , self ) . validate ( value ) if not value . is_saved ( ) : raise BadValueError ( '%s instance must be saved before it can be stored as a ' 'reference' % self . reference_class . __class__ . __name__ ) if not isinstance ( value , self . reference_class ) : raise KindError ( 'Property %s must be an instance of %s' % ( self . name , self . reference_class . __class__ . __name__ ) ) return value | Validate reference . |
50,983 | def get_fields ( self ) : columns = self . columns model = self . model fields = [ ] for col in columns : if isinstance ( col , ( str , unicode ) ) : v = col . split ( '.' ) if len ( v ) > 1 : field = get_model ( v [ 0 ] , engine_name = self . model . get_engine_name ( ) , signal = False ) . properties ( v [ 1 ] ) else : field = model . properties [ col ] elif isinstance ( col , Column ) : field = get_model ( col . table . name , engine_name = self . model . get_engine_name ( ) , signal = False ) . properties [ col . name ] else : field = col fields . append ( field ) return fields | get property instance according self . columns |
50,984 | def count ( self ) : if self . _group_by or self . _join or self . distinct_field : return self . do_ ( self . get_query ( ) . limit ( None ) . order_by ( None ) . offset ( None ) . alias ( ) . count ( ) ) . scalar ( ) else : return self . do_ ( self . get_query ( ) . with_only_columns ( [ func . count ( ) ] ) . limit ( None ) . order_by ( None ) . offset ( None ) ) . scalar ( ) | If result is True then the count will process result set if result if False then only use condition to count |
50,985 | def update ( self , ** kwargs ) : if self . condition is not None : self . result = self . do_ ( self . model . table . update ( ) . where ( self . condition ) . values ( ** kwargs ) ) else : self . result = self . do_ ( self . model . table . update ( ) . values ( ** kwargs ) ) return self . result | Execute update table set field = field + 1 like statement |
50,986 | def save_file ( self , filename , encoding = 'utf8' , headers = None , convertors = None , display = True , ** kwargs ) : global save_file convertors = convertors or { } headers = headers or [ ] fields = self . get_fields ( ) _header = [ ] for i , column in enumerate ( fields ) : if column . name not in convertors : if display : def f ( value , data ) : return column . get_display_value ( value ) convertors [ column . name ] = f flag = False for j in headers : if not isinstance ( j , dict ) : raise ValueError ( "Header should be a list of dict, but {} type found" . format ( type ( j ) ) ) if j [ 'name' ] == column . name : _header . append ( j ) flag = True break if not flag : d = { 'name' : column . name } if display : d [ 'title' ] = column . verbose_name or column . name else : d [ 'title' ] = column . name _header . append ( d ) return save_file ( self . run ( ) , filename , encoding = encoding , headers = _header , convertors = convertors , ** kwargs ) | save result to a csv file . display = True will convert value according choices value |
50,987 | def all ( self , cache = False ) : if cache : return [ get_object ( self . modelb , obj_id , cache = True , use_local = True ) for obj_id in self . keys ( True ) ] else : return self | can use cache to return objects |
50,988 | def update ( self , * objs ) : keys = self . keys ( ) new_keys = get_objs_columns ( objs , self . realfieldb ) modified = False for v in new_keys : if v in keys : keys . remove ( v ) else : d = { self . fielda : self . valuea , self . fieldb : v } if self . before_save : self . before_save ( d ) if self . through_model : obj = self . through_model ( ** d ) obj . save ( ) else : self . do_ ( self . table . insert ( ) . values ( ** d ) ) modified = True if keys : self . clear ( * keys ) modified = True setattr ( self . instance , self . store_key , new_keys ) return modified | Update the third relationship table but not the ModelA or ModelB |
50,989 | def with_relation ( self , relation_name = None ) : if not relation_name : relation_name = 'relation' if hasattr ( self . modelb , relation_name ) : raise Error ( "The attribute name %s has already existed in Model %s!" % ( relation_name , self . modelb . __name__ ) ) if not self . through_model : raise Error ( "Only with through style in ManyToMany supports with_relation function of Model %s!" % self . modelb . __name__ ) self . with_relation_name = relation_name return self | if relation is not None when fetch manytomany result also fetch relation record and saved them to manytomany object and named them as relation . If relation_name is not given then default value is relation |
50,990 | def in_ ( self , * objs ) : if not objs : return self . table . c [ self . fielda ] != self . table . c [ self . fielda ] else : keys = get_objs_columns ( objs , self . reference_fieldname ) sub_query = select ( [ self . table . c [ self . fielda ] ] , ( self . table . c [ self . fieldb ] == self . reference_class . c [ self . reference_fieldname ] ) & ( self . table . c [ self . fieldb ] . in_ ( keys ) ) ) condition = self . model_class . c [ self . reversed_fieldname ] . in_ ( sub_query ) return condition | Create a condition |
50,991 | def join_in ( self , * objs ) : if not objs : return self . table . c [ self . fielda ] != self . table . c [ self . fielda ] else : keys = get_objs_columns ( objs , self . reference_fieldname ) return ( self . table . c [ self . fielda ] == self . model_class . c [ self . reversed_fieldname ] ) & ( self . table . c [ self . fieldb ] . in_ ( keys ) ) | Create a join condition connect A and C |
50,992 | def join_right_in ( self , * objs ) : if not objs : return self . table . c [ self . fielda ] != self . table . c [ self . fielda ] else : keys = get_objs_columns ( objs , self . reference_fieldname ) return ( self . table . c [ self . fieldb ] == self . reference_class . c [ self . reference_fieldname ] ) & ( self . table . c [ self . fielda ] . in_ ( keys ) ) | Create a join condition connect B and C |
50,993 | def _get_data ( self , fields = None , compare = True ) : fields = fields or [ ] if self . _key is None or self . _key == '' or self . _key == 0 : d = { } for k , v in self . properties . items ( ) : if fields and k not in fields : continue if v . property_type == 'compound' : continue if v . sequence : continue if not isinstance ( v , ManyToMany ) : x = v . get_value_for_datastore ( self ) if isinstance ( x , Model ) : x = x . _key elif x is None or ( k == self . _primary_field and not x ) : if isinstance ( v , DateTimeProperty ) and v . auto_now_add : x = v . now ( ) elif ( v . auto_add or ( not v . auto and not v . auto_add ) ) : x = v . default_value ( ) else : x = v . get_value_for_datastore ( self , cached = True ) if x is not None and not x is Lazy : d [ k ] = x else : d = { } d [ self . _primary_field ] = self . _key for k , v in self . properties . items ( ) : if fields and k not in fields : continue if v . property_type == 'compound' : continue t = self . _old_values . get ( k , None ) if not isinstance ( v , ManyToMany ) : x = v . get_value_for_datastore ( self ) if isinstance ( x , Model ) : x = x . _key else : x = v . get_value_for_datastore ( self , cached = True ) if not x is Lazy : if ( compare and t != self . field_str ( x ) ) or not compare : d [ k ] = x return d | Get the changed property it ll be used to save the object If compare is False then it ll include all data not only changed property |
50,994 | def create_sql ( self , insert = False , version = False , version_fieldname = None , fields = None , ec = None , compare = False ) : version_fieldname = version_fieldname or 'version' if not self . _key or insert : d = self . _get_data ( fields , compare = compare ) if d : return rawsql ( self . table . insert ( ) . values ( ** d ) , ec or self . get_engine_name ( ) ) + ';' else : d = self . _get_data ( fields , compare = compare ) _key = d . pop ( self . _primary_field ) if d : _cond = self . table . c [ self . _primary_field ] == self . _key if version : version_field = self . table . c . get ( version_fieldname ) if version_field is None : raise KindError ( "version_fieldname %s is not existed in Model %s" % ( version_fieldname , self . __class__ . __name__ ) ) _version_value = getattr ( self , version_fieldname , 0 ) d [ version_fieldname ] = _version_value + 1 _cond = ( version_field == _version_value ) & _cond return rawsql ( self . table . update ( _cond ) . values ( ** d ) , ec or self . get_engine_name ( ) ) + ';' return '' | Create sql statement do not process manytomany |
50,995 | def get_collection_name ( cls , from_class_name , collection_name = None , prefix = None ) : if not collection_name : collection_name = prefix + '_set' if hasattr ( cls , collection_name ) : collection_name = prefix + '_set_' + str ( cls . _collection_set_id ) cls . _collection_set_id += 1 else : if collection_name in cls . _collection_names : if cls . _collection_names . get ( collection_name ) != from_class_name : raise DuplicatePropertyError ( "Model %s already has collection property %s" % ( cls . __name__ , collection_name ) ) if collection_name in cls . properties : raise DuplicatePropertyError ( "Model %s already has property %s" % ( cls . __name__ , collection_name ) ) return collection_name | Get reference collection_name if the collection_name is None then make sure the collection_name is not conflict but if the collection_name is not None then check if the collection_name is already exists if existed then raise Exception . |
50,996 | def _use ( cls , ec ) : ConnectModel = type ( cls . __name__ , ( cls , ) , { } ) ConnectModel . tablename = cls . tablename ConnectModel . _base_class = cls if isinstance ( ec , ( str , unicode ) ) : ConnectModel . _engine_name = ec elif isinstance ( ec , Session ) : ConnectModel . _engine_name = ec . engine_name ConnectModel . _connection = ec return ConnectModel | underly implement of use |
50,997 | def use ( cls , ec ) : if isinstance ( ec , ( str , unicode ) ) : m = get_model ( cls . _alias , ec , signal = False ) else : m = cls . _use ( ec ) return m | use will duplicate a new Model class and bind ec ec is Engine name or Sesstion object |
50,998 | def get_tree ( cls , * condition , ** kwargs ) : parent_field = kwargs . pop ( 'parent_field' , 'parent' ) parent = kwargs . pop ( 'parent' , None ) parent_order_by = kwargs . pop ( 'parent_order_by' , None ) current = kwargs . pop ( 'current' , None ) order_by = kwargs . pop ( 'order_by' , None ) id_field = kwargs . pop ( 'id_field' , 'id' ) mode = kwargs . pop ( 'mode' , 'wide' ) if mode not in ( 'wide' , 'deep' ) : raise Exception ( "mode parameter should be 'wide' or 'deep', but '{}' found." . format ( mode ) ) def _f ( parent ) : query = cls . filter ( cls . c [ parent_field ] == parent , * condition ) if order_by is not None : query . order_by ( order_by ) for row in query : if mode == 'wide' : yield row for _row in _f ( getattr ( row , id_field ) ) : yield _row if mode == 'deep' : yield row if current : query = cls . filter ( cls . c [ id_field ] == current ) else : if is_condition ( parent ) : query = cls . filter ( parent ) else : query = cls . filter ( cls . c [ parent_field ] == parent ) if parent_order_by is not None : query . order_by ( parent_order_by ) for row in query : if mode == 'wide' : yield row for r in _f ( getattr ( row , id_field ) ) : yield r if mode == 'deep' : yield row | parent is root parent value default is None current is current value condition is extra condition for select root records mode is search method value is wide or deep |
50,999 | def refresh ( self , fields = None , ** kwargs ) : cond = self . c [ self . _primary_field ] == self . _key query = self . filter ( cond , ** kwargs ) if not fields : fields = list ( self . table . c ) v = query . values_one ( * fields ) if not v : raise NotFound ( 'Instance <{0}:{1}> can not be found' . format ( self . tablename , self . _key ) ) d = self . _data_prepare ( v . items ( ) ) self . update ( ** d ) self . set_saved ( ) | Re get the instance of current id |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.