idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
11,000 | def set ( self , key , value , * , flags = None ) : self . append ( { "Verb" : "set" , "Key" : key , "Value" : encode_value ( value , flags , base64 = True ) . decode ( "utf-8" ) , "Flags" : flags } ) return self | Sets the Key to the given Value |
11,001 | def cas ( self , key , value , * , flags = None , index ) : self . append ( { "Verb" : "cas" , "Key" : key , "Value" : encode_value ( value , flags , base64 = True ) . decode ( "utf-8" ) , "Flags" : flags , "Index" : extract_attr ( index , keys = [ "ModifyIndex" , "Index" ] ) } ) return self | Sets the Key to the given Value with check - and - set semantics |
11,002 | def lock ( self , key , value , * , flags = None , session ) : self . append ( { "Verb" : "lock" , "Key" : key , "Value" : encode_value ( value , flags , base64 = True ) . decode ( "utf-8" ) , "Flags" : flags , "Session" : extract_attr ( session , keys = [ "ID" ] ) } ) return self | Locks the Key with the given Session |
11,003 | def check_index ( self , key , * , index ) : self . append ( { "Verb" : "check-index" , "Key" : key , "Index" : extract_attr ( index , keys = [ "ModifyIndex" , "Index" ] ) } ) return self | Fails the transaction if Key does not have a modify index equal to Index |
11,004 | def check_session ( self , key , * , session = None ) : self . append ( { "Verb" : "check-session" , "Key" : key , "Session" : extract_attr ( session , keys = [ "ID" ] ) } ) return self | Fails the transaction if Key is not currently locked by Session |
11,005 | async def execute ( self , dc = None , token = None ) : token_id = extract_attr ( token , keys = [ "ID" ] ) try : response = await self . _api . put ( "/v1/txn" , data = self . operations , params = { "dc" : dc , "token" : token_id } ) except ConflictError as error : errors = { elt [ "OpIndex" ] : elt for elt in error . value [ "Errors" ] } operations = [ op [ "KV" ] for op in self . operations ] meta = error . meta raise TransactionError ( errors , operations , meta ) from error except Exception as error : raise error else : self . operations [ : ] = [ ] results = [ ] for _ in response . body [ "Results" ] : data = _ [ "KV" ] if data [ "Value" ] is not None : data [ "Value" ] = decode_value ( data [ "Value" ] , data [ "Flags" ] ) results . append ( data ) return results | Execute stored operations |
11,006 | def write ( self , script ) : if self . _closed : raise IOError ( 'tried to write to closed connection' ) script = script . strip ( ) if script : assert self . _parentout is not None self . _parentout . write ( script ) self . _parentout . write ( '\n' ) | Send a script to FORM . |
11,007 | def flush ( self ) : if self . _closed : raise IOError ( 'tried to flush closed connection' ) assert self . _parentout is not None self . _parentout . flush ( ) | Flush the channel to FORM . |
11,008 | def compile ( self ) : sql = '' sql += 'DELETE FROM ' + self . dialect . quote_table ( self . _table ) if self . _where : sql += ' WHERE ' + self . compile_condition ( self . _where ) if self . _order_by : sql += ' ' + self . compile_order_by ( self . _order_by ) if self . _limit : sql += ' LIMIT ' + self . _limit return sql | Compiles the delete sql statement |
11,009 | def clear ( self ) : WhereQuery . clear ( self ) self . _table = None self . _parameters = [ ] self . _sql = None | Clear and reset to orignal state |
11,010 | def startLoading ( self ) : if self . _loading : return False tree = self . treeWidget ( ) if not tree : return self . _loading = True self . setText ( 0 , '' ) lbl = QtGui . QLabel ( self . treeWidget ( ) ) lbl . setMovie ( XLoaderWidget . getMovie ( ) ) lbl . setAlignment ( QtCore . Qt . AlignCenter ) tree . setItemWidget ( self , 0 , lbl ) try : tree . loadStarted . emit ( self ) except AttributeError : pass return True | Updates this item to mark the item as loading . This will create a QLabel with the loading ajax spinner to indicate that progress is occurring . |
11,011 | def emitCurrentChanged ( self ) : if not self . signalsBlocked ( ) : self . currentIndexChanged . emit ( self . currentIndex ( ) ) self . currentUrlChanged . emit ( self . currentUrl ( ) ) self . canGoBackChanged . emit ( self . canGoBack ( ) ) self . canGoForwardChanged . emit ( self . canGoForward ( ) ) | Emits the current index changed signal provided signals are not blocked . |
11,012 | def goHome ( self ) : if not self . canGoBack ( ) : return '' if self . homeUrl ( ) : self . push ( self . homeUrl ( ) ) self . _blockStack = True self . _index = 0 self . emitCurrentChanged ( ) self . _blockStack = False return self . currentUrl ( ) | Goes to the home url . If there is no home url specifically set then \ this will go to the first url in the history . Otherwise it will \ look to see if the home url is in the stack and go to that level if \ the home url is not found then it will be pushed to the top of the \ stack using the push method . |
11,013 | def cli ( conf ) : config = init_config ( conf ) nas_id = config . get ( 'DEFAULT' , 'nas_id' ) nas_addr = config . get ( 'DEFAULT' , 'nas_addr' ) secret = config . get ( 'DEFAULT' , 'radius_secret' ) radius_addr = config . get ( 'DEFAULT' , 'radius_addr' ) radius_auth_port = config . getint ( 'DEFAULT' , 'radius_auth_port' ) radius_timeout = config . getint ( 'DEFAULT' , 'radius_timeout' ) client_config_dir = config . get ( 'DEFAULT' , 'client_config_dir' ) username = os . environ . get ( 'username' ) req = { 'User-Name' : username } req [ 'CHAP-Challenge' ] = get_challenge ( ) req [ 'CHAP-Password-Plaintext' ] = os . environ . get ( 'password' ) req [ "NAS-IP-Address" ] = nas_addr req [ "NAS-Port-Id" ] = '0/0/0:0.0' req [ "NAS-Port" ] = 0 req [ "Service-Type" ] = "Login-User" req [ "NAS-Identifier" ] = nas_id req [ "Called-Station-Id" ] = '00:00:00:00:00:00' req [ "Calling-Station-Id" ] = '00:00:00:00:00:00' def shutdown ( exitcode = 0 ) : reactor . addSystemEventTrigger ( 'after' , 'shutdown' , os . _exit , exitcode ) reactor . stop ( ) def onresp ( r ) : if r . code == packet . AccessAccept : try : ccdattrs = [ ] userip = get_radius_addr_attr ( r , 8 ) if userip : ccdattrs . append ( 'ifconfig-push {0} 255.255.255.0' . format ( userip ) ) with open ( os . path . join ( client_config_dir , username ) , 'wb' ) as ccdfs : ccdfs . write ( '\n' . join ( ccdattrs ) ) except : traceback . print_exc ( ) shutdown ( 0 ) else : shutdown ( 1 ) def onerr ( e ) : log . err ( e ) shutdown ( 1 ) d = client . send_auth ( str ( secret ) , get_dictionary ( ) , radius_addr , authport = radius_auth_port , debug = True , ** req ) d . addCallbacks ( onresp , onerr ) reactor . callLater ( radius_timeout , shutdown , 1 ) reactor . run ( ) | OpenVPN user_pass_verify method |
11,014 | async def create ( self , session , * , dc = None ) : response = await self . _api . put ( "/v1/session/create" , data = session , params = { "dc" : dc } ) return response . body | Creates a new session |
11,015 | async def destroy ( self , session , * , dc = None ) : session_id = extract_attr ( session , keys = [ "ID" ] ) response = await self . _api . put ( "/v1/session/destroy" , session_id , params = { "dc" : dc } ) return response . body is True | Destroys a given session |
11,016 | async def info ( self , session , * , dc = None , watch = None , consistency = None ) : session_id = extract_attr ( session , keys = [ "ID" ] ) response = await self . _api . get ( "/v1/session/info" , session_id , watch = watch , consistency = consistency , params = { "dc" : dc } ) try : result = response . body [ 0 ] except IndexError : meta = extract_meta ( response . headers ) raise NotFound ( "No session for %r" % session_id , meta = meta ) return consul ( result , meta = extract_meta ( response . headers ) ) | Queries a given session |
11,017 | async def renew ( self , session , * , dc = None ) : session_id = extract_attr ( session , keys = [ "ID" ] ) response = await self . _api . put ( "/v1/session/renew" , session_id , params = { "dc" : dc } ) try : result = response . body [ 0 ] except IndexError : meta = extract_meta ( response . headers ) raise NotFound ( "No session for %r" % session_id , meta = meta ) return consul ( result , meta = extract_meta ( response . headers ) ) | Renews a TTL - based session |
11,018 | def _lazy_re_compile ( regex , flags = 0 ) : def _compile ( ) : if isinstance ( regex , str ) : return re . compile ( regex , flags ) else : assert not flags , "flags must be empty if regex is passed pre-compiled" return regex return SimpleLazyObject ( _compile ) | Lazily compile a regex with flags . |
11,019 | def deconstructible ( * args , path = None ) : def decorator ( klass ) : def __new__ ( cls , * args , ** kwargs ) : obj = super ( klass , cls ) . __new__ ( cls ) obj . _constructor_args = ( args , kwargs ) return obj klass . __new__ = staticmethod ( __new__ ) return klass if not args : return decorator return decorator ( * args ) | Class decorator that allows the decorated class to be serialized by the migrations subsystem . |
11,020 | def clear ( self ) : self . _searchEdit . blockSignals ( True ) self . _searchEdit . setText ( '' ) self . _searchEdit . blockSignals ( False ) | Clears the text from the search edit . |
11,021 | def clearAdvancedActions ( self ) : self . _advancedMap . clear ( ) margins = list ( self . getContentsMargins ( ) ) margins [ 2 ] = 0 self . setContentsMargins ( * margins ) | Clears out the advanced action map . |
11,022 | def rebuildButtons ( self ) : for btn in self . findChildren ( XAdvancedButton ) : btn . close ( ) btn . setParent ( None ) btn . deleteLater ( ) for standard , advanced in self . _advancedMap . items ( ) : rect = self . actionGeometry ( standard ) btn = XAdvancedButton ( self ) btn . setFixedWidth ( 22 ) btn . setFixedHeight ( rect . height ( ) ) btn . setDefaultAction ( advanced ) btn . setAutoRaise ( True ) btn . move ( rect . right ( ) + 1 , rect . top ( ) ) btn . show ( ) if btn . icon ( ) . isNull ( ) : btn . setIcon ( QIcon ( resources . find ( 'img/advanced.png' ) ) ) btn . clicked . connect ( self . acceptAdvanced ) | Rebuilds the buttons for the advanced actions . |
11,023 | def rebuild ( self ) : self . _buildData . clear ( ) self . _dateGrid . clear ( ) self . _dateTimeGrid . clear ( ) curr_min = self . _minimumDate curr_max = self . _maximumDate self . _maximumDate = QDate ( ) self . _minimumDate = QDate ( ) self . markForRebuild ( False ) if ( self . currentMode ( ) == XCalendarScene . Mode . Month ) : self . rebuildMonth ( ) elif ( self . currentMode ( ) in ( XCalendarScene . Mode . Week , XCalendarScene . Mode . Day ) ) : self . rebuildDays ( ) items = sorted ( self . items ( ) ) for item in items : item . setPos ( 0 , 0 ) item . hide ( ) for item in items : if ( isinstance ( item , XCalendarItem ) ) : item . rebuild ( ) if ( curr_min != self . _minimumDate or curr_max != self . _maximumDate ) : parent = self . parent ( ) if ( parent and not parent . signalsBlocked ( ) ) : parent . dateRangeChanged . emit ( self . _minimumDate , self . _maximumDate ) | Rebuilds the information for this scene . |
11,024 | def set ( self , time ) : self . _time = time self . _pb . sec = int ( self . _time ) self . _pb . nsec = int ( ( self . _time - self . _pb . sec ) * 10 ** 9 ) | Sets time in seconds since Epoch |
11,025 | def compile ( self , db ) : sql = self . expression if self . alias : sql += ( ' AS ' + db . quote_column ( self . alias ) ) return sql | Building the sql expression |
11,026 | async def register ( self , check , * , token = None ) : token_id = extract_attr ( token , keys = [ "ID" ] ) params = { "token" : token_id } response = await self . _api . put ( "/v1/agent/check/register" , params = params , data = check ) return response . status == 200 | Registers a new local check |
11,027 | async def deregister ( self , check ) : check_id = extract_attr ( check , keys = [ "CheckID" , "ID" ] ) response = await self . _api . get ( "/v1/agent/check/deregister" , check_id ) return response . status == 200 | Deregisters a local check |
11,028 | async def mark ( self , check , status , * , note = None ) : check_id = extract_attr ( check , keys = [ "CheckID" , "ID" ] ) data = { "Status" : status , "Output" : note } response = await self . _api . put ( "/v1/agent/check/update" , check_id , data = data ) return response . status == 200 | Marks a local check as passing warning or critical |
11,029 | def find_by_username ( self , username ) : data = ( db . select ( self . table ) . select ( 'username' , 'email' , 'real_name' , 'password' , 'bio' , 'status' , 'role' , 'uid' ) . condition ( 'username' , username ) . execute ( ) ) if data : return self . load ( data [ 0 ] , self . model ) | Return user by username if find in database otherwise None |
11,030 | def search ( self , ** kw ) : q = db . select ( self . table ) . condition ( 'status' , 'active' ) for k , v in kw : q . condition ( k , v ) data = q . execute ( ) users = [ ] for user in data : users . append ( self . load ( user , self . model ) ) return users | Find the users match the condition in kw |
11,031 | def paginate ( self , page = 1 , perpage = 10 , category = None ) : q = db . select ( self . table ) . fields ( 'title' , 'slug' , 'description' , 'html' , 'css' , 'js' , 'category' , 'status' , 'comments' , 'author' , 'created' , 'pid' ) if category : q . condition ( 'category' , category ) results = ( q . limit ( perpage ) . offset ( ( page - 1 ) * perpage ) . order_by ( 'created' , 'DESC' ) . execute ( ) ) return [ self . load ( data , self . model ) for data in results ] | Paginate the posts |
11,032 | def clear ( self ) : self . setCurrentLayer ( None ) self . _layers = [ ] self . _cache . clear ( ) super ( XNodeScene , self ) . clear ( ) | Clears the current scene of all the items and layers . |
11,033 | def rebuild ( self ) : rect = self . sceneRect ( ) x = rect . left ( ) y = rect . top ( ) w = rect . width ( ) h = rect . height ( ) cx = x + ( w / 2 ) cy = y + ( h / 2 ) self . _centerLines = [ QLine ( cx , rect . top ( ) , cx , rect . bottom ( ) ) , QLine ( rect . left ( ) , cy , rect . right ( ) , cy ) ] delta = self . cellHeight ( ) minor_lines = [ ] major_lines = [ ] count = 1 while delta < ( h / 2 ) : pos_line = QLine ( x , cy + delta , x + w , cy + delta ) neg_line = QLine ( x , cy - delta , x + w , cy - delta ) if count == 10 : major_lines . append ( pos_line ) major_lines . append ( neg_line ) count = 1 else : minor_lines . append ( pos_line ) minor_lines . append ( neg_line ) delta += self . cellHeight ( ) count += 1 delta = self . cellWidth ( ) count = 1 while delta < ( w / 2 ) : pos_line = QLine ( cx + delta , y , cx + delta , y + h ) neg_line = QLine ( cx - delta , y , cx - delta , y + h ) if count == 10 : major_lines . append ( pos_line ) major_lines . append ( neg_line ) count = 1 else : minor_lines . append ( pos_line ) minor_lines . append ( neg_line ) delta += self . cellWidth ( ) count += 1 self . _majorLines = major_lines self . _minorLines = minor_lines self . setDirty ( False ) | Rebuilds the grid lines based on the current settings and \ scene width . This method is triggered automatically and \ shouldn t need to be manually called . |
11,034 | def selectAll ( self ) : currLayer = self . _currentLayer for item in self . items ( ) : layer = item . layer ( ) if ( layer == currLayer or not layer ) : item . setSelected ( True ) | Selects all the items in the scene . |
11,035 | def selectInvert ( self ) : currLayer = self . _currentLayer for item in self . items ( ) : layer = item . layer ( ) if ( layer == currLayer or not layer ) : item . setSelected ( not item . isSelected ( ) ) | Inverts the currently selected items in the scene . |
11,036 | def selectNone ( self ) : currLayer = self . _currentLayer for item in self . items ( ) : layer = item . layer ( ) if ( layer == currLayer or not layer ) : item . setSelected ( False ) | Deselects all the items in the scene . |
11,037 | def setViewMode ( self , state = True ) : if self . _viewMode == state : return self . _viewMode = state if state : self . _mainView . setDragMode ( self . _mainView . ScrollHandDrag ) else : self . _mainView . setDragMode ( self . _mainView . RubberBandDrag ) self . emitViewModeChanged ( ) | Starts the view mode for moving around the scene . |
11,038 | def updateIsolated ( self , force = False ) : if ( not ( self . isolationMode ( ) or force ) ) : return if ( not self . isolationMode ( ) ) : for node in self . nodes ( ) : node . setIsolateHidden ( False ) return selected_nodes = self . selectedNodes ( ) isolated_nodes = set ( selected_nodes ) connections = self . connections ( ) for connection in connections : in_node = connection . inputNode ( ) out_node = connection . outputNode ( ) if ( in_node in selected_nodes or out_node in selected_nodes ) : isolated_nodes . add ( in_node ) isolated_nodes . add ( out_node ) for node in self . nodes ( ) : node . setIsolateHidden ( not node in isolated_nodes ) | Updates the visible state of nodes based on whether or not they are isolated . |
11,039 | def merge ( constraints ) : operators = defaultdict ( set ) for constraint in constraints : operators [ constraint . operator ] . add ( constraint . version ) if gt in operators : gt_ver = sorted ( operators [ gt ] ) [ - 1 ] else : gt_ver = None if ge in operators : ge_ver = sorted ( operators [ ge ] ) [ - 1 ] else : ge_ver = None if lt in operators : lt_ver = sorted ( operators [ lt ] ) [ 0 ] else : lt_ver = None if le in operators : le_ver = sorted ( operators [ le ] ) [ 0 ] else : le_ver = None l_constraint = None if le_ver : if lt_ver : le_constraint = Constraint ( le , le_ver ) lt_constraint = Constraint ( lt , lt_ver ) if le_ver < lt_ver : l_constraint = le_constraint l_less_restrictive_c = lt_constraint else : l_constraint = lt_constraint l_less_restrictive_c = le_constraint LOGGER . debug ( 'Removed constraint %s because it is less ' 'restrictive than %s' , l_less_restrictive_c , l_constraint ) else : l_constraint = Constraint ( le , le_ver ) elif lt_ver : l_constraint = Constraint ( lt , lt_ver ) g_constraint = None if ge_ver : if gt_ver : gt_constraint = Constraint ( gt , gt_ver ) ge_constraint = Constraint ( ge , ge_ver ) if ge_ver <= gt_ver : g_constraint = gt_constraint g_less_restrictive_c = ge_constraint else : g_constraint = ge_constraint g_less_restrictive_c = gt_constraint LOGGER . debug ( 'Removed constraint %s because it is less ' 'restrictive than %s' , g_less_restrictive_c , g_constraint ) else : g_constraint = Constraint ( ge , ge_ver ) elif gt_ver : g_constraint = Constraint ( gt , gt_ver ) if g_constraint and l_constraint : if g_constraint . version == l_constraint . version : if g_constraint . operator == ge and l_constraint . operator == le : operators [ eq ] . add ( g_constraint . version ) LOGGER . debug ( 'Merged constraints: %s and %s into ==%s' , l_constraint , g_constraint , g_constraint . version ) l_constraint , g_constraint = None , None else : raise ExclusiveConstraints ( g_constraint , [ l_constraint ] ) elif g_constraint . version > l_constraint . version : raise ExclusiveConstraints ( g_constraint , [ l_constraint ] ) ne_constraints = [ Constraint ( ne , v ) for v in operators [ ne ] ] eq_constraints = [ Constraint ( eq , v ) for v in operators [ eq ] ] if eq_constraints : eq_constraint = eq_constraints . pop ( ) if g_constraint or l_constraint or ne_constraints or eq_constraints : conflict_list = [ c for c in ( g_constraint , l_constraint ) if c ] conflict_list . extend ( ne_constraints ) conflict_list . extend ( eq_constraints ) raise ExclusiveConstraints ( eq_constraint , conflict_list ) return [ eq_constraint ] else : constraints = ne_constraints + [ g_constraint , l_constraint ] return [ c for c in constraints if c ] | Merge constraints . |
11,040 | def match ( self , version ) : return all ( constraint . match ( version ) for constraint in self . constraints ) | Match version with this collection of constraints . |
11,041 | def set_memcached_backend ( self , config ) : config [ 'BACKEND' ] = 'django_pylibmc.memcached.PyLibMCCache' if is_importable ( config [ 'BACKEND' ] ) : return if config . get ( 'BINARY' ) and is_importable ( 'django_bmemcached' ) : config [ 'BACKEND' ] = 'django_bmemcached.memcached.BMemcached' return if not any ( [ config . get ( key ) for key in ( 'BINARY' , 'USERNAME' , 'PASSWORD' ) ] ) : if is_importable ( 'pylibmc' ) : config [ 'BACKEND' ] = 'django.core.cache.backends.memcached.PyLibMCCache' elif is_importable ( 'memcached' ) : config [ 'BACKEND' ] = 'django.core.cache.backends.memcached.MemcachedCache' | Select the most suitable Memcached backend based on the config and on what s installed |
11,042 | def addEntry ( self ) : joiner = self . joiner ( ) curr_joiner = self . _containerWidget . currentJoiner ( ) if joiner != curr_joiner : if not self . _last : self . updateJoin ( ) return self . _containerWidget . setCurrentJoiner ( joiner ) self . _containerWidget . addEntry ( entry = self ) | This will either add a new widget or switch the joiner based on the state of the entry |
11,043 | def assignPlugin ( self ) : self . uiOperatorDDL . blockSignals ( True ) self . uiOperatorDDL . clear ( ) plugin = self . currentPlugin ( ) if plugin : flags = 0 if not self . queryWidget ( ) . showReferencePlugins ( ) : flags |= plugin . Flags . ReferenceRequired self . uiOperatorDDL . addItems ( plugin . operators ( ignore = flags ) ) self . uiOperatorDDL . blockSignals ( False ) self . assignEditor ( ) | Assigns an editor based on the current column for this schema . |
11,044 | def assignEditor ( self ) : plugin = self . currentPlugin ( ) column = self . currentColumn ( ) value = self . currentValue ( ) if not plugin : self . setEditor ( None ) return self . setUpdatesEnabled ( False ) self . blockSignals ( True ) op = self . uiOperatorDDL . currentText ( ) self . setEditor ( plugin . createEditor ( self , column , op , value ) ) self . setUpdatesEnabled ( True ) self . blockSignals ( False ) | Assigns the editor for this entry based on the plugin . |
11,045 | def refreshButtons ( self ) : last = self . _last first = self . _first joiner = self . _containerWidget . currentJoiner ( ) if first : self . uiJoinSBTN . setActionTexts ( [ 'AND' , 'OR' ] ) elif joiner == QueryCompound . Op . And : self . uiJoinSBTN . setActionTexts ( [ 'AND' ] ) else : self . uiJoinSBTN . setActionTexts ( [ 'OR' ] ) if last : self . uiJoinSBTN . setCurrentAction ( None ) else : act = self . uiJoinSBTN . findAction ( QueryCompound . Op [ joiner ] . upper ( ) ) self . uiJoinSBTN . setCurrentAction ( act ) enable = QueryCompound . typecheck ( self . _query ) or self . isChecked ( ) self . uiEnterBTN . setEnabled ( enable ) | Refreshes the buttons for building this sql query . |
11,046 | def updateJoin ( self ) : text = self . uiJoinSBTN . currentAction ( ) . text ( ) if text == 'AND' : joiner = QueryCompound . Op . And else : joiner = QueryCompound . Op . Or self . _containerWidget . setCurrentJoiner ( self . joiner ( ) ) | Updates the joining method used by the system . |
11,047 | def is_chinese ( name ) : if not name : return False for ch in name : ordch = ord ( ch ) if not ( 0x3400 <= ordch <= 0x9fff ) and not ( 0x20000 <= ordch <= 0x2ceaf ) and not ( 0xf900 <= ordch <= ordch ) and not ( 0x2f800 <= ordch <= 0x2fa1f ) : return False return True | Check if a symbol is a Chinese character . |
11,048 | def pinyin ( char , variant = 'mandarin' , sep = ' ' , out = 'tones' ) : if len ( char ) > 1 : return sep . join ( [ pinyin ( c , variant = variant , sep = sep , out = out ) for c in char ] ) if not is_chinese ( char ) : return char if char in _cd . GBK : char = gbk2big5 ( char ) out_char = _cd . UNIHAN . get ( char , { variant : '?({0}' . format ( char ) } ) . get ( variant , '!({0})' . format ( char ) ) if out != 'tones' : out_char = '' . join ( [ tone_converter . get ( x , x ) for x in out_char ] ) return out_char | Retrieve Pinyin of a character . |
11,049 | def parse_baxter ( reading ) : initial = '' medial = '' final = '' tone = '' inienv = True medienv = False finenv = False tonenv = False inichars = "pbmrtdnkgnsyhzl'x" chars = list ( reading ) for char in chars : if char in 'jw' and not finenv : inienv , medienv , finenv , tonenv = False , True , False , False elif char not in inichars or finenv : if char in 'XH' : inienv , medienv , finenv , tonenv = False , False , False , True else : inienv , medienv , finenv , tonenv = False , False , True , False if inienv : initial += char if medienv : medial += char if finenv : final += char if tonenv : tone += char if not tone and final [ - 1 ] in 'ptk' : tone = 'R' elif not tone : tone = 'P' if 'j' not in medial and 'y' in initial : medial += 'j' if final [ 0 ] in 'u' and 'w' not in medial : medial = 'w' + medial return initial , medial , final , tone | Parse a Baxter string and render it with all its contents namely initial medial final and tone . |
11,050 | def chars2gloss ( chars ) : out = [ ] chars = gbk2big5 ( chars ) for char in chars : tmp = [ ] if char in _cd . TLS : for entry in _cd . TLS [ char ] : baxter = _cd . TLS [ char ] [ entry ] [ 'UNIHAN_GLOSS' ] if baxter != '?' : tmp += [ baxter ] out += [ ',' . join ( tmp ) ] return out | Get the TLS basic gloss for a characters . |
11,051 | def baxter2ipa ( mch , segmented = False ) : out = mch if out [ - 1 ] in 'ptk' : out += 'R' elif out [ - 1 ] not in 'XHP' : out += 'P' for s , t in _cd . GY [ 'ipa' ] : out = out . replace ( s , t ) if segmented : return parse_chinese_morphemes ( out ) return out | Very simple aber convient - enough conversion from baxter MCH to IPA MCH . this is also more or less already implemented in MiddleChinese |
11,052 | def gbk2big5 ( chars ) : out = '' for char in chars : if char in _cd . GBK : out += _cd . BIG5 [ _cd . GBK . index ( char ) ] else : out += char return out | Convert from gbk format to big5 representation of chars . |
11,053 | def big52gbk ( chars ) : out = '' for char in chars : if char in _cd . BIG5 : out += _cd . GBK [ _cd . BIG5 . index ( char ) ] else : out += char return out | Convert from long chars to short chars . |
11,054 | def add_transform ( self , key , xslt ) : self . _remove_converter ( key ) self . _xsltLibrary [ key ] = xslt self . _add_converter ( key ) | Add or update a transform . |
11,055 | def _refresh_converters ( self ) : self . _converters . clear ( ) return reduce ( lambda a , b : a and b , [ self . _add_converter ( k ) for k in list ( self . _xsltLibrary . keys ( ) ) ] , True ) | Refresh all of the converters in the py4j library |
11,056 | def transform ( self , key , xml , ** kwargs ) : if key in self . _xsltLibrary and self . gateway_connected ( ) and key in self . _converters : return self . _converters [ key ] . transform ( xml , self . _parms ( ** kwargs ) ) return None | Transform the supplied XML using the transform identified by key |
11,057 | async def items ( self , * , dc = None , watch = None , consistency = None ) : response = await self . _api . get ( "/v1/query" , params = { "dc" : dc } ) return response . body | Provides a listing of all prepared queries |
11,058 | async def create ( self , query , * , dc = None ) : if "Token" in query : query [ "Token" ] = extract_attr ( query [ "Token" ] , keys = [ "ID" ] ) response = await self . _api . post ( "/v1/query" , params = { "dc" : dc } , data = query ) return response . body | Creates a new prepared query |
11,059 | async def update ( self , query , * , dc = None ) : query_id = extract_attr ( query , keys = [ "ID" ] ) response = await self . _api . put ( "/v1/query" , query_id , params = { "dc" : dc } , data = query ) return response . status == 200 | Updates existing prepared query |
11,060 | async def delete ( self , query , * , dc = None ) : query_id = extract_attr ( query , keys = [ "ID" ] ) response = await self . _api . delete ( "/v1/query" , query_id , params = { "dc" : dc } ) return response . status == 200 | Delete existing prepared query |
11,061 | async def execute ( self , query , * , dc = None , near = None , limit = None , consistency = None ) : query_id = extract_attr ( query , keys = [ "ID" ] ) response = await self . _api . get ( "/v1/query/%s/execute" % query_id , params = { "dc" : dc , "near" : near , "limit" : limit } , consistency = consistency ) return response . body | Executes a prepared query |
11,062 | def load ( self ) : if self . _loaded : return rset = self . recordSet ( ) QApplication . setOverrideCursor ( Qt . WaitCursor ) self . loadRecords ( rset ) QApplication . restoreOverrideCursor ( ) | Loads the records from the query set linked with this item . |
11,063 | def _tabulate ( rows , headers , spacing = 5 ) : if len ( rows ) == 0 : return "None\n" assert len ( rows [ 0 ] ) == len ( headers ) count = len ( rows [ 0 ] ) widths = [ 0 for _ in range ( count ) ] rows = [ headers ] + rows for row in rows : for index , field in enumerate ( row ) : if len ( str ( field ) ) > widths [ index ] : widths [ index ] = len ( str ( field ) ) output = "" for row in rows : for index , field in enumerate ( row ) : field = str ( field ) output += field + ( widths [ index ] - len ( field ) + spacing ) * " " output += "\n" return output | Prepare simple table with spacing based on content |
11,064 | def add_item ( self , item ) : if not ( isinstance ( item . name , basestring ) and isinstance ( item . description , basestring ) ) : raise TypeError ( "Name and description should be strings, are of type {} and {}" . format ( type ( item . name ) , type ( item . description ) ) ) if not ( isinstance ( item . flag_type , FlagType ) ) : raise TypeError ( "Flag type should be of type FlagType, is of {}" . format ( type ( item . flag_type ) ) ) if item . name not in self . _flags : if item . default is not None : if item . default is not False : item . description = item . description + " (default: %(default)s)" self . _flags [ item . name ] = item else : self . _flags [ item . name ] = item | Add single command line flag |
11,065 | def add_multiple ( self , flags ) : if not isinstance ( flags , list ) : raise TypeError ( "Expected list of flags, got object of type{}" . format ( type ( flags ) ) ) for flag in flags : if isinstance ( flag , Flag ) : self . add_item ( flag ) elif isinstance ( flag , tuple ) : try : item = Flag ( * flag ) self . add_item ( item ) except TypeError as e : raise TypeError ( "Invalid arguments to initialize a flag definition, expect ({0} [, {1}]) but got {3}" . format ( ", " . join ( Flag . REQUIRED_FIELDS ) , ", " . join ( Flag . OPTIONAL_FIELDS ) , flag ) ) | Add multiple command line flags |
11,066 | def gotoNext ( self ) : scene = self . scene ( ) date = scene . currentDate ( ) if ( scene . currentMode ( ) == scene . Mode . Day ) : scene . setCurrentDate ( date . addDays ( 1 ) ) elif ( scene . currentMode ( ) == scene . Mode . Week ) : scene . setCurrentDate ( date . addDays ( 7 ) ) elif ( scene . currentMode ( ) == scene . Mode . Month ) : scene . setCurrentDate ( date . addMonths ( 1 ) ) | Goes to the next date based on the current mode and date . |
11,067 | def zoomExtents ( self ) : rect = self . scene ( ) . visibleItemsBoundingRect ( ) vrect = self . viewportRect ( ) if rect . width ( ) : changed = False scene_rect = self . scene ( ) . sceneRect ( ) if scene_rect . width ( ) < rect . width ( ) : scene_rect . setWidth ( rect . width ( ) + 150 ) scene_rect . setX ( - scene_rect . width ( ) / 2.0 ) changed = True if scene_rect . height ( ) < rect . height ( ) : scene_rect . setHeight ( rect . height ( ) + 150 ) scene_rect . setY ( - scene_rect . height ( ) / 2.0 ) changed = True if changed : self . scene ( ) . setSceneRect ( scene_rect ) self . fitInView ( rect , Qt . KeepAspectRatio ) if not self . signalsBlocked ( ) : self . zoomAmountChanged . emit ( self . zoomAmount ( ) ) | Fits all the nodes in the view . |
11,068 | def zipdir ( path , ziph , ** kwargs ) : str_arcroot = "" for k , v in kwargs . items ( ) : if k == 'arcroot' : str_arcroot = v for root , dirs , files in os . walk ( path ) : for file in files : str_arcfile = os . path . join ( root , file ) if len ( str_arcroot ) : str_arcname = str_arcroot . split ( '/' ) [ - 1 ] + str_arcfile . split ( str_arcroot ) [ 1 ] else : str_arcname = str_arcfile try : ziph . write ( str_arcfile , arcname = str_arcname ) except : print ( "Skipping %s" % str_arcfile ) | Zip up a directory . |
11,069 | def zip_process ( ** kwargs ) : str_localPath = "" str_zipFileName = "" str_action = "zip" str_arcroot = "" for k , v in kwargs . items ( ) : if k == 'path' : str_localPath = v if k == 'action' : str_action = v if k == 'payloadFile' : str_zipFileName = v if k == 'arcroot' : str_arcroot = v if str_action == 'zip' : str_mode = 'w' str_zipFileName = '%s.zip' % uuid . uuid4 ( ) else : str_mode = 'r' try : ziphandler = zipfile . ZipFile ( str_zipFileName , str_mode , zipfile . ZIP_DEFLATED ) if str_mode == 'w' : if os . path . isdir ( str_localPath ) : zipdir ( str_localPath , ziphandler , arcroot = str_arcroot ) else : if len ( str_arcroot ) : str_arcname = str_arcroot . split ( '/' ) [ - 1 ] + str_localPath . split ( str_arcroot ) [ 1 ] else : str_arcname = str_localPath try : ziphandler . write ( str_localPath , arcname = str_arcname ) except : ziphandler . close ( ) os . remove ( str_zipFileName ) return { 'msg' : json . dumps ( { "msg" : "No file or directory found for '%s'" % str_localPath } ) , 'status' : False } if str_mode == 'r' : ziphandler . extractall ( str_localPath ) ziphandler . close ( ) str_msg = '%s operation successful' % str_action b_status = True except : str_msg = '%s operation failed' % str_action b_status = False return { 'msg' : str_msg , 'fileProcessed' : str_zipFileName , 'status' : b_status , 'path' : str_localPath , 'zipmode' : str_mode , 'filesize' : "{:,}" . format ( os . stat ( str_zipFileName ) . st_size ) , 'timestamp' : '%s' % datetime . datetime . now ( ) } | Process zip operations . |
11,070 | def base64_process ( ** kwargs ) : str_fileToSave = "" str_fileToRead = "" str_action = "encode" data = None for k , v in kwargs . items ( ) : if k == 'action' : str_action = v if k == 'payloadBytes' : data = v if k == 'payloadFile' : str_fileToRead = v if k == 'saveToFile' : str_fileToSave = v if str_action == "encode" : if len ( str_fileToRead ) : with open ( str_fileToRead , 'rb' ) as f : data = f . read ( ) f . close ( ) data_b64 = base64 . b64encode ( data ) with open ( str_fileToSave , 'wb' ) as f : f . write ( data_b64 ) f . close ( ) return { 'msg' : 'Encode successful' , 'fileProcessed' : str_fileToSave , 'status' : True } if str_action == "decode" : bytes_decoded = base64 . b64decode ( data + "===" ) with open ( str_fileToSave , 'wb' ) as f : f . write ( bytes_decoded ) f . close ( ) return { 'msg' : 'Decode successful' , 'fileProcessed' : str_fileToSave , 'status' : True } | Process base64 file io |
11,071 | def storage_resolveBasedOnKey ( self , * args , ** kwargs ) : global Gd_internalvar d_msg = { 'action' : 'internalctl' , 'meta' : { 'var' : 'key2address' , 'compute' : '<key>' } } str_key = "" b_status = False for k , v in kwargs . items ( ) : if k == 'key' : str_key = v d_msg [ 'meta' ] [ 'key' ] = str_key d_ret = self . pullPath_core ( d_msg = d_msg ) return { 'status' : b_status , 'path' : str_internalLocation } | Call the remote service and ask for the storage location based on the key . |
11,072 | def remoteLocation_resolveSimple ( self , d_remote ) : b_status = False str_remotePath = "" if 'path' in d_remote . keys ( ) : str_remotePath = d_remote [ 'path' ] b_status = True if 'key' in d_remote . keys ( ) : str_remotePath = d_remote [ 'key' ] b_status = True return { 'status' : b_status , 'path' : str_remotePath } | Resolve the remote path location by returning either the path or key parameter in the remote JSON record . |
11,073 | def remoteLocation_resolve ( self , d_remote ) : b_status = False str_remotePath = "" if 'path' in d_remote . keys ( ) : str_remotePath = d_remote [ 'path' ] b_status = True if 'key' in d_remote . keys ( ) : d_ret = self . storage_resolveBasedOnKey ( key = d_remote [ 'key' ] ) if d_ret [ 'status' ] : b_status = True str_remotePath = d_ret [ 'path' ] return { 'status' : b_status , 'path' : str_remotePath } | Resolve the remote path location |
11,074 | def path_localLocationCheck ( self , d_msg , ** kwargs ) : b_pull = False d_meta = d_msg [ 'meta' ] if 'do' in d_meta : if d_meta [ 'do' ] == 'pull' : b_pull = True if 'local' in d_meta : d_local = d_meta [ 'local' ] if 'to' in d_meta : d_local = d_meta [ 'to' ] str_localPathFull = d_local [ 'path' ] str_localPath , str_unpack = os . path . split ( str_localPathFull ) str_msg = '' str_checkedDir = str_localPathFull b_isFile = os . path . isfile ( str_localPathFull ) b_isDir = os . path . isdir ( str_localPathFull ) b_exists = os . path . exists ( str_localPathFull ) if 'pull' in d_msg [ 'action' ] or b_pull : str_checkedDir = str_localPath b_isFile = os . path . isfile ( str_localPath ) b_isDir = os . path . isdir ( str_localPath ) b_exists = os . path . exists ( str_localPath ) if 'createDir' in d_local . keys ( ) : if d_local [ 'createDir' ] : if os . path . isdir ( str_localPathFull ) : self . dp . qprint ( 'Removing local path %s...' % str_localPathFull ) shutil . rmtree ( str_localPathFull ) str_msg = 'Removed existing local path... ' self . dp . qprint ( 'Creating empty local path %s...' % str_localPathFull ) os . makedirs ( str_localPathFull ) b_exists = True str_msg += 'Created new local path' else : str_msg = 'local path already exists!' if 'writeInExisting' in d_local . keys ( ) : if not d_local [ 'writeInExisting' ] : if b_isDir : b_exists = False else : if b_isDir : b_exists = False d_ret = { 'action' : d_msg [ 'action' ] , 'dir' : str_checkedDir , 'status' : b_exists , 'isfile' : b_isFile , 'isdir' : b_isDir , 'msg' : str_msg } return { 'check' : d_ret , 'status' : d_ret [ 'status' ] , 'timestamp' : '%s' % datetime . datetime . now ( ) } | Check if a path exists on the local filesystem |
11,075 | def find_executable ( name ) : if sys . platform . startswith ( 'win' ) or os . name . startswith ( 'os2' ) : name = name + '.exe' executable_path = find_file ( name , deep = True ) return executable_path | Returns the path of an executable file . |
11,076 | def readCorpus ( location ) : print ( "Reading corpus from file(s)..." ) corpus = '' if '.txt' in location : with open ( location ) as fp : corpus = fp . read ( ) else : dirFiles = listdir ( location ) nFiles = len ( dirFiles ) for f in tqdm ( dirFiles ) : with open ( location + "/" + f ) as fp : corpus += fp . read ( ) return corpus | Returns the contents of a file or a group of files as a string . |
11,077 | def validate ( data ) : text = data . get ( 'text' ) if not isinstance ( text , _string_types ) or len ( text ) == 0 : raise ValueError ( 'text field is required and should not be empty' ) if 'markdown' in data and not type ( data [ 'markdown' ] ) is bool : raise ValueError ( 'markdown field should be bool' ) if 'attachments' in data : if not isinstance ( data [ 'attachments' ] , ( list , tuple ) ) : raise ValueError ( 'attachments field should be list or tuple' ) for attachment in data [ 'attachments' ] : if 'text' not in attachment and 'title' not in attachment : raise ValueError ( 'text or title is required in attachment' ) return True | Validates incoming data |
11,078 | def send ( url , data ) : validate ( data ) return requests . post ( url , json = data ) | Sends an incoming message |
11,079 | def switch_or_run ( cmd , venv_name = None ) : if cmd : return _run ( venv_name , cmd ) inenv = InenvManager ( ) if not os . getenv ( INENV_ENV_VAR ) : activator_warn ( inenv ) return else : venv = inenv . get_prepped_venv ( venv_name ) inenv . clear_extra_source_file ( ) inenv . write_extra_source_file ( "source {}" . format ( venv . activate_shell_file ) ) inenv . write_extra_source_file ( override_envars_and_deactivate ( inenv . get_envvars ( venv_name ) ) ) if autojump_enabled ( ) : directory = inenv . guess_contents_dir ( venv_name ) inenv . write_extra_source_file ( 'cd {}' . format ( directory ) ) click . secho ( "Jumping to {}" . format ( directory ) , fg = 'green' ) sys . exit ( EVAL_EXIT_CODE ) | Switch or run in this env |
11,080 | def rm ( venv_name ) : inenv = InenvManager ( ) venv = inenv . get_venv ( venv_name ) click . confirm ( "Delete dir {}" . format ( venv . path ) ) shutil . rmtree ( venv . path ) | Removes the venv by name |
11,081 | def root ( venv_name ) : inenv = InenvManager ( ) inenv . get_venv ( venv_name ) venv = inenv . registered_venvs [ venv_name ] click . secho ( venv [ 'root' ] ) | Print the root directory of a virtualenv |
11,082 | def init ( venv_name ) : inenv = InenvManager ( ) inenv . get_prepped_venv ( venv_name , skip_cached = False ) if not os . getenv ( INENV_ENV_VAR ) : activator_warn ( inenv ) click . secho ( "Your venv is ready. Enjoy!" , fg = 'green' ) | Initializez a virtualenv |
11,083 | def autojump ( ) : currently_enabled = autojump_enabled ( ) toggle_autojump ( ) if not currently_enabled : click . secho ( "Autojump enabled" , fg = 'green' ) else : click . secho ( "Autojump disabled" , fg = 'red' ) | Initializes a virtualenv |
11,084 | def clear ( self ) : self . _minimum = None self . _maximum = None self . _step = None self . _notches = None self . _format = None self . _formatter = None self . _padEnd = 0 self . _padStart = 0 | Clears all the cached information about this ruler . |
11,085 | def keyPressEvent ( self , event ) : if event . key ( ) in ( Qt . Key_Enter , Qt . Key_Return ) : self . queryEntered . emit ( self . query ( ) ) super ( XOrbQuickFilterWidget , self ) . keyPressEvent ( event ) | Listens for the enter event to check if the query is setup . |
11,086 | def rebuild ( self ) : table = self . tableType ( ) form = nativestring ( self . filterFormat ( ) ) if not table and form : if self . layout ( ) . count ( ) == 0 : self . layout ( ) . addWidget ( QLabel ( form , self ) ) else : self . layout ( ) . itemAt ( 0 ) . widget ( ) . setText ( form ) return elif not form : return for child in self . findChildren ( QWidget ) : child . close ( ) child . setParent ( None ) child . deleteLater ( ) self . setUpdatesEnabled ( False ) schema = table . schema ( ) vlayout = self . layout ( ) for i in range ( vlayout . count ( ) ) : vlayout . takeAt ( 0 ) self . _plugins = [ ] for line in form . split ( '\n' ) : row = QHBoxLayout ( ) row . setContentsMargins ( 0 , 0 , 0 , 0 ) row . setSpacing ( 0 ) for label , lookup in FORMAT_SPLITTER . findall ( line ) : lbl = QLabel ( label , self ) row . addWidget ( lbl ) opts = lookup . split ( ':' ) if len ( opts ) == 1 : opts . append ( 'is' ) column = schema . column ( opts [ 0 ] ) if not column : continue plugin = self . pluginFactory ( ) . plugin ( column ) if not plugin : continue editor = plugin . createEditor ( self , column , opts [ 1 ] , None ) if editor : editor . setObjectName ( opts [ 0 ] ) row . addWidget ( editor ) self . _plugins . append ( ( opts [ 0 ] , opts [ 1 ] , plugin , editor ) ) row . addStretch ( 1 ) vlayout . addLayout ( row ) self . setUpdatesEnabled ( True ) self . adjustSize ( ) | Rebuilds the data associated with this filter widget . |
11,087 | def showMenu ( self , point ) : menu = QMenu ( self ) acts = { } acts [ 'edit' ] = menu . addAction ( 'Edit quick filter...' ) trigger = menu . exec_ ( self . mapToGlobal ( point ) ) if trigger == acts [ 'edit' ] : text , accepted = XTextEdit . getText ( self . window ( ) , 'Edit Format' , 'Format:' , self . filterFormat ( ) , wrapped = False ) if accepted : self . setFilterFormat ( text ) | Displays the menu for this filter widget . |
11,088 | def set_nonblock ( fd ) : fcntl . fcntl ( fd , fcntl . F_SETFL , fcntl . fcntl ( fd , fcntl . F_GETFL ) | os . O_NONBLOCK ) | Set the given file descriptor to non - blocking mode . |
11,089 | def read ( self ) : s = self . _buf + self . _raw . read ( ) self . _buf = '' return s | Read data from the stream . |
11,090 | def on_open ( self , ws ) : def keep_alive ( interval ) : while True : time . sleep ( interval ) self . ping ( ) start_new_thread ( keep_alive , ( self . keep_alive_interval , ) ) | Websocket on_open event handler |
11,091 | def on_message ( self , ws , message ) : try : data = json . loads ( message ) except Exception : self . _set_error ( message , "decode message failed" ) else : self . _inbox . put ( RTMMessage ( data ) ) | Websocket on_message event handler |
11,092 | def send ( self , message ) : if "call_id" not in message : message [ "call_id" ] = self . gen_call_id ( ) self . _ws . send ( message . to_json ( ) ) | Sends a RTMMessage Should be called after starting the loop |
11,093 | def get_message ( self , block = False , timeout = None ) : try : message = self . _inbox . get ( block = block , timeout = timeout ) return message except Exception : return None | Removes and returns a RTMMessage from self . _inbox |
11,094 | def get_error ( self , block = False , timeout = None ) : try : error = self . _errors . get ( block = block , timeout = timeout ) return error except Exception : return None | Removes and returns an error from self . _errors |
11,095 | def createEditor ( self , parent , column , operator , value ) : editor = super ( EnumPlugin , self ) . createEditor ( parent , column , operator , value ) editor . setEnum ( column . enum ( ) ) if operator in ( 'contains' , 'does not contain' ) : editor . setCheckable ( True ) editor . setCurrentValue ( value ) return editor | Creates a new editor for the system . |
11,096 | def setupQuery ( self , query , op , editor ) : if editor is not None : value = editor . currentRecord ( ) if value is None : return False return super ( ForeignKeyPlugin , self ) . setupQuery ( query , op , editor ) | Sets up the query for this editor . |
11,097 | def color_gen ( colormap = 'viridis' , key = None , n = 15 ) : if colormap in dir ( bpal ) : palette = getattr ( bpal , colormap ) if isinstance ( palette , dict ) : if key is None : key = list ( palette . keys ( ) ) [ 0 ] palette = palette [ key ] elif callable ( palette ) : palette = palette ( n ) else : raise TypeError ( "pallette must be a bokeh palette name or a sequence of color hex values." ) elif isinstance ( colormap , ( list , tuple ) ) : palette = colormap else : raise TypeError ( "pallette must be a bokeh palette name or a sequence of color hex values." ) yield from itertools . cycle ( palette ) | Color generator for Bokeh plots |
11,098 | def filters ( filter_directory = None , update = False , fmt = 'table' , ** kwargs ) : if filter_directory is None : filter_directory = resource_filename ( 'svo_filters' , 'data/filters/' ) p_path = os . path . join ( filter_directory , 'filter_list.p' ) updated = False if not os . path . isfile ( p_path ) : os . system ( 'touch {}' . format ( p_path ) ) if update : print ( 'Loading filters into table...' ) files = glob ( filter_directory + '*' ) files = [ f for f in files if not f . endswith ( '.p' ) ] bands = [ os . path . basename ( b ) for b in files ] tables = [ ] for band in bands : band = band . replace ( '.txt' , '' ) filt = Filter ( band , ** kwargs ) filt . Band = band info = filt . info ( True ) vals = [ float ( i ) if i . replace ( '.' , '' ) . replace ( '-' , '' ) . replace ( '+' , '' ) . isnumeric ( ) else i for i in info [ 'Values' ] ] dtypes = np . array ( [ type ( i ) for i in vals ] ) table = at . Table ( np . array ( [ vals ] ) , names = info [ 'Attributes' ] , dtype = dtypes ) tables . append ( table ) del filt , info , table with open ( p_path , 'wb' ) as file : pickle . dump ( at . vstack ( tables ) , file ) data = { } if os . path . isfile ( p_path ) : with open ( p_path , 'rb' ) as file : data = pickle . load ( file ) if data : if fmt == 'dict' : data = { r [ 0 ] : { k : r [ k ] . value if hasattr ( r [ k ] , 'unit' ) else r [ k ] for k in data . keys ( ) [ 1 : ] } for r in data } else : data . add_index ( 'Band' ) return data else : if not updated : updated = True filters ( update = True ) else : print ( 'No filters found in' , filter_directory ) | Get a list of the available filters |
11,099 | def rebin_spec ( spec , wavnew , oversamp = 100 , plot = False ) : wave , flux = spec nlam = len ( wave ) x0 = np . arange ( nlam , dtype = float ) x0int = np . arange ( ( nlam - 1. ) * oversamp + 1. , dtype = float ) / oversamp w0int = np . interp ( x0int , x0 , wave ) spec0int = np . interp ( w0int , wave , flux ) / oversamp maxdiffw1 = np . diff ( wavnew ) . max ( ) w1bins = np . concatenate ( ( [ wavnew [ 0 ] - maxdiffw1 ] , .5 * ( wavnew [ 1 : : ] + wavnew [ 0 : - 1 ] ) , [ wavnew [ - 1 ] + maxdiffw1 ] ) ) w1bins = np . sort ( w1bins ) nbins = len ( w1bins ) - 1 specnew = np . zeros ( nbins ) inds2 = [ [ w0int . searchsorted ( w1bins [ ii ] , side = 'left' ) , w0int . searchsorted ( w1bins [ ii + 1 ] , side = 'left' ) ] for ii in range ( nbins ) ] for ii in range ( nbins ) : specnew [ ii ] = np . sum ( spec0int [ inds2 [ ii ] [ 0 ] : inds2 [ ii ] [ 1 ] ] ) return specnew | Rebin a spectrum to a new wavelength array while preserving the total flux |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.