idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
13,100
def add_callback ( self , fn , * args , ** kwargs ) : if _debug : IOCB . _debug ( "add_callback(%d) %r %r %r" , self . ioID , fn , args , kwargs ) self . ioCallback . append ( ( fn , args , kwargs ) ) if self . ioComplete . isSet ( ) : self . trigger ( )
Pass a function to be called when IO is complete .
13,101
def wait ( self , * args , ** kwargs ) : if _debug : IOCB . _debug ( "wait(%d) %r %r" , self . ioID , args , kwargs ) return self . ioComplete . wait ( * args , ** kwargs )
Wait for the completion event to be set .
13,102
def complete ( self , msg ) : if _debug : IOCB . _debug ( "complete(%d) %r" , self . ioID , msg ) if self . ioController : self . ioController . complete_io ( self , msg ) else : self . ioState = COMPLETED self . ioResponse = msg self . trigger ( )
Called to complete a transaction usually when ProcessIO has shipped the IOCB off to some other thread or function .
13,103
def abort ( self , err ) : if _debug : IOCB . _debug ( "abort(%d) %r" , self . ioID , err ) if self . ioController : self . ioController . abort_io ( self , err ) elif self . ioState < COMPLETED : self . ioState = ABORTED self . ioError = err self . trigger ( )
Called by a client to abort a transaction .
13,104
def set_timeout ( self , delay , err = TimeoutError ) : if _debug : IOCB . _debug ( "set_timeout(%d) %r err=%r" , self . ioID , delay , err ) if self . ioTimeout : self . ioTimeout . suspend_task ( ) else : self . ioTimeout = FunctionTask ( self . abort , err ) self . ioTimeout . install_task ( delay = delay )
Called to set a transaction timer .
13,105
def chain_callback ( self , iocb ) : if _debug : IOChainMixIn . _debug ( "chain_callback %r" , iocb ) if not self . ioChain : return iocb = self . ioChain try : if _debug : IOChainMixIn . _debug ( " - decoding" ) self . decode ( ) if _debug : IOChainMixIn . _debug ( " - decode complete" ) except : err = sys . exc_info ( ) [ 1 ] if _debug : IOChainMixIn . _exception ( " - decoding exception: %r" , err ) iocb . ioState = ABORTED iocb . ioError = err self . ioChain = None iocb . ioController = None iocb . trigger ( )
Callback when this iocb completes .
13,106
def abort_io ( self , iocb , err ) : if _debug : IOChainMixIn . _debug ( "abort_io %r %r" , iocb , err ) if iocb is not self . ioChain : raise RuntimeError ( "broken chain" ) self . abort ( err )
Forward the abort downstream .
13,107
def decode ( self ) : if _debug : IOChainMixIn . _debug ( "decode" ) iocb = self . ioChain if self . ioState == COMPLETED : if _debug : IOChainMixIn . _debug ( " - completed: %r" , self . ioResponse ) iocb . ioState = COMPLETED iocb . ioResponse = self . ioResponse elif self . ioState == ABORTED : if _debug : IOChainMixIn . _debug ( " - aborted: %r" , self . ioError ) iocb . ioState = ABORTED iocb . ioError = self . ioError else : raise RuntimeError ( "invalid state: %d" % ( self . ioState , ) )
Hook to transform the response called when this IOCB is completed .
13,108
def add ( self , iocb ) : if _debug : IOGroup . _debug ( "add %r" , iocb ) self . ioMembers . append ( iocb ) self . ioState = PENDING self . ioComplete . clear ( ) iocb . add_callback ( self . group_callback )
Add an IOCB to the group you can also add other groups .
13,109
def group_callback ( self , iocb ) : if _debug : IOGroup . _debug ( "group_callback %r" , iocb ) for iocb in self . ioMembers : if not iocb . ioComplete . isSet ( ) : if _debug : IOGroup . _debug ( " - waiting for child: %r" , iocb ) break else : if _debug : IOGroup . _debug ( " - all children complete" ) self . ioState = COMPLETED self . trigger ( )
Callback when a child iocb completes .
13,110
def abort ( self , err ) : if _debug : IOGroup . _debug ( "abort %r" , err ) self . ioState = ABORTED self . ioError = err for iocb in self . ioMembers : iocb . abort ( err ) self . trigger ( )
Called by a client to abort all of the member transactions . When the last pending member is aborted the group callback function will be called .
13,111
def put ( self , iocb ) : if _debug : IOQueue . _debug ( "put %r" , iocb ) if iocb . ioState != PENDING : raise RuntimeError ( "invalid state transition" ) wasempty = not self . notempty . isSet ( ) priority = iocb . ioPriority item = ( priority , iocb ) self . queue . insert ( bisect_left ( self . queue , ( priority + 1 , ) ) , item ) iocb . ioQueue = self self . notempty . set ( ) return wasempty
Add an IOCB to a queue . This is usually called by the function that filters requests and passes them out to the correct processing thread .
13,112
def get ( self , block = 1 , delay = None ) : if _debug : IOQueue . _debug ( "get block=%r delay=%r" , block , delay ) if not block and not self . notempty . isSet ( ) : if _debug : IOQueue . _debug ( " - not blocking and empty" ) return None if delay : self . notempty . wait ( delay ) if not self . notempty . isSet ( ) : return None else : self . notempty . wait ( ) priority , iocb = self . queue [ 0 ] del self . queue [ 0 ] iocb . ioQueue = None qlen = len ( self . queue ) if not qlen : self . notempty . clear ( ) return iocb
Get a request from a queue optionally block until a request is available .
13,113
def abort ( self , err ) : if _debug : IOQueue . _debug ( "abort %r" , err ) try : for iocb in self . queue : iocb . ioQueue = None iocb . abort ( err ) self . queue = [ ] self . notempty . clear ( ) except ValueError : pass
Abort all of the control blocks in the queue .
13,114
def abort ( self , err ) : if _debug : IOQController . _debug ( "abort %r" , err ) if ( self . state == CTRL_IDLE ) : if _debug : IOQController . _debug ( " - idle" ) return while True : iocb = self . ioQueue . get ( block = 0 ) if not iocb : break if _debug : IOQController . _debug ( " - iocb: %r" , iocb ) iocb . ioState = ABORTED iocb . ioError = err iocb . trigger ( ) if ( self . state != CTRL_IDLE ) : if _debug : IOQController . _debug ( " - busy after aborts" )
Abort all pending requests .
13,115
def connect ( self , addr ) : if _debug : RouterToRouterService . _debug ( "connect %r" , addr ) conn = ConnectionState ( addr ) self . multiplexer . connections [ addr ] = conn conn . service = self conn . pendingNPDU = [ ] request = ServiceRequest ( ROUTER_TO_ROUTER_SERVICE_ID ) request . pduDestination = addr self . service_request ( request ) return conn
Initiate a connection request to the peer router .
13,116
def process_npdu ( self , npdu ) : if _debug : ProxyServiceNetworkAdapter . _debug ( "process_npdu %r" , npdu ) pdu = PDU ( ) npdu . encode ( pdu ) if _debug : ProxyServiceNetworkAdapter . _debug ( " - pdu: %r" , pdu ) if pdu . pduDestination . addrType == Address . localBroadcastAddr : xpdu = ServerToProxyBroadcastNPDU ( pdu ) else : xpdu = ServerToProxyUnicastNPDU ( pdu . pduDestination , pdu ) xpdu . pduDestination = self . conn . address self . conn . service . service_request ( xpdu )
encode NPDUs from the network service access point and send them to the proxy .
13,117
def service_confirmation ( self , bslpdu ) : if _debug : ProxyServiceNetworkAdapter . _debug ( "service_confirmation %r" , bslpdu ) pdu = NPDU ( bslpdu . pduData ) pdu . pduSource = bslpdu . bslciAddress if isinstance ( bslpdu , ProxyToServerBroadcastNPDU ) : pdu . pduDestination = LocalBroadcast ( ) if _debug : ProxyServiceNetworkAdapter . _debug ( " - pdu: %r" , pdu ) npdu = NPDU ( ) npdu . decode ( pdu ) if _debug : ProxyServiceNetworkAdapter . _debug ( " - npdu: %r" , npdu ) self . adapterSAP . process_npdu ( self , npdu )
Receive packets forwarded by the proxy and send them upstream to the network service access point .
13,118
def service_confirmation ( self , conn , bslpdu ) : if _debug : ProxyServerService . _debug ( "service_confirmation %r %r" , conn , bslpdu ) if not getattr ( conn , 'proxyAdapter' , None ) : raise RuntimeError ( "service confirmation received but no adapter for it" ) conn . proxyAdapter . service_confirmation ( bslpdu )
Receive packets forwarded by the proxy and redirect them to the proxy network adapter .
13,119
def get_spell_damage ( self , amount : int ) -> int : amount += self . spellpower amount <<= self . controller . spellpower_double return amount
Returns the amount of damage \ a amount will do taking SPELLPOWER and SPELLPOWER_DOUBLE into account .
13,120
def can_pay_cost ( self , card ) : if self . spells_cost_health and card . type == CardType . SPELL : return self . hero . health > card . cost return self . mana >= card . cost
Returns whether the player can pay the resource cost of a card .
13,121
def pay_cost ( self , source , amount : int ) -> int : if self . spells_cost_health and source . type == CardType . SPELL : self . log ( "%s spells cost %i health" , self , amount ) self . game . queue_actions ( self , [ Hit ( self . hero , amount ) ] ) return amount if self . temp_mana : used_temp = min ( self . temp_mana , amount ) amount -= used_temp self . temp_mana -= used_temp self . log ( "%s pays %i mana" , self , amount ) self . used_mana += amount return amount
Make player pay \ a amount mana . Returns how much mana is spent after temporary mana adjustments .
13,122
def summon ( self , card ) : if isinstance ( card , str ) : card = self . card ( card , zone = Zone . PLAY ) self . game . cheat_action ( self , [ Summon ( self , card ) ] ) return card
Puts \ a card in the PLAY zone
13,123
def get_damage ( self , amount : int , target ) -> int : if target . immune : self . log ( "%r is immune to %s for %i damage" , target , self , amount ) return 0 return amount
Override to modify the damage dealt to a target from the given amount .
13,124
def then ( self , * args ) : ret = self . __class__ ( * self . _args , ** self . _kwargs ) ret . callback = args ret . times = self . times return ret
Create a callback containing an action queue called upon the action s trigger with the action s arguments available .
13,125
def random_draft ( card_class : CardClass , exclude = [ ] ) : from . import cards from . deck import Deck deck = [ ] collection = [ ] for card in cards . db . keys ( ) : if card in exclude : continue cls = cards . db [ card ] if not cls . collectible : continue if cls . type == CardType . HERO : continue if cls . card_class and cls . card_class not in [ card_class , CardClass . NEUTRAL ] : continue collection . append ( cls ) while len ( deck ) < Deck . MAX_CARDS : card = random . choice ( collection ) if deck . count ( card . id ) < card . max_count_in_deck : deck . append ( card . id ) return deck
Return a deck of 30 random cards for the \ a card_class
13,126
def get_script_definition ( id ) : for cardset in CARD_SETS : module = import_module ( "fireplace.cards.%s" % ( cardset ) ) if hasattr ( module , id ) : return getattr ( module , id )
Find and return the script definition for card \ a id
13,127
def check_for_end_game ( self ) : gameover = False for player in self . players : if player . playstate in ( PlayState . CONCEDED , PlayState . DISCONNECTED ) : player . playstate = PlayState . LOSING if player . playstate == PlayState . LOSING : gameover = True if gameover : if self . players [ 0 ] . playstate == self . players [ 1 ] . playstate : for player in self . players : player . playstate = PlayState . TIED else : for player in self . players : if player . playstate == PlayState . LOSING : player . playstate = PlayState . LOST else : player . playstate = PlayState . WON self . state = State . COMPLETE self . manager . step ( self . next_step , Step . FINAL_WRAPUP ) self . manager . step ( self . next_step , Step . FINAL_GAMEOVER ) self . manager . step ( self . next_step )
Check if one or more player is currently losing . End the game if they are .
13,128
def queue_actions ( self , source , actions , event_args = None ) : source . event_args = event_args ret = self . trigger_actions ( source , actions ) source . event_args = None return ret
Queue a list of \ a actions for processing from \ a source . Triggers an aura refresh afterwards .
13,129
def trigger_actions ( self , source , actions ) : ret = [ ] for action in actions : if isinstance ( action , EventListener ) : self . log ( "Registering event listener %r on %r" , action , self ) action . once = True if source . type == CardType . SPELL : listener = source . controller else : listener = source listener . _events . append ( action ) else : ret . append ( action . trigger ( source ) ) return ret
Performs a list of actions from source . This should seldom be called directly - use queue_actions instead .
13,130
def evaluate ( self , source ) : ret = self . check ( source ) if self . _neg : ret = not ret if ret : if self . _if : return self . _if elif self . _else : return self . _else
Evaluates the board state from source and returns an iterable of Actions as a result .
13,131
def trigger ( self , source ) : actions = self . evaluate ( source ) if actions : if not hasattr ( actions , "__iter__" ) : actions = ( actions , ) source . game . trigger_actions ( source , actions )
Triggers all actions meant to trigger on the board state from source .
13,132
def copy ( self , source , entity ) : log . info ( "Creating a copy of %r" , entity ) return source . controller . card ( entity . id , source )
Return a copy of \ a entity
13,133
def find_cards ( self , source = None , ** filters ) : if not filters : new_filters = self . filters . copy ( ) else : new_filters = filters . copy ( ) for k , v in new_filters . items ( ) : if isinstance ( v , LazyValue ) : new_filters [ k ] = v . evaluate ( source ) from . . import cards return cards . filter ( ** new_filters )
Generate a card pool with all cards matching specified filters
13,134
def evaluate ( self , source , cards = None ) -> str : from . . utils import weighted_card_choice if cards : self . weights = [ 1 ] card_sets = [ list ( cards ) ] elif not self . weightedfilters : self . weights = [ 1 ] card_sets = [ self . find_cards ( source ) ] else : wf = [ { ** x , ** self . filters } for x in self . weightedfilters ] card_sets = [ self . find_cards ( source , ** x ) for x in wf ] return weighted_card_choice ( source , self . weights , card_sets , self . count )
This picks from a single combined card pool without replacement weighting each filtered set of cards against the total
13,135
def powered_up ( self ) : if not self . data . scripts . powered_up : return False for script in self . data . scripts . powered_up : if not script . check ( self ) : return False return True
Returns True whether the card is powered up .
13,136
def play ( self , target = None , index = None , choose = None ) : if choose : if self . must_choose_one : choose = card = self . choose_cards . filter ( id = choose ) [ 0 ] self . log ( "%r: choosing %r" , self , choose ) else : raise InvalidAction ( "%r cannot be played with choice %r" % ( self , choose ) ) else : if self . must_choose_one : raise InvalidAction ( "%r requires a choice (one of %r)" % ( self , self . choose_cards ) ) card = self if not self . is_playable ( ) : raise InvalidAction ( "%r isn't playable." % ( self ) ) if card . requires_target ( ) : if not target : raise InvalidAction ( "%r requires a target to play." % ( self ) ) elif target not in self . play_targets : raise InvalidAction ( "%r is not a valid target for %r." % ( target , self ) ) elif target : self . logger . warning ( "%r does not require a target, ignoring target %r" , self , target ) self . game . play_card ( self , target , index , choose ) return self
Queue a Play action on the card .
13,137
def morph ( self , into ) : return self . game . cheat_action ( self , [ actions . Morph ( self , into ) ] )
Morph the card into another card
13,138
def shuffle_into_deck ( self ) : return self . game . cheat_action ( self , [ actions . Shuffle ( self . controller , self ) ] )
Shuffle the card into the controller s deck
13,139
def battlecry_requires_target ( self ) : if self . has_combo and self . controller . combo : if PlayReq . REQ_TARGET_FOR_COMBO in self . requirements : return True for req in TARGETING_PREREQUISITES : if req in self . requirements : return True return False
True if the play action of the card requires a target
13,140
def requires_target ( self ) : if self . has_combo and PlayReq . REQ_TARGET_FOR_COMBO in self . requirements : if self . controller . combo : return True if PlayReq . REQ_TARGET_IF_AVAILABLE in self . requirements : return bool ( self . play_targets ) if PlayReq . REQ_TARGET_IF_AVAILABLE_AND_DRAGON_IN_HAND in self . requirements : if self . controller . hand . filter ( race = Race . DRAGON ) : return bool ( self . play_targets ) req = self . requirements . get ( PlayReq . REQ_TARGET_IF_AVAILABLE_AND_MINIMUM_FRIENDLY_MINIONS ) if req is not None : if len ( self . controller . field ) >= req : return bool ( self . play_targets ) req = self . requirements . get ( PlayReq . REQ_TARGET_IF_AVAILABLE_AND_MINIMUM_FRIENDLY_SECRETS ) if req is not None : if len ( self . controller . secrets ) >= req : return bool ( self . play_targets ) return PlayReq . REQ_TARGET_TO_PLAY in self . requirements
True if the card currently requires a target
13,141
def merge ( id , card , cardscript = None ) : if card is None : card = cardxml . CardXML ( id ) if cardscript is None : cardscript = get_script_definition ( id ) if cardscript : card . scripts = type ( id , ( cardscript , ) , { } ) else : card . scripts = type ( id , ( ) , { } ) scriptnames = ( "activate" , "combo" , "deathrattle" , "draw" , "inspire" , "play" , "enrage" , "update" , "powered_up" ) for script in scriptnames : actions = getattr ( card . scripts , script , None ) if actions is None : setattr ( card . scripts , script , [ ] ) elif not callable ( actions ) : if not hasattr ( actions , "__iter__" ) : setattr ( card . scripts , script , ( actions , ) ) for script in ( "events" , "secret" ) : events = getattr ( card . scripts , script , None ) if events is None : setattr ( card . scripts , script , [ ] ) elif not hasattr ( events , "__iter__" ) : setattr ( card . scripts , script , [ events ] ) if not hasattr ( card . scripts , "cost_mod" ) : card . scripts . cost_mod = None if not hasattr ( card . scripts , "Hand" ) : card . scripts . Hand = type ( "Hand" , ( ) , { } ) if not hasattr ( card . scripts . Hand , "events" ) : card . scripts . Hand . events = [ ] if not hasattr ( card . scripts . Hand . events , "__iter__" ) : card . scripts . Hand . events = [ card . scripts . Hand . events ] if not hasattr ( card . scripts . Hand , "update" ) : card . scripts . Hand . update = ( ) if not hasattr ( card . scripts . Hand . update , "__iter__" ) : card . scripts . Hand . update = ( card . scripts . Hand . update , ) if hasattr ( cardscript , "choose" ) : card . choose_cards = cardscript . choose [ : ] else : card . choose_cards = [ ] if hasattr ( cardscript , "tags" ) : for tag , value in cardscript . tags . items ( ) : card . tags [ tag ] = value if card . poisonous : card . scripts . events . append ( POISONOUS ) return card
Find the xmlcard and the card definition of \ a id Then return a merged class of the two
13,142
def load_css ( css_url = None , version = '5.2.0' ) : css_filename = 'dropzone.min.css' serve_local = current_app . config [ 'DROPZONE_SERVE_LOCAL' ] if serve_local : css = '<link rel="stylesheet" href="%s" type="text/css">\n' % url_for ( 'dropzone.static' , filename = css_filename ) else : css = '<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/dropzone@%s/dist/min/%s"' ' type="text/css">\n' % ( version , css_filename ) if css_url : css = '<link rel="stylesheet" href="%s" type="text/css">\n' % css_url return Markup ( css )
Load Dropzone s css resources with given version .
13,143
def load_js ( js_url = None , version = '5.2.0' ) : js_filename = 'dropzone.min.js' serve_local = current_app . config [ 'DROPZONE_SERVE_LOCAL' ] if serve_local : js = '<script src="%s"></script>\n' % url_for ( 'dropzone.static' , filename = js_filename ) else : js = '<script src="https://cdn.jsdelivr.net/npm/dropzone@%s/dist/%s"></script>\n' % ( version , js_filename ) if js_url : js = '<script src="%s"></script>\n' % js_url return Markup ( js )
Load Dropzone s js resources with given version .
13,144
def main ( ) : t , kf , t0 , major , minor , prod , beta = sympy . symbols ( 't k_f t0 Y Z X beta' , negative = False ) for f in funcs : args = [ t , kf , prod , major , minor ] if f in ( pseudo_rev , binary_rev ) : args . insert ( 2 , kf / beta ) expr = f ( * args , backend = 'sympy' ) with open ( f . __name__ + '.png' , 'wb' ) as ofh : sympy . printing . preview ( expr , output = 'png' , filename = 'out.png' , viewer = 'BytesIO' , outputbuffer = ofh ) with open ( f . __name__ + '_diff.png' , 'wb' ) as ofh : sympy . printing . preview ( expr . diff ( t ) . subs ( { t0 : 0 } ) . simplify ( ) , output = 'png' , filename = 'out.png' , viewer = 'BytesIO' , outputbuffer = ofh )
This example demonstrates how to generate pretty equations from the analytic expressions found in chempy . kinetics . integrated .
13,145
def dissolved ( self , concs ) : new_concs = concs . copy ( ) for r in self . rxns : if r . has_precipitates ( self . substances ) : net_stoich = np . asarray ( r . net_stoich ( self . substances ) ) s_net , s_stoich , s_idx = r . precipitate_stoich ( self . substances ) new_concs -= new_concs [ s_idx ] / s_stoich * net_stoich return new_concs
Return dissolved concentrations
13,146
def lg_solubility_ratio ( electrolytes , gas , units = None , warn = True ) : if units is None : M = 1 else : M = units . molar if warn and 'F-' in electrolytes : warnings . warn ( "In Schumpe 1993: data for fluoride uncertain." ) return sum ( [ ( p_gas_rM [ gas ] / M + p_ion_rM [ k ] / M ) * v for k , v in electrolytes . items ( ) ] )
Returns the log10 value of the solubilty ratio
13,147
def Henry_H_at_T ( T , H , Tderiv , T0 = None , units = None , backend = None ) : be = get_backend ( backend ) if units is None : K = 1 else : K = units . Kelvin if T0 is None : T0 = 298.15 * K return H * be . exp ( Tderiv * ( 1 / T - 1 / T0 ) )
Evaluate Henry s constant H at temperature T
13,148
def mass_from_composition ( composition ) : mass = 0.0 for k , v in composition . items ( ) : if k == 0 : mass -= v * 5.489e-4 else : mass += v * relative_atomic_masses [ k - 1 ] return mass
Calculates molecular mass from atomic weights
13,149
def water_self_diffusion_coefficient ( T = None , units = None , warn = True , err_mult = None ) : if units is None : K = 1 m = 1 s = 1 else : K = units . Kelvin m = units . meter s = units . second if T is None : T = 298.15 * K _D0 = D0 * m ** 2 * s ** - 1 _TS = TS * K if err_mult is not None : _dD0 = dD0 * m ** 2 * s ** - 1 _dTS = dTS * K _D0 += err_mult [ 0 ] * _dD0 _TS += err_mult [ 1 ] * _dTS if warn and ( _any ( T < low_t_bound * K ) or _any ( T > high_t_bound * K ) ) : warnings . warn ( "Temperature is outside range (0-100 degC)" ) return _D0 * ( ( T / _TS ) - 1 ) ** gamma
Temperature - dependent self - diffusion coefficient of water .
13,150
def rsys2graph ( rsys , fname , output_dir = None , prog = None , save = False , ** kwargs ) : lines = rsys2dot ( rsys , ** kwargs ) created_tempdir = False try : if output_dir is None : output_dir = tempfile . mkdtemp ( ) created_tempdir = True basename , ext = os . path . splitext ( os . path . basename ( fname ) ) outpath = os . path . join ( output_dir , fname ) dotpath = os . path . join ( output_dir , basename + '.dot' ) with open ( dotpath , 'wt' ) as ofh : ofh . writelines ( lines ) if ext == '.tex' : cmds = [ prog or 'dot2tex' ] else : cmds = [ prog or 'dot' , '-T' + outpath . split ( '.' ) [ - 1 ] ] p = subprocess . Popen ( cmds + [ dotpath , '-o' , outpath ] ) retcode = p . wait ( ) if retcode : fmtstr = "{}\n returned with exit status {}" raise RuntimeError ( fmtstr . format ( ' ' . join ( cmds ) , retcode ) ) return outpath finally : if save is True or save == 'True' : pass else : if save is False or save == 'False' : if created_tempdir : shutil . rmtree ( output_dir ) else : shutil . copy ( outpath , save )
Convenience function to call rsys2dot and write output to file and render the graph
13,151
def check_permission_safety ( path ) : f_stats = os . stat ( path ) return ( f_stats . st_mode & ( stat . S_IRWXG | stat . S_IRWXO ) ) == 0 and f_stats . st_uid == os . getuid ( )
Check if the file at the given path is safe to use as a state file .
13,152
def get_private_key ( key_path , password_path = None ) : assert key_path , key_path if not os . path . exists ( key_path ) : log . fatal ( '%s: no such file' , key_path ) return None if not check_permission_safety ( key_path ) : log . fatal ( 'Private key file %s must be readable only by its owner.' , key_path ) return None if password_path and not check_permission_safety ( password_path ) : log . fatal ( 'Password file %s must be readable only by its owner.' , password_path ) return None with open ( key_path ) as keyfile : private_key = keyfile . readline ( ) . strip ( ) if is_hex ( private_key ) and len ( decode_hex ( private_key ) ) == 32 : log . warning ( 'Private key in raw format. Consider switching to JSON-encoded' ) else : keyfile . seek ( 0 ) try : json_data = json . load ( keyfile ) if password_path : with open ( password_path ) as password_file : password = password_file . readline ( ) . strip ( ) else : password = getpass . getpass ( 'Enter the private key password: ' ) if json_data [ 'crypto' ] [ 'kdf' ] == 'pbkdf2' : password = password . encode ( ) private_key = encode_hex ( decode_keyfile_json ( json_data , password ) ) except ValueError : log . fatal ( 'Invalid private key format or password!' ) return None return private_key
Open a JSON - encoded private key and return it
13,153
def deploy_token_contract ( self , token_supply : int , token_decimals : int , token_name : str , token_symbol : str , token_type : str = 'CustomToken' , ) : receipt = self . deploy ( contract_name = token_type , args = [ token_supply , token_decimals , token_name , token_symbol ] , ) token_address = receipt [ 'contractAddress' ] assert token_address and is_address ( token_address ) token_address = to_checksum_address ( token_address ) return { token_type : token_address }
Deploy a token contract .
13,154
def _deploy_and_remember ( self , contract_name : str , arguments : List , deployed_contracts : 'DeployedContracts' , ) -> Contract : receipt = self . deploy ( contract_name , arguments ) deployed_contracts [ 'contracts' ] [ contract_name ] = _deployed_data_from_receipt ( receipt = receipt , constructor_arguments = arguments , ) return self . web3 . eth . contract ( abi = self . contract_manager . get_contract_abi ( contract_name ) , address = deployed_contracts [ 'contracts' ] [ contract_name ] [ 'address' ] , )
Deploys contract_name with arguments and store the result in deployed_contracts .
13,155
def register_token_network ( self , token_registry_abi : Dict , token_registry_address : str , token_address : str , channel_participant_deposit_limit : Optional [ int ] , token_network_deposit_limit : Optional [ int ] , ) : with_limits = contracts_version_expects_deposit_limits ( self . contracts_version ) if with_limits : return self . _register_token_network_with_limits ( token_registry_abi , token_registry_address , token_address , channel_participant_deposit_limit , token_network_deposit_limit , ) else : return self . _register_token_network_without_limits ( token_registry_abi , token_registry_address , token_address , channel_participant_deposit_limit , token_network_deposit_limit , )
Register token with a TokenNetworkRegistry contract .
13,156
def _register_token_network_without_limits ( self , token_registry_abi : Dict , token_registry_address : str , token_address : str , channel_participant_deposit_limit : Optional [ int ] , token_network_deposit_limit : Optional [ int ] , ) : if channel_participant_deposit_limit : raise ValueError ( 'contracts_version below 0.9.0 does not expect ' 'channel_participant_deposit_limit' , ) if token_network_deposit_limit : raise ValueError ( 'contracts_version below 0.9.0 does not expect token_network_deposit_limit' , ) token_network_registry = self . web3 . eth . contract ( abi = token_registry_abi , address = token_registry_address , ) version_from_onchain = token_network_registry . functions . contract_version ( ) . call ( ) if version_from_onchain != self . contract_manager . version_string : raise RuntimeError ( f'got {version_from_onchain} from the chain, expected ' f'{self.contract_manager.version_string} in the deployment data' , ) command = token_network_registry . functions . createERC20TokenNetwork ( token_address , ) self . transact ( command ) token_network_address = token_network_registry . functions . token_to_token_networks ( token_address , ) . call ( ) token_network_address = to_checksum_address ( token_network_address ) LOG . debug ( f'TokenNetwork address: {token_network_address}' ) return token_network_address
Register token with a TokenNetworkRegistry contract
13,157
def deploy_service_contracts ( self , token_address : str , user_deposit_whole_balance_limit : int , ) : chain_id = int ( self . web3 . version . network ) deployed_contracts : DeployedContracts = { 'contracts_version' : self . contract_version_string ( ) , 'chain_id' : chain_id , 'contracts' : { } , } self . _deploy_and_remember ( CONTRACT_SERVICE_REGISTRY , [ token_address ] , deployed_contracts ) user_deposit = self . _deploy_and_remember ( contract_name = CONTRACT_USER_DEPOSIT , arguments = [ token_address , user_deposit_whole_balance_limit ] , deployed_contracts = deployed_contracts , ) monitoring_service_constructor_args = [ token_address , deployed_contracts [ 'contracts' ] [ CONTRACT_SERVICE_REGISTRY ] [ 'address' ] , deployed_contracts [ 'contracts' ] [ CONTRACT_USER_DEPOSIT ] [ 'address' ] , ] msc = self . _deploy_and_remember ( contract_name = CONTRACT_MONITORING_SERVICE , arguments = monitoring_service_constructor_args , deployed_contracts = deployed_contracts , ) one_to_n = self . _deploy_and_remember ( contract_name = CONTRACT_ONE_TO_N , arguments = [ user_deposit . address , chain_id ] , deployed_contracts = deployed_contracts , ) LOG . debug ( 'Calling UserDeposit.init() with ' f'msc_address={msc.address} ' f'one_to_n_address={one_to_n.address}' , ) self . transact ( user_deposit . functions . init ( _msc_address = msc . address , _one_to_n_address = one_to_n . address , ) ) return deployed_contracts
Deploy 3rd party service contracts
13,158
def private_key_to_address ( private_key : Union [ str , bytes ] ) -> ChecksumAddress : if isinstance ( private_key , str ) : private_key_bytes = to_bytes ( hexstr = private_key ) else : private_key_bytes = private_key pk = PrivateKey ( private_key_bytes ) return public_key_to_address ( pk . public_key )
Converts a private key to an Ethereum address .
13,159
def public_key_to_address ( public_key : Union [ PublicKey , bytes ] ) -> ChecksumAddress : if isinstance ( public_key , PublicKey ) : public_key = public_key . format ( compressed = False ) assert isinstance ( public_key , bytes ) return to_checksum_address ( sha3 ( public_key [ 1 : ] ) [ - 20 : ] )
Converts a public key to an Ethereum address .
13,160
def _handle_waited_log ( self , event : dict ) : txn_hash = event [ 'transactionHash' ] event_name = event [ 'event' ] assert event_name in self . event_waiting assert txn_hash in self . event_waiting [ event_name ] self . event_count [ event_name ] [ txn_hash ] += 1 event_entry = self . event_waiting [ event_name ] [ txn_hash ] if event_entry . count == self . event_count [ event_name ] [ txn_hash ] : self . event_waiting [ event_name ] . pop ( txn_hash ) if event_entry . callback : event_entry . callback ( event )
A subroutine of handle_log Increment self . event_count forget about waiting and call the callback if any .
13,161
def assert_event ( self , txn_hash , event_name , args , timeout = 5 ) : def assert_args ( event ) : assert event [ 'args' ] == args , f'{event["args"]} == {args}' self . add ( txn_hash = txn_hash , event_name = event_name , callback = assert_args ) self . check ( timeout = timeout )
Assert that event_name is emitted with the args
13,162
def join_sources ( source_module : DeploymentModule , contract_name : str ) : joined_file = Path ( __file__ ) . parent . joinpath ( 'joined.sol' ) remapping = { module : str ( path ) for module , path in contracts_source_path ( ) . items ( ) } command = [ './utils/join-contracts.py' , '--import-map' , json . dumps ( remapping ) , str ( contracts_source_path_of_deployment_module ( source_module , ) . joinpath ( contract_name + '.sol' ) ) , str ( joined_file ) , ] working_dir = Path ( __file__ ) . parent . parent try : subprocess . check_call ( command , cwd = working_dir ) except subprocess . CalledProcessError as ex : print ( f'cd {str(working_dir)}; {subprocess.list2cmdline(command)} failed.' ) raise ex return joined_file . read_text ( )
Use join - contracts . py to concatenate all imported Solidity files .
13,163
def etherscan_verify_contract ( chain_id : int , apikey : str , source_module : DeploymentModule , contract_name : str , ) : etherscan_api = api_of_chain_id [ chain_id ] deployment_info = get_contracts_deployment_info ( chain_id = chain_id , module = source_module , ) if deployment_info is None : raise FileNotFoundError ( f'Deployment file not found for chain_id={chain_id} and module={source_module}' , ) contract_manager = ContractManager ( contracts_precompiled_path ( ) ) data = post_data_for_etherscan_verification ( apikey = apikey , deployment_info = deployment_info [ 'contracts' ] [ contract_name ] , source = join_sources ( source_module = source_module , contract_name = contract_name ) , contract_name = contract_name , metadata = json . loads ( contract_manager . contracts [ contract_name ] [ 'metadata' ] ) , constructor_args = get_constructor_args ( deployment_info = deployment_info , contract_name = contract_name , contract_manager = contract_manager , ) , ) response = requests . post ( etherscan_api , data = data ) content = json . loads ( response . content . decode ( ) ) print ( content ) print ( f'Status: {content["status"]}; {content["message"]} ; GUID = {content["result"]}' ) etherscan_url = etherscan_api . replace ( 'api-' , '' ) . replace ( 'api' , '' ) etherscan_url += '/verifyContract2?a=' + data [ 'contractaddress' ] manual_submission_guide = f if content [ 'status' ] != '1' : if content [ 'result' ] == 'Contract source code already verified' : return else : raise ValueError ( 'Etherscan submission failed for an unknown reason\n' + manual_submission_guide , ) guid = content [ 'result' ] status = '0' retries = 10 while status == '0' and retries > 0 : retries -= 1 r = guid_status ( etherscan_api = etherscan_api , guid = guid ) status = r [ 'status' ] if r [ 'result' ] == 'Fail - Unable to verify' : raise ValueError ( manual_submission_guide ) if r [ 'result' ] == 'Pass - Verified' : return print ( 'Retrying...' ) sleep ( 5 ) raise TimeoutError ( manual_submission_guide )
Calls Etherscan API for verifying the Solidity source of a contract .
13,164
def error_removed_option ( message : str ) : def f ( _ , param , value ) : if value is not None : raise click . NoSuchOption ( f'--{param.name.replace("_", "-")} is no longer a valid option. ' + message , ) return f
Takes a message and returns a callback that raises NoSuchOption
13,165
def common_options ( func ) : @ click . option ( '--private-key' , required = True , help = 'Path to a private key store.' , ) @ click . option ( '--rpc-provider' , default = 'http://127.0.0.1:8545' , help = 'Address of the Ethereum RPC provider' , ) @ click . option ( '--wait' , default = 300 , help = 'Max tx wait time in s.' , ) @ click . option ( '--gas-price' , default = 5 , type = int , help = 'Gas price to use in gwei' , ) @ click . option ( '--gas-limit' , default = 5_500_000 , ) @ click . option ( '--contracts-version' , default = None , help = 'Contracts version to verify. Current version will be used by default.' , ) @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : return func ( * args , ** kwargs ) return wrapper
A decorator that combines commonly appearing
13,166
def contracts_source_path_with_stem ( stem ) : return { 'lib' : _BASE . joinpath ( stem , 'lib' ) , 'raiden' : _BASE . joinpath ( stem , 'raiden' ) , 'test' : _BASE . joinpath ( stem , 'test' ) , 'services' : _BASE . joinpath ( stem , 'services' ) , }
The directory remapping given to the Solidity compiler .
13,167
def compile_contracts ( self , target_path : Path ) -> ContractManager : self . checksum_contracts ( ) if self . overall_checksum is None : raise ContractSourceManagerCompilationError ( 'Checksumming failed.' ) contracts_compiled = self . _compile_all_contracts ( ) target_path . parent . mkdir ( parents = True , exist_ok = True ) with target_path . open ( mode = 'w' ) as target_file : target_file . write ( json . dumps ( dict ( contracts = contracts_compiled , contracts_checksums = self . contracts_checksums , overall_checksum = self . overall_checksum , contracts_version = None , ) , sort_keys = True , indent = 4 , ) , ) return ContractManager ( target_path )
Store compiled contracts JSON at target_path .
13,168
def verify_precompiled_checksums ( self , precompiled_path : Path ) -> None : contracts_precompiled = ContractManager ( precompiled_path ) assert self . contracts_checksums is not None for contract , checksum in self . contracts_checksums . items ( ) : try : assert contracts_precompiled . contracts_checksums is not None precompiled_checksum = contracts_precompiled . contracts_checksums [ contract ] except KeyError : raise ContractSourceManagerVerificationError ( f'No checksum for {contract}' , ) if precompiled_checksum != checksum : raise ContractSourceManagerVerificationError ( f'checksum of {contract} does not match {precompiled_checksum} != {checksum}' , ) if self . overall_checksum != contracts_precompiled . overall_checksum : raise ContractSourceManagerVerificationError ( f'overall checksum does not match ' f'{self.overall_checksum} != {contracts_precompiled.overall_checksum}' , )
Compare source code checksums with those from a precompiled file .
13,169
def checksum_contracts ( self ) -> None : checksums : Dict [ str , str ] = { } for contracts_dir in self . contracts_source_dirs . values ( ) : file : Path for file in contracts_dir . glob ( '*.sol' ) : checksums [ file . name ] = hashlib . sha256 ( file . read_bytes ( ) ) . hexdigest ( ) self . overall_checksum = hashlib . sha256 ( ':' . join ( checksums [ key ] for key in sorted ( checksums ) ) . encode ( ) , ) . hexdigest ( ) self . contracts_checksums = checksums
Remember the checksum of each source and the overall checksum .
13,170
def _hash_pair ( first : bytes , second : bytes ) -> bytes : if first is None : return second if second is None : return first if first > second : return keccak ( second + first ) else : return keccak ( first + second )
Computes the hash of the items in lexicographic order
13,171
def compute_merkle_tree ( items : Iterable [ bytes ] ) -> MerkleTree : if not all ( isinstance ( l , bytes ) and len ( l ) == 32 for l in items ) : raise ValueError ( 'Not all items are hashes' ) leaves = sorted ( items ) if len ( leaves ) == 0 : return MerkleTree ( layers = [ [ EMPTY_MERKLE_ROOT ] ] ) if not len ( leaves ) == len ( set ( leaves ) ) : raise ValueError ( 'The leaves items must not contain duplicate items' ) tree = [ leaves ] layer = leaves while len ( layer ) > 1 : iterator = iter ( layer ) paired_items = zip_longest ( iterator , iterator ) layer = [ _hash_pair ( a , b ) for a , b in paired_items ] tree . append ( layer ) return MerkleTree ( layers = tree )
Calculates the merkle root for a given list of items
13,172
def get_merkle_root ( merkle_tree : MerkleTree ) -> bytes : assert merkle_tree . layers , 'the merkle tree layers are empty' assert merkle_tree . layers [ - 1 ] , 'the root layer is empty' return merkle_tree . layers [ - 1 ] [ 0 ]
Returns the root element of the merkle tree .
13,173
def contracts_version_expects_deposit_limits ( contracts_version : Optional [ str ] ) -> bool : if contracts_version is None : return True if contracts_version == '0.3._' : return False return compare ( contracts_version , '0.9.0' ) > - 1
Answers whether TokenNetworkRegistry of the contracts_vesion needs deposit limits
13,174
def _verify_deployed_contract ( self , deployment_data : DeployedContracts , contract_name : str , ) -> Contract : contracts = deployment_data [ 'contracts' ] contract_address = contracts [ contract_name ] [ 'address' ] contract_instance = self . web3 . eth . contract ( abi = self . contract_manager . get_contract_abi ( contract_name ) , address = contract_address , ) blockchain_bytecode = self . web3 . eth . getCode ( contract_address ) . hex ( ) compiled_bytecode = self . contract_manager . get_runtime_hexcode ( contract_name ) assert blockchain_bytecode == compiled_bytecode print ( f'{contract_name} at {contract_address} ' f'matches the compiled data from contracts.json' , ) receipt = self . web3 . eth . getTransactionReceipt ( contracts [ contract_name ] [ 'transaction_hash' ] , ) assert receipt [ 'blockNumber' ] == contracts [ contract_name ] [ 'block_number' ] , ( f'We have block_number {contracts[contract_name]["block_number"]} in the deployment ' f'info, but {receipt["blockNumber"]} in the transaction receipt from web3.' ) assert receipt [ 'gasUsed' ] == contracts [ contract_name ] [ 'gas_cost' ] , ( f'We have gasUsed {contracts[contract_name]["gas_cost"]} in the deployment info, ' f'but {receipt["gasUsed"]} in the transaction receipt from web3.' ) assert receipt [ 'contractAddress' ] == contracts [ contract_name ] [ 'address' ] , ( f'We have contractAddress {contracts[contract_name]["address"]} in the deployment info' f' but {receipt["contractAddress"]} in the transaction receipt from web3.' ) version = contract_instance . functions . contract_version ( ) . call ( ) assert version == deployment_data [ 'contracts_version' ] , f'got {version} expected {deployment_data["contracts_version"]}.' f'contract_manager has contracts_version {self.contract_manager.contracts_version}' return contract_instance , contracts [ contract_name ] [ 'constructor_arguments' ]
Verify deployment info against the chain
13,175
def contracts_data_path ( version : Optional [ str ] = None ) : if version is None : return _BASE . joinpath ( 'data' ) return _BASE . joinpath ( f'data_{version}' )
Returns the deployment data directory for a version .
13,176
def contracts_precompiled_path ( version : Optional [ str ] = None ) -> Path : data_path = contracts_data_path ( version ) return data_path . joinpath ( 'contracts.json' )
Returns the path of JSON file where the bytecode can be found .
13,177
def contracts_deployed_path ( chain_id : int , version : Optional [ str ] = None , services : bool = False , ) : data_path = contracts_data_path ( version ) chain_name = ID_TO_NETWORKNAME [ chain_id ] if chain_id in ID_TO_NETWORKNAME else 'private_net' return data_path . joinpath ( f'deployment_{"services_" if services else ""}{chain_name}.json' )
Returns the path of the deplolyment data JSON file .
13,178
def merge_deployment_data ( dict1 : DeployedContracts , dict2 : DeployedContracts ) -> DeployedContracts : if not dict1 : return dict2 if not dict2 : return dict1 common_contracts : Dict [ str , DeployedContract ] = deepcopy ( dict1 [ 'contracts' ] ) assert not common_contracts . keys ( ) & dict2 [ 'contracts' ] . keys ( ) common_contracts . update ( dict2 [ 'contracts' ] ) assert dict2 [ 'chain_id' ] == dict1 [ 'chain_id' ] assert dict2 [ 'contracts_version' ] == dict1 [ 'contracts_version' ] return { 'contracts' : common_contracts , 'chain_id' : dict1 [ 'chain_id' ] , 'contracts_version' : dict1 [ 'contracts_version' ] , }
Take contents of two deployment JSON files and merge them
13,179
def get_contracts_deployment_info ( chain_id : int , version : Optional [ str ] = None , module : DeploymentModule = DeploymentModule . ALL , ) -> Optional [ DeployedContracts ] : if module not in DeploymentModule : raise ValueError ( f'Unknown module {module} given to get_contracts_deployment_info()' ) def module_chosen ( to_be_added : DeploymentModule ) : return module == to_be_added or module == DeploymentModule . ALL files : List [ Path ] = [ ] if module_chosen ( DeploymentModule . RAIDEN ) : files . append ( contracts_deployed_path ( chain_id = chain_id , version = version , services = False , ) ) if module == DeploymentModule . SERVICES and not version_provides_services ( version ) : raise ValueError ( f'SERVICES module queried for version {version}, but {version} ' 'does not provide service contracts.' , ) if module_chosen ( DeploymentModule . SERVICES ) and version_provides_services ( version ) : files . append ( contracts_deployed_path ( chain_id = chain_id , version = version , services = True , ) ) deployment_data : DeployedContracts = { } for f in files : deployment_data = merge_deployment_data ( deployment_data , _load_json_from_path ( f ) , ) if not deployment_data : deployment_data = None return deployment_data
Reads the deployment data . Returns None if the file is not found .
13,180
def get_contract ( self , contract_name : str ) -> Dict : assert self . contracts , 'ContractManager should have contracts compiled' return self . contracts [ contract_name ]
Return ABI BIN of the given contract .
13,181
def get_contract_abi ( self , contract_name : str ) -> Dict : assert self . contracts , 'ContractManager should have contracts compiled' return self . contracts [ contract_name ] [ 'abi' ]
Returns the ABI for a given contract .
13,182
def get_event_abi ( self , contract_name : str , event_name : str ) -> Dict : from web3 . utils . contracts import find_matching_event_abi assert self . contracts , 'ContractManager should have contracts compiled' contract_abi = self . get_contract_abi ( contract_name ) return find_matching_event_abi ( abi = contract_abi , event_name = event_name , )
Returns the ABI for a given event .
13,183
def check_base_filename ( self , record ) : time_tuple = time . localtime ( ) if self . suffix_time != time . strftime ( self . suffix , time_tuple ) or not os . path . exists ( self . baseFilename + '.' + self . suffix_time ) : return 1 else : return 0
Determine if builder should occur .
13,184
def build_base_filename ( self ) : if self . stream : self . stream . close ( ) self . stream = None if self . suffix_time != "" : index = self . baseFilename . find ( "." + self . suffix_time ) if index == - 1 : index = self . baseFilename . rfind ( "." ) self . baseFilename = self . baseFilename [ : index ] current_time_tuple = time . localtime ( ) self . suffix_time = time . strftime ( self . suffix , current_time_tuple ) self . baseFilename = self . baseFilename + "." + self . suffix_time self . mode = 'a' if not self . delay : self . stream = self . _open ( )
do builder ; in this case old time stamp is removed from filename and a new time stamp is append to the filename
13,185
def _cycle_proceedings ( self ) : next_approvals = self . _get_next_approvals ( ) . exclude ( status = PENDING ) . exclude ( cloned = True ) for ta in next_approvals : clone_transition_approval , c = TransitionApproval . objects . get_or_create ( source_state = ta . source_state , destination_state = ta . destination_state , content_type = ta . content_type , object_id = ta . object_id , field_name = ta . field_name , skip = ta . skip , priority = ta . priority , enabled = ta . enabled , status = PENDING , meta = ta . meta ) if c : clone_transition_approval . permissions . add ( * ta . permissions . all ( ) ) clone_transition_approval . groups . add ( * ta . groups . all ( ) ) next_approvals . update ( cloned = True ) return True if next_approvals . count ( ) else False
Finds next proceedings and clone them for cycling if it exists .
13,186
def process_subscription_data ( post_data ) : subscription_data = post_data . pop ( "subscription" , { } ) keys = subscription_data . pop ( "keys" , { } ) subscription_data . update ( keys ) subscription_data [ "browser" ] = post_data . pop ( "browser" ) return subscription_data
Process the subscription data according to out model
13,187
def save_avatar ( strategy , details , user = None , * args , ** kwargs ) : if user : backend_name = kwargs [ 'backend' ] . __class__ . __name__ . lower ( ) response = kwargs . get ( 'response' , { } ) social_thumb = None if 'facebook' in backend_name : if 'id' in response : social_thumb = ( 'http://graph.facebook.com/{0}/picture?type=normal' ) . format ( response [ 'id' ] ) elif 'twitter' in backend_name and response . get ( 'profile_image_url' ) : social_thumb = response [ 'profile_image_url' ] elif 'googleoauth2' in backend_name and response . get ( 'image' , { } ) . get ( 'url' ) : social_thumb = response [ 'image' ] [ 'url' ] . split ( '?' ) [ 0 ] else : social_thumb = 'http://www.gravatar.com/avatar/' social_thumb += hashlib . md5 ( user . email . lower ( ) . encode ( 'utf8' ) ) . hexdigest ( ) social_thumb += '?size=100' if social_thumb and user . social_thumb != social_thumb : user . social_thumb = social_thumb strategy . storage . user . changed ( user )
Get user avatar from social provider .
13,188
def default_image_loader ( filename , flags , ** kwargs ) : def load ( rect = None , flags = None ) : return filename , rect , flags return load
This default image loader just returns filename rect and any flags
13,189
def decode_gid ( raw_gid ) : flags = TileFlags ( raw_gid & GID_TRANS_FLIPX == GID_TRANS_FLIPX , raw_gid & GID_TRANS_FLIPY == GID_TRANS_FLIPY , raw_gid & GID_TRANS_ROT == GID_TRANS_ROT ) gid = raw_gid & ~ ( GID_TRANS_FLIPX | GID_TRANS_FLIPY | GID_TRANS_ROT ) return gid , flags
Decode a GID from TMX data
13,190
def convert_to_bool ( text ) : try : return bool ( int ( text ) ) except : pass text = str ( text ) . lower ( ) if text == "true" : return True if text == "yes" : return True if text == "false" : return False if text == "no" : return False raise ValueError
Convert a few common variations of true and false to boolean
13,191
def parse_properties ( node ) : d = dict ( ) for child in node . findall ( 'properties' ) : for subnode in child . findall ( 'property' ) : cls = None try : if "type" in subnode . keys ( ) : module = importlib . import_module ( 'builtins' ) cls = getattr ( module , subnode . get ( "type" ) ) except AttributeError : logger . info ( "Type [} Not a built-in type. Defaulting to string-cast." ) d [ subnode . get ( 'name' ) ] = cls ( subnode . get ( 'value' ) ) if cls is not None else subnode . get ( 'value' ) return d
Parse a Tiled xml node and return a dict that represents a tiled property
13,192
def _set_properties ( self , node ) : self . _cast_and_set_attributes_from_node_items ( node . items ( ) ) properties = parse_properties ( node ) if ( not self . allow_duplicate_names and self . _contains_invalid_property_name ( properties . items ( ) ) ) : self . _log_property_error_message ( ) raise ValueError ( "Reserved names and duplicate names are not allowed. Please rename your property inside the .tmx-file" ) self . properties = properties
Create dict containing Tiled object attributes from xml data
13,193
def parse_xml ( self , node ) : self . _set_properties ( node ) self . background_color = node . get ( 'backgroundcolor' , self . background_color ) for subnode in node . findall ( 'layer' ) : self . add_layer ( TiledTileLayer ( self , subnode ) ) for subnode in node . findall ( 'imagelayer' ) : self . add_layer ( TiledImageLayer ( self , subnode ) ) for subnode in node . findall ( 'objectgroup' ) : self . add_layer ( TiledObjectGroup ( self , subnode ) ) for subnode in node . findall ( 'tileset' ) : self . add_tileset ( TiledTileset ( self , subnode ) ) for o in [ o for o in self . objects if o . gid ] : p = self . get_tile_properties_by_gid ( o . gid ) if p : for key in p : o . properties . setdefault ( key , p [ key ] ) if self . invert_y : o . y -= o . height self . reload_images ( ) return self
Parse a map from ElementTree xml node
13,194
def reload_images ( self ) : self . images = [ None ] * self . maxgid for ts in self . tilesets : if ts . source is None : continue path = os . path . join ( os . path . dirname ( self . filename ) , ts . source ) colorkey = getattr ( ts , 'trans' , None ) loader = self . image_loader ( path , colorkey , tileset = ts ) p = product ( range ( ts . margin , ts . height + ts . margin - ts . tileheight + 1 , ts . tileheight + ts . spacing ) , range ( ts . margin , ts . width + ts . margin - ts . tilewidth + 1 , ts . tilewidth + ts . spacing ) ) for real_gid , ( y , x ) in enumerate ( p , ts . firstgid ) : rect = ( x , y , ts . tilewidth , ts . tileheight ) gids = self . map_gid ( real_gid ) if gids is None : if self . load_all_tiles or real_gid in self . optional_gids : gids = [ self . register_gid ( real_gid , flags = 0 ) ] if gids : for gid , flags in gids : self . images [ gid ] = loader ( rect , flags ) for layer in ( i for i in self . layers if isinstance ( i , TiledImageLayer ) ) : source = getattr ( layer , 'source' , None ) if source : colorkey = getattr ( layer , 'trans' , None ) real_gid = len ( self . images ) gid = self . register_gid ( real_gid ) layer . gid = gid path = os . path . join ( os . path . dirname ( self . filename ) , source ) loader = self . image_loader ( path , colorkey ) image = loader ( ) self . images . append ( image ) for real_gid , props in self . tile_properties . items ( ) : source = props . get ( 'source' , None ) if source : colorkey = props . get ( 'trans' , None ) path = os . path . join ( os . path . dirname ( self . filename ) , source ) loader = self . image_loader ( path , colorkey ) image = loader ( ) self . images [ real_gid ] = image
Load the map images from disk
13,195
def get_tile_locations_by_gid ( self , gid ) : for l in self . visible_tile_layers : for x , y , _gid in [ i for i in self . layers [ l ] . iter_data ( ) if i [ 2 ] == gid ] : yield x , y , l
Search map for tile locations by the GID
13,196
def get_tile_properties_by_layer ( self , layer ) : try : assert ( int ( layer ) >= 0 ) layer = int ( layer ) except ( TypeError , AssertionError ) : msg = "Layer must be a positive integer. Got {0} instead." logger . debug ( msg . format ( type ( layer ) ) ) raise ValueError p = product ( range ( self . width ) , range ( self . height ) ) layergids = set ( self . layers [ layer ] . data [ y ] [ x ] for x , y in p ) for gid in layergids : try : yield gid , self . tile_properties [ gid ] except KeyError : continue
Get the tile properties of each GID in layer
13,197
def add_tileset ( self , tileset ) : assert ( isinstance ( tileset , TiledTileset ) ) self . tilesets . append ( tileset )
Add a tileset to the map
13,198
def get_layer_by_name ( self , name ) : try : return self . layernames [ name ] except KeyError : msg = 'Layer "{0}" not found.' logger . debug ( msg . format ( name ) ) raise ValueError
Return a layer by name
13,199
def get_object_by_name ( self , name ) : for obj in self . objects : if obj . name == name : return obj raise ValueError
Find an object