idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
51,200 | def reset ( self ) : self . _will_reset ( ) if self . _has_backup : self . _restore ( ) else : _LIB . Reset ( self . _env ) self . _did_reset ( ) self . done = False return self . screen | Reset the state of the environment and returns an initial observation . |
51,201 | def step ( self , action ) : if self . done : raise ValueError ( 'cannot step in a done environment! call `reset`' ) self . controllers [ 0 ] [ : ] = action _LIB . Step ( self . _env ) reward = self . _get_reward ( ) self . done = self . _get_done ( ) info = self . _get_info ( ) self . _did_step ( self . done ) if reward < self . reward_range [ 0 ] : reward = self . reward_range [ 0 ] elif reward > self . reward_range [ 1 ] : reward = self . reward_range [ 1 ] return self . screen , reward , self . done , info | Run one frame of the NES and return the relevant observation data . |
51,202 | def close ( self ) : if self . _env is None : raise ValueError ( 'env has already been closed.' ) _LIB . Close ( self . _env ) self . _env = None if self . viewer is not None : self . viewer . close ( ) | Close the environment . |
51,203 | def render ( self , mode = 'human' ) : if mode == 'human' : if self . viewer is None : from . _image_viewer import ImageViewer if self . spec is None : caption = self . _rom_path . split ( '/' ) [ - 1 ] else : caption = self . spec . id self . viewer = ImageViewer ( caption = caption , height = SCREEN_HEIGHT , width = SCREEN_WIDTH , ) self . viewer . show ( self . screen ) elif mode == 'rgb_array' : return self . screen else : render_modes = [ repr ( x ) for x in self . metadata [ 'render.modes' ] ] msg = 'valid render modes are: {}' . format ( ', ' . join ( render_modes ) ) raise NotImplementedError ( msg ) | Render the environment . |
51,204 | def _get_args ( ) : parser = argparse . ArgumentParser ( description = __doc__ ) parser . add_argument ( '--rom' , '-r' , type = str , help = 'The path to the ROM to play.' , required = True , ) parser . add_argument ( '--mode' , '-m' , type = str , default = 'human' , choices = [ 'human' , 'random' ] , help = 'The execution mode for the emulation.' , ) parser . add_argument ( '--steps' , '-s' , type = int , default = 500 , help = 'The number of random steps to take.' , ) return parser . parse_args ( ) | Parse arguments from the command line and return them . |
51,205 | def main ( ) : args = _get_args ( ) env = NESEnv ( args . rom ) if args . mode == 'human' : play_human ( env ) else : play_random ( env , args . steps ) | The main entry point for the command line interface . |
51,206 | def play_random ( env , steps ) : try : done = True progress = tqdm ( range ( steps ) ) for _ in progress : if done : _ = env . reset ( ) action = env . action_space . sample ( ) _ , reward , done , info = env . step ( action ) progress . set_postfix ( reward = reward , info = info ) env . render ( ) except KeyboardInterrupt : pass env . close ( ) | Play the environment making uniformly random decisions . |
51,207 | def markowitz_portfolio ( cov_mat , exp_rets , target_ret , allow_short = False , market_neutral = False ) : if not isinstance ( cov_mat , pd . DataFrame ) : raise ValueError ( "Covariance matrix is not a DataFrame" ) if not isinstance ( exp_rets , pd . Series ) : raise ValueError ( "Expected returns is not a Series" ) if not isinstance ( target_ret , float ) : raise ValueError ( "Target return is not a float" ) if not cov_mat . index . equals ( exp_rets . index ) : raise ValueError ( "Indices do not match" ) if market_neutral and not allow_short : warnings . warn ( "A market neutral portfolio implies shorting" ) allow_short = True n = len ( cov_mat ) P = opt . matrix ( cov_mat . values ) q = opt . matrix ( 0.0 , ( n , 1 ) ) if not allow_short : G = opt . matrix ( np . vstack ( ( - exp_rets . values , - np . identity ( n ) ) ) ) h = opt . matrix ( np . vstack ( ( - target_ret , + np . zeros ( ( n , 1 ) ) ) ) ) else : G = opt . matrix ( - exp_rets . values ) . T h = opt . matrix ( - target_ret ) A = opt . matrix ( 1.0 , ( 1 , n ) ) if not market_neutral : b = opt . matrix ( 1.0 ) else : b = opt . matrix ( 0.0 ) optsolvers . options [ 'show_progress' ] = False sol = optsolvers . qp ( P , q , G , h , A , b ) if sol [ 'status' ] != 'optimal' : warnings . warn ( "Convergence problem" ) weights = pd . Series ( sol [ 'x' ] , index = cov_mat . index ) return weights | Computes a Markowitz portfolio . |
51,208 | def min_var_portfolio ( cov_mat , allow_short = False ) : if not isinstance ( cov_mat , pd . DataFrame ) : raise ValueError ( "Covariance matrix is not a DataFrame" ) n = len ( cov_mat ) P = opt . matrix ( cov_mat . values ) q = opt . matrix ( 0.0 , ( n , 1 ) ) if not allow_short : G = opt . matrix ( - np . identity ( n ) ) h = opt . matrix ( 0.0 , ( n , 1 ) ) else : G = None h = None A = opt . matrix ( 1.0 , ( 1 , n ) ) b = opt . matrix ( 1.0 ) optsolvers . options [ 'show_progress' ] = False sol = optsolvers . qp ( P , q , G , h , A , b ) if sol [ 'status' ] != 'optimal' : warnings . warn ( "Convergence problem" ) weights = pd . Series ( sol [ 'x' ] , index = cov_mat . index ) return weights | Computes the minimum variance portfolio . |
51,209 | def print_portfolio_info ( returns , avg_rets , weights ) : ret = ( weights * avg_rets ) . sum ( ) std = ( weights * returns ) . sum ( 1 ) . std ( ) sharpe = ret / std print ( "Optimal weights:\n{}\n" . format ( weights ) ) print ( "Expected return: {}" . format ( ret ) ) print ( "Expected variance: {}" . format ( std ** 2 ) ) print ( "Expected Sharpe: {}" . format ( sharpe ) ) | Print information on expected portfolio performance . |
51,210 | def main ( self ) : for m in self . methods : if m . name in [ 'Main' , 'main' ] : return m if len ( self . methods ) : return self . methods [ 0 ] return None | Return the default method in this module . |
51,211 | def orderered_methods ( self ) : oms = [ ] self . methods . reverse ( ) if self . main : oms = [ self . main ] for m in self . methods : if m == self . main : continue oms . append ( m ) return oms | An ordered list of methods |
51,212 | def write_methods ( self ) : b_array = bytearray ( ) for key , vm_token in self . all_vm_tokens . items ( ) : b_array . append ( vm_token . out_op ) if vm_token . data is not None and vm_token . vm_op != VMOp . NOP : b_array = b_array + vm_token . data return b_array | Write all methods in the current module to a byte string . |
51,213 | def link_methods ( self ) : from . . compiler import Compiler for method in self . methods : method . prepare ( ) self . all_vm_tokens = OrderedDict ( ) address = 0 for method in self . orderered_methods : if not method . is_interop : method . address = address for key , vmtoken in method . vm_tokens . items ( ) : self . all_vm_tokens [ address ] = vmtoken address += 1 if vmtoken . data is not None and vmtoken . vm_op != VMOp . NOP : address += len ( vmtoken . data ) vmtoken . addr = vmtoken . addr + method . address for key , vmtoken in self . all_vm_tokens . items ( ) : if vmtoken . src_method is not None : target_method = self . method_by_name ( vmtoken . target_method ) if target_method : jump_len = target_method . address - vmtoken . addr param_ret_counts = bytearray ( ) if Compiler . instance ( ) . nep8 : param_ret_counts = vmtoken . data [ 0 : 2 ] jump_len -= 2 if jump_len > - 32767 and jump_len < 32767 : vmtoken . data = param_ret_counts + jump_len . to_bytes ( 2 , 'little' , signed = True ) else : vmtoken . data = param_ret_counts + jump_len . to_bytes ( 4 , 'little' , signed = True ) else : raise Exception ( "Target method %s not found" % vmtoken . target_method ) | Perform linkage of addresses between methods . |
51,214 | def export_debug ( self , output_path ) : file_hash = hashlib . md5 ( open ( output_path , 'rb' ) . read ( ) ) . hexdigest ( ) avm_name = os . path . splitext ( os . path . basename ( output_path ) ) [ 0 ] json_data = self . generate_debug_json ( avm_name , file_hash ) mapfilename = output_path . replace ( '.avm' , '.debug.json' ) with open ( mapfilename , 'w+' ) as out_file : out_file . write ( json_data ) | this method is used to generate a debug map for NEO debugger |
51,215 | def load_and_save ( path , output_path = None , use_nep8 = True ) : compiler = Compiler . load ( os . path . abspath ( path ) , use_nep8 = use_nep8 ) data = compiler . write ( ) if output_path is None : fullpath = os . path . realpath ( path ) path , filename = os . path . split ( fullpath ) newfilename = filename . replace ( '.py' , '.avm' ) output_path = '%s/%s' % ( path , newfilename ) Compiler . write_file ( data , output_path ) compiler . entry_module . export_debug ( output_path ) return data | Call load_and_save to load a Python file to be compiled to the . avm format and save the result . By default the resultant . avm file is saved along side the source file . |
51,216 | def load ( path , use_nep8 = True ) : Compiler . __instance = None compiler = Compiler . instance ( ) compiler . nep8 = use_nep8 compiler . entry_module = Module ( path ) return compiler | Call load to load a Python file to be compiled but not to write to . avm |
51,217 | def add_record_references ( self , app_id , record_id , field_id , target_record_ids ) : self . _swimlane . request ( 'post' , 'app/{0}/record/{1}/add-references' . format ( app_id , record_id ) , json = { 'fieldId' : field_id , 'targetRecordIds' : target_record_ids } ) | Bulk operation to directly add record references without making any additional requests |
51,218 | def add_comment ( self , app_id , record_id , field_id , message ) : self . _swimlane . request ( 'post' , 'app/{0}/record/{1}/{2}/comment' . format ( app_id , record_id , field_id ) , json = { 'message' : message , 'createdDate' : pendulum . now ( ) . to_rfc3339_string ( ) } ) | Directly add a comment to a record without retrieving the app or record first |
51,219 | def _evaluate ( self ) : retrieved_records = SortedDict ( ) for record_id , record in six . iteritems ( self . _elements ) : if record is self . _field . _unset : try : record = self . target_app . records . get ( id = record_id ) except SwimlaneHTTP400Error : logger . debug ( "Received 400 response retrieving record '{}', ignoring assumed orphaned record" ) continue retrieved_records [ record_id ] = record self . _elements = retrieved_records return self . _elements . values ( ) | Scan for orphaned records and retrieve any records that have not already been grabbed |
51,220 | def add ( self , record ) : self . _field . validate_value ( record ) self . _elements [ record . id ] = record self . _sync_field ( ) | Add a reference to the provided record |
51,221 | def remove ( self , record ) : self . _field . validate_value ( record ) del self . _elements [ record . id ] self . _sync_field ( ) | Remove a reference to the provided record |
51,222 | def target_app ( self ) : if self . __target_app is None : self . __target_app = self . _swimlane . apps . get ( id = self . __target_app_id ) return self . __target_app | Defer target app retrieval until requested |
51,223 | def validate_value ( self , value ) : if value not in ( None , self . _unset ) : super ( ReferenceField , self ) . validate_value ( value ) if value . app != self . target_app : raise ValidationError ( self . record , "Reference field '{}' has target app '{}', cannot reference record '{}' from app '{}'" . format ( self . name , self . target_app , value , value . app ) ) | Validate provided record is a part of the appropriate target app for the field |
51,224 | def set_swimlane ( self , value ) : if not self . multiselect : if value and not isinstance ( value , list ) : value = [ value ] value = value or [ ] records = SortedDict ( ) for record_id in value : records [ record_id ] = self . _unset return super ( ReferenceField , self ) . set_swimlane ( records ) | Store record ids in separate location for later use but ignore initial value |
51,225 | def set_python ( self , value ) : if not self . multiselect : if value and not isinstance ( value , list ) : value = [ value ] value = value or [ ] records = SortedDict ( ) for record in value : self . validate_value ( record ) records [ record . id ] = record return_value = self . _set ( records ) self . record . _raw [ 'values' ] [ self . id ] = self . get_swimlane ( ) return return_value | Expect list of record instances convert to a SortedDict for internal representation |
51,226 | def get_swimlane ( self ) : value = super ( ReferenceField , self ) . get_swimlane ( ) if value : ids = list ( value . keys ( ) ) if self . multiselect : return ids return ids [ 0 ] return None | Return list of record ids |
51,227 | def get_python ( self ) : cursor = super ( ReferenceField , self ) . get_python ( ) if self . multiselect : return cursor else : try : return cursor [ 0 ] except IndexError : return None | Return cursor if multi - select direct value if single - select |
51,228 | def get ( self , key , value ) : if key == 'id' : response = self . _swimlane . request ( 'get' , 'app/{}' . format ( value ) ) if response . status_code == 204 : raise ValueError ( 'No app with id "{}"' . format ( value ) ) return App ( self . _swimlane , response . json ( ) ) else : for app in self . list ( ) : if value and value == app . name : return app raise ValueError ( 'No app with name "{}"' . format ( value ) ) | Get single app by one of id or name |
51,229 | def list ( self ) : response = self . _swimlane . request ( 'get' , 'app' ) return [ App ( self . _swimlane , item ) for item in response . json ( ) ] | Retrieve list of all apps |
51,230 | def users ( self ) : if self . __users is None : self . __users = GroupUsersCursor ( swimlane = self . _swimlane , user_ids = self . __user_ids ) return self . __users | Returns a GroupUsersCursor with list of User instances for this Group |
51,231 | def _evaluate ( self ) : if self . _elements : for element in self . _elements : yield element else : for user_id in self . __user_ids : element = self . _swimlane . users . get ( id = user_id ) self . _elements . append ( element ) yield element | Lazily retrieve and build User instances from returned data |
51,232 | def _user_raw_from_login_content ( login_content ) : matching_keys = [ 'displayName' , 'lastLogin' , 'active' , 'name' , 'isMe' , 'lastPasswordChangedDate' , 'passwordResetRequired' , 'groups' , 'roles' , 'email' , 'isAdmin' , 'createdDate' , 'modifiedDate' , 'createdByUser' , 'modifiedByUser' , 'userName' , 'id' , 'disabled' ] raw_data = { '$type' : User . _type , } for key in matching_keys : if key in login_content : raw_data [ key ] = login_content [ key ] return raw_data | Returns a User instance with appropriate raw data parsed from login response content |
51,233 | def __verify_server_version ( self ) : if compare_versions ( '.' . join ( [ _lib_major_version , _lib_minor_version ] ) , self . product_version ) > 0 : logger . warning ( 'Client version {} connecting to server with newer minor release {}.' . format ( _lib_full_version , self . product_version ) ) if compare_versions ( _lib_major_version , self . product_version ) != 0 : raise InvalidSwimlaneProductVersion ( self , '{}.0' . format ( _lib_major_version ) , '{}.0' . format ( str ( int ( _lib_major_version ) + 1 ) ) ) | Verify connected to supported server product version |
51,234 | def settings ( self ) : if not self . __settings : self . __settings = self . request ( 'get' , 'settings' ) . json ( ) return self . __settings | Retrieve and cache settings from server |
51,235 | def product_version ( self ) : version_separator = '+' if version_separator in self . version : return self . version . split ( version_separator ) [ 0 ] return self . version . split ( '-' ) [ 0 ] | Swimlane product version |
51,236 | def build_number ( self ) : version_separator = '+' if version_separator in self . version : return self . version . split ( version_separator ) [ 2 ] return self . version . split ( '-' ) [ 1 ] | Swimlane build number |
51,237 | def authenticate ( self ) : self . _swimlane . _session . auth = None resp = self . _swimlane . request ( 'post' , 'user/login' , json = { 'userName' : self . _username , 'password' : self . _password } , ) self . _swimlane . _session . auth = self json_content = resp . json ( ) token = json_content . pop ( 'token' , None ) token_data = jwt . decode ( token , verify = False ) token_expiration = pendulum . from_timestamp ( token_data [ 'exp' ] ) headers = { 'Authorization' : 'Bearer {}' . format ( token ) } user = User ( self . _swimlane , _user_raw_from_login_content ( json_content ) ) self . _login_headers = headers self . user = user self . _token_expiration = token_expiration | Send login request and update User instance login headers and token expiration |
51,238 | def _evaluate ( self ) : if self . _elements : for element in self . _elements : yield element else : for page in itertools . count ( ) : raw_elements = self . _retrieve_raw_elements ( page ) for raw_element in raw_elements : element = self . _parse_raw_element ( raw_element ) self . _elements . append ( element ) yield element if self . __limit and len ( self . _elements ) >= self . __limit : break if any ( [ len ( raw_elements ) < self . page_size , ( self . __limit and len ( self . _elements ) >= self . __limit ) ] ) : break | Lazily retrieve and paginate report results and build Record instances from returned data |
51,239 | def _validate_user ( self , user ) : if self . _show_all_users : return if user . id in self . _allowed_user_ids : return user_member_group_ids = set ( [ g [ 'id' ] for g in user . _raw [ 'groups' ] ] ) if user_member_group_ids & self . _allowed_member_ids : return raise ValidationError ( self . record , 'User `{}` is not a valid selection for field `{}`' . format ( user , self . name ) ) | Validate a User instance against allowed user IDs or membership in a group |
51,240 | def _validate_group ( self , group ) : if self . _show_all_groups : return if group . id in self . _allowed_group_ids : return for parent_group_id in self . _allowed_subgroup_ids : parent_group = self . _swimlane . groups . get ( id = parent_group_id ) parent_group_child_ids = set ( [ g [ 'id' ] for g in parent_group . _raw [ 'groups' ] ] ) if group . id in parent_group_child_ids : return raise ValidationError ( self . record , 'Group `{}` is not a valid selection for field `{}`' . format ( group , self . name ) ) | Validate a Group instance against allowed group IDs or subgroup of a parent group |
51,241 | def cast_to_python ( self , value ) : if value is not None : value = UserGroup ( self . _swimlane , value ) return value | Convert JSON definition to UserGroup object |
51,242 | def cursor ( self ) : if self . _cursor is None : self . _cursor = self . cursor_class ( self , self . get_initial_elements ( ) ) return self . _cursor | Cache and return cursor_class instance |
51,243 | def comment ( self , message ) : message = str ( message ) sw_repr = { '$type' : 'Core.Models.Record.Comments, Core' , 'createdByUser' : self . _record . _swimlane . user . as_usergroup_selection ( ) , 'createdDate' : pendulum . now ( ) . to_rfc3339_string ( ) , 'message' : message } comment = Comment ( self . _swimlane , sw_repr ) self . _elements . append ( comment ) self . _record . _raw [ 'comments' ] . setdefault ( self . _field . id , [ ] ) self . _record . _raw [ 'comments' ] [ self . _field . id ] . append ( comment . _raw ) return comment | Add new comment to record comment field |
51,244 | def get_recursive_subclasses ( cls ) : return cls . __subclasses__ ( ) + [ g for s in cls . __subclasses__ ( ) for g in get_recursive_subclasses ( s ) ] | Return list of all subclasses for a class including subclasses of direct subclasses |
51,245 | def import_submodules ( package ) : if isinstance ( package , str ) : package = importlib . import_module ( package ) results = { } for _ , full_name , is_pkg in pkgutil . walk_packages ( package . __path__ , package . __name__ + '.' ) : results [ full_name ] = importlib . import_module ( full_name ) if is_pkg : results . update ( import_submodules ( full_name ) ) return results | Return list of imported module instances from beneath root_package |
51,246 | def one_of_keyword_only ( * valid_keywords ) : def decorator ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : sentinel = object ( ) values = { } for key in valid_keywords : kwarg_value = kwargs . pop ( key , sentinel ) if kwarg_value is not sentinel : values [ key ] = kwarg_value if kwargs : raise TypeError ( 'Unexpected arguments: {}' . format ( kwargs ) ) if not values : raise TypeError ( 'Must provide one of {} as keyword argument' . format ( ', ' . join ( valid_keywords ) ) ) if len ( values ) > 1 : raise TypeError ( 'Must provide only one of {} as keyword argument. Received {}' . format ( ', ' . join ( valid_keywords ) , values ) ) return func ( * ( args + values . popitem ( ) ) ) return wrapper return decorator | Decorator to help make one - and - only - one keyword - only argument functions more reusable |
51,247 | def get_python ( self ) : value = super ( DatetimeField , self ) . get_python ( ) if value is not None : if self . input_type == self . _type_time : value = value . time ( ) if self . input_type == self . _type_date : value = value . date ( ) return value | Coerce to best date type representation for the field subtype |
51,248 | def cast_to_swimlane ( self , value ) : if value is None : return value if self . input_type == self . _type_interval : return value . in_seconds ( ) * 1000 return self . format_datetime ( value ) | Return datetimes formatted as expected by API and timespans as millisecond epochs |
51,249 | def for_json ( self ) : value = super ( DatetimeField , self ) . for_json ( ) if isinstance ( value , pendulum . Interval ) : return value . in_seconds ( ) * 1000 if isinstance ( value , datetime ) : return self . format_datetime ( value ) if isinstance ( value , pendulum . Time ) : return str ( value ) if isinstance ( value , pendulum . Date ) : return value . to_date_string ( ) | Return date ISO8601 string formats for datetime date and time values milliseconds for intervals |
51,250 | def report_factory ( app , report_name , ** kwargs ) : created = pendulum . now ( ) . to_rfc3339_string ( ) user_model = app . _swimlane . user . as_usergroup_selection ( ) return Report ( app , { "$type" : Report . _type , "groupBys" : [ ] , "aggregates" : [ ] , "applicationIds" : [ app . id ] , "columns" : [ ] , "sorts" : { "$type" : "System.Collections.Generic.Dictionary`2" "[[System.String, mscorlib]," "[Core.Models.Search.SortTypes, Core]], mscorlib" , } , "filters" : [ ] , "defaultSearchReport" : False , "allowed" : [ ] , "permissions" : { "$type" : "Core.Models.Security.PermissionMatrix, Core" } , "createdDate" : created , "modifiedDate" : created , "createdByUser" : user_model , "modifiedByUser" : user_model , "id" : None , "name" : report_name , "disabled" : False , "keywords" : "" } , ** kwargs ) | Report instance factory populating boilerplate raw data |
51,251 | def filter ( self , field_name , operand , value ) : if operand not in self . _FILTER_OPERANDS : raise ValueError ( 'Operand must be one of {}' . format ( ', ' . join ( self . _FILTER_OPERANDS ) ) ) record_stub = record_factory ( self . _app ) field = record_stub . get_field ( field_name ) self . _raw [ 'filters' ] . append ( { "fieldId" : field . id , "filterType" : operand , "value" : field . get_report ( value ) } ) | Adds a filter to report |
51,252 | def list ( self ) : raw_reports = self . _swimlane . request ( 'get' , "reports?appId={}" . format ( self . _app . id ) ) . json ( ) return [ Report ( self . _app , raw_report ) for raw_report in raw_reports if raw_report [ '$type' ] == Report . _type ] | Retrieve all reports for parent app |
51,253 | def get ( self , report_id ) : return Report ( self . _app , self . _swimlane . request ( 'get' , "reports/{0}" . format ( report_id ) ) . json ( ) ) | Retrieve report by ID |
51,254 | def get ( self , key , value ) : if key == 'id' : response = self . _swimlane . request ( 'get' , 'groups/{}' . format ( value ) ) return Group ( self . _swimlane , response . json ( ) ) else : response = self . _swimlane . request ( 'get' , 'groups/lookup?name={}' . format ( value ) ) matched_groups = response . json ( ) for group_data in matched_groups : if group_data . get ( 'name' ) == value : return Group ( self . _swimlane , group_data ) raise ValueError ( 'Unable to find group with name "{}"' . format ( value ) ) | Retrieve single group record by id or name |
51,255 | def get ( self , arg , value ) : if arg == 'id' : response = self . _swimlane . request ( 'get' , 'user/{}' . format ( value ) ) try : user_data = response . json ( ) except ValueError : raise ValueError ( 'Unable to find user with ID "{}"' . format ( value ) ) return User ( self . _swimlane , user_data ) else : response = self . _swimlane . request ( 'get' , 'user/search?query={}' . format ( quote_plus ( value ) ) ) matched_users = response . json ( ) target_matches = [ ] for user_data in matched_users : user_display_name = user_data . get ( 'displayName' ) if user_display_name == value : target_matches . append ( user_data ) if not target_matches : raise ValueError ( 'Unable to find user with display name "{}"' . format ( value ) ) if len ( target_matches ) > 1 : raise ValueError ( 'Multiple users returned with display name "{}". Matching user IDs: {}' . format ( value , ', ' . join ( [ '"{}"' . format ( r [ 'id' ] ) for r in target_matches ] ) ) ) return User ( self . _swimlane , target_matches [ 0 ] ) | Retrieve single user record by id or username |
51,256 | def _evaluate ( self ) : if not self . __retrieved : self . _elements = self . _retrieve_revisions ( ) self . __retrieved = True return super ( RevisionCursor , self ) . _evaluate ( ) | Lazily retrieves caches and returns the list of record _revisions |
51,257 | def _retrieve_revisions ( self ) : response = self . _swimlane . request ( 'get' , 'history' , params = { 'type' : 'Records' , 'id' : self . _record . id } ) raw_revisions = response . json ( ) return [ Revision ( self . _record , raw ) for raw in raw_revisions ] | Retrieve and populate Revision instances from history API endpoint |
51,258 | def validate_value ( self , value ) : super ( ValuesListField , self ) . validate_value ( value ) if value is not None : if value not in self . selection_to_id_map : raise ValidationError ( self . record , 'Field "{}" invalid value "{}". Valid options: {}' . format ( self . name , value , ', ' . join ( self . selection_to_id_map . keys ( ) ) ) ) | Validate provided value is one of the valid options |
51,259 | def cast_to_report ( self , value ) : value = super ( ValuesListField , self ) . cast_to_report ( value ) if value : return value [ 'id' ] | Report format uses only the value s id |
51,260 | def validate_filters_or_records ( filters_or_records ) : if not filters_or_records : raise ValueError ( 'Must provide at least one filter tuples or Records' ) if not isinstance ( filters_or_records [ 0 ] , ( Record , tuple ) ) : raise ValueError ( 'Cannot provide both filter tuples and Records' ) _type = type ( filters_or_records [ 0 ] ) for item in filters_or_records : if not isinstance ( item , _type ) : raise ValueError ( "Expected filter tuple or Record, received {0}" . format ( item ) ) return _type | Validation for filters_or_records variable from bulk_modify and bulk_delete |
51,261 | def get ( self , key , value ) : if key == 'id' : response = self . _swimlane . request ( 'get' , "app/{0}/record/{1}" . format ( self . _app . id , value ) ) return Record ( self . _app , response . json ( ) ) if key == 'tracking_id' : response = self . _swimlane . request ( 'get' , "app/{0}/record/tracking/{1}" . format ( self . _app . id , value ) ) return Record ( self . _app , response . json ( ) ) | Get a single record by id |
51,262 | def search ( self , * filters , ** kwargs ) : report = self . _app . reports . build ( 'search-' + random_string ( 8 ) , keywords = kwargs . pop ( 'keywords' , [ ] ) , limit = kwargs . pop ( 'limit' , Report . default_limit ) ) for filter_tuples in filters : report . filter ( * filter_tuples ) return list ( report ) | Shortcut to generate a new temporary search report using provided filters and return the resulting records |
51,263 | def create ( self , ** fields ) : new_record = record_factory ( self . _app , fields ) new_record . save ( ) return new_record | Create and return a new record in associated app and return the newly created Record instance |
51,264 | def bulk_create ( self , * records ) : if not records : raise TypeError ( 'Must provide at least one record' ) if any ( not isinstance ( r , dict ) for r in records ) : raise TypeError ( 'New records must be provided as dicts' ) new_records = [ ] for record_data in records : record = record_factory ( self . _app , record_data ) record . validate ( ) new_records . append ( record ) self . _swimlane . request ( 'post' , 'app/{}/record/batch' . format ( self . _app . id ) , json = [ r . _raw for r in new_records ] ) | Create and validate multiple records in associated app |
51,265 | def _validate_list ( self , target ) : min_items = self . _field . field_definition . get ( 'minItems' ) max_items = self . _field . field_definition . get ( 'maxItems' ) if min_items is not None : if len ( target ) < min_items : raise ValidationError ( self . _record , "Field '{}' must have a minimum of {} item(s)" . format ( self . _field . name , min_items ) ) if max_items is not None : if len ( target ) > max_items : raise ValidationError ( self . _record , "Field '{}' can only have a maximum of {} item(s)" . format ( self . _field . name , max_items ) ) for item in target : self . _validate_item ( item ) | Validate a list against field validation rules |
51,266 | def set_swimlane ( self , value ) : value = value or [ ] self . _initial_value_to_ids_map = defaultdict ( list ) for item in value : self . _initial_value_to_ids_map [ item [ 'value' ] ] . append ( item [ 'id' ] ) return super ( ListField , self ) . set_swimlane ( [ d [ 'value' ] for d in value ] ) | Convert from list of dicts with values to list of values |
51,267 | def set_python ( self , value ) : if not isinstance ( value , ( list , type ( None ) ) ) : raise ValidationError ( self . record , "Field '{}' must be set to a list, not '{}'" . format ( self . name , value . __class__ ) ) value = value or [ ] self . cursor . _validate_list ( value ) return super ( ListField , self ) . set_python ( value ) | Validate using cursor for consistency between direct set of values vs modification of cursor values |
51,268 | def cast_to_swimlane ( self , value ) : value = super ( ListField , self ) . cast_to_swimlane ( value ) if not value : return None value_ids = deepcopy ( self . _initial_value_to_ids_map ) return [ self . _build_list_item ( item , value_ids [ item ] . pop ( 0 ) if value_ids [ item ] else None ) for item in value ] | Restore swimlane format attempting to keep initial IDs for any previously existing values |
51,269 | def select ( self , element ) : self . _field . validate_value ( element ) self . _elements . add ( element ) self . _sync_field ( ) | Add an element to the set of selected elements Proxy to internal set . add and sync field |
51,270 | def get_python ( self ) : if self . multiselect : return super ( MultiSelectField , self ) . get_python ( ) return self . _get ( ) | Only return cursor instance if configured for multiselect |
51,271 | def get_swimlane ( self ) : if self . multiselect : value = self . _get ( ) children = [ ] if value : for child in value : children . append ( self . cast_to_swimlane ( child ) ) return children return None return super ( MultiSelectField , self ) . get_swimlane ( ) | Handle multi - select and single - select modes |
51,272 | def set_python ( self , value ) : if self . multiselect : value = value or [ ] elements = [ ] for element in value : self . validate_value ( element ) elements . append ( element ) value = elements else : self . validate_value ( value ) self . _set ( value ) | Override to remove key from raw data when empty to work with server 2 . 16 + validation |
51,273 | def set_swimlane ( self , value ) : if self . multiselect : value = value or [ ] children = [ ] for child in value : children . append ( self . cast_to_python ( child ) ) return self . _set ( children ) return super ( MultiSelectField , self ) . set_swimlane ( value ) | Cast all multi - select elements to correct internal type like single - select mode |
51,274 | def for_json ( self ) : if self . multiselect : return super ( MultiSelectField , self ) . for_json ( ) value = self . get_python ( ) if hasattr ( value , 'for_json' ) : return value . for_json ( ) return value | Handle multi - select vs single - select |
51,275 | def record_factory ( app , fields = None ) : record = Record ( app , { '$type' : Record . _type , 'isNew' : True , 'applicationId' : app . id , 'comments' : { '$type' : 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Collections.Generic.List`1[[Core.Models.Record.Comments, Core]], mscorlib]], mscorlib' } , 'values' : { '$type' : 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Object, mscorlib]], mscorlib' } } ) fields = fields or { } for name , value in six . iteritems ( fields ) : record [ name ] = value copy_raw = copy . copy ( record . _raw ) values_dict = { } for key , value in six . iteritems ( copy_raw [ 'values' ] ) : if value is not None : values_dict [ key ] = value record . _raw [ 'values' ] = values_dict return record | Return a temporary Record instance to be used for field validation and value parsing |
51,276 | def set_python ( self , value ) : if value is not None and not isinstance ( value , self . supported_types ) or isinstance ( value , int ) : value = str ( value ) return super ( TextField , self ) . set_python ( value ) | Set field internal value from the python representation of field value |
51,277 | def compare_versions ( version_a , version_b , zerofill = False ) : a_sections = list ( ( int ( match ) for match in re . findall ( r'\d+' , version_a ) ) ) b_sections = list ( ( int ( match ) for match in re . findall ( r'\d+' , version_b ) ) ) if zerofill : max_sections = max ( [ len ( a_sections ) , len ( b_sections ) ] ) a_sections += [ 0 for _ in range ( max ( max_sections - len ( a_sections ) , 0 ) ) ] b_sections += [ 0 for _ in range ( max ( max_sections - len ( b_sections ) , 0 ) ) ] else : min_sections = min ( [ len ( a_sections ) , len ( b_sections ) ] ) a_sections = a_sections [ : min_sections ] b_sections = b_sections [ : min_sections ] return ( b_sections > a_sections ) - ( b_sections < a_sections ) | Return direction of version relative to provided version sections |
51,278 | def requires_swimlane_version ( min_version = None , max_version = None ) : if min_version is None and max_version is None : raise ValueError ( 'Must provide either min_version, max_version, or both' ) if min_version and max_version and compare_versions ( min_version , max_version ) < 0 : raise ValueError ( 'min_version must be <= max_version ({}, {})' . format ( min_version , max_version ) ) def decorator ( func ) : @ functools . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : swimlane = self . _swimlane if min_version and compare_versions ( min_version , swimlane . build_version , True ) < 0 : raise InvalidSwimlaneBuildVersion ( swimlane , min_version , max_version ) if max_version and compare_versions ( swimlane . build_version , max_version , True ) < 0 : raise InvalidSwimlaneBuildVersion ( swimlane , min_version , max_version ) return func ( self , * args , ** kwargs ) return wrapper return decorator | Decorator for SwimlaneResolver methods verifying Swimlane server build version is within a given inclusive range |
51,279 | def get_report ( self , value ) : if self . multiselect : value = value or [ ] children = [ ] for child in value : children . append ( self . cast_to_report ( child ) ) return children return self . cast_to_report ( value ) | Return provided field Python value formatted for use in report filter |
51,280 | def get_bulk_modify ( self , value ) : if self . multiselect : value = value or [ ] return [ self . cast_to_bulk_modify ( child ) for child in value ] return self . cast_to_bulk_modify ( value ) | Return value in format for bulk modify |
51,281 | def validate_value ( self , value ) : if self . readonly : raise ValidationError ( self . record , "Cannot set readonly field '{}'" . format ( self . name ) ) if value not in ( None , self . _unset ) : if self . supported_types and not isinstance ( value , tuple ( self . supported_types ) ) : raise ValidationError ( self . record , "Field '{}' expects one of {}, got '{}' instead" . format ( self . name , ', ' . join ( [ repr ( t . __name__ ) for t in self . supported_types ] ) , type ( value ) . __name__ ) ) | Validate value is an acceptable type during set_python operation |
51,282 | def _set ( self , value ) : self . _value = value self . record . _raw [ 'values' ] [ self . id ] = self . get_swimlane ( ) | Default setter used for both representations unless overridden |
51,283 | def resolve_field_class ( field_definition ) : try : return _FIELD_TYPE_MAP [ field_definition [ '$type' ] ] except KeyError as error : error . message = 'No field available to handle Swimlane $type "{}"' . format ( field_definition ) raise | Return field class most fitting of provided Swimlane field definition |
51,284 | def get_cache_index_key ( resource ) : if isinstance ( resource , APIResource ) : attr , attr_value = list ( resource . get_cache_index_keys ( ) . items ( ) ) [ 0 ] key = ( type ( resource ) , attr , attr_value ) else : key = tuple ( resource ) if len ( key ) != 3 : raise TypeError ( 'Cache key must be tuple of (class, key, value), got `{!r}` instead' . format ( key ) ) if not issubclass ( key [ 0 ] , APIResource ) : raise TypeError ( 'First value of cache key must be a subclass of APIResource, got `{!r}` instead' . format ( key [ 0 ] ) ) return key | Return a usable cache lookup key for an already initialized resource |
51,285 | def check_cache ( resource_type ) : def decorator ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : try : adapter = args [ 0 ] key , val = list ( kwargs . items ( ) ) [ 0 ] except IndexError : logger . warning ( "Couldn't generate full index key, skipping cache" ) else : index_key = ( resource_type , key , val ) try : cached_record = adapter . _swimlane . resources_cache [ index_key ] except KeyError : logger . debug ( 'Cache miss: `{!r}`' . format ( index_key ) ) else : logger . debug ( 'Cache hit: `{!r}`' . format ( cached_record ) ) return cached_record return func ( * args , ** kwargs ) return wrapper return decorator | Decorator for adapter methods to check cache for resource before normally sending requests to retrieve data |
51,286 | def cache ( self , resource ) : if not isinstance ( resource , APIResource ) : raise TypeError ( 'Cannot cache `{!r}`, can only cache APIResource instances' . format ( resource ) ) if self . __cache_max_size == 0 : return try : cache_internal_key = resource . get_cache_internal_key ( ) cache_index_keys = resource . get_cache_index_keys ( ) . items ( ) except NotImplementedError : logger . warning ( 'Not caching `{!r}`, resource did not provide all necessary cache details' . format ( resource ) ) else : resource_type = type ( resource ) for key , value in cache_index_keys : self . __cache_index_key_map [ ( resource_type , key , value ) ] = cache_internal_key self . __caches [ resource_type ] [ cache_internal_key ] = resource logger . debug ( 'Cached `{!r}`' . format ( resource ) ) | Insert a resource instance into appropriate resource cache |
51,287 | def clear ( self , * resource_types ) : resource_types = resource_types or tuple ( self . __caches . keys ( ) ) for cls in resource_types : self . __caches [ cls ] . clear ( ) del self . __caches [ cls ] | Clear cache for each provided APIResource class or all resources if no classes are provided |
51,288 | def _set ( self , value ) : super ( AttachmentsField , self ) . _set ( value ) self . _cursor = None | Override setter allow clearing cursor |
51,289 | def verify_otp ( request ) : ctx = { } if request . method == "POST" : verification_code = request . POST . get ( 'verification_code' ) if verification_code is None : ctx [ 'error_message' ] = "Missing verification code." else : otp_ = UserOTP . objects . get ( user = request . user ) totp_ = totp . TOTP ( otp_ . secret_key ) is_verified = totp_ . verify ( verification_code ) if is_verified : request . session [ 'verfied_otp' ] = True response = redirect ( request . POST . get ( "next" , settings . LOGIN_REDIRECT_URL ) ) return update_rmb_cookie ( request , response ) ctx [ 'error_message' ] = "Your code is expired or invalid." ctx [ 'next' ] = request . GET . get ( 'next' , settings . LOGIN_REDIRECT_URL ) return render ( request , 'django_mfa/login_verify.html' , ctx , status = 400 ) | Verify a OTP request |
51,290 | def at ( self , for_time , counter_offset = 0 ) : if not isinstance ( for_time , datetime . datetime ) : for_time = datetime . datetime . fromtimestamp ( int ( for_time ) ) return self . generate_otp ( self . timecode ( for_time ) + counter_offset ) | Accepts either a Unix timestamp integer or a Time object . Time objects will be adjusted to UTC automatically |
51,291 | def verify ( self , otp , for_time = None , valid_window = 0 ) : if for_time is None : for_time = datetime . datetime . now ( ) if valid_window : for i in range ( - valid_window , valid_window + 1 ) : if utils . strings_equal ( str ( otp ) , str ( self . at ( for_time , i ) ) ) : return True return False return utils . strings_equal ( str ( otp ) , str ( self . at ( for_time ) ) ) | Verifies the OTP passed in against the current time OTP |
51,292 | def provisioning_uri ( self , name , issuer_name = None ) : return utils . build_uri ( self . secret , name , issuer_name = issuer_name ) | Returns the provisioning URI for the OTP This can then be encoded in a QR Code and used to provision the Google Authenticator app |
51,293 | def build_uri ( secret , name , initial_count = None , issuer_name = None ) : is_initial_count_present = ( initial_count is not None ) otp_type = 'hotp' if is_initial_count_present else 'totp' base = 'otpauth://%s/' % otp_type if issuer_name : issuer_name = quote ( issuer_name ) base += '%s:' % issuer_name uri = '%(base)s%(name)s?secret=%(secret)s' % { 'name' : quote ( name , safe = '@' ) , 'secret' : secret , 'base' : base , } if is_initial_count_present : uri += '&counter=%s' % initial_count if issuer_name : uri += '&issuer=%s' % issuer_name return uri | Returns the provisioning URI for the OTP ; works for either TOTP or HOTP . |
51,294 | def strings_equal ( s1 , s2 ) : try : s1 = unicodedata . normalize ( 'NFKC' , str ( s1 ) ) s2 = unicodedata . normalize ( 'NFKC' , str ( s2 ) ) except : s1 = unicodedata . normalize ( 'NFKC' , unicode ( s1 ) ) s2 = unicodedata . normalize ( 'NFKC' , unicode ( s2 ) ) return compare_digest ( s1 , s2 ) | Timing - attack resistant string comparison . |
51,295 | def GetPythonLibraryDirectoryPath ( ) : path = sysconfig . get_python_lib ( True ) _ , _ , path = path . rpartition ( sysconfig . PREFIX ) if path . startswith ( os . sep ) : path = path [ 1 : ] return path | Retrieves the Python library directory path . |
51,296 | def run ( self ) : compiler = new_compiler ( compiler = self . compiler ) if compiler . compiler_type == "msvc" : self . define = [ ( "UNICODE" , "" ) , ] else : command = "sh configure --disable-shared-libs" output = self . _RunCommand ( command ) print_line = False for line in output . split ( "\n" ) : line = line . rstrip ( ) if line == "configure:" : print_line = True if print_line : print ( line ) self . define = [ ( "HAVE_CONFIG_H" , "" ) , ( "LOCALEDIR" , "\"/usr/share/locale\"" ) , ] build_ext . run ( self ) | Runs the build extension . |
51,297 | def _ReadConfigureAc ( self ) : file_object = open ( "configure.ac" , "rb" ) if not file_object : raise IOError ( "Unable to open: configure.ac" ) found_ac_init = False found_library_name = False for line in file_object . readlines ( ) : line = line . strip ( ) if found_library_name : library_version = line [ 1 : - 2 ] if sys . version_info [ 0 ] >= 3 : library_version = library_version . decode ( "ascii" ) self . library_version = library_version break elif found_ac_init : library_name = line [ 1 : - 2 ] if sys . version_info [ 0 ] >= 3 : library_name = library_name . decode ( "ascii" ) self . library_name = library_name found_library_name = True elif line . startswith ( b"AC_INIT" ) : found_ac_init = True file_object . close ( ) if not self . library_name or not self . library_version : raise RuntimeError ( "Unable to find library name and version in: configure.ac" ) | Reads configure . ac to initialize the project information . |
51,298 | def _ReadMakefileAm ( self ) : if not self . library_name : raise RuntimeError ( "Missing library name" ) file_object = open ( "Makefile.am" , "rb" ) if not file_object : raise IOError ( "Unable to open: Makefile.am" ) found_subdirs = False for line in file_object . readlines ( ) : line = line . strip ( ) if found_subdirs : library_name , _ , _ = line . partition ( b" " ) if sys . version_info [ 0 ] >= 3 : library_name = library_name . decode ( "ascii" ) self . include_directories . append ( library_name ) if library_name . startswith ( "lib" ) : self . library_names . append ( library_name ) if library_name == self . library_name : break elif line . startswith ( b"SUBDIRS" ) : found_subdirs = True file_object . close ( ) if not self . include_directories or not self . library_names : raise RuntimeError ( "Unable to find include directories and library names in: " "Makefile.am" ) | Reads Makefile . am to initialize the project information . |
51,299 | def babel_compile ( source , ** kwargs ) : presets = kwargs . get ( 'presets' ) if not presets : kwargs [ 'presets' ] = [ "es2015" ] with open ( BABEL_COMPILER , 'rb' ) as babel_js : return evaljs ( ( babel_js . read ( ) . decode ( 'utf-8' ) , 'var bres, res;' 'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);' , 'res = {map: bres.map, code: bres.code};' ) , es6code = source , babel_options = kwargs ) | Compiles the given source from ES6 to ES5 using Babeljs |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.