idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
50,200 | def username ( self ) : if len ( self . _inp_username . value . strip ( ) ) == 0 : if not self . hostname is None : config = parse_sshconfig ( self . hostname ) if 'user' in config : return config [ 'user' ] else : return None else : return self . _inp_username . value | Loking for username in user s input and config file |
50,201 | def load ( cls , rr , pk ) : if pk is None : return None d = rr . table ( cls . table ) . get ( pk ) . run ( ) if d is None : return None doc = cls ( rr , d ) return doc | Retrieves a document from the database by primary key . |
50,202 | def table_ensure ( cls , rr ) : dbs = rr . db_list ( ) . run ( ) if not rr . dbname in dbs : logging . info ( 'creating rethinkdb database %s' , repr ( rr . dbname ) ) rr . db_create ( rr . dbname ) . run ( ) tables = rr . table_list ( ) . run ( ) if not cls . table in tables : logging . info ( 'creating rethinkdb table %s in database %s' , repr ( cls . table ) , repr ( rr . dbname ) ) cls . table_create ( rr ) | Creates the table if it doesn t exist . |
50,203 | def pk_field ( self ) : if not self . _pk : try : pk = self . rr . db ( 'rethinkdb' ) . table ( 'table_config' ) . filter ( { 'db' : self . rr . dbname , 'name' : self . table } ) . get_field ( 'primary_key' ) [ 0 ] . run ( ) self . _pk = pk except Exception as e : raise Exception ( 'problem determining primary key for table %s.%s: %s' , self . rr . dbname , self . table , e ) return self . _pk | Name of the primary key field as retrieved from rethinkdb table metadata id by default . Should not be overridden . Override table_create if you want to use a nonstandard field as the primary key . |
50,204 | def save ( self ) : should_insert = False try : self [ self . pk_field ] if self . _updates : updates = { field : r . literal ( self . _updates [ field ] ) for field in self . _updates } query = self . rr . table ( self . table ) . get ( self . pk_value ) . update ( updates ) result = query . run ( ) if result [ 'skipped' ] : should_insert = True elif result [ 'errors' ] or result [ 'deleted' ] : raise Exception ( 'unexpected result %s from rethinkdb query %s' % ( result , query ) ) if not should_insert and self . _deletes : query = self . rr . table ( self . table ) . get ( self . pk_value ) . replace ( r . row . without ( self . _deletes ) ) result = query . run ( ) if result [ 'errors' ] : should_insert = True elif result [ 'replaced' ] != 1 : raise Exception ( 'unexpected result %s from rethinkdb query %s' % ( result , query ) ) except KeyError : should_insert = True if should_insert : query = self . rr . table ( self . table ) . insert ( self ) result = query . run ( ) if result [ 'inserted' ] != 1 : raise Exception ( 'unexpected result %s from rethinkdb query %s' % ( result , query ) ) if 'generated_keys' in result : dict . __setitem__ ( self , self . pk_field , result [ 'generated_keys' ] [ 0 ] ) self . _clear_updates ( ) | Persist changes to rethinkdb . Updates only the fields that have changed . Performs insert rather than update if the document has no primary key or if the primary key is absent from the database . |
50,205 | def refresh ( self ) : d = self . rr . table ( self . table ) . get ( self . pk_value ) . run ( ) if d is None : raise KeyError for k in d : dict . __setitem__ ( self , k , watch ( d [ k ] , callback = self . _updated , field = k ) ) | Refresh the document from the database . |
50,206 | def parse ( self , xml_file , view_name = None ) -> XmlNode : self . _setup_parser ( ) try : self . _view_name = view_name self . _parser . ParseFile ( xml_file ) except ExpatError as error : raise XmlError ( errors . messages [ error . code ] , ViewInfo ( view_name , error . lineno ) ) root = self . _root self . _reset ( ) return root | Parses xml file with xml_path and returns XmlNode |
50,207 | def apply_connectivity_changes ( self , request ) : if request is None or request == "" : raise Exception ( self . __class__ . __name__ , "request is None or empty" ) holder = JsonRequestDeserializer ( jsonpickle . decode ( request ) ) if not holder or not hasattr ( holder , "driverRequest" ) : raise Exception ( self . __class__ . __name__ , "Deserialized request is None or empty" ) driver_response = DriverResponse ( ) add_vlan_thread_list = [ ] remove_vlan_thread_list = [ ] driver_response_root = DriverResponseRoot ( ) for action in holder . driverRequest . actions : self . _logger . info ( "Action: " , action . __dict__ ) self . _validate_request_action ( action ) action_id = action . actionId full_name = action . actionTarget . fullName port_mode = action . connectionParams . mode . lower ( ) if action . type == "setVlan" : qnq = False ctag = "" for attribute in action . connectionParams . vlanServiceAttributes : if attribute . attributeName . lower ( ) == "qnq" and attribute . attributeValue . lower ( ) == "true" : qnq = True if attribute . attributeName . lower ( ) == "ctag" : ctag = attribute . attributeValue for vlan_id in self . _get_vlan_list ( action . connectionParams . vlanId ) : add_vlan_thread = Thread ( target = self . add_vlan , name = action_id , args = ( vlan_id , full_name , port_mode , qnq , ctag ) ) add_vlan_thread_list . append ( add_vlan_thread ) elif action . type == "removeVlan" : for vlan_id in self . _get_vlan_list ( action . connectionParams . vlanId ) : remove_vlan_thread = Thread ( target = self . remove_vlan , name = action_id , args = ( vlan_id , full_name , port_mode , ) ) remove_vlan_thread_list . append ( remove_vlan_thread ) else : self . _logger . warning ( "Undefined action type determined '{}': {}" . format ( action . type , action . __dict__ ) ) continue for thread in remove_vlan_thread_list : thread . start ( ) for thread in remove_vlan_thread_list : thread . join ( ) for thread in add_vlan_thread_list : thread . start ( ) for thread in add_vlan_thread_list : thread . join ( ) request_result = [ ] for action in holder . driverRequest . actions : result_statuses , message = zip ( * self . result . get ( action . actionId ) ) if all ( result_statuses ) : action_result = ConnectivitySuccessResponse ( action , "Add Vlan {vlan} configuration successfully completed" . format ( vlan = action . connectionParams . vlanId ) ) else : message_details = "\n\t" . join ( message ) action_result = ConnectivityErrorResponse ( action , "Add Vlan {vlan} configuration failed." "\nAdd Vlan configuration details:\n{message_details}" . format ( vlan = action . connectionParams . vlanId , message_details = message_details ) ) request_result . append ( action_result ) driver_response . actionResults = request_result driver_response_root . driverResponse = driver_response return serialize_to_json ( driver_response_root ) | Handle apply connectivity changes request json trigger add or remove vlan methods get responce from them and create json response |
50,208 | def _validate_request_action ( self , action ) : is_fail = False fail_attribute = "" for class_attribute in self . APPLY_CONNECTIVITY_CHANGES_ACTION_REQUIRED_ATTRIBUTE_LIST : if type ( class_attribute ) is tuple : if not hasattr ( action , class_attribute [ 0 ] ) : is_fail = True fail_attribute = class_attribute [ 0 ] if not hasattr ( getattr ( action , class_attribute [ 0 ] ) , class_attribute [ 1 ] ) : is_fail = True fail_attribute = class_attribute [ 1 ] else : if not hasattr ( action , class_attribute ) : is_fail = True fail_attribute = class_attribute if is_fail : raise Exception ( self . __class__ . __name__ , "Mandatory field {0} is missing in ApplyConnectivityChanges request json" . format ( fail_attribute ) ) | Validate action from the request json according to APPLY_CONNECTIVITY_CHANGES_ACTION_REQUIRED_ATTRIBUTE_LIST |
50,209 | def _get_vlan_list ( self , vlan_str ) : result = set ( ) for splitted_vlan in vlan_str . split ( "," ) : if "-" not in splitted_vlan : if validate_vlan_number ( splitted_vlan ) : result . add ( int ( splitted_vlan ) ) else : raise Exception ( self . __class__ . __name__ , "Wrong VLAN number detected {}" . format ( splitted_vlan ) ) else : if self . IS_VLAN_RANGE_SUPPORTED : if validate_vlan_range ( splitted_vlan ) : result . add ( splitted_vlan ) else : raise Exception ( self . __class__ . __name__ , "Wrong VLANs range detected {}" . format ( vlan_str ) ) else : start , end = map ( int , splitted_vlan . split ( "-" ) ) if validate_vlan_number ( start ) and validate_vlan_number ( end ) : if start > end : start , end = end , start for vlan in range ( start , end + 1 ) : result . add ( vlan ) else : raise Exception ( self . __class__ . __name__ , "Wrong VLANs range detected {}" . format ( vlan_str ) ) return map ( str , list ( result ) ) | Get VLAN list from input string |
50,210 | def render_view ( view_name , ** args ) : try : root_xml = get_view_root ( view_name ) return render ( root_xml , ** args ) except CoreError as error : error . add_view_info ( ViewInfo ( view_name , None ) ) raise except : info = exc_info ( ) error = ViewError ( 'Unknown error occured during rendering' , ViewInfo ( view_name , None ) ) error . add_cause ( info [ 1 ] ) raise error from info [ 1 ] | Process view and return root Node |
50,211 | def get_view_root ( view_name : str ) -> XmlNode : try : path = join ( deps . views_folder , '{0}.{1}' . format ( view_name , deps . view_ext ) ) parser = Parser ( ) if path not in _XML_CACHE : with open ( path , 'rb' ) as xml_file : _XML_CACHE [ path ] = parser . parse ( xml_file , view_name ) return _XML_CACHE [ path ] except FileNotFoundError as error : error = ViewError ( 'View is not found' ) error . add_info ( 'View name' , view_name ) error . add_info ( 'Path' , path ) raise error except CoreError as error : error . add_view_info ( ViewInfo ( view_name , None ) ) raise except : info = exc_info ( ) error = ViewError ( 'Unknown error occured during parsing xml' , ViewInfo ( view_name , None ) ) error . add_cause ( info [ 1 ] ) raise error from info [ 1 ] | Parses xml file and return root XmlNode |
50,212 | def import_global ( node : Node , key : str , path : Any ) : node . node_globals [ key ] = import_path ( path ) | Import passed module class function full name and stores it to node s globals |
50,213 | def inject_global ( node : Node , global_key : str , inject_key : Any ) : value = get_current_scope ( ) . container . get ( inject_key ) set_global ( node , global_key , value ) | Resolves passed dependency and stores it to node s globals |
50,214 | def set_global ( node : Node , key : str , value : Any ) : node . node_globals [ key ] = value | Adds passed value to node s globals |
50,215 | def call ( node : Node , key : str , value : Any ) : value = _to_list ( value ) if not value or not isinstance ( value [ - 1 ] , dict ) : value . append ( { } ) args = value [ 0 : - 1 ] kwargs = value [ - 1 ] node . __dict__ [ key ] ( * args , ** kwargs ) | Calls node or node instance method |
50,216 | def iter_annotations ( self ) : section_map = defaultdict ( list ) for rule in self . app . url_map . iter_rules ( ) : if rule . endpoint == 'static' : continue view_function = self . app . view_functions . get ( rule . endpoint ) if view_function is None : continue view_class = getattr ( view_function , 'view_class' , None ) if view_class is None : continue annotations = [ ] for method_name in HTTP_METHODS : method = getattr ( view_class , method_name , None ) if not method : continue annotation = ResourceAnnotation ( method , method_name , method . _doctor_title ) annotations . append ( annotation ) if annotations : heading = self . _get_annotation_heading ( view_class , str ( rule ) ) section_map [ heading ] . append ( ( rule , view_class , annotations ) ) for heading in sorted ( section_map . keys ( ) ) : for item in section_map [ heading ] : rule , view_class , annotations = item yield ( heading , rule , view_class , annotations ) | Yield a tuple for each Flask handler containing annotated methods . |
50,217 | def request ( self , rule , view_class , annotation ) : headers = self . _get_headers ( rule , annotation ) example_values = self . _get_example_values ( rule , annotation ) if annotation . http_method . upper ( ) in ( 'DELETE' , 'GET' ) : for key , value in list ( example_values . items ( ) ) : if isinstance ( value , ( dict , list ) ) : example_values [ key ] = json . dumps ( value ) _ , path = rule . build ( example_values , append_unknown = True ) if annotation . http_method . upper ( ) not in ( 'DELETE' , 'GET' ) : parsed_path = parse . urlparse ( path ) path = parsed_path . path params = example_values else : params = { } method_name = annotation . http_method . lower ( ) method = getattr ( self . test_client , method_name ) if method_name in ( 'post' , 'put' ) : response = method ( path , data = json . dumps ( params ) , headers = headers , content_type = 'application/json' ) else : response = method ( path , data = params , headers = headers ) return { 'url' : '/' . join ( [ self . url_prefix , path . lstrip ( '/' ) ] ) , 'method' : annotation . http_method . upper ( ) , 'params' : params , 'response' : response . data , } | Make a request against the app . |
50,218 | def release ( self , key : str , callback : Callable [ [ Any , Any ] , None ] ) : try : self . _callbacks [ key ] . remove ( callback ) except ( KeyError , ValueError ) : pass | Releases callback from key changes |
50,219 | def inherit ( self , parent ) : if self . _parent == parent : return if self . _parent : self . _parent . release_all ( self . _parent_changed ) self_values = { key : self . _container [ key ] for key in self . _own_keys } self . _container = { ** parent . to_dictionary ( ) , ** self_values } self . _parent = parent self . _parent . observe_all ( self . _parent_changed ) | Inherit passed dictionary |
50,220 | def observe_all ( self , callback : Callable [ [ str , Any , Any ] , None ] ) : self . _all_callbacks . append ( callback ) | Subscribes to all keys changes |
50,221 | def release_all ( self , callback : Callable [ [ str , Any , Any ] , None ] ) : self . _all_callbacks . remove ( callback ) | Releases callback from all keys changes |
50,222 | def remove_key ( self , key ) : try : self . _own_keys . discard ( key ) if self . _parent and self . _parent . has_key ( key ) : self . _container [ key ] = self . _parent [ key ] else : del self . _container [ key ] except KeyError : pass | Remove own key value |
50,223 | def add_rule ( self , binding_type : str , rule : BindingRule ) : if binding_type not in self . _rules : self . _rules [ binding_type ] = [ ] self . _rules [ binding_type ] . insert ( 0 , rule ) | Adds new rule |
50,224 | def find_rule ( self , binding_type : str , ** args ) : try : rules = self . _rules [ binding_type ] return next ( rule for rule in rules if rule . suitable ( ** args ) ) except ( KeyError , StopIteration ) : return None | Finds rule by binding type and args |
50,225 | def apply ( self , binding_type , ** args ) : rule = self . find_rule ( binding_type , ** args ) if rule is None : error = BindingError ( 'Binding rule is not found' ) error . add_info ( 'Binding type' , binding_type ) error . add_info ( 'args' , args ) raise error binding = rule . apply ( ** args ) if binding : args [ 'node' ] . add_binding ( rule . apply ( ** args ) ) | Returns apply function |
50,226 | def get_expression_target ( expression : Expression , expr_vars : InheritedDict ) -> BindingTarget : root = expression . get_object_tree ( ) if len ( root . children ) != 1 or not PROPERTY_EXPRESSION_REGEX . fullmatch ( expression . code ) : error = BindingError ( 'Expression should be property expression' ) error . add_info ( 'Expression' , expression . code ) raise error if root . children [ 0 ] . children : return PropertyExpressionTarget ( expression , expr_vars ) return GlobalValueExpressionTarget ( expression , expr_vars ) | Factory method to create expression target |
50,227 | def on_change ( self , value ) : self . _modifier ( self . inst , self . prop , value ) | Calles modifier on instance with passed value |
50,228 | def destroy ( self ) : self . _observable . release ( self . _key , self . _callback ) self . _observable = None self . _key = None self . _callback = None | Unsubscribes callback from observable |
50,229 | def args ( ) : parser = argparse . ArgumentParser ( description = 'Run the Furious Examples.' ) parser . add_argument ( '--gae-sdk-path' , metavar = 'S' , dest = "gae_lib_path" , default = "/usr/local/google_appengine" , help = 'path to the GAE SDK' ) parser . add_argument ( 'url' , metavar = 'U' , default = "" , nargs = 1 , help = "the endpoint to run" ) return parser . parse_args ( ) | Add and parse the arguments for the script . |
50,230 | def setup ( options ) : sys . path . insert ( 0 , options . gae_lib_path ) from dev_appserver import fix_sys_path fix_sys_path ( ) | Grabs the gae_lib_path from the options and inserts it into the first index of the sys . path . Then calls GAE s fix_sys_path to get all the proper GAE paths included . |
50,231 | def run ( options ) : from google . appengine . tools import appengine_rpc from google . appengine . tools import appcfg source = 'furious' user_agent = appcfg . GetUserAgent ( ) server = appengine_rpc . HttpRpcServer ( 'localhost:8080' , lambda : ( 'test@example.com' , 'password' ) , user_agent , source , secure = False ) url = "/" if options . url : url += options . url [ 0 ] server . _DevAppServerAuthenticate ( ) server . Send ( url , content_type = "text/html; charset=utf-8" , payload = None ) | Run the passed in url of the example using GAE s rpc runner . |
50,232 | def has ( self , querypart_name , value = None ) : querypart = self . _queryparts . get ( querypart_name ) if not querypart : return False if not querypart . is_set : return False if value : return querypart . has ( value ) return True | Returns True if querypart_name with value is set . |
50,233 | def execute ( self , parameters : dict = None ) : try : parameters = { } if parameters is None else parameters return eval ( self . _compiled_code , parameters , { } ) except : info = exc_info ( ) error = CompilationError ( 'Error occurred in expression execution' , self . code ) error . add_cause ( info [ 1 ] ) raise error from info [ 1 ] | Executes expression with passed parameters and returns result |
50,234 | def aborting_function ( ) : import random logging . info ( 'In aborting_function' ) if random . random ( ) < .5 : from furious . errors import AbortAndRestart logging . info ( 'Getting ready to restart' ) raise AbortAndRestart ( ) logging . info ( 'No longer restarting' ) | There is a 50% chance that this function will AbortAndRestart or complete successfully . |
50,235 | def strip_ansi ( state ) : stu_res = _strip_ansi ( state . student_result ) return state . to_child ( student_result = stu_res ) | Remove ANSI escape codes from student result . |
50,236 | def has_code ( state , text , incorrect_msg = "The checker expected to find `{{text}}` in your command." , fixed = False ) : stu_code = state . student_code res = text in stu_code if fixed else re . search ( text , stu_code ) if not res : _msg = state . build_message ( incorrect_msg , fmt_kwargs = { 'text' : text } ) state . do_test ( _msg ) return state | Check whether the student code contains text . |
50,237 | def has_output ( state , text , incorrect_msg = "The checker expected to find {{'' if fixed else 'the pattern '}}`{{text}}` in the output of your command." , fixed = False , strip_ansi = True ) : stu_output = state . student_result if strip_ansi : stu_output = _strip_ansi ( stu_output ) res = text in stu_output if fixed else re . search ( text , stu_output ) if not res : _msg = state . build_message ( incorrect_msg , fmt_kwargs = { 'text' : text , 'fixed' : fixed } ) state . do_test ( _msg ) return state | Check whether student output contains specific text . |
50,238 | def has_cwd ( state , dir , incorrect_msg = "Your current working directory should be `{{dir}}`. Use `cd {{dir}}` to navigate there." ) : expr = "[[ $PWD == '{}' ]]" . format ( dir ) _msg = state . build_message ( incorrect_msg , fmt_kwargs = { 'dir' : dir } ) has_expr_exit_code ( state , expr , output = "0" , incorrect_msg = _msg ) return state | Check whether the student is in the expected directory . |
50,239 | def heartbeat ( self , status_info ) : for field in 'role' , 'ttl' , 'load' : if not field in status_info : raise Exception ( 'status_info is missing required field %s' , repr ( field ) ) val = status_info [ 'ttl' ] if not ( isinstance ( val , float ) or isinstance ( val , int ) ) or val <= 0 : raise Exception ( 'ttl must be a number > 0' ) updated_status_info = dict ( status_info ) updated_status_info [ 'last_heartbeat' ] = r . now ( ) if not 'first_heartbeat' in updated_status_info : updated_status_info [ 'first_heartbeat' ] = updated_status_info [ 'last_heartbeat' ] if not 'host' in updated_status_info : updated_status_info [ 'host' ] = socket . gethostname ( ) if not 'pid' in updated_status_info : updated_status_info [ 'pid' ] = os . getpid ( ) try : result = self . rr . table ( self . table ) . insert ( updated_status_info , conflict = 'replace' , return_changes = True ) . run ( ) return result [ 'changes' ] [ 0 ] [ 'new_val' ] except : self . logger . error ( 'error updating service registry' , exc_info = True ) return status_info | Update service status indicating up - ness . |
50,240 | def unregister ( self , id ) : result = self . rr . table ( self . table ) . get ( id ) . delete ( ) . run ( ) if result != { 'deleted' : 1 , 'errors' : 0 , 'inserted' : 0 , 'replaced' : 0 , 'skipped' : 0 , 'unchanged' : 0 } : self . logger . warn ( 'unexpected result attempting to delete id=%s from ' 'rethinkdb services table: %s' , id , result ) | Remove the service with id id from the service registry . |
50,241 | def unique_service ( self , role , candidate = None ) : now = doublethink . utcnow ( ) if candidate is not None : candidate [ 'id' ] = role if not 'ttl' in candidate : raise Exception ( "candidate is missing required field 'ttl'" ) val = candidate [ 'ttl' ] if not ( isinstance ( val , float ) or isinstance ( val , int ) ) or val <= 0 : raise Exception ( "'ttl' must be a number > 0" ) candidate [ 'first_heartbeat' ] = now candidate [ 'last_heartbeat' ] = now if not 'host' in candidate : candidate [ 'host' ] = socket . gethostname ( ) if not 'pid' in candidate : candidate [ 'pid' ] = os . getpid ( ) result = self . rr . table ( self . table , read_mode = 'majority' ) . get ( role ) . replace ( lambda row : r . branch ( r . branch ( row , row [ 'last_heartbeat' ] > now - row [ 'ttl' ] , False ) , row , candidate ) , return_changes = 'always' ) . run ( ) new_val = result [ 'changes' ] [ 0 ] [ 'new_val' ] if all ( [ new_val . get ( k ) == candidate [ k ] for k in candidate if k not in ( 'first_heartbeat' , 'last_heartbeat' ) ] ) : del candidate [ 'first_heartbeat' ] self . rr . table ( self . table ) . get ( role ) . update ( candidate ) . run ( ) results = list ( self . rr . table ( self . table , read_mode = 'majority' ) . get_all ( role ) . filter ( lambda row : row [ 'last_heartbeat' ] > now - row [ 'ttl' ] ) . run ( ) ) if results : return results [ 0 ] else : return None | Retrieve a unique service possibly setting or heartbeating it first . |
50,242 | def healthy_services ( self , role = None ) : try : query = self . rr . table ( self . table ) if role : query = query . get_all ( role , index = 'role' ) query = query . filter ( lambda svc : r . now ( ) . sub ( svc [ "last_heartbeat" ] ) < svc [ "ttl" ] ) . order_by ( "load" ) result = query . run ( ) return result except r . ReqlNonExistenceError : return [ ] | Look up healthy services in the registry . |
50,243 | def new ( batch_size = None , ** options ) : if batch_size : new_context = AutoContext ( batch_size = batch_size , ** options ) else : new_context = Context ( ** options ) _local . get_local_context ( ) . registry . append ( new_context ) return new_context | Get a new furious context and add it to the registry . If a batch size is specified use an AutoContext which inserts tasks in batches as they are added to the context . |
50,244 | def get_current_async ( ) : local_context = _local . get_local_context ( ) if local_context . _executing_async : return local_context . _executing_async [ - 1 ] raise errors . NotInContextError ( 'Not in an _ExecutionContext.' ) | Return a reference to the currently executing Async job object or None if not in an Async job . |
50,245 | def get_current_context ( ) : local_context = _local . get_local_context ( ) if local_context . registry : return local_context . registry [ - 1 ] raise errors . NotInContextError ( 'Not in a Context.' ) | Return a reference to the current Context object . |
50,246 | def purge_stale_services ( argv = None ) : argv = argv or sys . argv arg_parser = argparse . ArgumentParser ( prog = os . path . basename ( argv [ 0 ] ) , description = ( 'doublethink-purge-stale-services: utility to periodically ' 'purge stale entries from the "services" table.' ) ) arg_parser . add_argument ( "-d" , "--rethinkdb-db" , required = True , dest = "database" , help = "A RethinkDB database containing a 'services' table" ) arg_parser . add_argument ( "-s" , "--rethinkdb-servers" , metavar = "SERVERS" , dest = "servers" , default = 'localhost' , help = "rethinkdb servers, e.g. db0.foo.org,db0.foo.org:38015,db1.foo.org" ) arg_parser . add_argument ( '-v' , '--verbose' , dest = 'log_level' , action = 'store_const' , default = logging . INFO , const = logging . DEBUG , help = ( 'verbose logging' ) ) args = arg_parser . parse_args ( argv [ 1 : ] ) logging . basicConfig ( stream = sys . stdout , level = args . log_level , format = ( '%(asctime)s %(process)d %(levelname)s %(threadName)s ' '%(name)s.%(funcName)s(%(filename)s:%(lineno)d) %(message)s' ) ) args . servers = [ srv . strip ( ) for srv in args . servers . split ( "," ) ] rethinker = doublethink . Rethinker ( servers = args . servers , db = args . database ) registry = doublethink . services . ServiceRegistry ( rethinker ) registry . purge_stale_services ( ) return 0 | Command - line utility to periodically purge stale entries from the services table . |
50,247 | def _insert_tasks ( tasks , queue , transactional = False , retry_transient_errors = True , retry_delay = RETRY_SLEEP_SECS ) : from google . appengine . api import taskqueue if not tasks : return 0 try : taskqueue . Queue ( name = queue ) . add ( tasks , transactional = transactional ) return len ( tasks ) except ( taskqueue . BadTaskStateError , taskqueue . TaskAlreadyExistsError , taskqueue . TombstonedTaskError ) : if len ( tasks ) <= 1 : return 0 reinsert = _tasks_to_reinsert ( tasks , transactional ) count = len ( reinsert ) inserted = len ( tasks ) - count inserted += _insert_tasks ( reinsert [ : count / 2 ] , queue , transactional , retry_transient_errors , retry_delay ) inserted += _insert_tasks ( reinsert [ count / 2 : ] , queue , transactional , retry_transient_errors , retry_delay ) return inserted except taskqueue . TransientError : if transactional or not retry_transient_errors : raise reinsert = _tasks_to_reinsert ( tasks , transactional ) time . sleep ( retry_delay ) taskqueue . Queue ( name = queue ) . add ( reinsert , transactional = transactional ) return len ( tasks ) | Insert a batch of tasks into the specified queue . If an error occurs during insertion split the batch and retry until they are successfully inserted . Return the number of successfully inserted tasks . |
50,248 | def _tasks_to_reinsert ( tasks , transactional ) : if transactional : return tasks return [ task for task in tasks if not task . was_enqueued ] | Return a list containing the tasks that should be reinserted based on the was_enqueued property and whether the insert is transactional or not . |
50,249 | def _task_batcher ( tasks , batch_size = None ) : from itertools import izip_longest if not batch_size : batch_size = DEFAULT_TASK_BATCH_SIZE batch_size = min ( batch_size , 100 ) args = [ iter ( tasks ) ] * batch_size return ( [ task for task in group if task ] for group in izip_longest ( * args ) ) | Batches large task lists into groups of 100 so that they can all be inserted . |
50,250 | def _handle_tasks_insert ( self , batch_size = None ) : if self . _tasks_inserted : raise errors . ContextAlreadyStartedError ( "This Context has already had its tasks inserted." ) task_map = self . _get_tasks_by_queue ( ) callbacks = self . _options . get ( 'callbacks' ) if self . _persistence_engine and callbacks : self . persist ( ) retry_transient = self . _options . get ( 'retry_transient_errors' , True ) retry_delay = self . _options . get ( 'retry_delay' , RETRY_SLEEP_SECS ) for queue , tasks in task_map . iteritems ( ) : for batch in _task_batcher ( tasks , batch_size = batch_size ) : inserted = self . _insert_tasks ( batch , queue = queue , retry_transient_errors = retry_transient , retry_delay = retry_delay ) if isinstance ( inserted , ( int , long ) ) : self . _insert_success_count += inserted self . _insert_failed_count += len ( batch ) - inserted | Convert all Async s into tasks then insert them into queues . |
50,251 | def set_event_handler ( self , event , handler ) : self . _prepare_persistence_engine ( ) callbacks = self . _options . get ( 'callbacks' , { } ) callbacks [ event ] = handler self . _options [ 'callbacks' ] = callbacks | Add an Async to be run on event . |
50,252 | def exec_event_handler ( self , event , transactional = False ) : callbacks = self . _options . get ( 'callbacks' , { } ) handler = callbacks . get ( event ) if not handler : raise Exception ( 'Handler not defined!!!' ) handler . start ( transactional = transactional ) | Execute the Async set to be run on event . |
50,253 | def load ( cls , context_id , persistence_engine = None ) : if not persistence_engine : from furious . config import get_default_persistence_engine persistence_engine = get_default_persistence_engine ( ) if not persistence_engine : raise RuntimeError ( 'Specify a valid persistence_engine to load the context.' ) return persistence_engine . load_context ( context_id ) | Load and instantiate a Context from the persistence_engine . |
50,254 | def to_dict ( self ) : import copy options = copy . deepcopy ( self . _options ) if self . _insert_tasks : options [ 'insert_tasks' ] = reference_to_path ( self . _insert_tasks ) if self . _persistence_engine : options [ 'persistence_engine' ] = reference_to_path ( self . _persistence_engine ) options . update ( { '_tasks_inserted' : self . _tasks_inserted , } ) callbacks = self . _options . get ( 'callbacks' ) if callbacks : options [ 'callbacks' ] = encode_callbacks ( callbacks ) return options | Return this Context as a dict suitable for json encoding . |
50,255 | def from_dict ( cls , context_options_dict ) : import copy context_options = copy . deepcopy ( context_options_dict ) tasks_inserted = context_options . pop ( '_tasks_inserted' , False ) insert_tasks = context_options . pop ( 'insert_tasks' , None ) if insert_tasks : context_options [ 'insert_tasks' ] = path_to_reference ( insert_tasks ) persistence_engine = context_options . pop ( 'persistence_engine' , None ) if persistence_engine : context_options [ 'persistence_engine' ] = path_to_reference ( persistence_engine ) callbacks = context_options . pop ( 'callbacks' , None ) if callbacks : context_options [ 'callbacks' ] = decode_callbacks ( callbacks ) context = cls ( ** context_options ) context . _tasks_inserted = tasks_inserted return context | Return a context job from a dict output by Context . to_dict . |
50,256 | def result ( self ) : if not self . _result : if not self . _persistence_engine : return None self . _result = self . _persistence_engine . get_context_result ( self ) return self . _result | Return the context result object pulled from the persistence_engine if it has been set . |
50,257 | def insert_tasks_ignore_duplicate_names ( tasks , queue , * args , ** kwargs ) : from google . appengine . api import taskqueue try : inserted = _insert_tasks ( tasks , queue , * args , ** kwargs ) return inserted except taskqueue . DuplicateTaskNameError : reinsert = _tasks_to_reinsert ( tasks , transactional = False ) count = len ( reinsert ) inserted = len ( tasks ) - count for task in reinsert : inserted += _insert_tasks ( [ task ] , queue , * args , ** kwargs ) return inserted | Insert a batch of tasks into a specific queue . If a DuplicateTaskNameError is raised loop through the tasks and insert the remaining ignoring and logging the duplicate tasks . |
50,258 | def build_and_start ( query , directory ) : Async ( target = grep , args = [ query , directory ] ) . start ( ) | This function will create and then start a new Async task with the default callbacks argument defined in the decorator . |
50,259 | def grep_file ( query , item ) : return [ '%s: %s' % ( item , line ) for line in open ( item ) if re . search ( query , line ) ] | This function performs the actual grep on a given file . |
50,260 | def grep ( query , directory ) : dir_contents = os . listdir ( directory ) results = [ ] for item in dir_contents : path = os . path . join ( directory , item ) if os . path . isdir ( path ) : build_and_start ( query , path ) else : if item . endswith ( '.py' ) : results . extend ( grep_file ( query , path ) ) return results | This function will search through the directory structure of the application and for each directory it finds it launches an Async task to run itself . For each . py file it finds it actually greps the file and then returns the found output . |
50,261 | def find_furious_yaml ( config_file = __file__ ) : checked = set ( ) result = _find_furious_yaml ( os . path . dirname ( config_file ) , checked ) if not result : result = _find_furious_yaml ( os . getcwd ( ) , checked ) return result | Traverse directory trees to find a furious . yaml file |
50,262 | def _find_furious_yaml ( start , checked ) : directory = start while directory not in checked : checked . add ( directory ) for fs_yaml_name in FURIOUS_YAML_NAMES : yaml_path = os . path . join ( directory , fs_yaml_name ) if os . path . exists ( yaml_path ) : return yaml_path directory = os . path . dirname ( directory ) return None | Traverse the directory tree identified by start until a directory already in checked is encountered or the path of furious . yaml is found . |
50,263 | def run_code ( node : Code , parent_node : Node = None , node_globals : InheritedDict = None , ** args ) : if not node . xml_node . text : return code = node . xml_node . text try : globs = node_globals . to_dictionary ( ) exec ( dedent ( code ) , globs ) definitions = [ ( key , value ) for key , value in globs . items ( ) if key != '__builtins__' and not node_globals . has_key ( key ) ] for key , value in definitions : parent_node . node_globals [ key ] = value except SyntaxError as err : error = _get_compilation_error ( code , 'Invalid syntax' , err , err . lineno ) raise error from err except : info = exc_info ( ) cause = info [ 1 ] line_number = extract_tb ( info [ 2 ] ) [ - 1 ] [ 1 ] error = _get_compilation_error ( code , 'Code execution is failed' , cause , line_number ) raise error from cause | Executes node content as python module and adds its definitions to globals |
50,264 | def merge ( left , right ) : merged = { } left_keys = frozenset ( left ) right_keys = frozenset ( right ) for key in left_keys - right_keys : merged [ key ] = left [ key ] for key in right_keys - left_keys : merged [ key ] = right [ key ] for key in left_keys & right_keys : left_value = left [ key ] right_value = right [ key ] if ( isinstance ( left_value , Mapping ) and isinstance ( right_value , Mapping ) ) : merged [ key ] = merge ( left_value , right_value ) else : merged [ key ] = right_value return merged | Merge two mappings objects together combining overlapping Mappings and favoring right - values |
50,265 | def pelt ( cost , length , pen = None ) : if pen is None : pen = np . log ( length ) F = np . zeros ( length + 1 ) R = np . array ( [ 0 ] , dtype = np . int ) candidates = np . zeros ( length + 1 , dtype = np . int ) F [ 0 ] = - pen for tstar in range ( 2 , length + 1 ) : cpt_cands = R seg_costs = np . zeros ( len ( cpt_cands ) ) for i in range ( 0 , len ( cpt_cands ) ) : seg_costs [ i ] = cost ( cpt_cands [ i ] , tstar ) F_cost = F [ cpt_cands ] + seg_costs F [ tstar ] , tau = find_min ( F_cost , pen ) candidates [ tstar ] = cpt_cands [ tau ] ineq_prune = [ val < F [ tstar ] for val in F_cost ] R = [ cpt_cands [ j ] for j , val in enumerate ( ineq_prune ) if val ] R . append ( tstar - 1 ) R = np . array ( R , dtype = np . int ) last = candidates [ - 1 ] changepoints = [ last ] while last > 0 : last = candidates [ last ] changepoints . append ( last ) return sorted ( changepoints ) | PELT algorithm to compute changepoints in time series |
50,266 | def normal_mean ( data , variance ) : if not isinstance ( data , np . ndarray ) : data = np . array ( data ) i_variance_2 = 1 / ( variance ** 2 ) cmm = [ 0.0 ] cmm . extend ( np . cumsum ( data ) ) cmm2 = [ 0.0 ] cmm2 . extend ( np . cumsum ( np . abs ( data ) ) ) def cost ( start , end ) : cmm2_diff = cmm2 [ end ] - cmm2 [ start ] cmm_diff = pow ( cmm [ end ] - cmm [ start ] , 2 ) i_diff = end - start diff = cmm2_diff - cmm_diff return ( diff / i_diff ) * i_variance_2 return cost | Creates a segment cost function for a time series with a Normal distribution with changing mean |
50,267 | def normal_var ( data , mean ) : if not isinstance ( data , np . ndarray ) : data = np . array ( data ) cumm = [ 0.0 ] cumm . extend ( np . cumsum ( np . power ( np . abs ( data - mean ) , 2 ) ) ) def cost ( s , t ) : dist = float ( t - s ) diff = cumm [ t ] - cumm [ s ] return dist * np . log ( diff / dist ) return cost | Creates a segment cost function for a time series with a Normal distribution with changing variance |
50,268 | def normal_meanvar ( data ) : data = np . hstack ( ( [ 0.0 ] , np . array ( data ) ) ) cumm = np . cumsum ( data ) cumm_sq = np . cumsum ( [ val ** 2 for val in data ] ) def cost ( s , t ) : ts_i = 1.0 / ( t - s ) mu = ( cumm [ t ] - cumm [ s ] ) * ts_i sig = ( cumm_sq [ t ] - cumm_sq [ s ] ) * ts_i - mu ** 2 sig_i = 1.0 / sig return ( t - s ) * np . log ( sig ) + ( cumm_sq [ t ] - cumm_sq [ s ] ) * sig_i - 2 * ( cumm [ t ] - cumm [ s ] ) * mu * sig_i + ( ( t - s ) * mu ** 2 ) * sig_i return cost | Creates a segment cost function for a time series with a Normal distribution with changing mean and variance |
50,269 | def poisson ( data ) : data = np . hstack ( ( [ 0.0 ] , np . array ( data ) ) ) cumm = np . cumsum ( data ) def cost ( s , t ) : diff = cumm [ t ] - cumm [ s ] if diff == 0 : return - 2 * diff * ( - np . log ( t - s ) - 1 ) else : return - 2 * diff * ( np . log ( diff ) - np . log ( t - s ) - 1 ) return cost | Creates a segment cost function for a time series with a poisson distribution with changing mean |
50,270 | def exponential ( data ) : data = np . hstack ( ( [ 0.0 ] , np . array ( data ) ) ) cumm = np . cumsum ( data ) def cost ( s , t ) : return - 1 * ( t - s ) * ( np . log ( t - s ) - np . log ( cumm [ t ] - cumm [ s ] ) ) return cost | Creates a segment cost function for a time series with a exponential distribution with changing mean |
50,271 | def _get_layer_min_max ( self , oid_field_name ) : query_args = self . _build_query_args ( { 'f' : 'json' , 'outFields' : '' , 'outStatistics' : json . dumps ( [ dict ( statisticType = 'min' , onStatisticField = oid_field_name , outStatisticFieldName = 'THE_MIN' ) , dict ( statisticType = 'max' , onStatisticField = oid_field_name , outStatisticFieldName = 'THE_MAX' ) , ] , separators = ( ',' , ':' ) ) } ) headers = self . _build_headers ( ) url = self . _build_url ( '/query' ) response = self . _request ( 'GET' , url , params = query_args , headers = headers ) metadata = self . _handle_esri_errors ( response , "Could not retrieve min/max oid values" ) min_max_values = metadata [ 'features' ] [ 0 ] [ 'attributes' ] . values ( ) min_value = min ( min_max_values ) max_value = max ( min_max_values ) query_args = self . _build_query_args ( { 'f' : 'json' , 'outFields' : '*' , 'outStatistics' : json . dumps ( [ dict ( statisticType = 'min' , onStatisticField = oid_field_name , outStatisticFieldName = 'THE_MIN' ) , dict ( statisticType = 'max' , onStatisticField = oid_field_name , outStatisticFieldName = 'THE_MAX' ) , ] , separators = ( ',' , ':' ) ) } ) query_args = self . _build_query_args ( { 'where' : '{} = {} OR {} = {}' . format ( oid_field_name , min_value , oid_field_name , max_value ) , 'returnIdsOnly' : 'true' , 'f' : 'json' , } ) headers = self . _build_headers ( ) url = self . _build_url ( '/query' ) response = self . _request ( 'GET' , url , params = query_args , headers = headers ) oid_data = self . _handle_esri_errors ( response , "Could not check min/max values" ) if not oid_data or not oid_data . get ( 'objectIds' ) or min_value not in oid_data [ 'objectIds' ] or max_value not in oid_data [ 'objectIds' ] : raise EsriDownloadError ( 'Server returned invalid min/max' ) return ( min_value , max_value ) | Find the min and max values for the OID field . |
50,272 | def make_url ( * args , ** kwargs ) : base = "/" . join ( args ) if kwargs : return "%s?%s" % ( base , urlencode ( kwargs ) ) else : return base | Makes a URL from component parts |
50,273 | def _querystring ( self ) : kw = { } for key in self . KNOWN_QUERY_OPTIONS : val = getattr ( self , key ) if val is not None : kw [ key ] = val return kw | Get additional keyword arguments |
50,274 | def check_dataset_format ( ds_format ) : if ds_format . lower ( ) not in DATASET_FORMATS . keys ( ) : raise ValueError ( "dataset_format is expected to be one of %s. '%s' is not valid" % ( ", " . join ( DATASET_FORMATS . keys ( ) ) , ds_format ) ) | Ensure dataset format is XML or CSV |
50,275 | def dataset_format_to_extension ( ds_format ) : try : return DATASET_FORMATS [ ds_format ] except KeyError : raise ValueError ( "dataset_format is expected to be one of %s. '%s' is not valid" % ( ", " . join ( DATASET_FORMATS . keys ( ) ) , ds_format ) ) | Get the preferred Dataset format extension |
50,276 | def get_data ( ctx , study , environment , subject ) : cfg = GetDataConfigurableDataset ( GET_DATA_DATASET , study , environment , subject , params = dict ( IncludeIDs = 0 , IncludeValues = 0 ) ) if ctx . obj [ 'VERBOSE' ] : click . echo ( 'Getting data list' ) client = ctx . obj [ 'RWS' ] resp = client . send_request ( cfg ) if client . last_result . status_code != 200 : click . echo ( client . last_result . text ) return xml_pretty_print ( resp ) | Call rwscmd_getdata custom dataset to retrieve currently enterable empty fields |
50,277 | def rws_call ( ctx , method , default_attr = None ) : try : response = ctx . obj [ 'RWS' ] . send_request ( method ) if ctx . obj [ 'RAW' ] : result = ctx . obj [ 'RWS' ] . last_result . text elif default_attr is not None : result = "" for item in response : result = result + item . __dict__ [ default_attr ] + "\n" else : result = ctx . obj [ 'RWS' ] . last_result . text if ctx . obj [ 'OUTPUT' ] : ctx . obj [ 'OUTPUT' ] . write ( result . encode ( 'utf-8' ) ) else : click . echo ( result ) except RWSException as e : click . echo ( str ( e ) ) | Make request to RWS |
50,278 | def post ( ctx , odm ) : try : ctx . obj [ 'RWS' ] . send_request ( PostDataRequest ( odm . read ( ) ) ) if ctx . obj [ 'RAW' ] : click . echo ( ctx . obj [ 'RWS' ] . last_result . text ) except RWSException as e : click . echo ( e . message ) | Post ODM clinical data |
50,279 | def direct ( ctx , path ) : try : url = make_url ( ctx . obj [ 'RWS' ] . base_url , path ) resp = requests . get ( url , auth = HTTPBasicAuth ( ctx . obj [ 'USERNAME' ] , ctx . obj [ 'PASSWORD' ] ) ) click . echo ( resp . text ) except RWSException as e : click . echo ( e . message ) except requests . exceptions . HTTPError as e : click . echo ( e . message ) | Make direct call to RWS bypassing rwslib |
50,280 | def autofill ( ctx , steps , metadata , fixed , study , environment , subject ) : if metadata is not None : odm_metadata = metadata . read ( ) meta_v = etree . fromstring ( odm_metadata ) . find ( './/' + E_ODM . METADATA_VERSION . value ) . get ( A_ODM . OID . value ) else : odm_metadata = None meta_v = None fixed_values = { } if fixed is not None : for f in fixed : oid , value = f . decode ( ) . split ( ',' ) fixed_values [ oid ] = value if ctx . obj [ 'VERBOSE' ] : click . echo ( 'Fixing {} to value: {}' . format ( oid , value ) ) try : for n in range ( 0 , steps ) : if ctx . obj [ 'VERBOSE' ] : click . echo ( 'Step {}' . format ( str ( n + 1 ) ) ) subject_data = get_data ( ctx , study , environment , subject ) subject_data_odm = etree . fromstring ( subject_data ) if subject_data_odm . find ( './/' + E_ODM . CLINICAL_DATA . value ) is None : if ctx . obj [ 'VERBOSE' ] : click . echo ( 'No data found' ) break subject_meta_v = subject_data_odm . find ( './/' + E_ODM . CLINICAL_DATA . value ) . get ( A_ODM . METADATA_VERSION_OID . value ) if subject_meta_v is None : if ctx . obj [ 'VERBOSE' ] : click . echo ( 'Subject not found' ) break if meta_v != subject_meta_v : if ctx . obj [ 'VERBOSE' ] : click . echo ( 'Getting metadata version {}' . format ( subject_meta_v ) ) ctx . obj [ 'RWS' ] . send_request ( StudyVersionRequest ( study , subject_meta_v ) ) odm_metadata = ctx . obj [ 'RWS' ] . last_result . text meta_v = subject_meta_v if ctx . obj [ 'VERBOSE' ] : click . echo ( 'Generating data' ) scr = Scramble ( odm_metadata ) odm = scr . fill_empty ( fixed_values , subject_data ) if etree . fromstring ( odm ) . find ( './/' + E_ODM . ITEM_DATA . value ) is None : if ctx . obj [ 'VERBOSE' ] : click . echo ( 'No data to send' ) break ctx . obj [ 'RWS' ] . send_request ( PostDataRequest ( odm ) ) if ctx . obj [ 'RAW' ] : click . echo ( ctx . obj [ 'RWS' ] . last_result . text ) except RWSException as e : click . echo ( e . rws_error ) except requests . exceptions . HTTPError as e : click . echo ( e . strerror ) | Request enterable data for a subject generate data values and post back to Rave . Requires rwscmd_getdata configurable dataset to be installed on the Rave URL . |
50,281 | def check_action_type ( self , value ) : if value is not None : if not isinstance ( value , ActionType ) : raise AttributeError ( "Invalid check action %s" % value ) self . _check_action_type = value | Set the value for the CheckActionType validating input |
50,282 | def annotation_id ( self , value ) : if value in [ None , '' ] or str ( value ) . strip ( ) == '' : raise AttributeError ( "Invalid ID value supplied" ) self . _id = value | Set ID for Annotation |
50,283 | def sponsor_or_site ( self , value ) : if value not in Comment . VALID_SPONSOR_OR_SITE_RESPONSES : raise AttributeError ( "%s sponsor_or_site value of %s is not valid" % ( self . __class__ . __name__ , value ) ) self . _sponsor_or_site = value | Set Originator with validation of input |
50,284 | def status ( self , value ) : if value is not None : if not isinstance ( value , QueryStatusType ) : raise AttributeError ( "%s action type is invalid in mdsol:Query." % ( value , ) ) self . _status = value | Set Query Status |
50,285 | def getroot ( self ) : builder = ET . TreeBuilder ( ) self . build ( builder ) return builder . close ( ) | Build XML object return the root |
50,286 | def build ( self , builder ) : params = dict ( ODMVersion = "1.3" , FileType = self . filetype , CreationDateTime = self . creationdatetime , Originator = self . originator , FileOID = self . fileoid , xmlns = "http://www.cdisc.org/ns/odm/v1.3" , ) if self . granularity_type : params [ 'Granularity' ] = self . granularity_type . value if self . source_system : params [ 'SourceSystem' ] = self . source_system if self . source_system_version : params [ 'SourceSystemVersion' ] = self . source_system_version params [ 'xmlns:mdsol' ] = "http://www.mdsol.com/ns/odm/metadata" if self . description : params [ 'Description' ] = self . description builder . start ( "ODM" , params ) if self . study is not None : self . study . build ( builder ) if self . clinical_data : for clinical_data in self . clinical_data : clinical_data . build ( builder ) if self . admindata is not None : self . admindata . build ( builder ) builder . end ( "ODM" ) return builder . close ( ) | Build XML object return the root this is a copy for consistency and testing |
50,287 | def add_attribute ( self , attribute , value ) : class_name = self . __class__ . __name__ if class_name . startswith ( 'ItemData' ) : class_name = 'ItemData' if attribute not in MODMExtensionRegistry [ class_name ] . value : raise ValueError ( "Can't add {} to {}" . format ( attribute , self . __class__ . __name__ ) ) self . attributes . append ( MODMAttribute ( attribute , value ) ) | Add an attribute to the current instance |
50,288 | def mixin_params ( self , params ) : if not isinstance ( params , ( dict , ) ) : raise AttributeError ( "Cannot mixin to object of type {}" . format ( type ( params ) ) ) for attribute in self . attributes : params . update ( { attribute . tag : attribute . value } ) | Merge in the MdsolAttribute for the passed parameter |
50,289 | def last_update_time ( self , value ) : if isinstance ( value , ( datetime . datetime , ) ) : self . _last_update_time = value else : raise ValueError ( "Expect last_update_time to be a datetime" ) | Setter for the last_update_time attribute |
50,290 | def fromElement ( cls , elem ) : e_global_variables = elem . find ( ODM_NS + "GlobalVariables" ) studyname = e_global_variables . find ( ODM_NS + "StudyName" ) . text protocolname = e_global_variables . find ( ODM_NS + "ProtocolName" ) . text self = cls ( oid = elem . get ( "OID" ) , projecttype = elem . get ( MEDI_NS + "ProjectType" , "Project" ) , studyname = studyname , protocolname = protocolname , environment = getEnvironmentFromNameAndProtocol ( studyname , protocolname ) , ) return self | Read properties from an XML Element to build a StudyList Item |
50,291 | def fromElement ( cls , elem ) : self = cls ( ) self . oid = elem . get ( "OID" ) self . name = elem . get ( "Name" ) return self | Read properties from a MetaDataVersion element |
50,292 | def fromElement ( cls , elem ) : self = cls ( ) self . studyoid = elem . get ( "StudyOID" ) self . metadataversionoid = elem . get ( "MetaDataVersionOID" ) e_subjectdata = elem . findall ( ODM_NS + "SubjectData" ) [ 0 ] self . subjectkey = e_subjectdata . get ( "SubjectKey" ) e_siteref = e_subjectdata . findall ( ODM_NS + "SiteRef" ) [ 0 ] self . locationoid = e_siteref . get ( "LocationOID" ) e_links = e_subjectdata . findall ( MEDI_NS + "Link" ) for e_link in e_links : self . links . append ( e_link . get ( XLINK_NS + "href" ) ) decodes = { "yes" : True , "no" : False , "" : None } for prop in RWSSubjectListItem . STATUS_PROPERTIES : val = e_subjectdata . get ( MEDI_NS + prop , "" ) . lower ( ) setattr ( self , prop . lower ( ) , decodes . get ( val , val ) ) self . active = decodes [ e_subjectdata . get ( MEDI_NS + "SubjectActive" , "yes" ) . lower ( ) ] self . deleted = decodes [ e_subjectdata . get ( MEDI_NS + "Deleted" , "no" ) . lower ( ) ] return self | Read properties from an XML Element |
50,293 | def make_element ( builder , tag , content ) : builder . start ( tag , { } ) builder . data ( content ) builder . end ( tag ) | Make an element with this tag and text content |
50,294 | def set_single_attribute ( self , other , trigger_klass , property_name ) : if isinstance ( other , trigger_klass ) : if not hasattr ( self , property_name ) : raise AttributeError ( "%s has no property %s" % ( self . __class__ . __name__ , property_name ) ) if getattr ( self , property_name ) is None : setattr ( self , property_name , other ) else : raise ValueError ( '%s already has a %s element set.' % ( self . __class__ . __name__ , other . __class__ . __name__ , ) ) | Used to set guard the setting of an attribute which is singular and can t be set twice |
50,295 | def set_list_attribute ( self , other , trigger_klass , property_name ) : if isinstance ( other , trigger_klass ) : if not hasattr ( self , property_name ) : raise AttributeError ( "%s has no property %s" % ( self . __class__ . __name__ , property_name ) ) val = getattr ( self , property_name , [ ] ) if other in val : raise ValueError ( "%s already exists in %s" % ( other . __class__ . __name__ , self . __class__ . __name__ ) ) else : val . append ( other ) setattr ( self , property_name , val ) | Used to set guard the setting of a list attribute ensuring the same element is not added twice . |
50,296 | def get_next_start_id ( self ) : link = self . rws_connection . last_result . links . get ( "next" , None ) if link : link = link [ 'url' ] p = urlparse ( link ) start_id = int ( parse_qs ( p . query ) [ 'startid' ] [ 0 ] ) return start_id return None | If link for next result set has been passed extract it and get the next set start id |
50,297 | def example_clinical_data ( study_name , environment ) : odm = ODM ( "test system" ) ( ClinicalData ( "Mediflex" , "DEV" ) ( SubjectData ( "MDSOL" , "IJS TEST4" , transaction_type = "Insert" ) ( StudyEventData ( "SUBJECT" ) ( FormData ( "EN" , transaction_type = "Update" ) ( ItemGroupData ( ) ( ItemData ( "SUBJINIT" , "AAA" ) ( AuditRecord ( edit_point = AuditRecord . EDIT_DATA_MANAGEMENT , used_imputation_method = False , identifier = 'X2011' , include_file_oid = False ) ( UserRef ( "isparks" ) , LocationRef ( "MDSOL" ) , ReasonForChange ( "Data Entry Error" ) , DateTimeStamp ( datetime ( 2015 , 9 , 11 , 10 , 15 , 22 , 80 ) ) ) , MdsolQuery ( value = "Subject initials should be 2 chars only." , recipient = "Site from System" , status = QueryStatusType . Open ) ) , ItemData ( "SUBJID" , '001' ) ) ) ) ) ) ) return odm | Test demonstrating building clinical data |
50,298 | def getCSVReader ( data , reader_type = csv . DictReader ) : f = StringIO ( data [ : - 4 ] ) return reader_type ( f ) | Take a Rave CSV output ending with a line with just EOF on it and return a DictReader |
50,299 | def _setDatasets ( self , metadata ) : cols = list ( self . getCSVReader ( metadata ) ) cols . sort ( key = lambda x : [ x [ 'viewname' ] , int ( x [ 'ordinal' ] ) ] ) for key , grp in groupby ( cols , key = lambda x : x [ "viewname" ] ) : self . datasets [ key ] = list ( grp ) | Extract dataset definitions from CSV metadata |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.