idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
9,300
|
async def reset ( request : web . Request ) -> web . Response : data = await request . json ( ) ok , bad_key = _check_reset ( data ) if not ok : return web . json_response ( { 'message' : '{} is not a valid reset option' . format ( bad_key ) } , status = 400 ) log . info ( "Reset requested for {}" . format ( ', ' . join ( data . keys ( ) ) ) ) if data . get ( 'tipProbe' ) : config = rc . load ( ) if ff . use_protocol_api_v2 ( ) : config = config . _replace ( instrument_offset = rc . build_fallback_instrument_offset ( { } ) ) else : config . tip_length . clear ( ) rc . save_robot_settings ( config ) if data . get ( 'labwareCalibration' ) : if ff . use_protocol_api_v2 ( ) : labware . clear_calibrations ( ) else : db . reset ( ) if data . get ( 'bootScripts' ) : if IS_ROBOT : if os . path . exists ( '/data/boot.d' ) : shutil . rmtree ( '/data/boot.d' ) else : log . debug ( 'Not on pi, not removing /data/boot.d' ) return web . json_response ( { } , status = 200 )
|
Execute a reset of the requested parts of the user configuration .
|
9,301
|
async def available_resets ( request : web . Request ) -> web . Response : return web . json_response ( { 'options' : _settings_reset_options } , status = 200 )
|
Indicate what parts of the user configuration are available for reset .
|
9,302
|
async def connections ( for_type : Optional [ CONNECTION_TYPES ] = None ) -> List [ Dict [ str , str ] ] : fields = [ 'name' , 'type' , 'active' ] res , _ = await _call ( [ '-t' , '-f' , ',' . join ( fields ) , 'connection' , 'show' ] ) found = _dict_from_terse_tabular ( fields , res , transformers = { 'type' : lambda s : s . split ( '-' ) [ - 1 ] , 'active' : lambda s : s . lower ( ) == 'yes' } ) if for_type is not None : should_return = [ ] for c in found : if c [ 'type' ] == for_type . value : should_return . append ( c ) return should_return else : return found
|
Return the list of configured connections .
|
9,303
|
async def connection_exists ( ssid : str ) -> Optional [ str ] : nmcli_conns = await connections ( ) for wifi in [ c [ 'name' ] for c in nmcli_conns if c [ 'type' ] == 'wireless' ] : res , _ = await _call ( [ '-t' , '-f' , '802-11-wireless.ssid' , '-m' , 'tabular' , 'connection' , 'show' , wifi ] ) if res == ssid : return wifi return None
|
If there is already a connection for this ssid return the name of the connection ; if there is not return None .
|
9,304
|
async def _trim_old_connections ( new_name : str , con_type : CONNECTION_TYPES ) -> Tuple [ bool , str ] : existing_cons = await connections ( for_type = con_type ) not_us = [ c [ 'name' ] for c in existing_cons if c [ 'name' ] != new_name ] ok = True res = [ ] for c in not_us : this_ok , remove_res = await remove ( name = c ) ok = ok and this_ok if not this_ok : log . warning ( "Could not remove wifi connection {}: {}" . format ( c , remove_res ) ) res . append ( remove_res ) else : log . debug ( "Removed old wifi connection {}" . format ( c ) ) return ok , ';' . join ( res )
|
Delete all connections of con_type but the one specified .
|
9,305
|
def _add_eap_args ( eap_args : Dict [ str , str ] ) -> List [ str ] : args = [ 'wifi-sec.key-mgmt' , 'wpa-eap' ] eap_type = EAP_TYPES . by_qualified_name ( eap_args [ 'eapType' ] ) type_args = eap_type . args ( ) args += [ '802-1x.eap' , eap_type . outer . value . name ] if eap_type . inner : args += [ '802-1x.phase2-autheap' , eap_type . inner . value . name ] for ta in type_args : if ta [ 'name' ] in eap_args : if ta [ 'type' ] == 'file' : _make_host_symlink_if_necessary ( ) path = _rewrite_key_path_to_host_path ( eap_args [ ta [ 'name' ] ] ) val = 'file://' + path else : val = eap_args [ ta [ 'name' ] ] args += [ '802-1x.' + ta [ 'nmName' ] , val ] return args
|
Add configuration options suitable for an nmcli con add command for WPA - EAP configuration . These options are mostly in the 802 - 1x group .
|
9,306
|
def _build_con_add_cmd ( ssid : str , security_type : SECURITY_TYPES , psk : Optional [ str ] , hidden : bool , eap_args : Optional [ Dict [ str , Any ] ] ) -> List [ str ] : configure_cmd = [ 'connection' , 'add' , 'save' , 'yes' , 'autoconnect' , 'yes' , 'ifname' , 'wlan0' , 'type' , 'wifi' , 'con-name' , ssid , 'wifi.ssid' , ssid ] if hidden : configure_cmd += [ 'wifi.hidden' , 'true' ] if security_type == SECURITY_TYPES . WPA_PSK : configure_cmd += [ 'wifi-sec.key-mgmt' , security_type . value ] if psk is None : raise ValueError ( 'wpa-psk security type requires psk' ) configure_cmd += [ 'wifi-sec.psk' , psk ] elif security_type == SECURITY_TYPES . WPA_EAP : if eap_args is None : raise ValueError ( 'wpa-eap security type requires eap_args' ) configure_cmd += _add_eap_args ( eap_args ) elif security_type == SECURITY_TYPES . NONE : pass else : raise ValueError ( "Bad security_type {}" . format ( security_type ) ) return configure_cmd
|
Build the nmcli connection add command to configure the new network .
|
9,307
|
async def iface_info ( which_iface : NETWORK_IFACES ) -> Dict [ str , Optional [ str ] ] : _DEV_INFO_LINE_RE = re . compile ( r'([\w.]+)(?:\[\d+])?:(.*)' ) _IFACE_STATE_RE = re . compile ( r'\d+ \((.+)\)' ) info : Dict [ str , Optional [ str ] ] = { 'ipAddress' : None , 'macAddress' : None , 'gatewayAddress' : None , 'state' : None , 'type' : None } fields = [ 'GENERAL.HWADDR' , 'IP4.ADDRESS' , 'IP4.GATEWAY' , 'GENERAL.TYPE' , 'GENERAL.STATE' ] res , err = await _call ( [ '--mode' , 'multiline' , '--escape' , 'no' , '--terse' , '--fields' , ',' . join ( fields ) , 'dev' , 'show' , which_iface . value ] ) field_map = { } for line in res . split ( '\n' ) : match = _DEV_INFO_LINE_RE . fullmatch ( line ) if match is None : raise ValueError ( "Bad nmcli result; out: {}; err: {}" . format ( res , err ) ) key , val = match . groups ( ) field_map [ key ] = None if val == '--' else val info [ 'macAddress' ] = field_map . get ( 'GENERAL.HWADDR' ) info [ 'ipAddress' ] = field_map . get ( 'IP4.ADDRESS' ) info [ 'gatewayAddress' ] = field_map . get ( 'IP4.GATEWAY' ) info [ 'type' ] = field_map . get ( 'GENERAL.TYPE' ) state_val = field_map . get ( 'GENERAL.STATE' ) if state_val : state_match = _IFACE_STATE_RE . fullmatch ( state_val ) if state_match : info [ 'state' ] = state_match . group ( 1 ) return info
|
Get the basic network configuration of an interface .
|
9,308
|
def sanitize_args ( cmd : List [ str ] ) -> List [ str ] : sanitized = [ ] for idx , fieldname in enumerate ( cmd ) : def _is_password ( cmdstr ) : return 'wifi-sec.psk' in cmdstr or 'password' in cmdstr . lower ( ) if idx > 0 and _is_password ( cmd [ idx - 1 ] ) : sanitized . append ( '****' ) else : sanitized . append ( fieldname ) return sanitized
|
Filter the command so that it no longer contains passwords
|
9,309
|
def _dict_from_terse_tabular ( names : List [ str ] , inp : str , transformers : Dict [ str , Callable [ [ str ] , Any ] ] = { } ) -> List [ Dict [ str , Any ] ] : res = [ ] for n in names : if n not in transformers : transformers [ n ] = lambda s : s for line in inp . split ( '\n' ) : if len ( line ) < 3 : continue fields = line . split ( ':' ) res . append ( dict ( [ ( elem [ 0 ] , transformers [ elem [ 0 ] ] ( elem [ 1 ] ) ) for elem in zip ( names , fields ) ] ) ) return res
|
Parse NMCLI terse tabular output into a list of Python dict .
|
9,310
|
def reset ( self ) : self . tip_attached = False self . placeables = [ ] self . previous_placeable = None self . current_volume = 0 self . reset_tip_tracking ( )
|
Resets the state of this pipette removing associated placeables setting current volume to zero and resetting tip tracking
|
9,311
|
def _associate_placeable ( self , location ) : if not location : return placeable , _ = unpack_location ( location ) self . previous_placeable = placeable if not self . placeables or ( placeable != self . placeables [ - 1 ] ) : self . placeables . append ( placeable )
|
Saves a reference to a placeable
|
9,312
|
def retract ( self , safety_margin = 10 ) : self . previous_placeable = None self . robot . poses = self . instrument_mover . fast_home ( self . robot . poses , safety_margin ) return self
|
Move the pipette s mount upwards and away from the deck
|
9,313
|
def blow_out ( self , location = None ) : if not self . tip_attached : log . warning ( "Cannot 'blow out' without a tip attached." ) self . move_to ( location ) self . instrument_actuator . set_active_current ( self . _plunger_current ) self . robot . poses = self . instrument_actuator . move ( self . robot . poses , x = self . _get_plunger_position ( 'blow_out' ) ) self . current_volume = 0 return self
|
Force any remaining liquid to dispense by moving this pipette s plunger to the calibrated blow_out position
|
9,314
|
def return_tip ( self , home_after = True ) : if not self . tip_attached : log . warning ( "Cannot return tip without tip attached." ) if not self . current_tip ( ) : self . robot . add_warning ( 'Pipette has no tip to return, dropping in place' ) self . drop_tip ( self . current_tip ( ) , home_after = home_after ) return self
|
Drop the pipette s current tip to it s originating tip rack
|
9,315
|
def pick_up_tip ( self , location = None , presses = None , increment = None ) : if self . tip_attached : log . warning ( "There is already a tip attached to this pipette." ) if not location : location = self . get_next_tip ( ) self . current_tip ( None ) if location : placeable , _ = unpack_location ( location ) self . current_tip ( placeable ) presses = ( self . _pick_up_presses if not helpers . is_number ( presses ) else presses ) increment = ( self . _pick_up_increment if not helpers . is_number ( increment ) else increment ) def _pick_up_tip ( self , location , presses , increment ) : self . instrument_actuator . set_active_current ( self . _plunger_current ) self . robot . poses = self . instrument_actuator . move ( self . robot . poses , x = self . _get_plunger_position ( 'bottom' ) ) self . current_volume = 0 self . move_to ( self . current_tip ( ) . top ( 0 ) ) for i in range ( int ( presses ) ) : self . instrument_mover . push_speed ( ) self . instrument_mover . push_active_current ( ) self . instrument_mover . set_active_current ( self . _pick_up_current ) self . instrument_mover . set_speed ( self . _pick_up_speed ) dist = ( - 1 * self . _pick_up_distance ) + ( - 1 * increment * i ) self . move_to ( self . current_tip ( ) . top ( dist ) , strategy = 'direct' ) self . instrument_mover . pop_active_current ( ) self . instrument_mover . pop_speed ( ) self . move_to ( self . current_tip ( ) . top ( 0 ) , strategy = 'direct' ) self . _add_tip ( length = self . _tip_length ) if 'needs-pickup-shake' in self . quirks : self . _shake_off_tips ( location ) self . _shake_off_tips ( location ) self . previous_placeable = None self . robot . poses = self . instrument_mover . fast_home ( self . robot . poses , self . _pick_up_distance ) return self do_publish ( self . broker , commands . pick_up_tip , self . pick_up_tip , 'before' , None , None , self , location , presses , increment ) _pick_up_tip ( self , location = location , presses = presses , increment = increment ) do_publish ( self . broker , commands . pick_up_tip , self . pick_up_tip , 'after' , self , None , self , location , presses , increment ) return self
|
Pick up a tip for the Pipette to run liquid - handling commands with
|
9,316
|
def drop_tip ( self , location = None , home_after = True ) : if not self . tip_attached : log . warning ( "Cannot drop tip without a tip attached." ) if not location and self . trash_container : location = self . trash_container if isinstance ( location , Placeable ) : if 'rack' in location . get_parent ( ) . get_type ( ) : half_tip_length = self . _tip_length / 2 location = location . top ( - half_tip_length ) elif 'trash' in location . get_parent ( ) . get_type ( ) : loc , coords = location . top ( ) location = ( loc , coords + ( 0 , self . model_offset [ 1 ] , 0 ) ) else : location = location . top ( ) def _drop_tip ( location , instrument = self ) : if location : self . move_to ( location ) pos_bottom = self . _get_plunger_position ( 'bottom' ) pos_drop_tip = self . _get_plunger_position ( 'drop_tip' ) self . instrument_actuator . set_active_current ( self . _plunger_current ) self . robot . poses = self . instrument_actuator . move ( self . robot . poses , x = pos_bottom ) self . instrument_actuator . set_active_current ( self . _drop_tip_current ) self . instrument_actuator . push_speed ( ) self . instrument_actuator . set_speed ( self . _drop_tip_speed ) self . robot . poses = self . instrument_actuator . move ( self . robot . poses , x = pos_drop_tip ) self . instrument_actuator . pop_speed ( ) self . _shake_off_tips ( location ) if home_after : self . _home_after_drop_tip ( ) self . current_volume = 0 self . current_tip ( None ) self . _remove_tip ( length = self . _tip_length ) do_publish ( self . broker , commands . drop_tip , self . drop_tip , 'before' , None , None , self , location ) _drop_tip ( location ) do_publish ( self . broker , commands . drop_tip , self . drop_tip , 'after' , self , None , self , location ) return self
|
Drop the pipette s current tip
|
9,317
|
def home ( self ) : def _home ( mount ) : self . current_volume = 0 self . instrument_actuator . set_active_current ( self . _plunger_current ) self . robot . poses = self . instrument_actuator . home ( self . robot . poses ) self . robot . poses = self . instrument_mover . home ( self . robot . poses ) self . previous_placeable = None do_publish ( self . broker , commands . home , _home , 'before' , None , None , self . mount ) _home ( self . mount ) do_publish ( self . broker , commands . home , _home , 'after' , self , None , self . mount ) return self
|
Home the pipette s plunger axis during a protocol run
|
9,318
|
def calibrate_plunger ( self , top = None , bottom = None , blow_out = None , drop_tip = None ) : if top is not None : self . plunger_positions [ 'top' ] = top if bottom is not None : self . plunger_positions [ 'bottom' ] = bottom if blow_out is not None : self . plunger_positions [ 'blow_out' ] = blow_out if drop_tip is not None : self . plunger_positions [ 'drop_tip' ] = drop_tip return self
|
Set calibration values for the pipette plunger .
|
9,319
|
def _get_plunger_position ( self , position ) : try : value = self . plunger_positions [ position ] if helpers . is_number ( value ) : return value else : raise RuntimeError ( 'Plunger position "{}" not yet calibrated' . format ( position ) ) except KeyError : raise RuntimeError ( 'Plunger position "{}" does not exist' . format ( position ) )
|
Returns the calibrated coordinate of a given plunger position
|
9,320
|
def engage ( self , height ) : if height > MAX_ENGAGE_HEIGHT or height < 0 : raise ValueError ( 'Invalid engage height. Should be 0 to {}' . format ( MAX_ENGAGE_HEIGHT ) ) self . _driver . move ( height ) self . _engaged = True
|
Move the magnet to a specific height in mm from home position
|
9,321
|
def break_down_travel ( p1 , target , increment = 5 , mode = 'absolute' ) : heading = target - p1 if mode == 'relative' : heading = target length = heading . length ( ) length_steps = length / increment length_remainder = length % increment vector_step = Vector ( 0 , 0 , 0 ) if length_steps > 0 : vector_step = heading / length_steps vector_remainder = vector_step * ( length_remainder / increment ) res = [ ] if mode == 'absolute' : for i in range ( int ( length_steps ) ) : p1 = p1 + vector_step res . append ( p1 ) p1 = p1 + vector_remainder res . append ( p1 ) else : for i in range ( int ( length_steps ) ) : res . append ( vector_step ) res . append ( vector_remainder ) return res
|
given two points p1 and target this returns a list of incremental positions or relative steps
|
9,322
|
def _expand_for_carryover ( max_vol , plan , ** kwargs ) : max_vol = float ( max_vol ) carryover = kwargs . get ( 'carryover' , True ) if not carryover : return plan new_transfer_plan = [ ] for p in plan : source = p [ 'aspirate' ] [ 'location' ] target = p [ 'dispense' ] [ 'location' ] volume = float ( p [ 'aspirate' ] [ 'volume' ] ) while volume > max_vol * 2 : new_transfer_plan . append ( { 'aspirate' : { 'location' : source , 'volume' : max_vol } , 'dispense' : { 'location' : target , 'volume' : max_vol } } ) volume -= max_vol if volume > max_vol : volume /= 2 new_transfer_plan . append ( { 'aspirate' : { 'location' : source , 'volume' : float ( volume ) } , 'dispense' : { 'location' : target , 'volume' : float ( volume ) } } ) new_transfer_plan . append ( { 'aspirate' : { 'location' : source , 'volume' : float ( volume ) } , 'dispense' : { 'location' : target , 'volume' : float ( volume ) } } ) return new_transfer_plan
|
Divide volumes larger than maximum volume into separate transfers
|
9,323
|
def _compress_for_repeater ( max_vol , plan , ** kwargs ) : max_vol = float ( max_vol ) mode = kwargs . get ( 'mode' , 'transfer' ) if mode == 'distribute' : return _compress_for_distribute ( max_vol , plan , ** kwargs ) if mode == 'consolidate' : return _compress_for_consolidate ( max_vol , plan , ** kwargs ) else : return plan
|
Reduce size of transfer plan if mode is distribute or consolidate
|
9,324
|
def _compress_for_distribute ( max_vol , plan , ** kwargs ) : source = None new_source = None a_vol = 0 temp_dispenses = [ ] new_transfer_plan = [ ] disposal_vol = kwargs . get ( 'disposal_vol' , 0 ) max_vol = max_vol - disposal_vol def _append_dispenses ( ) : nonlocal a_vol , temp_dispenses , new_transfer_plan , source if not temp_dispenses : return added_volume = 0 if len ( temp_dispenses ) > 1 : added_volume = disposal_vol new_transfer_plan . append ( { 'aspirate' : { 'location' : source , 'volume' : a_vol + added_volume } } ) for d in temp_dispenses : new_transfer_plan . append ( { 'dispense' : { 'location' : d [ 'location' ] , 'volume' : d [ 'volume' ] } } ) a_vol = 0 temp_dispenses = [ ] for p in plan : this_vol = p [ 'aspirate' ] [ 'volume' ] new_source = p [ 'aspirate' ] [ 'location' ] if ( new_source is not source ) or ( this_vol + a_vol > max_vol ) : _append_dispenses ( ) source = new_source a_vol += this_vol temp_dispenses . append ( p [ 'dispense' ] ) _append_dispenses ( ) return new_transfer_plan
|
Combines as many dispenses as can fit within the maximum volume
|
9,325
|
def _compress_for_consolidate ( max_vol , plan , ** kwargs ) : target = None new_target = None d_vol = 0 temp_aspirates = [ ] new_transfer_plan = [ ] def _append_aspirates ( ) : nonlocal d_vol , temp_aspirates , new_transfer_plan , target if not temp_aspirates : return for a in temp_aspirates : new_transfer_plan . append ( { 'aspirate' : { 'location' : a [ 'location' ] , 'volume' : a [ 'volume' ] } } ) new_transfer_plan . append ( { 'dispense' : { 'location' : target , 'volume' : d_vol } } ) d_vol = 0 temp_aspirates = [ ] for i , p in enumerate ( plan ) : this_vol = p [ 'aspirate' ] [ 'volume' ] new_target = p [ 'dispense' ] [ 'location' ] if ( new_target is not target ) or ( this_vol + d_vol > max_vol ) : _append_aspirates ( ) target = new_target d_vol += this_vol temp_aspirates . append ( p [ 'aspirate' ] ) _append_aspirates ( ) return new_transfer_plan
|
Combines as many aspirates as can fit within the maximum volume
|
9,326
|
def _migrate0to1 ( previous : Mapping [ str , Any ] ) -> SettingsMap : next : SettingsMap = { } for s in settings : id = s . id old_id = s . old_id if previous . get ( id ) is True : next [ id ] = True elif previous . get ( old_id ) is True : next [ id ] = True else : next [ id ] = None return next
|
Migrate to version 1 of the feature flags file . Replaces old IDs with new IDs and sets any False values to None
|
9,327
|
def _migrate ( data : Mapping [ str , Any ] ) -> SettingsData : next = dict ( data ) version = next . pop ( '_version' , 0 ) target_version = len ( _MIGRATIONS ) migrations = _MIGRATIONS [ version : ] if len ( migrations ) > 0 : log . info ( "Migrating advanced settings from version {} to {}" . format ( version , target_version ) ) for m in migrations : next = m ( next ) return next , target_version
|
Check the version integer of the JSON file data a run any necessary migrations to get us to the latest file format . Returns dictionary of settings and version migrated to
|
9,328
|
def _ensure_values ( data : Mapping [ str , Any ] ) -> Tuple [ Dict [ str , Any ] , bool ] : to_return = { } should_write = False for keyname , typekind , default in REQUIRED_DATA : if keyname not in data : LOG . debug ( f"Defaulted config value {keyname} to {default}" ) to_return [ keyname ] = default should_write = True elif not isinstance ( data [ keyname ] , typekind ) : LOG . warning ( f"Config value {keyname} was {type(data[keyname])} not" f" {typekind}, defaulted to {default}" ) to_return [ keyname ] = default should_write = True else : to_return [ keyname ] = data [ keyname ] return to_return , should_write
|
Make sure we have appropriate keys and say if we should write
|
9,329
|
def load_from_path ( path : str ) -> Config : data = _ensure_load ( path ) if not data : data = { } values , should_write = _ensure_values ( data ) values . update ( { 'path' : path } ) config = Config ( ** values ) if config . signature_required : if not os . path . exists ( config . update_cert_path ) : LOG . warning ( f"No signing cert is present in {config.update_cert_path}, " "code signature checking disabled" ) config = config . _replace ( signature_required = False ) config = config . _replace ( update_cert_path = DEFAULT_CERT_PATH ) if should_write : save_to_path ( path , config ) return config
|
Load a config from a file and ensure its structure . Writes a default if necessary
|
9,330
|
def _get_path ( args_path : Optional [ str ] ) -> str : env_path = os . getenv ( PATH_ENVIRONMENT_VARIABLE ) for path , source in ( ( args_path , 'arg' ) , ( env_path , 'env' ) ) : if not path : LOG . debug ( f"config.load: skipping {source} (path None)" ) continue else : LOG . debug ( f"config.load: using config path {path} from {source}" ) return path return DEFAULT_PATH
|
Find the valid path from args then env then default
|
9,331
|
def set_stage ( self , stage : Stages ) : assert stage in Stages LOG . info ( f'Update session: stage {self._stage.name}->{stage.name}' ) self . _stage = stage
|
Convenience method to set the stage and lookup message
|
9,332
|
def set_error ( self , error_shortmsg : str , error_longmsg : str ) : LOG . error ( f"Update session: error in stage {self._stage.name}: " f"{error_shortmsg}: {error_longmsg}" ) self . _error = Value ( error_shortmsg , error_longmsg ) self . set_stage ( Stages . ERROR )
|
Set the stage to error and add a message
|
9,333
|
def message ( self ) -> str : if self . is_error : assert self . _error return self . _error . human else : return self . _stage . value . human
|
The human readable message of the current stage
|
9,334
|
def descendants ( state , obj , level = 0 ) : return sum ( [ [ ( child , level ) ] + descendants ( state , child , level + 1 ) for child in state [ obj ] . children ] , [ ] )
|
Returns a flattened list tuples of DFS traversal of subtree from object that contains descendant object and it s depth
|
9,335
|
def gantry_axes ( cls ) -> Tuple [ 'Axis' , 'Axis' , 'Axis' , 'Axis' ] : return ( cls . X , cls . Y , cls . Z , cls . A )
|
The axes which are tied to the gantry and require the deck calibration transform
|
9,336
|
def update_instrument_config ( instrument , measured_center ) -> Tuple [ Point , float ] : from copy import deepcopy from opentrons . trackers . pose_tracker import update robot = instrument . robot config = robot . config instrument_offset = deepcopy ( config . instrument_offset ) dx , dy , dz = array ( measured_center ) - config . tip_probe . center log . debug ( "This is measured probe center dx {}" . format ( Point ( dx , dy , dz ) ) ) old_x , old_y , _ = instrument_offset [ instrument . mount ] [ instrument . type ] instrument_offset [ instrument . mount ] [ instrument . type ] = ( old_x - dx , old_y - dy , 0.0 ) tip_length = deepcopy ( config . tip_length ) tip_length [ instrument . name ] = tip_length [ instrument . name ] + dz config = config . _replace ( instrument_offset = instrument_offset ) . _replace ( tip_length = tip_length ) robot . config = config log . debug ( "Updating config for {} instrument" . format ( instrument . mount ) ) robot_configs . save_robot_settings ( config ) new_coordinates = change_base ( robot . poses , src = instrument , dst = instrument . instrument_mover ) - Point ( dx , dy , 0.0 ) robot . poses = update ( robot . poses , instrument , new_coordinates ) return robot . config
|
Update config and pose tree with instrument s x and y offsets and tip length based on delta between probe center and measured_center persist updated config and return it
|
9,337
|
def read_pipette_id ( self , mount ) -> Optional [ str ] : res : Optional [ str ] = None if self . simulating : res = '1234567890' else : try : res = self . _read_from_pipette ( GCODES [ 'READ_INSTRUMENT_ID' ] , mount ) except UnicodeDecodeError : log . exception ( "Failed to decode pipette ID string:" ) res = None return res
|
Reads in an attached pipette s ID The ID is unique to this pipette and is a string of unknown length
|
9,338
|
def read_pipette_model ( self , mount ) -> Optional [ str ] : if self . simulating : res = None else : res = self . _read_from_pipette ( GCODES [ 'READ_INSTRUMENT_MODEL' ] , mount ) if res and '_v' not in res : res = res + '_v1' elif res and '_v13' in res : res = res . replace ( '_v13' , '_v1.3' ) return res
|
Reads an attached pipette s MODEL The MODEL is a unique string for this model of pipette
|
9,339
|
def get_fw_version ( self ) : version = 'Virtual Smoothie' if not self . simulating : version = self . _send_command ( 'version' ) version = version . split ( ',' ) [ 0 ] . split ( ':' ) [ - 1 ] . strip ( ) version = version . replace ( 'NOMSD' , '' ) return version
|
Queries Smoothieware for it s build version and returns the parsed response .
|
9,340
|
def position ( self ) : return { k . upper ( ) : v for k , v in self . _position . items ( ) }
|
Instead of sending M114 . 2 we are storing target values in self . _position since movement and home commands are blocking and assumed to go the correct place .
|
9,341
|
def set_active_current ( self , settings ) : self . _active_current_settings [ 'now' ] . update ( settings ) active_axes_to_update = { axis : amperage for axis , amperage in self . _active_current_settings [ 'now' ] . items ( ) if self . _active_axes . get ( axis ) is True if self . current [ axis ] != amperage } if active_axes_to_update : self . _save_current ( active_axes_to_update , axes_active = True )
|
Sets the amperage of each motor for when it is activated by driver . Values are initialized from the robot_config . high_current values and can then be changed through this method by other parts of the API .
|
9,342
|
def set_dwelling_current ( self , settings ) : self . _dwelling_current_settings [ 'now' ] . update ( settings ) dwelling_axes_to_update = { axis : amps for axis , amps in self . _dwelling_current_settings [ 'now' ] . items ( ) if self . _active_axes . get ( axis ) is False if self . current [ axis ] != amps } if dwelling_axes_to_update : self . _save_current ( dwelling_axes_to_update , axes_active = False )
|
Sets the amperage of each motor for when it is dwelling . Values are initialized from the robot_config . log_current values and can then be changed through this method by other parts of the API .
|
9,343
|
def _generate_current_command ( self ) : values = [ '{}{}' . format ( axis , value ) for axis , value in sorted ( self . current . items ( ) ) ] current_cmd = '{} {}' . format ( GCODES [ 'SET_CURRENT' ] , ' ' . join ( values ) ) command = '{currents} {code}P{seconds}' . format ( currents = current_cmd , code = GCODES [ 'DWELL' ] , seconds = CURRENT_CHANGE_DELAY ) log . debug ( "_generate_current_command: {}" . format ( command ) ) return command
|
Returns a constructed GCode string that contains this driver s axis - current settings plus a small delay to wait for those settings to take effect .
|
9,344
|
def disengage_axis ( self , axes ) : axes = '' . join ( set ( axes . upper ( ) ) & set ( AXES ) ) if axes : log . debug ( "disengage_axis: {}" . format ( axes ) ) self . _send_command ( GCODES [ 'DISENGAGE_MOTOR' ] + axes ) for axis in axes : self . engaged_axes [ axis ] = False
|
Disable the stepper - motor - driver s 36v output to motor This is a safe GCODE to send to Smoothieware as it will automatically re - engage the motor if it receives a home or move command
|
9,345
|
def dwell_axes ( self , axes ) : axes = '' . join ( set ( axes ) & set ( AXES ) - set ( DISABLE_AXES ) ) dwelling_currents = { ax : self . _dwelling_current_settings [ 'now' ] [ ax ] for ax in axes if self . _active_axes [ ax ] is True } if dwelling_currents : self . _save_current ( dwelling_currents , axes_active = False )
|
Sets motors to low current for when they are not moving .
|
9,346
|
def _read_from_pipette ( self , gcode , mount ) -> Optional [ str ] : allowed_mounts = { 'left' : 'L' , 'right' : 'R' } mount = allowed_mounts . get ( mount ) if not mount : raise ValueError ( 'Unexpected mount: {}' . format ( mount ) ) try : self . disengage_axis ( 'BC' ) self . delay ( CURRENT_CHANGE_DELAY ) res = self . _send_command ( gcode + mount ) if res : res = _parse_instrument_data ( res ) assert mount in res return _byte_array_to_ascii_string ( res [ mount ] ) except ( ParseError , AssertionError , SmoothieError ) : pass return None
|
Read from an attached pipette s internal memory . The gcode used determines which portion of memory is read and returned .
|
9,347
|
def _write_to_pipette ( self , gcode , mount , data_string ) : allowed_mounts = { 'left' : 'L' , 'right' : 'R' } mount = allowed_mounts . get ( mount ) if not mount : raise ValueError ( 'Unexpected mount: {}' . format ( mount ) ) if not isinstance ( data_string , str ) : raise ValueError ( 'Expected {0}, not {1}' . format ( str , type ( data_string ) ) ) self . disengage_axis ( 'BC' ) self . delay ( CURRENT_CHANGE_DELAY ) byte_string = _byte_array_to_hex_string ( bytearray ( data_string . encode ( ) ) ) command = gcode + mount + byte_string log . debug ( "_write_to_pipette: {}" . format ( command ) ) self . _send_command ( command )
|
Write to an attached pipette s internal memory . The gcode used determines which portion of memory is written to .
|
9,348
|
def move ( self , target , home_flagged_axes = False ) : from numpy import isclose self . run_flag . wait ( ) def valid_movement ( coords , axis ) : return not ( ( axis in DISABLE_AXES ) or ( coords is None ) or isclose ( coords , self . position [ axis ] ) ) def create_coords_list ( coords_dict ) : return [ axis + str ( round ( coords , GCODE_ROUNDING_PRECISION ) ) for axis , coords in sorted ( coords_dict . items ( ) ) if valid_movement ( coords , axis ) ] backlash_target = target . copy ( ) backlash_target . update ( { axis : value + PLUNGER_BACKLASH_MM for axis , value in sorted ( target . items ( ) ) if axis in 'BC' and self . position [ axis ] < value } ) target_coords = create_coords_list ( target ) backlash_coords = create_coords_list ( backlash_target ) if target_coords : non_moving_axes = '' . join ( [ ax for ax in AXES if ax not in target . keys ( ) ] ) self . dwell_axes ( non_moving_axes ) self . activate_axes ( target . keys ( ) ) command = self . _generate_current_command ( ) if backlash_coords != target_coords : command += ' ' + GCODES [ 'MOVE' ] + '' . join ( backlash_coords ) command += ' ' + GCODES [ 'MOVE' ] + '' . join ( target_coords ) try : for axis in target . keys ( ) : self . engaged_axes [ axis ] = True if home_flagged_axes : self . home_flagged_axes ( '' . join ( list ( target . keys ( ) ) ) ) log . debug ( "move: {}" . format ( command ) ) self . _send_command ( command , timeout = DEFAULT_MOVEMENT_TIMEOUT ) finally : plunger_axis_moved = '' . join ( set ( 'BC' ) & set ( target . keys ( ) ) ) if plunger_axis_moved : self . dwell_axes ( plunger_axis_moved ) self . _set_saved_current ( ) self . _update_position ( target )
|
Move to the target Smoothieware coordinate along any of the size axes XYZABC .
|
9,349
|
def fast_home ( self , axis , safety_margin ) : destination = { ax : self . homed_position . get ( ax ) - abs ( safety_margin ) for ax in axis . upper ( ) } try : self . move ( destination ) except SmoothieError : pass disabled = '' . join ( [ ax for ax in AXES if ax not in axis . upper ( ) ] ) return self . home ( axis = axis , disabled = disabled )
|
home after a controlled motor stall
|
9,350
|
def unstick_axes ( self , axes , distance = None , speed = None ) : for ax in axes : if ax not in AXES : raise ValueError ( 'Unknown axes: {}' . format ( axes ) ) if distance is None : distance = UNSTICK_DISTANCE if speed is None : speed = UNSTICK_SPEED self . push_active_current ( ) self . set_active_current ( { ax : self . _config . high_current [ ax ] for ax in axes } ) self . push_axis_max_speed ( ) self . set_axis_max_speed ( { ax : speed for ax in axes } ) state_of_switches = { ax : False for ax in AXES } if not self . simulating : state_of_switches = self . switch_state homing_axes = '' . join ( [ ax for ax in axes if state_of_switches [ ax ] ] ) moving_axes = { ax : self . position [ ax ] - distance for ax in axes if ( not state_of_switches [ ax ] ) and ( ax not in homing_axes ) } try : if moving_axes : self . move ( moving_axes ) if homing_axes : self . home ( homing_axes ) finally : self . pop_active_current ( ) self . pop_axis_max_speed ( )
|
The plunger axes on OT2 can build up static friction over time and when it s cold . To get over this the robot can move that plunger at normal current and a very slow speed to increase the torque removing the static friction
|
9,351
|
def kill ( self ) : log . debug ( "kill" ) self . _smoothie_hard_halt ( ) self . _reset_from_error ( ) self . _setup ( )
|
In order to terminate Smoothie motion immediately ( including interrupting a command in progress we set the reset pin low and then back to high then call _setup method to send the RESET_FROM_ERROR Smoothie code to return Smoothie to a normal waiting state and reset any other state needed for the driver .
|
9,352
|
def home_flagged_axes ( self , axes_string ) : axes_that_need_to_home = [ axis for axis , already_homed in self . homed_flags . items ( ) if ( not already_homed ) and ( axis in axes_string ) ] if axes_that_need_to_home : axes_string = '' . join ( axes_that_need_to_home ) self . home ( axes_string )
|
Given a list of axes to check this method will home each axis if Smoothieware s internal flag sets it as needing to be homed
|
9,353
|
async def update_firmware ( self , filename : str , loop : asyncio . AbstractEventLoop = None , explicit_modeset : bool = True ) -> str : if self . simulating : return 'Did nothing (simulating)' smoothie_update . _ensure_programmer_executable ( ) if not self . is_connected ( ) : self . _connect_to_port ( ) port = self . _connection . port if explicit_modeset : self . _smoothie_programming_mode ( ) self . _connection . close ( ) update_cmd = 'lpc21isp -wipe -donotstart {0} {1} {2} 12000' . format ( filename , port , self . _config . serial_speed ) kwargs : Dict [ str , Any ] = { 'stdout' : asyncio . subprocess . PIPE } if loop : kwargs [ 'loop' ] = loop proc = await asyncio . create_subprocess_shell ( update_cmd , ** kwargs ) rd : bytes = await proc . stdout . read ( ) res = rd . decode ( ) . strip ( ) await proc . communicate ( ) self . _connection . open ( ) self . _smoothie_reset ( ) self . _setup ( ) return res
|
Program the smoothie board with a given hex file .
|
9,354
|
def load_new_labware ( container_name ) : defn = new_labware . load_definition_by_name ( container_name ) labware_id = defn [ 'otId' ] saved_offset = _look_up_offsets ( labware_id ) container = Container ( ) log . info ( f"Container name {container_name}" ) container . properties [ 'type' ] = container_name container . properties [ 'otId' ] = labware_id format = defn [ 'parameters' ] [ 'format' ] container . _coordinates = Vector ( defn [ 'cornerOffsetFromSlot' ] ) for well_name in itertools . chain ( * defn [ 'ordering' ] ) : well_obj , well_pos = _load_new_well ( defn [ 'wells' ] [ well_name ] , saved_offset , format ) container . add ( well_obj , well_name , well_pos ) return container
|
Load a labware in the new schema into a placeable .
|
9,355
|
def to_yaml ( cls , config , compact = False , indent = 2 , level = 0 ) : lines = "" if isinstance ( config , ConfigTree ) : if len ( config ) > 0 : if level > 0 : lines += '\n' bet_lines = [ ] for key , item in config . items ( ) : bet_lines . append ( '{indent}{key}: {value}' . format ( indent = '' . rjust ( level * indent , ' ' ) , key = key . strip ( '"' ) , value = cls . to_yaml ( item , compact , indent , level + 1 ) ) ) lines += '\n' . join ( bet_lines ) elif isinstance ( config , list ) : config_list = [ line for line in config if line is not None ] if len ( config_list ) == 0 : lines += '[]' else : lines += '\n' bet_lines = [ ] for item in config_list : bet_lines . append ( '{indent}- {value}' . format ( indent = '' . rjust ( level * indent , ' ' ) , value = cls . to_yaml ( item , compact , indent , level + 1 ) ) ) lines += '\n' . join ( bet_lines ) elif isinstance ( config , basestring ) : lines = config . split ( '\n' ) if len ( lines ) == 1 : lines = config else : lines = '|\n' + '\n' . join ( [ line . rjust ( level * indent , ' ' ) for line in lines ] ) elif config is None or isinstance ( config , NoneValue ) : lines = 'null' elif config is True : lines = 'true' elif config is False : lines = 'false' else : lines = str ( config ) return lines
|
Convert HOCON input into a YAML output
|
9,356
|
def to_properties ( cls , config , compact = False , indent = 2 , key_stack = [ ] ) : def escape_value ( value ) : return value . replace ( '=' , '\\=' ) . replace ( '!' , '\\!' ) . replace ( '#' , '\\#' ) . replace ( '\n' , '\\\n' ) stripped_key_stack = [ key . strip ( '"' ) for key in key_stack ] lines = [ ] if isinstance ( config , ConfigTree ) : for key , item in config . items ( ) : if item is not None : lines . append ( cls . to_properties ( item , compact , indent , stripped_key_stack + [ key ] ) ) elif isinstance ( config , list ) : for index , item in enumerate ( config ) : if item is not None : lines . append ( cls . to_properties ( item , compact , indent , stripped_key_stack + [ str ( index ) ] ) ) elif isinstance ( config , basestring ) : lines . append ( '.' . join ( stripped_key_stack ) + ' = ' + escape_value ( config ) ) elif config is True : lines . append ( '.' . join ( stripped_key_stack ) + ' = true' ) elif config is False : lines . append ( '.' . join ( stripped_key_stack ) + ' = false' ) elif config is None or isinstance ( config , NoneValue ) : pass else : lines . append ( '.' . join ( stripped_key_stack ) + ' = ' + str ( config ) ) return '\n' . join ( [ line for line in lines if len ( line ) > 0 ] )
|
Convert HOCON input into a . properties output
|
9,357
|
def convert_from_file ( cls , input_file = None , output_file = None , output_format = 'json' , indent = 2 , compact = False ) : if input_file is None : content = sys . stdin . read ( ) config = ConfigFactory . parse_string ( content ) else : config = ConfigFactory . parse_file ( input_file ) res = cls . convert ( config , output_format , indent , compact ) if output_file is None : print ( res ) else : with open ( output_file , "w" ) as fd : fd . write ( res )
|
Convert to json properties or yaml
|
9,358
|
def postParse ( self , instring , loc , token_list ) : cleaned_token_list = [ token for tokens in ( token . tokens if isinstance ( token , ConfigInclude ) else [ token ] for token in token_list if token != '' ) for token in tokens ] config_list = ConfigList ( cleaned_token_list ) return [ config_list ]
|
Create a list from the tokens
|
9,359
|
def postParse ( self , instring , loc , token_list ) : config_tree = ConfigTree ( root = self . root ) for element in token_list : expanded_tokens = element . tokens if isinstance ( element , ConfigInclude ) else [ element ] for tokens in expanded_tokens : key = tokens [ 0 ] . strip ( ) operator = '=' if len ( tokens ) == 3 and tokens [ 1 ] . strip ( ) in [ ':' , '=' , '+=' ] : operator = tokens [ 1 ] . strip ( ) values = tokens [ 2 : ] elif len ( tokens ) == 2 : values = tokens [ 1 : ] else : raise ParseSyntaxException ( "Unknown tokens {tokens} received" . format ( tokens = tokens ) ) if len ( values ) == 0 : config_tree . put ( key , '' ) else : value = values [ 0 ] if isinstance ( value , list ) and operator == "+=" : value = ConfigValues ( [ ConfigSubstitution ( key , True , '' , False , loc ) , value ] , False , loc ) config_tree . put ( key , value , False ) elif isinstance ( value , unicode ) and operator == "+=" : value = ConfigValues ( [ ConfigSubstitution ( key , True , '' , True , loc ) , ' ' + value ] , True , loc ) config_tree . put ( key , value , False ) elif isinstance ( value , list ) : config_tree . put ( key , value , False ) else : existing_value = config_tree . get ( key , None ) if isinstance ( value , ConfigTree ) and not isinstance ( existing_value , list ) : config_tree . put ( key , value , True ) elif isinstance ( value , ConfigValues ) : conf_value = value value . parent = config_tree value . key = key if isinstance ( existing_value , list ) or isinstance ( existing_value , ConfigTree ) : config_tree . put ( key , conf_value , True ) else : config_tree . put ( key , conf_value , False ) else : config_tree . put ( key , value , False ) return config_tree
|
Create ConfigTree from tokens
|
9,360
|
def merge_configs ( a , b , copy_trees = False ) : for key , value in b . items ( ) : if key in a and isinstance ( a [ key ] , ConfigTree ) and isinstance ( b [ key ] , ConfigTree ) : if copy_trees : a [ key ] = a [ key ] . copy ( ) ConfigTree . merge_configs ( a [ key ] , b [ key ] , copy_trees = copy_trees ) else : if isinstance ( value , ConfigValues ) : value . parent = a value . key = key if key in a : value . overriden_value = a [ key ] a [ key ] = value if a . root : if b . root : a . history [ key ] = a . history . get ( key , [ ] ) + b . history . get ( key , [ value ] ) else : a . history [ key ] = a . history . get ( key , [ ] ) + [ value ] return a
|
Merge config b into a
|
9,361
|
def get ( self , key , default = UndefinedKey ) : return self . _get ( ConfigTree . parse_key ( key ) , 0 , default )
|
Get a value from the tree
|
9,362
|
def get_string ( self , key , default = UndefinedKey ) : value = self . get ( key , default ) if value is None : return None string_value = unicode ( value ) if isinstance ( value , bool ) : string_value = string_value . lower ( ) return string_value
|
Return string representation of value found at key
|
9,363
|
def pop ( self , key , default = UndefinedKey ) : if default != UndefinedKey and key not in self : return default value = self . get ( key , UndefinedKey ) lst = ConfigTree . parse_key ( key ) parent = self . KEY_SEP . join ( lst [ 0 : - 1 ] ) child = lst [ - 1 ] if parent : self . get ( parent ) . __delitem__ ( child ) else : self . __delitem__ ( child ) return value
|
Remove specified key and return the corresponding value . If key is not found default is returned if given otherwise ConfigMissingException is raised
|
9,364
|
def get_int ( self , key , default = UndefinedKey ) : value = self . get ( key , default ) try : return int ( value ) if value is not None else None except ( TypeError , ValueError ) : raise ConfigException ( u"{key} has type '{type}' rather than 'int'" . format ( key = key , type = type ( value ) . __name__ ) )
|
Return int representation of value found at key
|
9,365
|
def get_float ( self , key , default = UndefinedKey ) : value = self . get ( key , default ) try : return float ( value ) if value is not None else None except ( TypeError , ValueError ) : raise ConfigException ( u"{key} has type '{type}' rather than 'float'" . format ( key = key , type = type ( value ) . __name__ ) )
|
Return float representation of value found at key
|
9,366
|
def get_bool ( self , key , default = UndefinedKey ) : bool_conversions = { None : None , 'true' : True , 'yes' : True , 'on' : True , 'false' : False , 'no' : False , 'off' : False } string_value = self . get_string ( key , default ) if string_value is not None : string_value = string_value . lower ( ) try : return bool_conversions [ string_value ] except KeyError : raise ConfigException ( u"{key} does not translate to a Boolean value" . format ( key = key ) )
|
Return boolean representation of value found at key
|
9,367
|
def get_list ( self , key , default = UndefinedKey ) : value = self . get ( key , default ) if isinstance ( value , list ) : return value elif isinstance ( value , ConfigTree ) : lst = [ ] for k , v in sorted ( value . items ( ) , key = lambda kv : kv [ 0 ] ) : if re . match ( '^[1-9][0-9]*$|0' , k ) : lst . append ( v ) else : raise ConfigException ( u"{key} does not translate to a list" . format ( key = key ) ) return lst elif value is None : return None else : raise ConfigException ( u"{key} has type '{type}' rather than 'list'" . format ( key = key , type = type ( value ) . __name__ ) )
|
Return list representation of value found at key
|
9,368
|
def get_config ( self , key , default = UndefinedKey ) : value = self . get ( key , default ) if isinstance ( value , dict ) : return value elif value is None : return None else : raise ConfigException ( u"{key} has type '{type}' rather than 'config'" . format ( key = key , type = type ( value ) . __name__ ) )
|
Return tree config representation of value found at key
|
9,369
|
def as_plain_ordered_dict ( self ) : def plain_value ( v ) : if isinstance ( v , list ) : return [ plain_value ( e ) for e in v ] elif isinstance ( v , ConfigTree ) : return v . as_plain_ordered_dict ( ) else : if isinstance ( v , ConfigValues ) : raise ConfigException ( "The config tree contains unresolved elements" ) return v return OrderedDict ( ( key . strip ( '"' ) , plain_value ( value ) ) for key , value in self . items ( ) )
|
return a deep copy of this config as a plain OrderedDict
|
9,370
|
def labelLine ( line , x , label = None , align = True , ** kwargs ) : ax = line . axes xdata = line . get_xdata ( ) ydata = line . get_ydata ( ) order = np . argsort ( xdata ) xdata = xdata [ order ] ydata = ydata [ order ] if isinstance ( x , datetime ) : x = date2num ( x ) xmin , xmax = xdata [ 0 ] , xdata [ - 1 ] if ( x < xmin ) or ( x > xmax ) : raise Exception ( 'x label location is outside data range!' ) ip = 1 for i in range ( len ( xdata ) ) : if x < xdata [ i ] : ip = i break y = ydata [ ip - 1 ] + ( ydata [ ip ] - ydata [ ip - 1 ] ) * ( x - xdata [ ip - 1 ] ) / ( xdata [ ip ] - xdata [ ip - 1 ] ) if not label : label = line . get_label ( ) if align : dx = xdata [ ip ] - xdata [ ip - 1 ] dy = ydata [ ip ] - ydata [ ip - 1 ] ang = degrees ( atan2 ( dy , dx ) ) pt = np . array ( [ x , y ] ) . reshape ( ( 1 , 2 ) ) trans_angle = ax . transData . transform_angles ( np . array ( ( ang , ) ) , pt ) [ 0 ] else : trans_angle = 0 if 'color' not in kwargs : kwargs [ 'color' ] = line . get_color ( ) if ( 'horizontalalignment' not in kwargs ) and ( 'ha' not in kwargs ) : kwargs [ 'ha' ] = 'center' if ( 'verticalalignment' not in kwargs ) and ( 'va' not in kwargs ) : kwargs [ 'va' ] = 'center' if 'backgroundcolor' not in kwargs : kwargs [ 'backgroundcolor' ] = ax . get_facecolor ( ) if 'clip_on' not in kwargs : kwargs [ 'clip_on' ] = True if 'zorder' not in kwargs : kwargs [ 'zorder' ] = 2.5 ax . text ( x , y , label , rotation = trans_angle , ** kwargs )
|
Label a single matplotlib line at position x
|
9,371
|
def labelLines ( lines , align = True , xvals = None , ** kwargs ) : ax = lines [ 0 ] . axes labLines = [ ] labels = [ ] for line in lines : label = line . get_label ( ) if "_line" not in label : labLines . append ( line ) labels . append ( label ) if xvals is None : xvals = ax . get_xlim ( ) if type ( xvals ) == tuple : xmin , xmax = xvals xscale = ax . get_xscale ( ) if xscale == "log" : xvals = np . logspace ( np . log10 ( xmin ) , np . log10 ( xmax ) , len ( labLines ) + 2 ) [ 1 : - 1 ] else : xvals = np . linspace ( xmin , xmax , len ( labLines ) + 2 ) [ 1 : - 1 ] for line , x , label in zip ( labLines , xvals , labels ) : labelLine ( line , x , label , align , ** kwargs )
|
Label all lines with their respective legends .
|
9,372
|
def new_as_dict ( self , raw = True , vars = None ) : result = { } for section in self . sections ( ) : if section not in result : result [ section ] = { } for option in self . options ( section ) : value = self . get ( section , option , raw = raw , vars = vars ) try : value = cherrypy . lib . reprconf . unrepr ( value ) except Exception : x = sys . exc_info ( ) [ 1 ] msg = ( "Config error in section: %r, option: %r, " "value: %r. Config values must be valid Python." % ( section , option , value ) ) raise ValueError ( msg , x . __class__ . __name__ , x . args ) result [ section ] [ option ] = value return result
|
Convert an INI file to a dictionary
|
9,373
|
def auth ( self , username , password ) : if username not in self . users : return False elif self . users [ username ] [ self . pwd_attr ] == password : return True return False
|
Check authentication against the backend
|
9,374
|
def add_user ( self , attrs ) : username = attrs [ self . key ] if username in self . users : raise UserAlreadyExists ( username , self . backend_name ) self . users [ username ] = attrs self . users [ username ] [ 'groups' ] = set ( [ ] )
|
Add a user to the backend
|
9,375
|
def del_user ( self , username ) : self . _check_fix_users ( username ) try : del self . users [ username ] except Exception as e : raise UserDoesntExist ( username , self . backend_name )
|
Delete a user from the backend
|
9,376
|
def set_attrs ( self , username , attrs ) : self . _check_fix_users ( username ) for attr in attrs : self . users [ username ] [ attr ] = attrs [ attr ]
|
set a list of attributes for a given user
|
9,377
|
def add_to_groups ( self , username , groups ) : self . _check_fix_users ( username ) current_groups = self . users [ username ] [ 'groups' ] new_groups = current_groups | set ( groups ) self . users [ username ] [ 'groups' ] = new_groups
|
Add a user to a list of groups
|
9,378
|
def search ( self , searchstring ) : ret = { } for user in self . users : match = False for attr in self . search_attrs : if attr not in self . users [ user ] : pass elif re . search ( searchstring + '.*' , self . users [ user ] [ attr ] ) : match = True if match : ret [ user ] = self . users [ user ] return ret
|
Search backend for users
|
9,379
|
def get_user ( self , username ) : try : return self . users [ username ] except Exception as e : raise UserDoesntExist ( username , self . backend_name )
|
Get a user s attributes
|
9,380
|
def get_groups ( self , username ) : try : return self . users [ username ] [ 'groups' ] except Exception as e : raise UserDoesntExist ( username , self . backend_name )
|
Get a user s groups
|
9,381
|
def get_loglevel ( level ) : if level == 'debug' : return logging . DEBUG elif level == 'notice' : return logging . INFO elif level == 'info' : return logging . INFO elif level == 'warning' or level == 'warn' : return logging . WARNING elif level == 'error' or level == 'err' : return logging . ERROR elif level == 'critical' or level == 'crit' : return logging . CRITICAL elif level == 'alert' : return logging . CRITICAL elif level == 'emergency' or level == 'emerg' : return logging . CRITICAL else : return logging . INFO
|
return logging level object corresponding to a given level passed as a string
|
9,382
|
def _normalize_group_attrs ( self , attrs ) : for key in self . group_attrs_keys : if key not in attrs : raise MissingGroupAttr ( key ) if type ( attrs [ key ] ) is list and len ( attrs [ key ] ) == 1 : attrs [ key ] = attrs [ key ] [ 0 ] if type ( attrs [ key ] ) is list and len ( attrs [ key ] ) != 1 : raise MultivaluedGroupAttr ( key )
|
Normalize the attributes used to set groups If it s a list of one element it just become this element . It raises an error if the attribute doesn t exist or if it s multivaluated .
|
9,383
|
def _connect ( self ) : ldap_client = ldap . initialize ( self . uri ) ldap . set_option ( ldap . OPT_REFERRALS , 0 ) ldap . set_option ( ldap . OPT_TIMEOUT , self . timeout ) if self . starttls == 'on' : ldap . set_option ( ldap . OPT_X_TLS_DEMAND , True ) else : ldap . set_option ( ldap . OPT_X_TLS_DEMAND , False ) if self . ca and self . checkcert == 'on' : if os . path . isfile ( self . ca ) : ldap . set_option ( ldap . OPT_X_TLS_CACERTFILE , self . ca ) else : raise CaFileDontExist ( self . ca ) if self . checkcert == 'off' : ldap . set_option ( ldap . OPT_X_TLS_REQUIRE_CERT , ldap . OPT_X_TLS_NEVER ) ldap_client . set_option ( ldap . OPT_X_TLS_REQUIRE_CERT , ldap . OPT_X_TLS_NEVER ) else : ldap_client . set_option ( ldap . OPT_X_TLS_REQUIRE_CERT , ldap . OPT_X_TLS_DEMAND ) ldap . set_option ( ldap . OPT_X_TLS_REQUIRE_CERT , ldap . OPT_X_TLS_NEVER ) if self . starttls == 'on' : try : ldap_client . start_tls_s ( ) except Exception as e : self . _exception_handler ( e ) return ldap_client
|
Initialize an ldap client
|
9,384
|
def _bind ( self ) : ldap_client = self . _connect ( ) try : ldap_client . simple_bind_s ( self . binddn , self . bindpassword ) except Exception as e : ldap_client . unbind_s ( ) self . _exception_handler ( e ) return ldap_client
|
bind to the ldap with the technical account
|
9,385
|
def _get_user ( self , username , attrs = ALL_ATTRS ) : username = ldap . filter . escape_filter_chars ( username ) user_filter = self . user_filter_tmpl % { 'username' : self . _uni ( username ) } r = self . _search ( self . _byte_p2 ( user_filter ) , attrs , self . userdn ) if len ( r ) == 0 : return None if attrs == NO_ATTR : dn_entry = r [ 0 ] [ 0 ] else : dn_entry = r [ 0 ] return dn_entry
|
Get a user from the ldap
|
9,386
|
def auth ( self , username , password ) : binddn = self . _get_user ( self . _byte_p2 ( username ) , NO_ATTR ) if binddn is not None : ldap_client = self . _connect ( ) try : ldap_client . simple_bind_s ( self . _byte_p2 ( binddn ) , self . _byte_p2 ( password ) ) except ldap . INVALID_CREDENTIALS : ldap_client . unbind_s ( ) return False ldap_client . unbind_s ( ) return True else : return False
|
Authentication of a user
|
9,387
|
def add_user ( self , attrs ) : ldap_client = self . _bind ( ) attrs_srt = self . attrs_pretreatment ( attrs ) attrs_srt [ self . _byte_p2 ( 'objectClass' ) ] = self . objectclasses dn = self . _byte_p2 ( self . dn_user_attr ) + self . _byte_p2 ( '=' ) + self . _byte_p2 ( ldap . dn . escape_dn_chars ( attrs [ self . dn_user_attr ] ) ) + self . _byte_p2 ( ',' ) + self . _byte_p2 ( self . userdn ) ldif = modlist . addModlist ( attrs_srt ) try : ldap_client . add_s ( dn , ldif ) except ldap . ALREADY_EXISTS as e : raise UserAlreadyExists ( attrs [ self . key ] , self . backend_name ) except Exception as e : ldap_client . unbind_s ( ) self . _exception_handler ( e ) ldap_client . unbind_s ( )
|
add a user
|
9,388
|
def del_user ( self , username ) : ldap_client = self . _bind ( ) dn = self . _byte_p2 ( self . _get_user ( self . _byte_p2 ( username ) , NO_ATTR ) ) if dn is not None : ldap_client . delete_s ( dn ) else : ldap_client . unbind_s ( ) raise UserDoesntExist ( username , self . backend_name ) ldap_client . unbind_s ( )
|
delete a user
|
9,389
|
def set_attrs ( self , username , attrs ) : ldap_client = self . _bind ( ) tmp = self . _get_user ( self . _byte_p2 ( username ) , ALL_ATTRS ) if tmp is None : raise UserDoesntExist ( username , self . backend_name ) dn = self . _byte_p2 ( tmp [ 0 ] ) old_attrs = tmp [ 1 ] for attr in attrs : bcontent = self . _byte_p2 ( attrs [ attr ] ) battr = self . _byte_p2 ( attr ) new = { battr : self . _modlist ( self . _byte_p3 ( bcontent ) ) } if attr . lower ( ) == self . dn_user_attr . lower ( ) : ldap_client . rename_s ( dn , ldap . dn . dn2str ( [ [ ( battr , bcontent , 1 ) ] ] ) ) dn = ldap . dn . dn2str ( [ [ ( battr , bcontent , 1 ) ] ] + ldap . dn . str2dn ( dn ) [ 1 : ] ) else : if attr in old_attrs : if type ( old_attrs [ attr ] ) is list : tmp = [ ] for value in old_attrs [ attr ] : tmp . append ( self . _byte_p2 ( value ) ) bold_value = tmp else : bold_value = self . _modlist ( self . _byte_p3 ( old_attrs [ attr ] ) ) old = { battr : bold_value } else : old = { } ldif = modlist . modifyModlist ( old , new ) if ldif : try : ldap_client . modify_s ( dn , ldif ) except Exception as e : ldap_client . unbind_s ( ) self . _exception_handler ( e ) ldap_client . unbind_s ( )
|
set user attributes
|
9,390
|
def del_from_groups ( self , username , groups ) : ldap_client = self . _bind ( ) tmp = self . _get_user ( self . _byte_p2 ( username ) , ALL_ATTRS ) if tmp is None : raise UserDoesntExist ( username , self . backend_name ) dn = tmp [ 0 ] attrs = tmp [ 1 ] attrs [ 'dn' ] = dn self . _normalize_group_attrs ( attrs ) dn = self . _byte_p2 ( tmp [ 0 ] ) for group in groups : group = self . _byte_p2 ( group ) for attr in self . group_attrs : content = self . _byte_p2 ( self . group_attrs [ attr ] % attrs ) ldif = [ ( ldap . MOD_DELETE , attr , self . _byte_p3 ( content ) ) ] try : ldap_client . modify_s ( group , ldif ) except ldap . NO_SUCH_ATTRIBUTE as e : self . _logger ( severity = logging . INFO , msg = "%(backend)s: user '%(user)s'" " wasn't member of group '%(group)s'" " (attribute '%(attr)s')" % { 'user' : username , 'group' : self . _uni ( group ) , 'attr' : attr , 'backend' : self . backend_name } ) except Exception as e : ldap_client . unbind_s ( ) self . _exception_handler ( e ) ldap_client . unbind_s ( )
|
Delete user from groups
|
9,391
|
def get_user ( self , username ) : ret = { } tmp = self . _get_user ( self . _byte_p2 ( username ) , ALL_ATTRS ) if tmp is None : raise UserDoesntExist ( username , self . backend_name ) attrs_tmp = tmp [ 1 ] for attr in attrs_tmp : value_tmp = attrs_tmp [ attr ] if len ( value_tmp ) == 1 : ret [ attr ] = value_tmp [ 0 ] else : ret [ attr ] = value_tmp return ret
|
Gest a specific user
|
9,392
|
def get_groups ( self , username ) : username = ldap . filter . escape_filter_chars ( self . _byte_p2 ( username ) ) userdn = self . _get_user ( username , NO_ATTR ) searchfilter = self . group_filter_tmpl % { 'userdn' : userdn , 'username' : username } groups = self . _search ( searchfilter , NO_ATTR , self . groupdn ) ret = [ ] for entry in groups : ret . append ( self . _uni ( entry [ 0 ] ) ) return ret
|
Get all groups of a user
|
9,393
|
def _merge_groups ( self , backends_list ) : ret = { } for backends in backends_list : for b in backends : if b not in ret : ret [ b ] = set ( [ ] ) for group in backends [ b ] : ret [ b ] . add ( group ) for b in ret : ret [ b ] = list ( ret [ b ] ) ret [ b ] . sort ( ) return ret
|
merge a list backends_groups
|
9,394
|
def _is_parent ( self , roleid1 , roleid2 ) : role2 = copy . deepcopy ( self . flatten [ roleid2 ] ) role1 = copy . deepcopy ( self . flatten [ roleid1 ] ) if role1 == role2 : return False for b1 in role1 [ 'backends_groups' ] : if b1 not in role2 [ 'backends_groups' ] : return False for group in role1 [ 'backends_groups' ] [ b1 ] : if group not in role2 [ 'backends_groups' ] [ b1 ] : return False for b2 in role2 [ 'backends_groups' ] : if b2 not in role1 [ 'backends_groups' ] : return True for group in role2 [ 'backends_groups' ] [ b2 ] : if group not in role1 [ 'backends_groups' ] [ b2 ] : return True raise DumplicateRoleContent ( roleid1 , roleid2 )
|
Test if roleid1 is contained inside roleid2
|
9,395
|
def get_groups_to_remove ( self , current_roles , roles_to_remove ) : current_roles = set ( current_roles ) ret = { } roles_to_remove = set ( roles_to_remove ) tmp = set ( [ ] ) for r in roles_to_remove : for sr in self . _get_subroles ( r ) : if sr not in roles_to_remove and sr in current_roles : tmp . add ( sr ) roles_to_remove = roles_to_remove . union ( tmp ) roles = current_roles . difference ( set ( roles_to_remove ) ) groups_roles = self . _get_groups ( roles ) groups_roles_to_remove = self . _get_groups ( roles_to_remove ) for b in groups_roles_to_remove : if b in groups_roles : groups_roles_to_remove [ b ] = groups_roles_to_remove [ b ] . difference ( groups_roles [ b ] ) return groups_roles_to_remove
|
get groups to remove from list of roles to remove and current roles
|
9,396
|
def get_roles ( self , groups ) : roles = set ( [ ] ) parentroles = set ( [ ] ) notroles = set ( [ ] ) tmp = set ( [ ] ) usedgroups = { } unusedgroups = { } ret = { } for role in self . roles : if self . _check_member ( role , groups , notroles , tmp , parentroles , usedgroups ) : roles . add ( role ) for b in groups : for g in groups [ b ] : if b not in usedgroups or g not in usedgroups [ b ] : if b not in unusedgroups : unusedgroups [ b ] = set ( [ ] ) unusedgroups [ b ] . add ( g ) ret [ 'roles' ] = roles ret [ 'unusedgroups' ] = unusedgroups return ret
|
get list of roles and list of standalone groups
|
9,397
|
def get_display_name ( self , role ) : if role not in self . flatten : raise MissingRole ( role ) return self . flatten [ role ] [ 'display_name' ]
|
get the display name of a role
|
9,398
|
def get_groups ( self , roles ) : ret = { } for role in roles : if role not in self . flatten : raise MissingRole ( role ) for b in self . flatten [ role ] [ 'backends_groups' ] : if b not in ret : ret [ b ] = [ ] ret [ b ] = ret [ b ] + self . flatten [ role ] [ 'backends_groups' ] [ b ] return ret
|
get the list of groups from role
|
9,399
|
def is_admin ( self , roles ) : for r in roles : if r in self . admin_roles : return True return False
|
determine from a list of roles if is ldapcherry administrator
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.