idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
47,100 | def save ( params , filename , source ) : writer = wave . open ( filename , 'wb' ) writer . setnchannels ( 1 ) writer . setsampwidth ( 2 ) writer . setframerate ( params . sample_rate ) data_out = array . array ( 'h' ) for x in source : data_out . append ( int ( x * 32766 ) ) writer . writeframes ( data_out . tostring ( ) ) writer . close ( ) | Write a sequence of samples as a WAV file Currently a 16 bit mono file |
47,101 | def smallest ( self ) : heap = self . _heap v , k = heap [ 0 ] while k not in self or self [ k ] != v : heappop ( heap ) v , k = heap [ 0 ] return k | Return the item with the lowest priority . |
47,102 | def pop_smallest ( self ) : heap = self . _heap v , k = heappop ( heap ) while k not in self or self [ k ] != v : v , k = heappop ( heap ) del self [ k ] return k | Return the item with the lowest priority and remove it . |
47,103 | def get_lookups ( cls ) : class_lookups = [ parent . __dict__ . get ( 'class_lookups' , { } ) for parent in inspect . getmro ( cls ) ] return cls . merge_dicts ( class_lookups ) | Fetch all Lookups |
47,104 | def get_lookup ( self , lookup_name ) : from protean . core . repository import BaseLookup lookup = self . _get_lookup ( lookup_name ) if lookup is None or ( lookup is not None and not issubclass ( lookup , BaseLookup ) ) : raise NotImplementedError return lookup | Fetch Lookup by name |
47,105 | def register_lookup ( cls , lookup , lookup_name = None ) : if lookup_name is None : lookup_name = lookup . lookup_name if 'class_lookups' not in cls . __dict__ : cls . class_lookups = { } cls . class_lookups [ lookup_name ] = lookup cls . _clear_cached_lookups ( ) return lookup | Register a Lookup to a class |
47,106 | def _unregister_lookup ( cls , lookup , lookup_name = None ) : if lookup_name is None : lookup_name = lookup . lookup_name del cls . class_lookups [ lookup_name ] | Remove given lookup from cls lookups . For use in tests only as it s not thread - safe . |
47,107 | def deconstruct ( self ) : path = '%s.%s' % ( self . __class__ . __module__ , self . __class__ . __name__ ) args , kwargs = ( ) , { } if len ( self . children ) == 1 and not isinstance ( self . children [ 0 ] , Q ) : child = self . children [ 0 ] kwargs = { child [ 0 ] : child [ 1 ] } else : args = tuple ( self . children ) if self . connector != self . default : kwargs = { '_connector' : self . connector } if self . negated : kwargs [ '_negated' ] = True return path , args , kwargs | Deconstruct a Q Object |
47,108 | def set_context ( self , data ) : for key in data : setattr ( self . local_context , key , data [ key ] ) | Load Context with data |
47,109 | def _reset_values ( self , instance ) : self . value = None self . reference . value = None instance . __dict__ . pop ( self . field_name , None ) instance . __dict__ . pop ( self . reference . field_name , None ) self . reference . delete_cached_value ( instance ) | Reset all associated values and clean up dictionary items |
47,110 | def to_cls ( self ) : try : if isinstance ( self . _to_cls , str ) : self . _to_cls = fetch_entity_cls_from_registry ( self . _to_cls ) except AssertionError : pass return self . _to_cls | Property to retrieve to_cls as an entity when possible |
47,111 | def linked_attribute ( self ) : if isinstance ( self . to_cls , str ) : return 'id' else : return self . via or self . to_cls . meta_ . id_field . attribute_name | Choose the Linkage attribute between via and designated id_field of the target class |
47,112 | def _linked_attribute ( self , owner ) : return self . via or ( utils . inflection . underscore ( owner . __name__ ) + '_id' ) | Choose the Linkage attribute between via and own entity s id_field |
47,113 | def _fetch_objects ( self , key , value ) : return self . to_cls . query . filter ( ** { key : value } ) | Fetch Multiple linked objects |
47,114 | def _load_fields ( new_class , attrs ) : for attr_name , attr_obj in attrs . items ( ) : if isinstance ( attr_obj , ( Field , Reference ) ) : setattr ( new_class , attr_name , attr_obj ) new_class . meta_ . declared_fields [ attr_name ] = attr_obj | Load field items into Class . |
47,115 | def _set_up_reference_fields ( new_class ) : if new_class . meta_ . declared_fields : for _ , field in new_class . meta_ . declared_fields . items ( ) : if isinstance ( field , Reference ) : shadow_field_name , shadow_field = field . get_shadow_field ( ) setattr ( new_class , shadow_field_name , shadow_field ) shadow_field . __set_name__ ( new_class , shadow_field_name ) | Walk through relation fields and setup shadow attributes |
47,116 | def _set_id_field ( new_class ) : if new_class . meta_ . declared_fields : try : new_class . meta_ . id_field = next ( field for _ , field in new_class . meta_ . declared_fields . items ( ) if field . identifier ) except StopIteration : new_class . _create_id_field ( ) | Lookup the id field for this entity and assign |
47,117 | def _create_id_field ( new_class ) : id_field = Auto ( identifier = True ) setattr ( new_class , 'id' , id_field ) id_field . __set_name__ ( new_class , 'id' ) new_class . meta_ . declared_fields [ 'id' ] = id_field new_class . meta_ . id_field = id_field | Create and return a default ID field that is Auto generated |
47,118 | def _load_attributes ( new_class ) : for field_name , field_obj in new_class . meta_ . declared_fields . items ( ) : new_class . meta_ . attributes [ field_obj . get_attribute_name ( ) ] = field_obj | Load list of attributes from declared fields |
47,119 | def unique_fields ( self ) : return [ ( field_name , field_obj ) for field_name , field_obj in self . declared_fields . items ( ) if field_obj . unique ] | Return the unique fields for this entity |
47,120 | def _update_data ( self , * data_dict , ** kwargs ) : self . errors = { } for data in data_dict : if not isinstance ( data , dict ) : raise AssertionError ( f'Positional argument "{data}" passed must be a dict.' f'This argument serves as a template for loading common ' f'values.' ) for field_name , val in data . items ( ) : setattr ( self , field_name , val ) for field_name , val in kwargs . items ( ) : setattr ( self , field_name , val ) if self . errors : raise ValidationError ( self . errors ) | A private method to process and update entity values correctly . |
47,121 | def to_dict ( self ) : return { field_name : getattr ( self , field_name , None ) for field_name in self . meta_ . declared_fields } | Return entity data as a dictionary |
47,122 | def clone ( self ) : clone_copy = copy . deepcopy ( self ) clone_copy . state_ = EntityState ( ) return clone_copy | Deepclone the entity but reset state |
47,123 | def get ( cls , identifier : Any ) -> 'Entity' : logger . debug ( f'Lookup `{cls.__name__}` object with identifier {identifier}' ) filters = { cls . meta_ . id_field . field_name : identifier } results = cls . query . filter ( ** filters ) . limit ( 1 ) . all ( ) if not results : raise ObjectNotFoundError ( f'`{cls.__name__}` object with identifier {identifier} ' f'does not exist.' ) return results . first | Get a specific Record from the Repository |
47,124 | def reload ( self ) -> None : if not self . state_ . is_persisted or self . state_ . is_changed : raise InvalidStateError ( f'`{self.__class__.__name__}` object is in invalid state' ) identifier = getattr ( self , self . meta_ . id_field . field_name ) logger . debug ( f'Lookup `{self.__class__.__name__}` object with ' f'identifier {self.meta_.id_field}' ) db_value = self . get ( identifier ) self . _update_data ( db_value . to_dict ( ) ) | Reload Entity from the repository |
47,125 | def find_by ( cls , ** kwargs ) -> 'Entity' : logger . debug ( f'Lookup `{cls.__name__}` object with values ' f'{kwargs}' ) results = cls . query . filter ( ** kwargs ) . limit ( 1 ) . all ( ) if not results : raise ObjectNotFoundError ( f'`{cls.__name__}` object with values {[item for item in kwargs.items()]} ' f'does not exist.' ) return results . first | Find a specific entity record that matches one or more criteria . |
47,126 | def exists ( cls , excludes_ , ** filters ) : results = cls . query . filter ( ** filters ) . exclude ( ** excludes_ ) return bool ( results ) | Return True if objects matching the provided filters and excludes exist if not return false . |
47,127 | def create ( cls , * args , ** kwargs ) -> 'Entity' : logger . debug ( f'Creating new `{cls.__name__}` object using data {kwargs}' ) model_cls = repo_factory . get_model ( cls ) repository = repo_factory . get_repository ( cls ) try : entity = cls ( * args , ** kwargs ) entity . _validate_unique ( ) entity . pre_save ( ) model_obj = repository . create ( model_cls . from_entity ( entity ) ) for field_name , field_obj in entity . meta_ . declared_fields . items ( ) : if isinstance ( field_obj , Auto ) : if isinstance ( model_obj , dict ) : field_val = model_obj [ field_name ] else : field_val = getattr ( model_obj , field_name ) setattr ( entity , field_name , field_val ) entity . state_ . mark_saved ( ) entity . post_save ( ) return entity except ValidationError : raise | Create a new record in the repository . |
47,128 | def save ( self ) : logger . debug ( f'Saving `{self.__class__.__name__}` object' ) model_cls = repo_factory . get_model ( self . __class__ ) repository = repo_factory . get_repository ( self . __class__ ) try : self . _validate_unique ( create = False ) self . pre_save ( ) model_obj = repository . create ( model_cls . from_entity ( self ) ) for field_name , field_obj in self . meta_ . declared_fields . items ( ) : if isinstance ( field_obj , Auto ) : if isinstance ( model_obj , dict ) : field_val = model_obj [ field_name ] else : field_val = getattr ( model_obj , field_name ) setattr ( self , field_name , field_val ) self . state_ . mark_saved ( ) self . post_save ( ) return self except Exception : raise | Save a new Entity into repository . |
47,129 | def update ( self , * data , ** kwargs ) -> 'Entity' : logger . debug ( f'Updating existing `{self.__class__.__name__}` object with id {self.id}' ) model_cls = repo_factory . get_model ( self . __class__ ) repository = repo_factory . get_repository ( self . __class__ ) try : self . _update_data ( * data , ** kwargs ) self . _validate_unique ( create = False ) self . pre_save ( ) repository . update ( model_cls . from_entity ( self ) ) self . state_ . mark_saved ( ) self . post_save ( ) return self except Exception : raise | Update a Record in the repository . |
47,130 | def _validate_unique ( self , create = True ) : filters , excludes = { } , { } for field_name , field_obj in self . meta_ . unique_fields : lookup_value = getattr ( self , field_name , None ) if lookup_value in Field . empty_values : continue if not create and field_obj . identifier : excludes [ field_name ] = lookup_value continue filters [ field_name ] = lookup_value for filter_key , lookup_value in filters . items ( ) : if self . exists ( excludes , ** { filter_key : lookup_value } ) : field_obj = self . meta_ . declared_fields [ filter_key ] field_obj . fail ( 'unique' , entity_name = self . __class__ . __name__ , field_name = filter_key ) | Validate the unique constraints for the entity |
47,131 | def delete ( self ) : model_cls = repo_factory . get_model ( self . __class__ ) repository = repo_factory . get_repository ( self . __class__ ) try : if not self . state_ . is_destroyed : repository . delete ( model_cls . from_entity ( self ) ) self . state_ . mark_destroyed ( ) return self except Exception : raise | Delete a Record from the Repository |
47,132 | def message ( self ) : return '\n' . join ( [ self . from_email , str ( self . to ) , self . subject , self . body ] ) | Convert the message to a mime compliant email string |
47,133 | def get_connection ( self , fail_silently = False ) : from protean . services . email import get_connection if not self . connection : self . connection = get_connection ( fail_silently = fail_silently ) return self . connection | Retrieve connection to send email |
47,134 | def prefix ( self , prefix ) : original_prefix = self . _prefix self . _prefix += prefix yield self self . _prefix = original_prefix | Adds a prefix to routes contained within . |
47,135 | def normalized_messages ( self , no_field_name = '_entity' ) : if isinstance ( self . messages , dict ) : return self . messages if not self . field_names : return { no_field_name : self . messages } return dict ( ( name , self . messages ) for name in self . field_names ) | Return all the error messages as a dictionary |
47,136 | def generate_random_string ( length = 6 ) : n = int ( length / 2 + 1 ) x = binascii . hexlify ( os . urandom ( n ) ) s = x [ : length ] return s . decode ( 'utf-8' ) | Returns a random string of a specified length . |
47,137 | def get_datetime ( epoch ) : t = time . gmtime ( epoch ) dt = datetime . datetime ( * t [ : 6 ] ) return dt | get datetime from an epoch timestamp |
47,138 | def xcode ( text , encoding = 'utf8' , mode = 'ignore' ) : return text . encode ( encoding , mode ) if isinstance ( text , str ) else text | Converts unicode encoding to str |
47,139 | def flatten_dict ( d , parent_key = '' , sep = '.' , ignore_under_prefixed = True , mark_value = True ) : items = { } for k in d : if ignore_under_prefixed and k . startswith ( '__' ) : continue v = d [ k ] if mark_value and k . startswith ( '_' ) and not k . startswith ( '__' ) : v = MarkValue ( repr ( v ) ) new_key = sep . join ( ( parent_key , k ) ) if parent_key else k if isinstance ( v , collections . MutableMapping ) : items . update ( flatten_dict ( v , new_key , sep = sep , ignore_under_prefixed = True , mark_value = True ) ) else : items [ new_key ] = v return items | Flattens a nested dictionary |
47,140 | def set_file_limits ( n ) : try : resource . setrlimit ( resource . RLIMIT_NOFILE , ( n , n ) ) return True except ValueError : return False | Set the limit on number of file descriptors that this process can open . |
47,141 | def load_object ( imp_path ) : module_name , obj_name = imp_path . split ( '.' , 1 ) module = __import__ ( module_name ) obj = attrgetter ( obj_name ) ( module ) return obj | Given a python import path load the object For dynamic imports in a program |
47,142 | def fail ( self , key , ** kwargs ) : try : msg = self . error_messages [ key ] except KeyError : class_name = self . __class__ . __name__ msg = MISSING_ERROR_MESSAGE . format ( class_name = class_name , key = key ) raise AssertionError ( msg ) if isinstance ( msg , str ) : msg = msg . format ( ** kwargs ) raise exceptions . ValidationError ( msg , self . field_name ) | A helper method that simply raises a ValidationError . |
47,143 | def _load ( self , value : Any ) : if value in self . empty_values : if self . default is not None : default = self . default value = default ( ) if callable ( default ) else default return value elif self . required : self . fail ( 'required' ) else : return None if self . choices : value_list = value if not isinstance ( value , ( list , tuple ) ) : value_list = [ value ] for v in value_list : if v not in self . choice_dict : self . fail ( 'invalid_choice' , value = v , choices = list ( self . choice_dict ) ) value = self . _cast_to_type ( value ) self . _run_validators ( value ) return value | Load the value for the field run validators and return the value . Subclasses can override this to provide custom load logic . |
47,144 | def _extract_lookup ( self , key ) : parts = key . split ( '__' ) op = 'exact' if len ( parts ) == 1 else parts [ 1 ] return parts [ 0 ] , self . get_lookup ( op ) | Extract lookup method based on key name format |
47,145 | def exec ( self , payload : Log2ReqsAddOnPayload ) -> TList [ Request ] : try : return Request . from_jsonf_to_list ( payload . file , encoding = self . config . encoding ) except TypeError as e : raise ValueError ( e ) | Transform from json to Request |
47,146 | def get_backend_expiry ( self , expiry = DEFAULT_EXPIRY ) : if expiry == DEFAULT_EXPIRY : expiry = self . default_expiry elif expiry == 0 : expiry = - 1 return None if expiry is None else time . time ( ) + expiry | Return the expiry value usable by this backend based upon the provided timeout . |
47,147 | def incr ( self , key , delta = 1 ) : value = self . get ( key ) if value is None : raise ValueError ( "Key '%s' not found" % key ) new_value = value + delta self . set ( key , new_value ) return new_value | Add delta to value in the cache . If the key does not exist raise a ValueError exception . |
47,148 | def _initialize_providers ( self ) : configured_providers = active_config . DATABASES provider_objects = { } if not isinstance ( configured_providers , dict ) or configured_providers == { } : raise ConfigurationError ( "'DATABASES' config must be a dict and at least one " "provider must be defined" ) if 'default' not in configured_providers : raise ConfigurationError ( "You must define a 'default' provider" ) for provider_name , conn_info in configured_providers . items ( ) : provider_full_path = conn_info [ 'PROVIDER' ] provider_module , provider_class = provider_full_path . rsplit ( '.' , maxsplit = 1 ) provider_cls = getattr ( importlib . import_module ( provider_module ) , provider_class ) provider_objects [ provider_name ] = provider_cls ( conn_info ) return provider_objects | Read config file and initialize providers |
47,149 | def get_provider ( self , provider_name = 'default' ) : try : if self . _providers is None : self . _providers = self . _initialize_providers ( ) return self . _providers [ provider_name ] except KeyError : raise AssertionError ( f'No Provider registered with name {provider_name}' ) | Fetch provider with the name specified in Configuration file |
47,150 | def get_connection ( self , provider_name = 'default' ) : try : return self . _providers [ provider_name ] . get_connection ( ) except KeyError : raise AssertionError ( f'No Provider registered with name {provider_name}' ) | Fetch connection from Provider |
47,151 | def update_defaults ( self , ext_config ) : for setting in dir ( ext_config ) : if setting . isupper ( ) and not hasattr ( self , setting ) : setattr ( self , setting , getattr ( ext_config , setting ) ) | Update the default settings for an extension from an object |
47,152 | def fetch_entity_cls_from_registry ( entity ) : if isinstance ( entity , str ) : try : return repo_factory . get_entity ( entity ) except AssertionError : raise else : return entity | Util Method to fetch an Entity class from an entity s name |
47,153 | def _find_entity_in_records_by_class_name ( self , entity_name ) : records = { key : value for ( key , value ) in self . _registry . items ( ) if value . name == entity_name } if len ( records ) > 1 : raise ConfigurationError ( f'Entity with name {entity_name} has been registered twice. ' f'Please use fully qualified Entity name to specify the exact Entity.' ) elif len ( records ) == 1 : return next ( iter ( records . values ( ) ) ) else : raise AssertionError ( f'No Entity registered with name {entity_name}' ) | Fetch by Entity Name in values |
47,154 | def _get_entity_by_class ( self , entity_cls ) : entity_qualname = fully_qualified_name ( entity_cls ) if entity_qualname in self . _registry : return self . _registry [ entity_qualname ] else : return self . _find_entity_in_records_by_class_name ( entity_cls . __name__ ) | Fetch Entity record with Entity class details |
47,155 | def _get_entity_by_name ( self , entity_name ) : if entity_name in self . _registry : return self . _registry [ entity_name ] else : return self . _find_entity_in_records_by_class_name ( entity_name ) | Fetch Entity record with an Entity name |
47,156 | def _validate_entity_cls ( self , entity_cls ) : from protean . core . entity import Entity if not issubclass ( entity_cls , Entity ) : raise AssertionError ( f'Entity {entity_cls.__name__} must be subclass of `Entity`' ) if entity_cls . meta_ . abstract is True : raise NotSupportedError ( f'{entity_cls.__name__} class has been marked abstract' f' and cannot be instantiated' ) | Validate that Entity is a valid class |
47,157 | def get_model ( self , entity_cls ) : entity_record = self . _get_entity_by_class ( entity_cls ) model_cls = None if entity_record . model_cls : model_cls = entity_record . model_cls else : provider = self . get_provider ( entity_record . provider_name ) baked_model_cls = provider . get_model ( entity_record . entity_cls ) new_entity_record = entity_record . _replace ( model_cls = baked_model_cls ) self . _registry [ entity_record . qualname ] = new_entity_record model_cls = baked_model_cls return model_cls | Retrieve Model class connected to Entity |
47,158 | def get_repository ( self , entity_cls ) : entity_record = self . _get_entity_by_class ( entity_cls ) provider = self . get_provider ( entity_record . provider_name ) return provider . get_repository ( entity_record . entity_cls ) | Retrieve a Repository for the Model with a live connection |
47,159 | async def set_heater_values ( heater_data , heater ) : heater . current_temp = heater_data . get ( 'currentTemp' ) heater . device_status = heater_data . get ( 'deviceStatus' ) heater . available = heater . device_status == 0 heater . name = heater_data . get ( 'deviceName' ) heater . fan_status = heater_data . get ( 'fanStatus' ) heater . is_holiday = heater_data . get ( 'isHoliday' ) if heater . room is None : heater . can_change_temp = heater_data . get ( 'canChangeTemp' ) if heater . independent_device or heater . is_holiday == 1 : heater . set_temp = heater_data . get ( 'holidayTemp' ) elif heater . room is not None : if heater . room . current_mode == 1 : heater . set_temp = heater . room . comfort_temp elif heater . room . current_mode == 2 : heater . set_temp = heater . room . sleep_temp elif heater . room . current_mode == 3 : heater . set_temp = heater . room . away_temp heater . power_status = heater_data . get ( 'powerStatus' ) heater . tibber_control = heater_data . get ( 'tibberControl' ) heater . open_window = heater_data . get ( 'open_window' , heater_data . get ( 'open' ) ) heater . is_heating = heater_data . get ( 'heatStatus' , heater_data . get ( 'heaterFlag' ) ) try : heater . sub_domain = int ( float ( heater_data . get ( 'subDomain' , heater_data . get ( 'subDomainId' , heater . sub_domain ) ) ) ) except ValueError : pass | Set heater values from heater data |
47,160 | async def connect ( self , retry = 2 ) : url = API_ENDPOINT_1 + 'login' headers = { "Content-Type" : "application/x-zc-object" , "Connection" : "Keep-Alive" , "X-Zc-Major-Domain" : "seanywell" , "X-Zc-Msg-Name" : "millService" , "X-Zc-Sub-Domain" : "milltype" , "X-Zc-Seq-Id" : "1" , "X-Zc-Version" : "1" , } payload = { "account" : self . _username , "password" : self . _password } try : with async_timeout . timeout ( self . _timeout ) : resp = await self . websession . post ( url , data = json . dumps ( payload ) , headers = headers ) except ( asyncio . TimeoutError , aiohttp . ClientError ) : if retry < 1 : _LOGGER . error ( "Error connecting to Mill" , exc_info = True ) return False return await self . connect ( retry - 1 ) result = await resp . text ( ) if '"errorCode":3504' in result : _LOGGER . error ( 'Wrong password' ) return False if '"errorCode":3501' in result : _LOGGER . error ( 'Account does not exist' ) return False data = json . loads ( result ) token = data . get ( 'token' ) if token is None : _LOGGER . error ( 'No token' ) return False user_id = data . get ( 'userId' ) if user_id is None : _LOGGER . error ( 'No user id' ) return False self . _token = token self . _user_id = user_id return True | Connect to Mill . |
47,161 | async def set_room_temperatures_by_name ( self , room_name , sleep_temp = None , comfort_temp = None , away_temp = None ) : if sleep_temp is None and comfort_temp is None and away_temp is None : return for room_id , _room in self . rooms . items ( ) : if _room . name == room_name : await self . set_room_temperatures ( room_id , sleep_temp , comfort_temp , away_temp ) return _LOGGER . error ( "Could not find a room with name %s" , room_name ) | Set room temps by name . |
47,162 | async def set_room_temperatures ( self , room_id , sleep_temp = None , comfort_temp = None , away_temp = None ) : if sleep_temp is None and comfort_temp is None and away_temp is None : return room = self . rooms . get ( room_id ) if room is None : _LOGGER . error ( "No such device" ) return room . sleep_temp = sleep_temp if sleep_temp else room . sleep_temp room . away_temp = away_temp if away_temp else room . away_temp room . comfort_temp = comfort_temp if comfort_temp else room . comfort_temp payload = { "roomId" : room_id , "sleepTemp" : room . sleep_temp , "comfortTemp" : room . comfort_temp , "awayTemp" : room . away_temp , "homeType" : 0 } await self . request ( "changeRoomModeTempInfo" , payload ) self . rooms [ room_id ] = room | Set room temps . |
47,163 | async def throttle_update_heaters ( self ) : if ( self . _throttle_time is not None and dt . datetime . now ( ) - self . _throttle_time < MIN_TIME_BETWEEN_UPDATES ) : return self . _throttle_time = dt . datetime . now ( ) await self . update_heaters ( ) | Throttle update device . |
47,164 | async def throttle_update_all_heaters ( self ) : if ( self . _throttle_all_time is not None and dt . datetime . now ( ) - self . _throttle_all_time < MIN_TIME_BETWEEN_UPDATES ) : return self . _throttle_all_time = dt . datetime . now ( ) await self . find_all_heaters ( ) | Throttle update all devices and rooms . |
47,165 | async def set_heater_temp ( self , device_id , set_temp ) : payload = { "homeType" : 0 , "timeZoneNum" : "+02:00" , "deviceId" : device_id , "value" : int ( set_temp ) , "key" : "holidayTemp" } await self . request ( "changeDeviceInfo" , payload ) | Set heater temp . |
47,166 | def _clone ( self ) : clone = self . __class__ ( self . _entity_cls , criteria = self . _criteria , offset = self . _offset , limit = self . _limit , order_by = self . _order_by ) return clone | Return a copy of the current QuerySet . |
47,167 | def _add_q ( self , q_object ) : self . _criteria = self . _criteria . _combine ( q_object , q_object . connector ) | Add a Q - object to the current filter . |
47,168 | def limit ( self , limit ) : clone = self . _clone ( ) if isinstance ( limit , int ) : clone . _limit = limit return clone | Limit number of records |
47,169 | def offset ( self , offset ) : clone = self . _clone ( ) if isinstance ( offset , int ) : clone . _offset = offset return clone | Fetch results after offset value |
47,170 | def order_by ( self , order_by : Union [ set , str ] ) : clone = self . _clone ( ) if isinstance ( order_by , str ) : order_by = { order_by } clone . _order_by = clone . _order_by . union ( order_by ) return clone | Update order_by setting for filter set |
47,171 | def all ( self ) : logger . debug ( f'Query `{self.__class__.__name__}` objects with filters {self}' ) self . _result_cache = None model_cls = repo_factory . get_model ( self . _entity_cls ) repository = repo_factory . get_repository ( self . _entity_cls ) order_by = self . _entity_cls . meta_ . order_by if not self . _order_by else self . _order_by results = repository . filter ( self . _criteria , self . _offset , self . _limit , order_by ) entity_items = [ ] for item in results . items : entity = model_cls . to_entity ( item ) entity . state_ . mark_retrieved ( ) entity_items . append ( entity ) results . items = entity_items self . _result_cache = results return results | Primary method to fetch data based on filters |
47,172 | def raw ( self , query : Any , data : Any = None ) : logger . debug ( f'Query `{self.__class__.__name__}` objects with raw query {query}' ) self . _result_cache = None model_cls = repo_factory . get_model ( self . _entity_cls ) repository = repo_factory . get_repository ( self . _entity_cls ) try : results = repository . raw ( query , data ) entity_items = [ ] for item in results . items : entity = model_cls . to_entity ( item ) entity . state_ . mark_retrieved ( ) entity_items . append ( entity ) results . items = entity_items self . _result_cache = results except Exception : raise return results | Runs raw query directly on the database and returns Entity objects |
47,173 | def delete ( self ) : deleted_item_count = 0 try : items = self . all ( ) for item in items : item . delete ( ) deleted_item_count += 1 except Exception : raise return deleted_item_count | Deletes matching objects from the Repository |
47,174 | def delete_all ( self , * args , ** kwargs ) : deleted_item_count = 0 repository = repo_factory . get_repository ( self . _entity_cls ) try : deleted_item_count = repository . delete_all ( self . _criteria ) except Exception : raise return deleted_item_count | Deletes objects that match a set of conditions supplied . |
47,175 | def total ( self ) : if self . _result_cache : return self . _result_cache . total return self . all ( ) . total | Return the total number of records |
47,176 | def items ( self ) : if self . _result_cache : return self . _result_cache . items return self . all ( ) . items | Return result values |
47,177 | def first ( self ) : if self . _result_cache : return self . _result_cache . first return self . all ( ) . first | Return the first result |
47,178 | def has_next ( self ) : if self . _result_cache : return self . _result_cache . has_next return self . all ( ) . has_next | Return True if there are more values present |
47,179 | def has_prev ( self ) : if self . _result_cache : return self . _result_cache . has_prev return self . all ( ) . has_prev | Return True if there are previous values present |
47,180 | def value ( self ) : if isinstance ( self . code , Status ) : code = self . code . value else : code = self . code return { 'code' : code , 'errors' : self . errors } | Utility method to retrieve Response Object information |
47,181 | def build_response ( cls , code = Status . SYSTEM_ERROR , errors = None ) : errors = [ errors ] if not isinstance ( errors , list ) else errors return cls ( code , errors ) | Utility method to build a new Resource Error object . Can be used to build all kinds of error messages . |
47,182 | def build_from_invalid_request ( cls , invalid_request_object ) : errors = [ { err [ 'parameter' ] : err [ 'message' ] } for err in invalid_request_object . errors ] return cls . build_response ( Status . UNPROCESSABLE_ENTITY , errors ) | Utility method to build a new Error object from parameters . Typically used to build HTTP 422 error response . |
47,183 | def build_not_found ( cls , errors = None ) : errors = [ errors ] if not isinstance ( errors , list ) else errors return cls ( Status . NOT_FOUND , errors ) | Utility method to build a HTTP 404 Resource Error response |
47,184 | def build_system_error ( cls , errors = None ) : errors = [ errors ] if not isinstance ( errors , list ) else errors return cls ( Status . SYSTEM_ERROR , errors ) | Utility method to build a HTTP 500 System Error response |
47,185 | def build_parameters_error ( cls , errors = None ) : errors = [ errors ] if not isinstance ( errors , list ) else errors return cls ( Status . PARAMETERS_ERROR , errors ) | Utility method to build a HTTP 400 Parameter Error response |
47,186 | def build_unprocessable_error ( cls , errors = None ) : errors = [ errors ] if not isinstance ( errors , list ) else errors return cls ( Status . UNPROCESSABLE_ENTITY , errors ) | Utility method to build a HTTP 422 Parameter Error object |
47,187 | def oauth_flow ( s , oauth_url , username = None , password = None , sandbox = False ) : r = s . get ( oauth_url ) if r . status_code >= 300 : raise RuntimeError ( r . text ) params = urlparse . parse_qs ( urlparse . urlparse ( r . url ) . query ) data = { "un" : username , "width" : 2560 , "height" : 1440 , "hasRememberUn" : True , "startURL" : params [ 'startURL' ] , "loginURL" : "" , "loginType" : 6 , "useSecure" : True , "local" : "" , "lt" : "OAUTH" , "qs" : "r=https%3A%2F%2Flocalhost%3A8443%2Fsalesforce%2F21" , "locale" : "" , "oauth_token" : "" , "oauth_callback" : "" , "login" : "" , "serverid" : "" , "display" : "popup" , "username" : username , "pw" : password , "Login" : "" } base = "https://login.salesforce.com" if not sandbox else "https://test.salesforce.com" r2 = s . post ( base , data ) m = re . search ( "window.location.href\s*='(.[^']+)'" , r2 . text ) assert m is not None , "Couldn't find location.href expression in page %s (Username or password is wrong)" % r2 . url u3 = "https://" + urlparse . urlparse ( r2 . url ) . hostname + m . group ( 1 ) r3 = s . get ( u3 ) m = re . search ( "window.location.href\s*='(.[^']+)'" , r3 . text ) assert m is not None , "Couldn't find location.href expression in page %s:\n%s" % ( r3 . url , r3 . text ) return m . group ( 1 ) | s should be a requests session |
47,188 | def make_crossroad_router ( source , drain = False ) : sink_observer = None def on_sink_subscribe ( observer ) : nonlocal sink_observer sink_observer = observer def dispose ( ) : nonlocal sink_observer sink_observer = None return dispose def route_crossroad ( request ) : def on_response_subscribe ( observer ) : def on_next_source ( i ) : if type ( i ) is cyclotron . Drain : observer . on_completed ( ) else : observer . on_next ( i ) source_disposable = source . subscribe ( on_next = on_next_source , on_error = lambda e : observer . on_error ( e ) , on_completed = lambda : observer . on_completed ( ) ) def on_next_request ( i ) : if sink_observer is not None : sink_observer . on_next ( i ) def on_request_completed ( ) : if sink_observer is not None : if drain is True : sink_observer . on_next ( cyclotron . Drain ( ) ) else : sink_observer . on_completed ( ) request_disposable = request . subscribe ( on_next = on_next_request , on_error = observer . on_error , on_completed = on_request_completed ) def dispose ( ) : source_disposable . dispose ( ) request_disposable . dispose ( ) return dispose return Observable . create ( on_response_subscribe ) return Observable . create ( on_sink_subscribe ) , route_crossroad | legacy crossroad implementation . deprecated |
47,189 | def make_error_router ( ) : sink_observer = None def on_subscribe ( observer ) : nonlocal sink_observer sink_observer = observer def dispose ( ) : nonlocal sink_observer sink_observer = None return dispose def route_error ( obs , convert ) : def catch_error ( e ) : sink_observer . on_next ( convert ( e ) ) return Observable . empty ( ) return obs . catch_exception ( catch_error ) def catch_or_flat_map ( source , error_map , source_map = lambda i : i ) : return source . flat_map ( lambda i : route_error ( source_map ( i ) , error_map ) ) return Observable . create ( on_subscribe ) , catch_or_flat_map | Creates an error router |
47,190 | def wipe_db ( self ) : logger . warning ( "Wiping the whole database" ) self . client . drop_database ( self . db_name ) logger . debug ( "Database wiped" ) | Wipe the whole database |
47,191 | def check_indexes ( self ) : for collection_name in INDEXES : existing_indexes = self . indexes ( collection_name ) indexes = INDEXES [ collection_name ] for index in indexes : index_name = index . document . get ( 'name' ) if not index_name in existing_indexes : logger . warning ( "Index {0} missing. Run command `loqusdb index`" . format ( index_name ) ) return logger . info ( "All indexes exists" ) | Check if the indexes exists |
47,192 | def ensure_indexes ( self ) : for collection_name in INDEXES : existing_indexes = self . indexes ( collection_name ) indexes = INDEXES [ collection_name ] for index in indexes : index_name = index . document . get ( 'name' ) if index_name in existing_indexes : logger . debug ( "Index exists: %s" % index_name ) self . db [ collection_name ] . drop_index ( index_name ) logger . info ( "creating indexes for collection {0}: {1}" . format ( collection_name , ', ' . join ( [ index . document . get ( 'name' ) for index in indexes ] ) , ) ) self . db [ collection_name ] . create_indexes ( indexes ) | Update the indexes |
47,193 | def _create_m_objective ( w , X ) : clusters , cells = w . shape genes = X . shape [ 0 ] w_sum = w . sum ( 1 ) def objective ( m ) : m = m . reshape ( ( X . shape [ 0 ] , w . shape [ 0 ] ) ) d = m . dot ( w ) + eps temp = X / d w2 = w . dot ( temp . T ) deriv = w_sum - w2 . T return np . sum ( d - X * np . log ( d ) ) / genes , deriv . flatten ( ) / genes return objective | Creates an objective function and its derivative for M given W and X |
47,194 | def initialize_from_assignments ( assignments , k , max_assign_weight = 0.75 ) : cells = len ( assignments ) init_W = np . zeros ( ( k , cells ) ) for i , a in enumerate ( assignments ) : init_W [ a , i ] = max_assign_weight for a2 in range ( k ) : if a2 != a : init_W [ a2 , i ] = ( 1 - max_assign_weight ) / ( k - 1 ) return init_W / init_W . sum ( 0 ) | Creates a weight initialization matrix from Poisson clustering assignments . |
47,195 | def initialize_means ( data , clusters , k ) : init_w = np . zeros ( ( data . shape [ 0 ] , k ) ) if sparse . issparse ( data ) : for i in range ( k ) : if data [ : , clusters == i ] . shape [ 1 ] == 0 : point = np . random . randint ( 0 , data . shape [ 1 ] ) init_w [ : , i ] = data [ : , point ] . toarray ( ) . flatten ( ) else : init_w [ : , i ] = np . array ( data [ : , clusters == i ] . mean ( 1 ) ) . flatten ( ) + eps else : for i in range ( k ) : if data [ : , clusters == i ] . shape [ 1 ] == 0 : point = np . random . randint ( 0 , data . shape [ 1 ] ) init_w [ : , i ] = data [ : , point ] . flatten ( ) else : init_w [ : , i ] = data [ : , clusters == i ] . mean ( 1 ) + eps return init_w | Initializes the M matrix given the data and a set of cluster labels . Cluster centers are set to the mean of each cluster . |
47,196 | def initialize_weights_nn ( data , means , lognorm = True ) : genes , cells = data . shape k = means . shape [ 1 ] if lognorm : data = log1p ( cell_normalize ( data ) ) for i in range ( cells ) : for j in range ( k ) : pass | Initializes the weights with a nearest - neighbor approach using the means . |
47,197 | def initialize_means_weights ( data , clusters , init_means = None , init_weights = None , initialization = 'tsvd' , max_assign_weight = 0.75 ) : genes , cells = data . shape if init_means is None : if init_weights is not None : if len ( init_weights . shape ) == 1 : means = initialize_means ( data , init_weights , clusters ) else : means = initialize_means ( data , init_weights . argmax ( 0 ) , clusters , max_assign_weight = max_assign_weight ) elif initialization == 'cluster' : assignments , means = poisson_cluster ( data , clusters ) if init_weights is None : init_weights = initialize_from_assignments ( assignments , clusters , max_assign_weight = max_assign_weight ) elif initialization == 'kmpp' : means , assignments = kmeans_pp ( data , clusters ) elif initialization == 'km' : km = KMeans ( clusters ) assignments = km . fit_predict ( log1p ( cell_normalize ( data ) ) . T ) init_weights = initialize_from_assignments ( assignments , clusters , max_assign_weight ) means = initialize_means ( data , assignments , clusters ) elif initialization == 'tsvd' : n_components = min ( 50 , genes - 1 ) km = KMeans ( clusters ) U , Sigma , VT = randomized_svd ( log1p ( cell_normalize ( data ) ) . T , n_components ) data_reduced = U * Sigma assignments = km . fit_predict ( data_reduced ) init_weights = initialize_from_assignments ( assignments , clusters , max_assign_weight ) means = initialize_means ( data , assignments , clusters ) elif initialization == 'random' or initialization == 'rand' : selected_cells = np . random . choice ( range ( cells ) , size = clusters , replace = False ) means = data [ : , selected_cells ] if sparse . issparse ( means ) : means = means . toarray ( ) else : means = init_means . copy ( ) means = means . astype ( float ) if init_weights is None : if init_means is not None : if initialization == 'cluster' : assignments , means = poisson_cluster ( data , clusters , init = init_means , max_iters = 1 ) w_init = initialize_from_assignments ( assignments , clusters , max_assign_weight ) elif initialization == 'km' : km = KMeans ( clusters , init = log1p ( init_means . T ) , max_iter = 1 ) assignments = km . fit_predict ( log1p ( cell_normalize ( data ) ) . T ) w_init = initialize_from_assignments ( assignments , clusters , max_assign_weight ) else : w_init = np . random . random ( ( clusters , cells ) ) w_init = w_init / w_init . sum ( 0 ) else : w_init = np . random . random ( ( clusters , cells ) ) w_init = w_init / w_init . sum ( 0 ) else : if len ( init_weights . shape ) == 1 : init_weights = initialize_from_assignments ( init_weights , clusters , max_assign_weight ) w_init = init_weights . copy ( ) return means , w_init | Generates initial means and weights for state estimation . |
47,198 | def update_m ( data , old_M , old_W , selected_genes , disp = False , inner_max_iters = 100 , parallel = True , threads = 4 , write_progress_file = None , tol = 0.0 , regularization = 0.0 , ** kwargs ) : genes , cells = data . shape k = old_M . shape [ 1 ] non_selected_genes = [ x for x in range ( genes ) if x not in set ( selected_genes ) ] new_M = np . zeros ( ( genes , k ) ) new_M [ selected_genes , : ] = old_M if disp : print ( 'computing initial guess for M by data*W.T' ) new_M_non_selected = data [ non_selected_genes , : ] * sparse . csc_matrix ( old_W . T ) new_M [ non_selected_genes , : ] = new_M_non_selected . toarray ( ) X = data . astype ( float ) XT = X . T is_sparse = False if sparse . issparse ( X ) : is_sparse = True update_fn = sparse_nolips_update_w X = sparse . csc_matrix ( X ) XT = sparse . csc_matrix ( XT ) if parallel : update_fn = parallel_sparse_nolips_update_w Xsum = np . asarray ( X . sum ( 0 ) ) . flatten ( ) Xsum_m = np . asarray ( X . sum ( 1 ) ) . flatten ( ) method = 'NoLips' objective_fn = _call_sparse_obj else : objective_fn = objective update_fn = nolips_update_w Xsum = X . sum ( 0 ) Xsum_m = X . sum ( 1 ) if method == 'NoLips' : is_sparse = True X = sparse . csc_matrix ( X ) XT = sparse . csc_matrix ( XT ) update_fn = sparse_nolips_update_w if parallel : update_fn = parallel_sparse_nolips_update_w objective_fn = _call_sparse_obj if disp : print ( 'starting estimating M' ) new_M = _estimate_w ( XT , new_M . T , old_W . T , Xsum_m , update_fn , objective_fn , is_sparse , parallel , threads , method , tol , disp , inner_max_iters , 'M' , regularization ) if write_progress_file is not None : progress = open ( write_progress_file , 'w' ) progress . write ( '0' ) progress . close ( ) return new_M . T | This returns a new M matrix that contains all genes given an M that was created from running state estimation with a subset of genes . |
47,199 | def setup ( hass , config ) : from pyvesync . vesync import VeSync conf = config [ DOMAIN ] manager = VeSync ( conf . get ( CONF_USERNAME ) , conf . get ( CONF_PASSWORD ) , time_zone = conf . get ( CONF_TIME_ZONE ) ) if not manager . login ( ) : _LOGGER . error ( "Unable to login to VeSync" ) return manager . update ( ) hass . data [ DOMAIN ] = { 'manager' : manager } discovery . load_platform ( hass , 'switch' , DOMAIN , { } , config ) return True | Set up the VeSync component . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.