idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
47,600 | def add_func ( self , transmute_func , transmute_context ) : swagger_path = transmute_func . get_swagger_path ( transmute_context ) for p in transmute_func . paths : self . add_path ( p , swagger_path ) | add a transmute function s swagger definition to the spec |
47,601 | def add_path ( self , path , path_item ) : if path not in self . _swagger : self . _swagger [ path ] = path_item else : for method , definition in path_item . items ( ) : if definition is not None : setattr ( self . _swagger [ path ] , method , definition ) | for a given path add the path items . |
47,602 | def swagger_definition ( self , base_path = None , ** kwargs ) : return Swagger ( { "info" : Info ( { key : kwargs . get ( key , self . DEFAULT_INFO . get ( key ) ) for key in Info . fields . keys ( ) if key in kwargs or key in self . DEFAULT_INFO } ) , "paths" : self . paths , "swagger" : "2.0" , "basePath" : base_path , } ) . to_primitive ( ) | return a valid swagger spec with the values passed . |
47,603 | def _call_spy ( self , * args , ** kwargs ) : if self . _spy_side_effect : if self . _spy_side_effect_args or self . _spy_side_effect_kwargs : self . _spy_side_effect ( * self . _spy_side_effect_args , ** self . _spy_side_effect_kwargs ) else : self . _spy_side_effect ( * args , ** kwargs ) return_value = self . _stub . call_orig ( * args , ** kwargs ) if self . _spy_return : self . _spy_return ( return_value ) return return_value | Wrapper to call the spied - on function . Operates similar to Expectation . test . |
47,604 | def side_effect ( self , func , * args , ** kwargs ) : self . _spy_side_effect = func self . _spy_side_effect_args = args self . _spy_side_effect_kwargs = kwargs return self | Wrap side effects for spies . |
47,605 | def guess_rank ( M_E ) : n , m = M_E . shape epsilon = np . count_nonzero ( M_E ) / np . sqrt ( m * n ) _ , S0 , _ = svds_descending ( M_E , min ( 100 , max ( M_E . shape ) - 1 ) ) S0 = np . diag ( S0 ) S1 = S0 [ : - 1 ] - S0 [ 1 : ] S1_ = S1 / np . mean ( S1 [ - 10 : ] ) r1 = 0 lam = 0.05 cost = [ None ] * len ( S1_ ) while r1 <= 0 : for idx in range ( len ( S1_ ) ) : cost [ idx ] = lam * max ( S1_ [ idx : ] ) + idx i2 = np . argmin ( cost ) r1 = np . max ( i2 ) lam += 0.05 cost = [ None ] * ( len ( S0 ) - 1 ) for idx in range ( len ( S0 ) - 1 ) : cost [ idx ] = ( S0 [ idx + 1 ] + np . sqrt ( idx * epsilon ) * S0 [ 0 ] / epsilon ) / S0 [ idx ] i2 = np . argmin ( cost ) r2 = np . max ( i2 + 1 ) r = max ( [ r1 , r2 ] ) return r | Guess the rank of the incomplete matrix |
47,606 | def F_t ( X , Y , S , M_E , E , m0 , rho ) : r = X . shape [ 1 ] out1 = ( ( ( np . dot ( np . dot ( X , S ) , Y . T ) - M_E ) * E ) ** 2 ) . sum ( ) / 2 out2 = rho * G ( Y , m0 , r ) out3 = rho * G ( X , m0 , r ) return out1 + out2 + out3 | Compute the distortion |
47,607 | def getoptS ( X , Y , M_E , E ) : n , r = X . shape C = np . dot ( np . dot ( X . T , M_E ) , Y ) C = C . flatten ( ) A = np . zeros ( ( r * r , r * r ) ) for i in range ( r ) : for j in range ( r ) : ind = j * r + i temp = np . dot ( np . dot ( X . T , np . dot ( X [ : , i , None ] , Y [ : , j , None ] . T ) * E ) , Y ) A [ : , ind ] = temp . flatten ( ) S = np . linalg . solve ( A , C ) return np . reshape ( S , ( r , r ) ) . T | Find Sopt given X Y |
47,608 | def getoptT ( X , W , Y , Z , S , M_E , E , m0 , rho ) : iter_max = 20 norm2WZ = np . linalg . norm ( W , ord = 'fro' ) ** 2 + np . linalg . norm ( Z , ord = 'fro' ) ** 2 f = np . zeros ( iter_max + 1 ) f [ 0 ] = F_t ( X , Y , S , M_E , E , m0 , rho ) t = - 1e-1 for i in range ( iter_max ) : f [ i + 1 ] = F_t ( X + t * W , Y + t * Z , S , M_E , E , m0 , rho ) if f [ i + 1 ] - f [ 0 ] <= 0.5 * t * norm2WZ : return t t /= 2 return t | Perform line search |
47,609 | def MDS ( D , dim , method = 'simple' , theta = False ) : N = D . shape [ 0 ] if method == 'simple' : d1 = D [ 0 , : ] buf_ = np . broadcast_to ( d1 , D . shape ) + np . broadcast_to ( d1 [ : , np . newaxis ] , D . shape ) np . subtract ( D , buf_ , out = buf_ ) G = buf_ elif method == 'advanced' : s1T = np . zeros_like ( D ) s1T [ 0 , : ] = 1 np . subtract ( np . identity ( N ) , s1T , out = s1T ) G = np . dot ( np . dot ( s1T . T , D ) , s1T ) elif method == 'geometric' : J = np . identity ( N ) + np . full ( ( N , N ) , - 1.0 / float ( N ) ) G = np . dot ( np . dot ( J , D ) , J ) else : print ( 'Unknown method {} in MDS' . format ( method ) ) G *= - 0.5 factor , u = eigendecomp ( G , dim ) if ( theta ) : return theta_from_eigendecomp ( factor , u ) else : return x_from_eigendecomp ( factor , u , dim ) | recover points from euclidean distance matrix using classic MDS algorithm . |
47,610 | def superMDS ( X0 , N , d , ** kwargs ) : Om = kwargs . get ( 'Om' , None ) dm = kwargs . get ( 'dm' , None ) if Om is not None and dm is not None : KE = kwargs . get ( 'KE' , None ) if KE is not None : print ( 'superMDS: KE and Om, dm given. Continuing with Om, dm' ) factor , u = eigendecomp ( Om , d ) uhat = u [ : , : d ] lambdahat = np . diag ( factor [ : d ] ) diag_dm = np . diag ( dm ) Vhat = np . dot ( diag_dm , np . dot ( uhat , lambdahat ) ) elif Om is None or dm is None : KE = kwargs . get ( 'KE' , None ) if KE is None : raise NameError ( 'Either KE or Om and dm have to be given.' ) factor , u = eigendecomp ( KE , d ) lambda_ = np . diag ( factor ) Vhat = np . dot ( u , lambda_ ) [ : , : d ] C_inv = - np . eye ( N ) C_inv [ 0 , 0 ] = 1.0 C_inv [ : , 0 ] = 1.0 b = np . zeros ( ( C_inv . shape [ 1 ] , d ) ) b [ 0 , : ] = X0 b [ 1 : , : ] = Vhat [ : N - 1 , : ] Xhat = np . dot ( C_inv , b ) return Xhat , Vhat | Find the set of points from an edge kernel . |
47,611 | def iterativeEMDS ( X0 , N , d , C , b , max_it = 10 , print_out = False , ** kwargs ) : from pylocus . basics import mse , projection KE = kwargs . get ( 'KE' , None ) KE_projected = KE . copy ( ) d = len ( X0 ) for i in range ( max_it ) : KE_projected , cost , __ = projection ( KE_projected , C , b ) rank = np . linalg . matrix_rank ( KE_projected ) Xhat_KE , Vhat_KE = superMDS ( X0 , N , d , KE = KE_projected ) KE_projected = Vhat_KE . dot ( Vhat_KE . T ) error = mse ( C . dot ( KE_projected ) , b ) if ( print_out ) : print ( 'cost={:2.2e},error={:2.2e}, rank={}' . format ( cost , error , rank ) ) if cost < 1e-20 and error < 1e-20 and rank == d : if ( print_out ) : print ( 'converged after {} iterations' . format ( i ) ) return Xhat_KE , Vhat_KE print ( 'iterativeMDS did not converge!' ) return None , None | Find the set of points from an edge kernel with geometric constraints using iterative projection |
47,612 | def relaxedEMDS ( X0 , N , d , C , b , KE , print_out = False , lamda = 10 ) : E = C . shape [ 1 ] X = Variable ( ( E , E ) , PSD = True ) constraints = [ C [ i , : ] * X == b [ i ] for i in range ( C . shape [ 0 ] ) ] obj = Minimize ( trace ( X ) + lamda * norm ( KE - X ) ) prob = Problem ( obj , constraints ) try : total_cost = prob . solve ( solver = 'CVXOPT' , verbose = print_out ) except : try : print ( 'CVXOPT with default cholesky failed. Trying kktsolver...' ) total_cost = prob . solve ( solver = 'CVXOPT' , verbose = print_out , kktsolver = "robust" ) except : try : print ( 'CVXOPT with robust kktsovler failed. Trying SCS...' ) total_cost = prob . solve ( solver = 'SCS' , verbose = print_out ) except : print ( 'SCS and CVXOPT solver with default and kktsolver failed .' ) if print_out : print ( 'status:' , prob . status ) Xhat_KE , Vhat_KE = superMDS ( X0 , N , d , KE = X . value ) return Xhat_KE , Vhat_KE | Find the set of points from an edge kernel with geometric constraints using convex rank relaxation . |
47,613 | def signedMDS ( cdm , W = None ) : N = cdm . shape [ 0 ] D_sym = ( cdm - cdm . T ) D_sym /= 2 if W is None : x_est = np . mean ( D_sym , axis = 1 ) x_est -= np . min ( x_est ) return x_est W_sub = W [ 1 : , 1 : ] sum_W = np . sum ( W [ 1 : , : ] , axis = 1 ) A = np . eye ( N - 1 ) - W_sub . astype ( np . int ) / 1. / sum_W [ : , None ] d = ( np . sum ( D_sym [ 1 : , : ] * W [ 1 : , : ] , axis = 1 ) / 1. / sum_W ) x_est = np . linalg . lstsq ( A , d ) [ 0 ] x_est = np . r_ [ [ 0 ] , x_est ] x_est -= np . min ( x_est ) return x_est , A , np . linalg . pinv ( A ) | Find the set of points from a cdm . |
47,614 | def _stub_attr ( obj , attr_name ) : from . mock import Mock is_property = False if not inspect . isclass ( obj ) and not inspect . ismodule ( obj ) : attr = getattr ( obj . __class__ , attr_name , None ) if isinstance ( attr , property ) : is_property = True if not is_property : attr = getattr ( obj , attr_name ) if isinstance ( attr , Stub ) : return attr if isinstance ( attr , Mock ) : return stub ( attr . __call__ ) if isinstance ( attr , property ) : return StubProperty ( obj , attr_name ) if inspect . ismodule ( obj ) and isinstance ( attr , ( types . FunctionType , types . BuiltinFunctionType , types . BuiltinMethodType ) ) : return StubFunction ( obj , attr_name ) if inspect . isclass ( obj ) and isinstance ( attr , types . FunctionType ) : return StubUnboundMethod ( obj , attr_name ) if isinstance ( attr , types . MethodType ) : if getattr ( attr , 'im_self' , None ) is None : if hasattr ( attr , '__self__' ) : if attr . __self__ is not None : return StubMethod ( obj , attr_name ) if sys . version_info . major == 2 : return StubUnboundMethod ( attr ) else : return StubMethod ( obj , attr_name ) if isinstance ( attr , ( types . BuiltinFunctionType , types . BuiltinMethodType ) ) : return StubFunction ( obj , attr_name ) if type ( attr ) . __name__ == 'method-wrapper' : return StubMethodWrapper ( attr ) if type ( attr ) . __name__ == 'wrapper_descriptor' : return StubWrapperDescriptor ( obj , attr_name ) raise UnsupportedStub ( "can't stub %s(%s) of %s" , attr_name , type ( attr ) , obj ) | Stub an attribute of an object . Will return an existing stub if there already is one . |
47,615 | def _stub_obj ( obj ) : from . mock import Mock if isinstance ( obj , Stub ) : return obj if isinstance ( obj , Mock ) : return stub ( obj . __call__ ) if hasattr ( types , 'TypeType' ) and isinstance ( obj , types . TypeType ) : return StubNew ( obj ) elif hasattr ( __builtins__ , 'type' ) and isinstance ( obj , __builtins__ . type ) : return StubNew ( obj ) elif inspect . isclass ( obj ) : return StubNew ( obj ) if isinstance ( obj , types . MethodType ) : if getattr ( obj , 'im_self' , None ) is None : if hasattr ( obj , '__self__' ) : if obj . __self__ is not None : return StubMethod ( obj ) if sys . version_info . major == 2 : return StubUnboundMethod ( obj ) else : return StubMethod ( obj ) if type ( obj ) . __name__ == 'method-wrapper' : return StubMethodWrapper ( obj ) if type ( obj ) . __name__ == 'wrapper_descriptor' : raise UnsupportedStub ( "must call stub(obj,'%s') for slot wrapper on %s" , obj . __name__ , obj . __objclass__ . __name__ ) prop = obj if isinstance ( getattr ( obj , '__self__' , None ) , property ) : obj = prop . __self__ if isinstance ( obj , property ) : klass , attr = None , None for ref in gc . get_referrers ( obj ) : if klass and attr : break if isinstance ( ref , dict ) and ref . get ( 'prop' , None ) is obj : klass = getattr ( ref . get ( '__dict__' , None ) , '__objclass__' , None ) for name , val in getattr ( klass , '__dict__' , { } ) . items ( ) : if val is obj : attr = name break elif isinstance ( ref , type ) : for name in dir ( ref ) : if getattr ( ref , name ) == obj : klass = ref attr = name break if klass and attr : rval = stub ( klass , attr ) if prop != obj : return stub ( rval , prop . __name__ ) return rval if isinstance ( obj , ( types . FunctionType , types . BuiltinFunctionType , types . BuiltinMethodType ) ) and hasattr ( obj , '__module__' ) : return StubFunction ( obj ) raise UnsupportedStub ( "can't stub %s" , obj ) | Stub an object directly . |
47,616 | def unmet_expectations ( self ) : unmet = [ ] for exp in self . _expectations : if not exp . closed ( with_counts = True ) : unmet . append ( ExpectationNotSatisfied ( exp ) ) return unmet | Assert that all expectations on the stub have been met . |
47,617 | def teardown ( self ) : if not self . _torn : self . _expectations = [ ] self . _torn = True self . _teardown ( ) | Clean up all expectations and restore the original attribute of the mocked object . |
47,618 | def expect ( self ) : exp = Expectation ( self ) self . _expectations . append ( exp ) return exp | Add an expectation to this stub . Return the expectation . |
47,619 | def spy ( self ) : spy = Spy ( self ) self . _expectations . append ( spy ) return spy | Add a spy to this stub . Return the spy . |
47,620 | def _teardown ( self ) : if hasattr ( self . _obj , '__self__' ) and inspect . isclass ( self . _obj . __self__ ) and self . _obj . __self__ is self . _instance : setattr ( self . _instance , self . _attr , classmethod ( self . _obj . __func__ ) ) elif hasattr ( self . _obj , 'im_self' ) and inspect . isclass ( self . _obj . im_self ) and self . _obj . im_self is self . _instance : setattr ( self . _instance , self . _attr , classmethod ( self . _obj . im_func ) ) else : setattr ( self . _instance , self . _attr , self . _obj ) | Put the original method back in place . This will also handle the special case when it putting back a class method . |
47,621 | def _teardown ( self ) : if not self . _was_object_method : setattr ( self . _instance , self . _attr , self . _obj ) else : delattr ( self . _instance , self . _attr ) | Replace the original method . |
47,622 | def _teardown ( self ) : setattr ( self . _instance , self . _attr , staticmethod ( self . _new ) ) StubNew . _cache . pop ( self . _type ) | Overload so that we can clear out the cache after a test run . |
47,623 | def _setup ( self ) : self . log . info ( "Adding reader to prepare to receive." ) self . loop . add_reader ( self . dev . fd , self . read ) self . log . info ( "Flushing the RFXtrx buffer." ) self . flushSerialInput ( ) self . log . info ( "Writing the reset packet to the RFXtrx. (blocking)" ) yield from self . sendRESET ( ) self . log . info ( "Wating 0.4s" ) yield from asyncio . sleep ( 0.4 ) self . log . info ( "Write the status packet (blocking)" ) yield from self . sendSTATUS ( ) self . log . info ( "Adding mode packet to the write queue (blocking)" ) yield from self . sendMODE ( ) | Performs the RFXtrx initialisation protocol in a Future . |
47,624 | def do_callback ( self , pkt ) : callback , parser = self . get_callback_parser ( pkt ) if asyncio . iscoroutinefunction ( callback ) : self . loop . call_soon_threadsafe ( self . _do_async_callback , callback , parser ) else : self . loop . call_soon ( callback , parser ) | Add the callback to the event loop we use call soon because we just want it to be called at some point but don t care when particularly . |
47,625 | def read ( self ) : data = self . dev . read ( ) if len ( data ) == 0 : self . log . warning ( "READ : Nothing received" ) return if data == b'\x00' : self . log . warning ( "READ : Empty packet (Got \\x00)" ) return pkt = bytearray ( data ) data = self . dev . read ( pkt [ 0 ] ) pkt . extend ( bytearray ( data ) ) self . log . info ( "READ : %s" % self . format_packet ( pkt ) ) self . do_callback ( pkt ) return pkt | We have been called to read! As a consumer continue to read for the length of the packet and then pass to the callback . |
47,626 | def delete ( self , url , params = None ) : response = self . http . delete ( url , params = params , ** self . requests_params ) return self . process ( response ) | Executes an HTTP DELETE request for the given URL . |
47,627 | def get ( self , url , data = None ) : response = self . http . get ( url , headers = self . headers , params = data , ** self . requests_params ) return self . process ( response ) | Executes an HTTP GET request for the given URL . |
47,628 | def post ( self , url , body = None ) : response = self . http . post ( url , headers = self . headers , data = body , ** self . requests_params ) return self . process ( response ) | Executes an HTTP POST request for the given URL . |
47,629 | def put ( self , url , data = None , body = None ) : response = self . http . put ( url , headers = self . headers , data = body , params = data , ** self . requests_params ) return self . process ( response ) | Executes an HTTP PUT request for the given URL . |
47,630 | def process ( self , response ) : try : code = response . status_code if code == 204 : body = None elif code == 402 : body = { "message" : "Payment Required" , "status" : "error" } else : body = response . json ( ) return Response ( code , body , response . content , response ) except ValueError : raise ZencoderResponseError ( response , response . content ) | Returns HTTP backend agnostic Response data . |
47,631 | def create ( self , email , tos = 1 , options = None ) : data = { 'email' : email , 'terms_of_service' : str ( tos ) } if options : data . update ( options ) return self . post ( self . base_url , body = json . dumps ( data ) ) | Creates an account with Zencoder no API Key necessary . |
47,632 | def list ( self , page = 1 , per_page = 50 ) : data = { "page" : page , "per_page" : per_page } return self . get ( self . base_url , data = data ) | Lists Jobs . |
47,633 | def resubmit ( self , job_id ) : url = self . base_url + '/%s/resubmit' % str ( job_id ) return self . put ( url ) | Resubmits the given job_id . |
47,634 | def cancel ( self , job_id ) : if self . version == 'v1' : verb = self . get else : verb = self . put url = self . base_url + '/%s/cancel' % str ( job_id ) return verb ( url ) | Cancels the given job_id . |
47,635 | def minutes ( self , start_date = None , end_date = None , grouping = None ) : data = self . __format ( start_date , end_date ) url = self . base_url + '/minutes' return self . get ( url , data = data ) | Gets a detailed Report of encoded minutes and billable minutes for a date range . |
47,636 | def route ( app_or_blueprint , context = default_context , ** kwargs ) : def decorator ( fn ) : fn = describe ( ** kwargs ) ( fn ) transmute_func = TransmuteFunction ( fn ) routes , handler = create_routes_and_handler ( transmute_func , context ) for r in routes : if not hasattr ( app_or_blueprint , SWAGGER_ATTR_NAME ) : setattr ( app_or_blueprint , SWAGGER_ATTR_NAME , SwaggerSpec ( ) ) swagger_obj = getattr ( app_or_blueprint , SWAGGER_ATTR_NAME ) swagger_obj . add_func ( transmute_func , context ) app_or_blueprint . route ( r , methods = transmute_func . methods ) ( handler ) return handler return decorator | attach a transmute route . |
47,637 | def stub ( self , obj , attr = None ) : s = stub ( obj , attr ) if s not in self . _stubs : self . _stubs . append ( s ) return s | Stub an object . If attr is not None will attempt to stub that attribute on the object . Only required for modules and other rare cases where we can t determine the binding from the object . |
47,638 | def mock ( self , obj = None , attr = None , ** kwargs ) : rval = Mock ( ** kwargs ) if obj is not None and attr is not None : rval . _object = obj rval . _attr = attr if hasattr ( obj , attr ) : orig = getattr ( obj , attr ) self . _mocks . append ( ( obj , attr , orig ) ) setattr ( obj , attr , rval ) else : self . _mocks . append ( ( obj , attr ) ) setattr ( obj , attr , rval ) return rval | Return a mock object . |
47,639 | def from_validation_exception ( cls , exception , ** kwargs ) : errors = [ ] def flatten ( error , path = "" ) : if isinstance ( error , halogen . exceptions . ValidationError ) : if not path . endswith ( "/" ) : path += "/" if error . attr is not None : path += error . attr elif error . index is not None : path += six . text_type ( error . index ) for e in error . errors : flatten ( e , path ) else : message = error if isinstance ( error , Exception ) : try : message = error . message except AttributeError : message = six . text_type ( error ) errors . append ( Error ( message = message , path = path ) ) flatten ( exception ) message = kwargs . pop ( "message" , "Validation error." ) return cls ( message = message , errors = sorted ( errors , key = lambda error : error . path or "" ) , ** kwargs ) | Create an error from validation exception . |
47,640 | def add_transmute_route ( self , * args ) : if len ( args ) == 1 : fn = args [ 0 ] elif len ( args ) == 3 : methods , paths , fn = args fn = describe ( methods = methods , paths = paths ) ( fn ) else : raise ValueError ( "expected one or three arguments for add_transmute_route!" ) add_route ( self . _app , fn , context = self . _transmute_context ) | two formats are accepted for transmute routes . One allows for a more traditional aiohttp syntax while the other allows for a flask - like variant . |
47,641 | def can_handle ( self , cls ) : f = self . _cattrs_converter . _structure_func . dispatch ( cls ) return f != self . _cattrs_converter . _structure_default | this will theoretically be compatible with everything as cattrs can handle many basic types as well . |
47,642 | def load ( self , model , value ) : try : return self . _cattrs_converter . structure ( value , model ) except ( ValueError , TypeError ) as e : raise SerializationException ( str ( e ) ) | Converts unstructured data into structured data recursively . |
47,643 | def detect_encoding ( fp , default = None ) : init_pos = fp . tell ( ) try : sample = fp . read ( current_app . config . get ( 'PREVIEWER_CHARDET_BYTES' , 1024 ) ) result = cchardet . detect ( sample ) threshold = current_app . config . get ( 'PREVIEWER_CHARDET_CONFIDENCE' , 0.9 ) if result . get ( 'confidence' , 0 ) > threshold : return result . get ( 'encoding' , default ) else : return default except Exception : current_app . logger . warning ( 'Encoding detection failed.' , exc_info = True ) return default finally : fp . seek ( init_pos ) | Detect the cahracter encoding of a file . |
47,644 | def deserialize ( self , value , ** kwargs ) : for validator in self . validators : validator . validate ( value , ** kwargs ) return value | Deserialization of value . |
47,645 | def is_type ( value ) : if isinstance ( value , type ) : return issubclass ( value , Type ) return isinstance ( value , Type ) | Determine if value is an instance or subclass of the class Type . |
47,646 | def serialize ( self , value , ** kwargs ) : return [ self . item_type . serialize ( val , ** kwargs ) for val in value ] | Serialize every item of the list . |
47,647 | def deserialize ( self , value , ** kwargs ) : if self . allow_scalar and not isinstance ( value , ( list , tuple ) ) : value = [ value ] value = super ( List , self ) . deserialize ( value ) result = [ ] errors = [ ] for index , val in enumerate ( value ) : try : result . append ( self . item_type . deserialize ( val , ** kwargs ) ) except ValidationError as exc : exc . index = index errors . append ( exc ) if errors : raise ValidationError ( errors ) return result | Deserialize every item of the list . |
47,648 | def format_as_utc ( self , value ) : if isinstance ( value , datetime . datetime ) : if value . tzinfo is not None : value = value . astimezone ( pytz . UTC ) value = value . replace ( microsecond = 0 ) return value . isoformat ( ) . replace ( '+00:00' , 'Z' ) | Format UTC times . |
47,649 | def amount_object_to_dict ( self , amount ) : currency , amount = ( amount . as_quantized ( digits = 2 ) . as_tuple ( ) if not isinstance ( amount , dict ) else ( amount [ "currency" ] , amount [ "amount" ] ) ) if currency not in self . currencies : raise ValueError ( self . err_unknown_currency . format ( currency = currency ) ) return { "amount" : str ( amount ) , "currency" : str ( currency ) , } | Return the dictionary representation of an Amount object . |
47,650 | def deserialize ( self , value , ** kwargs ) : if value is None : return None if isinstance ( value , six . string_types ) : currency = value [ : 3 ] amount = value [ 3 : ] elif isinstance ( value , dict ) : if set ( value . keys ( ) ) != set ( ( "currency" , "amount" ) ) : raise ValueError ( "Amount object has to have currency and amount fields." ) amount = value [ "amount" ] currency = value [ "currency" ] else : raise ValueError ( "Value cannot be parsed to Amount." ) if currency not in self . currencies : raise ValueError ( self . err_unknown_currency . format ( currency = currency ) ) try : amount = decimal . Decimal ( amount ) . normalize ( ) except decimal . InvalidOperation : raise ValueError ( u"'{amount}' cannot be parsed to decimal." . format ( amount = amount ) ) if amount . as_tuple ( ) . exponent < - 2 : raise ValueError ( u"'{amount}' has more than 2 decimal places." . format ( amount = amount ) ) value = self . amount_class ( currency = currency , amount = amount ) return super ( Amount , self ) . deserialize ( value ) | Deserialize the amount . |
47,651 | def add_route ( app , fn , context = default_context ) : transmute_func = TransmuteFunction ( fn , args_not_from_request = [ "request" ] ) handler = create_handler ( transmute_func , context = context ) get_swagger_spec ( app ) . add_func ( transmute_func , context ) for p in transmute_func . paths : aiohttp_path = _convert_to_aiohttp_path ( p ) resource = app . router . add_resource ( aiohttp_path ) for method in transmute_func . methods : resource . add_route ( method , handler ) | a decorator that adds a transmute route to the application . |
47,652 | def preview ( pid , record , template = None , ** kwargs ) : fileobj = current_previewer . record_file_factory ( pid , record , request . view_args . get ( 'filename' , request . args . get ( 'filename' , type = str ) ) ) if not fileobj : abort ( 404 ) try : file_previewer = fileobj [ 'previewer' ] except KeyError : file_previewer = None fileobj = PreviewFile ( pid , record , fileobj ) for plugin in current_previewer . iter_previewers ( previewers = [ file_previewer ] if file_previewer else None ) : if plugin . can_preview ( fileobj ) : try : return plugin . preview ( fileobj ) except Exception : current_app . logger . warning ( ( 'Preview failed for {key}, in {pid_type}:{pid_value}' . format ( key = fileobj . file . key , pid_type = fileobj . pid . pid_type , pid_value = fileobj . pid . pid_value ) ) , exc_info = True ) return default . preview ( fileobj ) | Preview file for given record . |
47,653 | def fixtures ( ) : temp_path = os . path . join ( os . path . dirname ( __file__ ) , 'temp' ) demo_files_path = os . path . join ( os . path . dirname ( __file__ ) , 'demo_files' ) loc = Location ( name = 'local' , uri = temp_path , default = True ) db . session . add ( loc ) db . session . commit ( ) demo_files = ( 'markdown.md' , 'csvfile.csv' , 'zipfile.zip' , 'jsonfile.json' , 'xmlfile.xml' , 'notebook.ipynb' , 'jpgfile.jpg' , 'pngfile.png' , ) rec_uuid = uuid4 ( ) provider = RecordIdProvider . create ( object_type = 'rec' , object_uuid = rec_uuid ) data = { 'pid_value' : provider . pid . pid_value , } record = Record . create ( data , id_ = rec_uuid ) bucket = Bucket . create ( ) RecordsBuckets . create ( record = record . model , bucket = bucket ) for f in demo_files : with open ( os . path . join ( demo_files_path , f ) , 'rb' ) as fp : record . files [ f ] = fp record . files . flush ( ) record . commit ( ) db . session . commit ( ) | Command for working with test data . |
47,654 | def render ( file ) : with file . open ( ) as fp : encoding = detect_encoding ( fp , default = 'utf-8' ) result = mistune . markdown ( fp . read ( ) . decode ( encoding ) ) return result | Render HTML from Markdown file content . |
47,655 | def validate ( self , value ) : try : length = len ( value ) except TypeError : length = 0 if self . min_length is not None : min_length = self . min_length ( ) if callable ( self . min_length ) else self . min_length if length < min_length : raise exceptions . ValidationError ( self . min_err . format ( min_length ) ) if self . max_length is not None : max_length = self . max_length ( ) if callable ( self . max_length ) else self . max_length if length > max_length : raise exceptions . ValidationError ( self . max_err . format ( max_length ) ) | Validate the length of a list . |
47,656 | def __get_calls_from_parser ( proxy_parser , real_parser ) : __parsers [ proxy_parser ] = real_parser for method , safe , args , kwargs , proxy_subparser in proxy_parser . calls : args = ( __proxy_to_real_parser ( v ) for v in args ) kwargs = { k : __proxy_to_real_parser ( v ) for k , v in kwargs . items ( ) } real_subparser = getattr ( real_parser , method ) ( * args , ** kwargs ) if real_subparser is not None : __get_calls_from_parser ( proxy_subparser , real_subparser ) | This actually executes the calls registered in the ProxyArgumentParser . |
47,657 | def __proxy_to_real_parser ( value ) : if isinstance ( value , ProxyArgumentParser ) : return __parsers [ value ] elif any ( isinstance ( value , t ) for t in [ list , tuple ] ) : new_value = [ ] for subvalue in iter ( value ) : new_value . append ( __proxy_to_real_parser ( subvalue ) ) return new_value return value | This recursively converts ProxyArgumentParser instances to actual parsers . |
47,658 | def mse ( x , xhat ) : buf_ = x - xhat np . square ( buf_ , out = buf_ ) sum_ = np . sum ( buf_ ) sum_ /= x . size return sum_ | Calcualte mse between vector or matrix x and xhat |
47,659 | def low_rank_approximation ( A , r ) : try : u , s , v = np . linalg . svd ( A , full_matrices = False ) except np . linalg . LinAlgError as e : print ( 'Matrix:' , A ) print ( 'Matrix rank:' , np . linalg . matrix_rank ( A ) ) raise Ar = np . zeros ( ( len ( u ) , len ( v ) ) , dtype = u . dtype ) buf_ = np . empty_like ( Ar ) sc_vec_ = np . empty ( ( v . shape [ 1 ] , ) , dtype = v . dtype ) for i in range ( r ) : np . multiply ( v [ i ] , s [ i ] , out = sc_vec_ ) np . outer ( u [ : , i ] , sc_vec_ , out = buf_ ) Ar += buf_ return Ar | Returns approximation of A of rank r in least - squares sense . |
47,660 | def eigendecomp ( G , d ) : N = G . shape [ 0 ] lamda , u = np . linalg . eig ( G ) lamda = np . real ( lamda ) indices = np . argsort ( lamda ) [ : : - 1 ] lamda_sorted = lamda [ indices ] assert ( lamda_sorted [ : d ] > - 1e-10 ) . all ( ) , "{} not all positive!" . format ( lamda_sorted [ : d ] ) u = u [ : , indices ] factor = np . empty ( ( N , ) , dtype = lamda . dtype ) np . sqrt ( lamda_sorted [ : d ] , out = factor [ 0 : d ] ) factor [ d : ] = 0.0 return factor , np . real ( u ) | Computes sorted eigendecomposition of G . |
47,661 | def projection ( x , A , b ) : A_pseudoinv = pseudo_inverse ( A ) tmp_ = A . dot ( x ) tmp_ -= b x_hat = A_pseudoinv . dot ( tmp_ ) np . subtract ( x , x_hat , out = x_hat ) cost = mse ( x_hat , x ) A . dot ( x_hat , out = tmp_ ) constraints_error = mse ( tmp_ , b ) return x_hat , cost , constraints_error | Returns the vector xhat closest to x in 2 - norm satisfying A . xhat = b . |
47,662 | def build_comparators ( * values_or_types ) : comparators = [ ] for item in values_or_types : if isinstance ( item , Comparator ) : comparators . append ( item ) elif isinstance ( item , type ) : comparators . append ( Any ( IsA ( item ) , Is ( item ) ) ) else : comparators . append ( Equals ( item ) ) return comparators | All of the comparators that can be used for arguments . |
47,663 | def changelog ( build ) : build . packages . install ( "gitchangelog" ) changelog_text = subprocess . check_output ( [ "gitchangelog" , "HEAD...v0.2.9" ] ) with open ( os . path . join ( build . root , "CHANGELOG" ) , "wb+" ) as fh : fh . write ( changelog_text ) | create a changelog |
47,664 | def args ( self , * args , ** kwargs ) : self . _any_args = False self . _arguments_rule . set_args ( * args , ** kwargs ) return self | Creates a ArgumentsExpectationRule and adds it to the expectation |
47,665 | def return_value ( self ) : if self . _raises : if inspect . isclass ( self . _raises ) : raise self . _raises ( ) else : raise self . _raises else : if isinstance ( self . _returns , tuple ) : return tuple ( [ x . value if isinstance ( x , Variable ) else x for x in self . _returns ] ) return self . _returns . value if isinstance ( self . _returns , Variable ) else self . _returns | Returns the value for this expectation or raises the proper exception . |
47,666 | def match ( self , * args , ** kwargs ) : return self . _any_args or self . _arguments_rule . validate ( * args , ** kwargs ) | Check the if these args match this expectation . |
47,667 | def record_file_factory ( self ) : try : get_distribution ( 'invenio-records-files' ) from invenio_records_files . utils import record_file_factory default = record_file_factory except DistributionNotFound : def default ( pid , record , filename ) : return None return load_or_import_from_config ( 'PREVIEWER_RECORD_FILE_FACOTRY' , app = self . app , default = default , ) | Load default record file factory . |
47,668 | def register_previewer ( self , name , previewer ) : if name in self . previewers : assert name not in self . previewers , "Previewer with same name already registered" self . previewers [ name ] = previewer if hasattr ( previewer , 'previewable_extensions' ) : self . _previewable_extensions |= set ( previewer . previewable_extensions ) | Register a previewer in the system . |
47,669 | def iter_previewers ( self , previewers = None ) : if self . entry_point_group is not None : self . load_entry_point_group ( self . entry_point_group ) self . entry_point_group = None previewers = previewers or self . app . config . get ( 'PREVIEWER_PREFERENCE' , [ ] ) for item in previewers : if item in self . previewers : yield self . previewers [ item ] | Get previewers ordered by PREVIEWER_PREVIEWERS_ORDER . |
47,670 | def add_codes ( err_cls ) : class ErrorsWithCodes ( object ) : def __getattribute__ ( self , code ) : msg = getattr ( err_cls , code ) return '[{code}] {msg}' . format ( code = code , msg = msg ) return ErrorsWithCodes ( ) | Add error codes to string messages via class attribute names . |
47,671 | def is_package ( name ) : name = name . lower ( ) packages = pkg_resources . working_set . by_key . keys ( ) for package in packages : if package . lower ( ) . replace ( '-' , '_' ) == name : return True return False | Check if string maps to a package installed via pip . |
47,672 | def read_json ( location ) : location = ensure_path ( location ) with location . open ( 'r' , encoding = 'utf8' ) as f : return ujson . load ( f ) | Open and load JSON from file . |
47,673 | def print_table ( data , title = None ) : if isinstance ( data , dict ) : data = list ( data . items ( ) ) tpl_row = ' {:<15}' * len ( data [ 0 ] ) table = '\n' . join ( [ tpl_row . format ( l , unicode_ ( v ) ) for l , v in data ] ) if title : print ( '\n \033[93m{}\033[0m' . format ( title ) ) print ( '\n{}\n' . format ( table ) ) | Print data in table format . |
47,674 | def _wrap ( text , wrap_max = 80 , indent = 4 ) : indent = indent * ' ' wrap_width = wrap_max - len ( indent ) if isinstance ( text , Path ) : text = path2str ( text ) return textwrap . fill ( text , width = wrap_width , initial_indent = indent , subsequent_indent = indent , break_long_words = False , break_on_hyphens = False ) | Wrap text at given width using textwrap module . |
47,675 | def normalize_string_keys ( old ) : new = { } for key , value in old . items ( ) : if isinstance ( key , bytes_ ) : new [ key . decode ( 'utf8' ) ] = value else : new [ key ] = value return new | Given a dictionary make sure keys are unicode strings not bytes . |
47,676 | def locale_escape ( string , errors = 'replace' ) : encoding = locale . getpreferredencoding ( ) string = string . encode ( encoding , errors ) . decode ( 'utf8' ) return string | Mangle non - supported characters for savages with ascii terminals . |
47,677 | def parse_cmu ( cmufh ) : pronunciations = list ( ) for line in cmufh : line = line . strip ( ) . decode ( 'utf-8' ) if line . startswith ( ';' ) : continue word , phones = line . split ( " " , 1 ) pronunciations . append ( ( word . split ( '(' , 1 ) [ 0 ] . lower ( ) , phones ) ) return pronunciations | Parses an incoming file handle as a CMU pronouncing dictionary file . |
47,678 | def init_cmu ( filehandle = None ) : global pronunciations , lookup , rhyme_lookup if pronunciations is None : if filehandle is None : filehandle = cmudict . dict_stream ( ) pronunciations = parse_cmu ( filehandle ) filehandle . close ( ) lookup = collections . defaultdict ( list ) for word , phones in pronunciations : lookup [ word ] . append ( phones ) rhyme_lookup = collections . defaultdict ( list ) for word , phones in pronunciations : rp = rhyming_part ( phones ) if rp is not None : rhyme_lookup [ rp ] . append ( word ) | Initialize the module s pronunciation data . |
47,679 | def rhyming_part ( phones ) : phones_list = phones . split ( ) for i in range ( len ( phones_list ) - 1 , 0 , - 1 ) : if phones_list [ i ] [ - 1 ] in '12' : return ' ' . join ( phones_list [ i : ] ) return phones | Get the rhyming part of a string with CMUdict phones . |
47,680 | def search ( pattern ) : init_cmu ( ) regexp = re . compile ( r"\b" + pattern + r"\b" ) return [ word for word , phones in pronunciations if regexp . search ( phones ) ] | Get words whose pronunciation matches a regular expression . |
47,681 | def search_stresses ( pattern ) : init_cmu ( ) regexp = re . compile ( pattern ) return [ word for word , phones in pronunciations if regexp . search ( stresses ( phones ) ) ] | Get words whose stress pattern matches a regular expression . |
47,682 | def rhymes ( word ) : phones = phones_for_word ( word ) combined_rhymes = [ ] if phones : for element in phones : combined_rhymes . append ( [ w for w in rhyme_lookup . get ( rhyming_part ( element ) , [ ] ) if w != word ] ) combined_rhymes = list ( chain . from_iterable ( combined_rhymes ) ) unique_combined_rhymes = sorted ( set ( combined_rhymes ) ) return unique_combined_rhymes else : return [ ] | Get words rhyming with a given word . |
47,683 | def set_ds9 ( self , level = "PREF" ) : self . set_zoom ( ) ds9_settings = config . read ( "DS9." + level ) for key in ds9_settings . keys ( ) : value = ds9_settings [ key ] cmd = key . replace ( "_" , " " ) self . ds9 . set ( "{} {}" . format ( cmd , value ) ) | Set the default values on the ds9 display . |
47,684 | def cfht_megacam_tap_query ( ra_deg = 180.0 , dec_deg = 0.0 , width = 1 , height = 1 , date = None ) : radius = min ( 90 , max ( width , height ) / 2.0 ) query = ( "SELECT " "COORD1(CENTROID(Plane.position_bounds)) AS RAJ2000," "COORD2(CENTROID(Plane.position_bounds)) AS DEJ2000," "target_name " "FROM " "caom2.Observation as o " "JOIN caom2.Plane as Plane on o.obsID=Plane.obsID " "WHERE o.collection = 'CFHT' " "AND o.instrument_name = 'MegaPrime' " "AND INTERSECTS( CIRCLE('ICRS', %f, %f, %f), Plane.position_bounds ) = 1" ) query = query % ( ra_deg , dec_deg , radius ) if date is not None : mjd = Time ( date , scale = 'utc' ) . mjd query += " AND Plane.time_bounds_lower <= {} AND {} <= Plane.time_bounds_upper " . format ( mjd + 0.5 , mjd - 0.5 ) data = { "QUERY" : query , "REQUEST" : "doQuery" , "LANG" : "ADQL" , "FORMAT" : "votable" } url = "http://www.cadc.hia.nrc.gc.ca/tap/sync" warnings . simplefilter ( 'ignore' ) ff = StringIO ( requests . get ( url , params = data ) . content ) ff . seek ( 0 ) table = votable . parse ( ff ) . get_first_table ( ) . to_table ( ) assert isinstance ( table , Table ) return table | Do a query of the CADC Megacam table . |
47,685 | def validate ( self , size ) : msg = 'scale and array size must match, ' 'but were scale: {self.scale.n_bands}, array size: {size}' if size != len ( self . scale ) : raise ValueError ( msg . format ( ** locals ( ) ) ) | Ensure that the size of the dimension matches the number of bands in the scale |
47,686 | def resampled ( chunksize_bytes = DEFAULT_CHUNK_SIZE , resample_to = SR44100 ( ) , store_resampled = False ) : class Resampled ( BaseModel ) : meta = JSONFeature ( MetaData , store = True , encoder = AudioMetaDataEncoder ) raw = ByteStreamFeature ( ByteStream , chunksize = chunksize_bytes , needs = meta , store = False ) ogg = OggVorbisFeature ( OggVorbis , needs = raw , store = True ) pcm = AudioSamplesFeature ( AudioStream , needs = raw , store = False ) resampled = AudioSamplesFeature ( Resampler , needs = pcm , samplerate = resample_to , store = store_resampled ) return Resampled | Create a basic processing pipeline that can resample all incoming audio to a normalized sampling rate for downstream processing and store a convenient compressed version for playback |
47,687 | def audio_graph ( chunksize_bytes = DEFAULT_CHUNK_SIZE , resample_to = SR44100 ( ) , store_fft = False ) : band = FrequencyBand ( 20 , resample_to . nyquist ) class AudioGraph ( BaseModel ) : meta = JSONFeature ( MetaData , store = True , encoder = AudioMetaDataEncoder ) raw = ByteStreamFeature ( ByteStream , chunksize = chunksize_bytes , needs = meta , store = False ) ogg = OggVorbisFeature ( OggVorbis , needs = raw , store = True ) pcm = AudioSamplesFeature ( AudioStream , needs = raw , store = False ) resampled = AudioSamplesFeature ( Resampler , needs = pcm , samplerate = resample_to , store = False ) windowed = ArrayWithUnitsFeature ( SlidingWindow , needs = resampled , wscheme = HalfLapped ( ) , wfunc = OggVorbisWindowingFunc ( ) , store = False ) dct = ArrayWithUnitsFeature ( DCT , needs = windowed , store = True ) fft = ArrayWithUnitsFeature ( FFT , needs = windowed , store = store_fft ) bark = ArrayWithUnitsFeature ( BarkBands , needs = fft , frequency_band = band , store = True ) centroid = ArrayWithUnitsFeature ( SpectralCentroid , needs = bark , store = True ) chroma = ArrayWithUnitsFeature ( Chroma , needs = fft , frequency_band = band , store = True ) bfcc = ArrayWithUnitsFeature ( BFCC , needs = fft , store = True ) return AudioGraph | Produce a base class suitable as a starting point for many audio processing pipelines . This class resamples all audio to a common sampling rate and produces a bark band spectrogram from overlapping short - time fourier transform frames . It also compresses the audio into ogg vorbis format for compact storage . |
47,688 | def str_to_timedelta ( value : str , fmt : str = None ) -> Optional [ datetime . timedelta ] : def timedelta_kwargs ( data : DictStrInt ) -> DictStrInt : seconds = data . get ( 'seconds' , 0 ) seconds += data . get ( 'day_hours' , 0 ) * 3600 seconds += data . pop ( 'hour_minutes' , 0 ) * 60 seconds += data . pop ( 'minute_seconds' , 0 ) seconds += data . pop ( 'week_days' , 0 ) * SECONDS_PER_DAY seconds += data . pop ( 'weeks' , 0 ) * SECONDS_PER_WEEK data . update ( { 'seconds' : seconds } ) return data if not isinstance ( value , str ) : raise ValueError ( 'Value should be a "str" instance. You use {0}.' . format ( type ( value ) ) ) user_fmt = fmt if isinstance ( fmt , ( list , tuple ) ) : formats = list ( fmt ) elif fmt is None : formats = [ TIMEDELTA_FORMAT , 'F' , 'f' ] else : formats = [ fmt ] locale_data = { 'days_label' : '({0}|{1})' . format ( 'day' , 'days' ) , 'short_days_label' : 'd' , 'short_week_days_label' : 'd' , 'short_weeks_label' : 'w' , 'week_days_label' : '({0}|{1})' . format ( 'day' , 'days' ) , 'weeks_label' : '({0}|{1})' . format ( 'week' , 'weeks' ) , } regexps = [ ] for item in formats : processed = r'^' for part in item : if part in TIMEDELTA_FORMATS : part = TIMEDELTA_FORMATS [ part ] [ 1 ] % locale_data else : part = re . escape ( part ) processed += part processed += r'$' regexps . append ( processed ) for regexp in regexps : timedelta_re = re . compile ( regexp ) matched = timedelta_re . match ( value ) if matched : data = { key : to_int ( value ) or 0 for key , value in matched . groupdict ( ) . items ( ) } return datetime . timedelta ( ** timedelta_kwargs ( data ) ) if user_fmt : raise ValueError ( 'Cannot convert {0!r} to timedelta instance, using {1!r} format.' . format ( value , user_fmt ) ) return None | Convert string value to timedelta instance according to the given format . |
47,689 | def timedelta_average ( * values : datetime . timedelta ) -> datetime . timedelta : r if isinstance ( values [ 0 ] , ( list , tuple ) ) : values = values [ 0 ] return sum ( values , datetime . timedelta ( ) ) // len ( values ) | r Compute the arithmetic mean for timedeltas list . |
47,690 | def timedelta_div ( first : datetime . timedelta , second : datetime . timedelta ) -> Optional [ float ] : first_seconds = timedelta_seconds ( first ) second_seconds = timedelta_seconds ( second ) if not second_seconds : return None return first_seconds / second_seconds | Implement divison for timedelta instances . |
47,691 | def timedelta_seconds ( value : datetime . timedelta ) -> int : return SECONDS_PER_DAY * value . days + value . seconds | Return full number of seconds from timedelta . |
47,692 | def timedelta_to_str ( value : datetime . timedelta , fmt : str = None ) -> str : if not isinstance ( value , datetime . timedelta ) : raise ValueError ( 'Value should be a "datetime.timedelta" instance. You use {0}.' . format ( type ( value ) ) ) days = value . days microseconds = value . microseconds seconds = timedelta_seconds ( value ) hours = seconds // 3600 minutes = seconds // 60 weeks = days // 7 day_hours = hours - days * 24 hour_minutes = minutes - hours * 60 minute_seconds = seconds - minutes * 60 week_days = days - weeks * 7 days_label = 'day' if days % 10 == 1 else 'days' short_days_label = 'd' short_week_days_label = 'd' short_weeks_label = 'w' week_days_label = 'day' if week_days % 10 == 1 else 'days' weeks_label = 'week' if weeks % 10 == 1 else 'weeks' data = locals ( ) fmt = fmt or TIMEDELTA_FORMAT processed = '' for part in fmt : if part in TIMEDELTA_FORMATS : is_full_part = part in ( 'f' , 'F' ) is_repr_part = part in ( 'r' , 'R' ) part = TIMEDELTA_FORMATS [ part ] [ 0 ] if is_full_part or is_repr_part : if is_repr_part and not days : part = part . replace ( '%(days)d' , '' ) part = part . replace ( '%(days_label)s,' , '' ) part = part . replace ( '%(short_days_label)s' , '' ) if is_full_part and not minute_seconds : part = part . replace ( ':%(minute_seconds)02d' , '' ) if is_full_part and not weeks : part = part . replace ( '%(weeks)d' , '' ) part = part . replace ( '%(short_weeks_label)s' , '' ) part = part . replace ( '%(weeks_label)s,' , '' ) if is_full_part and not week_days : part = part . replace ( '%(week_days)d' , '' ) part = part . replace ( '%(short_week_days_label)s' , '' ) part = part . replace ( '%(week_days_label)s,' , '' ) part = part . strip ( ) part = ' ' . join ( part . split ( ) ) processed += part return processed % data | Display the timedelta formatted according to the given string . |
47,693 | def send_command ( self , cmd , params = None , raw = False ) : paramstr = '' if params : paramstr = urlencode ( params ) paramstr = '&' + paramstr if paramstr else '' cmdurl = 'http://%s/cgi-bin/CGIProxy.fcgi?usr=%s&pwd=%s&cmd=%s%s' % ( self . url , self . usr , self . pwd , cmd , paramstr , ) if self . ssl and ssl_enabled : cmdurl = cmdurl . replace ( 'http:' , 'https:' ) if self . verbose : print ( 'Send Foscam command: %s' % cmdurl ) try : raw_string = '' if self . ssl and ssl_enabled : gcontext = ssl . SSLContext ( ssl . PROTOCOL_TLSv1 ) raw_string = urlopen ( cmdurl , context = gcontext , timeout = 5 ) . read ( ) else : raw_string = urlopen ( cmdurl , timeout = 5 ) . read ( ) if raw : if self . verbose : print ( 'Returning raw Foscam response: len=%d' % len ( raw_string ) ) return FOSCAM_SUCCESS , raw_string root = ET . fromstring ( raw_string ) except : if self . verbose : print ( 'Foscam exception: ' + raw_string ) return ERROR_FOSCAM_UNAVAILABLE , None code = ERROR_FOSCAM_UNKNOWN params = OrderedDict ( ) for child in root . iter ( ) : if child . tag == 'result' : code = int ( child . text ) elif child . tag != 'CGI_Result' : params [ child . tag ] = unquote ( child . text ) if self . verbose : print ( 'Received Foscam response: %s, %s' % ( code , params ) ) return code , params | Send command to foscam . |
47,694 | def set_port_info ( self , webport , mediaport , httpsport , onvifport , callback = None ) : params = { 'webPort' : webport , 'mediaPort' : mediaport , 'httpsPort' : httpsport , 'onvifPort' : onvifport , } return self . execute_command ( 'setPortInfo' , params , callback = callback ) | Set http port and media port of camera . |
47,695 | def set_wifi_setting ( self , ssid , psk , isenable , isusewifi , nettype , encryptype , authmode , keyformat , defaultkey , key1 = '' , key2 = '' , key3 = '' , key4 = '' , key1len = 64 , key2len = 64 , key3len = 64 , key4len = 64 , callback = None ) : params = { 'isEnable' : isenable , 'isUseWifi' : isusewifi , 'ssid' : ssid , 'netType' : nettype , 'encryptType' : encryptype , 'psk' : psk , 'authMode' : authmode , 'keyFormat' : keyformat , 'defaultKey' : defaultkey , 'key1' : key1 , 'key2' : key2 , 'key3' : key3 , 'key4' : key4 , 'key1Len' : key1len , 'key2Len' : key2len , 'key3Len' : key3len , 'key4Len' : key4len , } return self . execute_command ( 'setWifiSetting' , params , callback = callback ) | Set wifi config . Camera will not connect to AP unless you enject your cable . |
47,696 | def set_upnp_config ( self , isenable , callback = None ) : params = { 'isEnable' : isenable } return self . execute_command ( 'setUPnPConfig' , params , callback = callback ) | Set UPnP config |
47,697 | def set_ddns_config ( self , isenable , hostname , ddnsserver , user , password , callback = None ) : params = { 'isEnable' : isenable , 'hostName' : hostname , 'ddnsServer' : ddnsserver , 'user' : user , 'password' : password , } return self . execute_command ( 'setDDNSConfig' , params , callback = callback ) | Set DDNS config . |
47,698 | def set_sub_stream_format ( self , format , callback = None ) : params = { 'format' : format } return self . execute_command ( 'setSubStreamFormat' , params , callback = callback ) | Set the stream fromat of sub stream???? |
47,699 | def set_main_video_stream_type ( self , streamtype , callback = None ) : params = { 'streamType' : streamtype } return self . execute_command ( 'setMainVideoStreamType' , params , callback = callback ) | Set the stream type of main stream |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.