idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
62,000
def best_diff ( img1 , img2 , opts ) : w1 , h1 = img1 . size w2 , h2 = img2 . size w , h = min ( w1 , w2 ) , min ( h1 , h2 ) best = None best_value = 255 * w * h + 1 xr = abs ( w1 - w2 ) + 1 yr = abs ( h1 - h2 ) + 1 p = Progress ( xr * yr , timeout = opts . timeout ) for x in range ( xr ) : if w1 > w2 : x1 , x2 = x , 0 else : x1 , x2 = 0 , x for y in range ( yr ) : if h1 > h2 : y1 , y2 = y , 0 else : y1 , y2 = 0 , y p . next ( ) this = diff ( img1 , img2 , ( x1 , y1 ) , ( x2 , y2 ) ) this_value = diff_badness ( this ) if this_value < best_value : best = this best_value = this_value best_pos = ( x1 , y1 ) , ( x2 , y2 ) return best , best_pos
Find the best alignment of two images that minimizes the differences .
62,001
def simple_highlight ( img1 , img2 , opts ) : try : diff , ( ( x1 , y1 ) , ( x2 , y2 ) ) = best_diff ( img1 , img2 , opts ) except KeyboardInterrupt : return None , None diff = diff . filter ( ImageFilter . MaxFilter ( 9 ) ) diff = tweak_diff ( diff , opts . opacity ) mask1 = Image . new ( 'L' , img1 . size , 0xff ) mask2 = Image . new ( 'L' , img2 . size , 0xff ) mask1 . paste ( diff , ( x1 , y1 ) ) mask2 . paste ( diff , ( x2 , y2 ) ) return mask1 , mask2
Try to align the two images to minimize pixel differences .
62,002
def slow_highlight ( img1 , img2 , opts ) : w1 , h1 = img1 . size w2 , h2 = img2 . size W , H = max ( w1 , w2 ) , max ( h1 , h2 ) pimg1 = Image . new ( 'RGB' , ( W , H ) , opts . bgcolor ) pimg2 = Image . new ( 'RGB' , ( W , H ) , opts . bgcolor ) pimg1 . paste ( img1 , ( 0 , 0 ) ) pimg2 . paste ( img2 , ( 0 , 0 ) ) diff = Image . new ( 'L' , ( W , H ) , 255 ) xr = abs ( w1 - w2 ) + 1 yr = abs ( h1 - h2 ) + 1 try : p = Progress ( xr * yr , timeout = opts . timeout ) for x in range ( xr ) : for y in range ( yr ) : p . next ( ) this = ImageChops . difference ( pimg1 , pimg2 ) . convert ( 'L' ) this = this . filter ( ImageFilter . MaxFilter ( 7 ) ) diff = ImageChops . darker ( diff , this ) if h1 > h2 : pimg2 = ImageChops . offset ( pimg2 , 0 , 1 ) else : pimg1 = ImageChops . offset ( pimg1 , 0 , 1 ) if h1 > h2 : pimg2 = ImageChops . offset ( pimg2 , 0 , - yr ) else : pimg1 = ImageChops . offset ( pimg1 , 0 , - yr ) if w1 > w2 : pimg2 = ImageChops . offset ( pimg2 , 1 , 0 ) else : pimg1 = ImageChops . offset ( pimg1 , 1 , 0 ) except KeyboardInterrupt : return None , None diff = diff . filter ( ImageFilter . MaxFilter ( 5 ) ) diff1 = diff . crop ( ( 0 , 0 , w1 , h1 ) ) diff2 = diff . crop ( ( 0 , 0 , w2 , h2 ) ) mask1 = tweak_diff ( diff1 , opts . opacity ) mask2 = tweak_diff ( diff2 , opts . opacity ) return mask1 , mask2
Try to find similar areas between two images .
62,003
def SOAPAction ( self , Action , responseElement , params = "" , recursive = False ) : if self . authenticated is None : self . authenticated = self . auth ( ) auth = self . authenticated if not self . use_legacy_protocol : self . authenticated = None if auth is None : return None payload = self . requestBody ( Action , params ) time_stamp = str ( round ( time . time ( ) / 1e6 ) ) action_url = '"http://purenetworks.com/HNAP1/{}"' . format ( Action ) AUTHKey = hmac . new ( auth [ 0 ] . encode ( ) , ( time_stamp + action_url ) . encode ( ) ) . hexdigest ( ) . upper ( ) + " " + time_stamp headers = { 'Content-Type' : '"text/xml; charset=utf-8"' , 'SOAPAction' : '"http://purenetworks.com/HNAP1/{}"' . format ( Action ) , 'HNAP_AUTH' : '{}' . format ( AUTHKey ) , 'Cookie' : 'uid={}' . format ( auth [ 1 ] ) } try : response = urlopen ( Request ( self . url , payload . encode ( ) , headers ) ) except ( HTTPError , URLError ) : self . authenticated = None if not recursive : return_value = self . SOAPAction ( Action , responseElement , params , True ) if recursive or return_value is None : _LOGGER . warning ( "Failed to open url to {}" . format ( self . ip ) ) self . _error_report = True return None else : return return_value xmlData = response . read ( ) . decode ( ) root = ET . fromstring ( xmlData ) try : value = root . find ( './/{http://purenetworks.com/HNAP1/}%s' % ( responseElement ) ) . text except AttributeError : _LOGGER . warning ( "Unable to find %s in response." % responseElement ) return None if value is None and self . _error_report is False : _LOGGER . warning ( "Could not find %s in response." % responseElement ) self . _error_report = True return None self . _error_report = False return value
Generate the SOAP action call .
62,004
def fetchMyCgi ( self ) : try : response = urlopen ( Request ( 'http://{}/my_cgi.cgi' . format ( self . ip ) , b'request=create_chklst' ) ) except ( HTTPError , URLError ) : _LOGGER . warning ( "Failed to open url to {}" . format ( self . ip ) ) self . _error_report = True return None lines = response . readlines ( ) return { line . decode ( ) . split ( ':' ) [ 0 ] . strip ( ) : line . decode ( ) . split ( ':' ) [ 1 ] . strip ( ) for line in lines }
Fetches statistics from my_cgi . cgi
62,005
def current_consumption ( self ) : res = 'N/A' if self . use_legacy_protocol : try : res = self . fetchMyCgi ( ) [ 'Meter Watt' ] except : return 'N/A' else : try : res = self . SOAPAction ( 'GetCurrentPowerConsumption' , 'CurrentConsumption' , self . moduleParameters ( "2" ) ) except : return 'N/A' if res is None : return 'N/A' try : res = float ( res ) except ValueError : _LOGGER . error ( "Failed to retrieve current power consumption from SmartPlug" ) return res
Get the current power consumption in Watt .
62,006
def total_consumption ( self ) : if self . use_legacy_protocol : return 'N/A' res = 'N/A' try : res = self . SOAPAction ( "GetPMWarningThreshold" , "TotalConsumption" , self . moduleParameters ( "2" ) ) except : return 'N/A' if res is None : return 'N/A' try : float ( res ) except ValueError : _LOGGER . error ( "Failed to retrieve total power consumption from SmartPlug" ) return res
Get the total power consumpuntion in the device lifetime .
62,007
def state ( self , value ) : if value . upper ( ) == ON : return self . SOAPAction ( 'SetSocketSettings' , 'SetSocketSettingsResult' , self . controlParameters ( "1" , "true" ) ) elif value . upper ( ) == OFF : return self . SOAPAction ( 'SetSocketSettings' , 'SetSocketSettingsResult' , self . controlParameters ( "1" , "false" ) ) else : raise TypeError ( "State %s is not valid." % str ( value ) )
Set device state .
62,008
def auth ( self ) : payload = self . initial_auth_payload ( ) headers = { 'Content-Type' : '"text/xml; charset=utf-8"' , 'SOAPAction' : '"http://purenetworks.com/HNAP1/Login"' } try : response = urlopen ( Request ( self . url , payload , headers ) ) except URLError : if self . _error_report is False : _LOGGER . warning ( 'Unable to open a connection to dlink switch {}' . format ( self . ip ) ) self . _error_report = True return None xmlData = response . read ( ) . decode ( ) root = ET . fromstring ( xmlData ) ChallengeResponse = root . find ( './/{http://purenetworks.com/HNAP1/}Challenge' ) CookieResponse = root . find ( './/{http://purenetworks.com/HNAP1/}Cookie' ) PublickeyResponse = root . find ( './/{http://purenetworks.com/HNAP1/}PublicKey' ) if ( ChallengeResponse == None or CookieResponse == None or PublickeyResponse == None ) and self . _error_report is False : _LOGGER . warning ( "Failed to receive initial authentication from smartplug." ) self . _error_report = True return None if self . _error_report is True : return None Challenge = ChallengeResponse . text Cookie = CookieResponse . text Publickey = PublickeyResponse . text PrivateKey = hmac . new ( ( Publickey + self . password ) . encode ( ) , ( Challenge ) . encode ( ) ) . hexdigest ( ) . upper ( ) login_pwd = hmac . new ( PrivateKey . encode ( ) , Challenge . encode ( ) ) . hexdigest ( ) . upper ( ) response_payload = self . auth_payload ( login_pwd ) headers = { 'Content-Type' : '"text/xml; charset=utf-8"' , 'SOAPAction' : '"http://purenetworks.com/HNAP1/Login"' , 'HNAP_AUTH' : '"{}"' . format ( PrivateKey ) , 'Cookie' : 'uid={}' . format ( Cookie ) } response = urlopen ( Request ( self . url , response_payload , headers ) ) xmlData = response . read ( ) . decode ( ) root = ET . fromstring ( xmlData ) login_status = root . find ( './/{http://purenetworks.com/HNAP1/}LoginResult' ) . text . lower ( ) if login_status != "success" and self . _error_report is False : _LOGGER . error ( "Failed to authenticate with SmartPlug {}" . format ( self . ip ) ) self . _error_report = True return None self . _error_report = False return ( PrivateKey , Cookie )
Authenticate using the SOAP interface .
62,009
def get_known_read_position ( fp , buffered = True ) : buffer_size = io . DEFAULT_BUFFER_SIZE if buffered else 0 return max ( fp . tell ( ) - buffer_size , 0 )
Return a position in a file which is known to be read & handled . It assumes a buffered file and streaming processing .
62,010
def recover ( gzfile , last_good_position ) : pos = get_recover_position ( gzfile , last_good_position = last_good_position ) if pos == - 1 : return None fp = gzfile . fileobj fp . seek ( pos ) return gzip . GzipFile ( fileobj = fp , mode = 'r' )
Skip to the next possibly decompressable part of a gzip file . Return a new GzipFile object if such part is found or None if it is not found .
62,011
def maybe_gzip_open ( path , * args , ** kwargs ) : path = path_to_str ( path ) if path . endswith ( '.gz' ) or path . endswith ( '.gzip' ) : _open = gzip . open else : _open = open return _open ( path , * args , ** kwargs )
Open file with either open or gzip . open depending on file extension .
62,012
def path_to_str ( path ) : try : from pathlib import Path as _Path except ImportError : class _Path : pass if isinstance ( path , _Path ) : return str ( path ) return path
Convert pathlib . Path objects to str ; return other objects as - is .
62,013
def calculate_signature ( key , data , timestamp = None ) : if timestamp is None : timestamp = int ( time . time ( ) ) message = str ( timestamp ) + '' . join ( "%s%s" % ( k , v ) for k , v in sorted ( data . items ( ) ) ) signature = hmac . HMAC ( str ( key ) , message , hashlib . sha256 ) . hexdigest ( ) return signature
Calculates the signature for the given request data .
62,014
def authenticate ( self ) : if request . headers . get ( 'Authorization' ) or request . args . get ( 'access_token' ) : realm = 'Bearer realm="%s", error="invalid_token"' % __package__ else : realm = 'Bearer realm="%s"' % __package__ resp = Response ( None , 401 , { 'WWW-Authenticate' : realm } ) abort ( 401 , description = 'Please provide proper credentials' , response = resp )
Indicate to the client that it needs to authenticate via a 401 .
62,015
def check_token ( self , token , allowed_roles , resource , method ) : resource_conf = config . DOMAIN [ resource ] audiences = resource_conf . get ( 'audiences' , config . JWT_AUDIENCES ) return self . _perform_verification ( token , audiences , allowed_roles )
This function is called when a token is sent throught the access_token parameter or the Authorization header as specified in the oAuth 2 specification .
62,016
def requires_token ( self , audiences = None , allowed_roles = None ) : def requires_token_wrapper ( f ) : @ wraps ( f ) def decorated ( * args , ** kwargs ) : try : token = request . args [ 'access_token' ] except KeyError : token = request . headers . get ( 'Authorization' , '' ) . partition ( ' ' ) [ 2 ] if not self . _perform_verification ( token , audiences , allowed_roles ) : abort ( 401 ) return f ( * args , ** kwargs ) return decorated return requires_token_wrapper
Decorator for functions that will be protected with token authentication .
62,017
def load_library ( self , libname ) : paths = self . getpaths ( libname ) for path in paths : if os . path . exists ( path ) : return self . load ( path ) raise ImportError ( "%s not found." % libname )
Given the name of a library load it .
62,018
def getpaths ( self , libname ) : if os . path . isabs ( libname ) : yield libname else : for path in self . getplatformpaths ( libname ) : yield path path = ctypes . util . find_library ( libname ) if path : yield path
Return a list of paths where the library might be found .
62,019
def to_json ( content , indent = None ) : if isinstance ( content , QuerySet ) : json_serializer = serializers . get_serializer ( 'json' ) ( ) serialized_content = json_serializer . serialize ( content , ensure_ascii = False , indent = indent ) else : try : serialized_content = json . dumps ( content , cls = DecimalEncoder , ensure_ascii = False , indent = indent ) except TypeError : serialized_content = json . dumps ( content , ensure_ascii = False , indent = indent ) return serialized_content
Serializes a python object as JSON
62,020
def to_html ( data ) : base_html_template = Template ( ) code = to_json ( data , indent = 4 ) if PYGMENTS_INSTALLED : c = Context ( { 'body' : highlight ( code , JSONLexer ( ) , HtmlFormatter ( ) ) , 'style' : HtmlFormatter ( ) . get_style_defs ( '.highlight' ) } ) html = base_html_template . render ( c ) else : c = Context ( { 'body' : code } ) html = base_html_template . render ( c ) return html
Serializes a python object as HTML
62,021
def to_text ( data ) : try : serialized_content = to_json ( data , indent = 4 ) except Exception , e : serialized_content = data return serialized_content
Serializes a python object as plain text
62,022
def auth_required ( secret_key_func ) : def actual_decorator ( obj ) : def test_func ( request , * args , ** kwargs ) : secret_key = secret_key_func ( request , * args , ** kwargs ) return validate_signature ( request , secret_key ) or request . user . is_authenticated ( ) decorator = request_passes_test ( test_func ) return wrap_object ( obj , decorator ) return actual_decorator
Requires that the user be authenticated either by a signature or by being actively logged in .
62,023
def login_required ( obj ) : decorator = request_passes_test ( lambda r , * args , ** kwargs : r . user . is_authenticated ( ) ) return wrap_object ( obj , decorator )
Requires that the user be logged in order to gain access to the resource at the specified the URI .
62,024
def admin_required ( obj ) : decorator = request_passes_test ( lambda r , * args , ** kwargs : r . user . is_superuser ) return wrap_object ( obj , decorator )
Requires that the user be logged AND be set as a superuser
62,025
def signature_required ( secret_key_func ) : def actual_decorator ( obj ) : def test_func ( request , * args , ** kwargs ) : secret_key = secret_key_func ( request , * args , ** kwargs ) return validate_signature ( request , secret_key ) decorator = request_passes_test ( test_func ) return wrap_object ( obj , decorator ) return actual_decorator
Requires that the request contain a valid signature to gain access to a specified resource .
62,026
def validate_signature ( request , secret_key ) : data = request . GET . copy ( ) if request . method != 'GET' : message_body = getattr ( request , request . method , { } ) data . update ( message_body ) if data . get ( 'sig' , False ) : sig = data [ 'sig' ] del data [ 'sig' ] else : return False if data . get ( 't' , False ) : timestamp = int ( data . get ( 't' , False ) ) del data [ 't' ] else : return False local_time = datetime . utcnow ( ) remote_time = datetime . utcfromtimestamp ( timestamp ) if local_time > remote_time : delta = local_time - remote_time else : delta = remote_time - local_time if delta . seconds > 5 * 60 : return False return sig == calculate_signature ( secret_key , data , timestamp )
Validates the signature associated with the given request .
62,027
def exists ( self , session_key ) : response = self . table . get_item ( Key = { 'session_key' : session_key } , ConsistentRead = ALWAYS_CONSISTENT ) if 'Item' in response : return True else : return False
Checks to see if a session currently exists in DynamoDB .
62,028
def save ( self , must_create = False ) : if must_create : self . _session_key = None self . _get_or_create_session_key ( ) update_kwargs = { 'Key' : { 'session_key' : self . session_key } , } attribute_names = { '#data' : 'data' } attribute_values = { ':data' : self . encode ( self . _get_session ( no_load = must_create ) ) } set_updates = [ '#data = :data' ] if must_create : update_kwargs [ 'ConditionExpression' ] = DynamoConditionAttr ( 'session_key' ) . not_exists ( ) attribute_values [ ':created' ] = int ( time . time ( ) ) set_updates . append ( 'created = :created' ) update_kwargs [ 'UpdateExpression' ] = 'SET ' + ',' . join ( set_updates ) update_kwargs [ 'ExpressionAttributeValues' ] = attribute_values update_kwargs [ 'ExpressionAttributeNames' ] = attribute_names try : self . table . update_item ( ** update_kwargs ) except ClientError as e : error_code = e . response [ 'Error' ] [ 'Code' ] if error_code == 'ConditionalCheckFailedException' : raise CreateError raise
Saves the current session data to the database .
62,029
def delete ( self , session_key = None ) : if session_key is None : if self . session_key is None : return session_key = self . session_key self . table . delete_item ( Key = { 'session_key' : session_key } )
Deletes the current session or the one specified in session_key .
62,030
def flush ( self ) : self . clear ( ) self . delete ( self . session_key ) self . create ( )
Removes the current session data from the database and regenerates the key .
62,031
def wrap_object ( obj , decorator ) : actual_decorator = method_decorator ( decorator ) if inspect . isfunction ( obj ) : wrapped_obj = actual_decorator ( obj ) update_wrapper ( wrapped_obj , obj , assigned = available_attrs ( obj ) ) elif inspect . isclass ( obj ) : for method_name in obj . http_method_names : if hasattr ( obj , method_name ) : method = getattr ( obj , method_name ) wrapped_method = actual_decorator ( method ) update_wrapper ( wrapped_method , method , assigned = available_attrs ( method ) ) setattr ( obj , method_name , wrapped_method ) wrapped_obj = obj else : raise TypeError ( "received an object of type '{0}' expected 'function' or 'classobj'." . format ( type ( obj ) ) ) return wrapped_obj
Decorates the given object with the decorator function .
62,032
def get_package_versions ( self , name ) : package_data = self . _packages . get ( name ) versions = [ ] if package_data : versions = sort_versions ( list ( package_data . get ( 'versions' , [ ] ) ) ) return versions
Gives all the compatible package canonical name
62,033
def get_conf_path ( filename = None ) : conf_dir = osp . join ( get_home_dir ( ) , '.condamanager' ) if not osp . isdir ( conf_dir ) : os . mkdir ( conf_dir ) if filename is None : return conf_dir else : return osp . join ( conf_dir , filename )
Return absolute path for configuration file with specified filename .
62,034
def sort_versions ( versions = ( ) , reverse = False , sep = u'.' ) : if versions == [ ] : return [ ] digits = u'0123456789' def toint ( x ) : try : n = int ( x ) except : n = x return n versions = list ( versions ) new_versions , alpha , sizes = [ ] , set ( ) , set ( ) for item in versions : it = item . split ( sep ) temp = [ ] for i in it : x = toint ( i ) if not isinstance ( x , int ) : x = u ( x ) middle = x . lstrip ( digits ) . rstrip ( digits ) tail = toint ( x . lstrip ( digits ) . replace ( middle , u'' ) ) head = toint ( x . rstrip ( digits ) . replace ( middle , u'' ) ) middle = toint ( middle ) res = [ head , middle , tail ] while u'' in res : res . remove ( u'' ) for r in res : if is_unicode ( r ) : alpha . add ( r ) else : res = [ x ] temp += res sizes . add ( len ( temp ) ) new_versions . append ( temp ) replace_dic = { } alpha = sorted ( alpha , reverse = True ) if len ( alpha ) : replace_dic = dict ( zip ( alpha , list ( range ( - 1 , - ( len ( alpha ) + 1 ) , - 1 ) ) ) ) nmax = max ( sizes ) for i in range ( len ( new_versions ) ) : item = [ ] for z in new_versions [ i ] : if z in replace_dic : item . append ( replace_dic [ z ] ) else : item . append ( z ) nzeros = nmax - len ( item ) item += [ 0 ] * nzeros item += [ versions [ i ] ] new_versions [ i ] = item new_versions = sorted ( new_versions , reverse = reverse ) return [ n [ - 1 ] for n in new_versions ]
Sort a list of version number strings . This function ensures that the package sorting based on number name is performed correctly when including alpha dev rc1 etc ...
62,035
def write_file ( fname_parts , content ) : fname_parts = [ str ( part ) for part in fname_parts ] if len ( fname_parts ) > 1 : try : os . makedirs ( os . path . join ( * fname_parts [ : - 1 ] ) ) except OSError : pass fhandle = open ( os . path . join ( * fname_parts ) , "w" ) fhandle . write ( content ) fhandle . close ( )
write a file and create all needed directories
62,036
def remove_filter_function ( self , name ) : if name in self . _filter_functions . keys ( ) : del self . _filter_functions [ name ] self . invalidateFilter ( )
Removes the filter function associated with name if it exists .
62,037
def get_data_files ( ) : if sys . platform . startswith ( 'linux' ) : if PY3 : data_files = [ ( 'share/applications' , [ 'scripts/condamanager3.desktop' ] ) , ( 'share/pixmaps' , [ 'img_src/condamanager3.png' ] ) ] else : data_files = [ ( 'share/applications' , [ 'scripts/condamanager.desktop' ] ) , ( 'share/pixmaps' , [ 'img_src/condamanager.png' ] ) ] elif os . name == 'nt' : data_files = [ ( 'scripts' , [ 'img_src/conda-manager.ico' ] ) ] else : data_files = [ ] return data_files
Return data_files in a platform dependent manner
62,038
def encode ( text , orig_coding ) : if orig_coding == 'utf-8-bom' : return BOM_UTF8 + text . encode ( "utf-8" ) , 'utf-8-bom' coding = get_coding ( text ) if coding : try : return text . encode ( coding ) , coding except ( UnicodeError , LookupError ) : raise RuntimeError ( "Incorrect encoding (%s)" % coding ) if ( orig_coding and orig_coding . endswith ( '-default' ) or orig_coding . endswith ( '-guessed' ) ) : coding = orig_coding . replace ( "-default" , "" ) coding = orig_coding . replace ( "-guessed" , "" ) try : return text . encode ( coding ) , coding except ( UnicodeError , LookupError ) : pass try : return text . encode ( 'ascii' ) , 'ascii' except UnicodeError : pass return text . encode ( 'utf-8' ) , 'utf-8'
Function to encode a text .
62,039
def qapplication ( translate = True , test_time = 3 ) : app = QApplication . instance ( ) if app is None : app = QApplication ( [ 'Conda-Manager' ] ) app . setApplicationName ( 'Conda-Manager' ) if translate : install_translator ( app ) test_travis = os . environ . get ( 'TEST_CI' , None ) if test_travis is not None : timer_shutdown = QTimer ( app ) timer_shutdown . timeout . connect ( app . quit ) timer_shutdown . start ( test_time * 1000 ) return app
Return QApplication instance Creates it if it doesn t already exist
62,040
def get_aes_mode ( mode ) : aes_mode_attr = "MODE_{}" . format ( mode . upper ( ) ) try : aes_mode = getattr ( AES , aes_mode_attr ) except AttributeError : raise Exception ( "Pycrypto/pycryptodome does not seem to support {}. " . format ( aes_mode_attr ) + "If you use pycrypto, you need a version >= 2.7a1 (or a special branch)." ) return aes_mode
Return pycrypto s AES mode raise exception if not supported
62,041
def process_proxy_servers ( proxy_settings ) : proxy_settings_dic = { } for key in proxy_settings : proxy = proxy_settings [ key ] proxy_config = [ m . groupdict ( ) for m in PROXY_RE . finditer ( proxy ) ] if proxy_config : proxy_config = proxy_config [ 0 ] host_port = proxy_config . pop ( 'host_port' ) if ':' in host_port : host , port = host_port . split ( ':' ) else : host , port = host_port , None proxy_config [ 'host' ] = host proxy_config [ 'port' ] = int ( port ) if port else None proxy_settings_dic [ key ] = proxy_config proxy_config [ 'full' ] = proxy_settings [ key ] return proxy_settings_dic
Split the proxy conda configuration to be used by the proxy factory .
62,042
def proxy_servers ( self ) : proxy_servers = { } if self . _load_rc_func is None : return proxy_servers else : HTTP_PROXY = os . environ . get ( 'HTTP_PROXY' ) HTTPS_PROXY = os . environ . get ( 'HTTPS_PROXY' ) if HTTP_PROXY : proxy_servers [ 'http' ] = HTTP_PROXY if HTTPS_PROXY : proxy_servers [ 'https' ] = HTTPS_PROXY proxy_servers_conf = self . _load_rc_func ( ) . get ( 'proxy_servers' , { } ) proxy_servers . update ( proxy_servers_conf ) return proxy_servers
Return the proxy servers available .
62,043
def _create_proxy ( proxy_setting ) : proxy = QNetworkProxy ( ) proxy_scheme = proxy_setting [ 'scheme' ] proxy_host = proxy_setting [ 'host' ] proxy_port = proxy_setting [ 'port' ] proxy_username = proxy_setting [ 'username' ] proxy_password = proxy_setting [ 'password' ] proxy_scheme_host = '{0}://{1}' . format ( proxy_scheme , proxy_host ) proxy . setType ( QNetworkProxy . HttpProxy ) if proxy_scheme_host : proxy . setHostName ( proxy_host ) if proxy_port : proxy . setPort ( proxy_port ) if proxy_username : proxy . setUser ( proxy_username ) if proxy_password : proxy . setPassword ( proxy_password ) return proxy
Create a Network proxy for the given proxy settings .
62,044
def _request_finished ( self , reply ) : url = to_text_string ( reply . url ( ) . toEncoded ( ) , encoding = 'utf-8' ) if url in self . _paths : path = self . _paths [ url ] if url in self . _workers : worker = self . _workers [ url ] if url in self . _head_requests : error = reply . error ( ) if error : logger . error ( str ( ( 'Head Reply Error:' , error ) ) ) worker . sig_download_finished . emit ( url , path ) worker . sig_finished . emit ( worker , path , error ) return self . _head_requests . pop ( url ) start_download = not bool ( error ) header_pairs = reply . rawHeaderPairs ( ) headers = { } for hp in header_pairs : headers [ to_text_string ( hp [ 0 ] ) . lower ( ) ] = to_text_string ( hp [ 1 ] ) total_size = int ( headers . get ( 'content-length' , 0 ) ) if os . path . isfile ( path ) : file_size = os . path . getsize ( path ) start_download = file_size != total_size if start_download : qurl = QUrl ( url ) request = QNetworkRequest ( qurl ) self . _get_requests [ url ] = request reply = self . _manager . get ( request ) error = reply . error ( ) if error : logger . error ( str ( ( 'Reply Error:' , error ) ) ) reply . downloadProgress . connect ( lambda r , t , w = worker : self . _progress ( r , t , w ) ) else : worker . finished = True worker . sig_download_finished . emit ( url , path ) worker . sig_finished . emit ( worker , path , None ) elif url in self . _get_requests : data = reply . readAll ( ) self . _save ( url , path , data )
Callback for download once the request has finished .
62,045
def _save ( self , url , path , data ) : worker = self . _workers [ url ] path = self . _paths [ url ] if len ( data ) : try : with open ( path , 'wb' ) as f : f . write ( data ) except Exception : logger . error ( ( url , path ) ) worker . finished = True worker . sig_download_finished . emit ( url , path ) worker . sig_finished . emit ( worker , path , None ) self . _get_requests . pop ( url ) self . _workers . pop ( url ) self . _paths . pop ( url )
Save data of downloaded url in path .
62,046
def _progress ( bytes_received , bytes_total , worker ) : worker . sig_download_progress . emit ( worker . url , worker . path , bytes_received , bytes_total )
Return download progress .
62,047
def download ( self , url , path ) : qurl = QUrl ( url ) url = to_text_string ( qurl . toEncoded ( ) , encoding = 'utf-8' ) logger . debug ( str ( ( url , path ) ) ) if url in self . _workers : while not self . _workers [ url ] . finished : return self . _workers [ url ] worker = DownloadWorker ( url , path ) folder = os . path . dirname ( os . path . abspath ( path ) ) if not os . path . isdir ( folder ) : os . makedirs ( folder ) request = QNetworkRequest ( qurl ) self . _head_requests [ url ] = request self . _paths [ url ] = path self . _workers [ url ] = worker self . _manager . head ( request ) self . _timer . start ( ) return worker
Download url and save data to path .
62,048
def _start ( self ) : if len ( self . _queue ) == 1 : thread = self . _queue . popleft ( ) thread . start ( ) self . _timer . start ( )
Start the next threaded worker in the queue .
62,049
def _create_worker ( self , method , * args , ** kwargs ) : thread = QThread ( ) worker = RequestsDownloadWorker ( method , args , kwargs ) worker . moveToThread ( thread ) worker . sig_finished . connect ( self . _start ) self . _sig_download_finished . connect ( worker . sig_download_finished ) self . _sig_download_progress . connect ( worker . sig_download_progress ) worker . sig_finished . connect ( thread . quit ) thread . started . connect ( worker . start ) self . _queue . append ( thread ) self . _threads . append ( thread ) self . _workers . append ( worker ) self . _start ( ) return worker
Create a new worker instance .
62,050
def _download ( self , url , path = None , force = False ) : if path is None : path = url . split ( '/' ) [ - 1 ] folder = os . path . dirname ( os . path . abspath ( path ) ) if not os . path . isdir ( folder ) : os . makedirs ( folder ) try : r = requests . get ( url , stream = True , proxies = self . proxy_servers ) except Exception as error : print ( 'ERROR' , 'here' , error ) logger . error ( str ( error ) ) total_size = int ( r . headers . get ( 'Content-Length' , 0 ) ) if os . path . isfile ( path ) and not force : file_size = os . path . getsize ( path ) if file_size == total_size : self . _sig_download_finished . emit ( url , path ) return path progress_size = 0 with open ( path , 'wb' ) as f : for chunk in r . iter_content ( chunk_size = self . _chunk_size ) : if chunk : f . write ( chunk ) progress_size += len ( chunk ) self . _sig_download_progress . emit ( url , path , progress_size , total_size ) self . _sig_download_finished . emit ( url , path ) return path
Callback for download .
62,051
def _is_valid_url ( self , url ) : try : r = requests . head ( url , proxies = self . proxy_servers ) value = r . status_code in [ 200 ] except Exception as error : logger . error ( str ( error ) ) value = False return value
Callback for is_valid_url .
62,052
def _is_valid_channel ( self , channel , conda_url = 'https://conda.anaconda.org' ) : if channel . startswith ( 'https://' ) or channel . startswith ( 'http://' ) : url = channel else : url = "{0}/{1}" . format ( conda_url , channel ) if url [ - 1 ] == '/' : url = url [ : - 1 ] plat = self . _conda_api . get_platform ( ) repodata_url = "{0}/{1}/{2}" . format ( url , plat , 'repodata.json' ) try : r = requests . head ( repodata_url , proxies = self . proxy_servers ) value = r . status_code in [ 200 ] except Exception as error : logger . error ( str ( error ) ) value = False return value
Callback for is_valid_channel .
62,053
def _is_valid_api_url ( self , url ) : data = { } try : r = requests . get ( url , proxies = self . proxy_servers ) content = to_text_string ( r . content , encoding = 'utf-8' ) data = json . loads ( content ) except Exception as error : logger . error ( str ( error ) ) return data . get ( 'ok' , 0 ) == 1
Callback for is_valid_api_url .
62,054
def download ( self , url , path = None , force = False ) : logger . debug ( str ( ( url , path , force ) ) ) method = self . _download return self . _create_worker ( method , url , path = path , force = force )
Download file given by url and save it to path .
62,055
def terminate ( self ) : for t in self . _threads : t . quit ( ) self . _thread = [ ] self . _workers = [ ]
Terminate all workers and threads .
62,056
def is_valid_url ( self , url , non_blocking = True ) : logger . debug ( str ( ( url ) ) ) if non_blocking : method = self . _is_valid_url return self . _create_worker ( method , url ) else : return self . _is_valid_url ( url )
Check if url is valid .
62,057
def is_valid_api_url ( self , url , non_blocking = True ) : logger . debug ( str ( ( url ) ) ) if non_blocking : method = self . _is_valid_api_url return self . _create_worker ( method , url ) else : return self . _is_valid_api_url ( url = url )
Check if anaconda api url is valid .
62,058
def is_valid_channel ( self , channel , conda_url = 'https://conda.anaconda.org' , non_blocking = True ) : logger . debug ( str ( ( channel , conda_url ) ) ) if non_blocking : method = self . _is_valid_channel return self . _create_worker ( method , channel , conda_url ) else : return self . _is_valid_channel ( channel , conda_url = conda_url )
Check if a conda channel is valid .
62,059
def human_bytes ( n ) : if n < 1024 : return '%d B' % n k = n / 1024 if k < 1024 : return '%d KB' % round ( k ) m = k / 1024 if m < 1024 : return '%.1f MB' % m g = m / 1024 return '%.2f GB' % g
Return the number of bytes n in more human readable form .
62,060
def ready_print ( worker , output , error ) : global COUNTER COUNTER += 1 print ( COUNTER , output , error )
Local test helper .
62,061
def _clean ( self ) : if self . _workers : for w in self . _workers : if w . is_finished ( ) : self . _workers . remove ( w ) else : self . _current_worker = None self . _timer . stop ( )
Remove references of inactive workers periodically .
62,062
def _call_conda ( self , extra_args , abspath = True , parse = False , callback = None ) : if abspath : if sys . platform == 'win32' : python = join ( self . ROOT_PREFIX , 'python.exe' ) conda = join ( self . ROOT_PREFIX , 'Scripts' , 'conda-script.py' ) else : python = join ( self . ROOT_PREFIX , 'bin/python' ) conda = join ( self . ROOT_PREFIX , 'bin/conda' ) cmd_list = [ python , conda ] else : cmd_list = [ 'conda' ] cmd_list . extend ( extra_args ) process_worker = ProcessWorker ( cmd_list , parse = parse , callback = callback ) process_worker . sig_finished . connect ( self . _start ) self . _queue . append ( process_worker ) self . _start ( ) return process_worker
Call conda with the list of extra arguments and return the worker .
62,063
def _setup_install_commands_from_kwargs ( kwargs , keys = tuple ( ) ) : cmd_list = [ ] if kwargs . get ( 'override_channels' , False ) and 'channel' not in kwargs : raise TypeError ( 'conda search: override_channels requires channel' ) if 'env' in kwargs : cmd_list . extend ( [ '--name' , kwargs . pop ( 'env' ) ] ) if 'prefix' in kwargs : cmd_list . extend ( [ '--prefix' , kwargs . pop ( 'prefix' ) ] ) if 'channel' in kwargs : channel = kwargs . pop ( 'channel' ) if isinstance ( channel , str ) : cmd_list . extend ( [ '--channel' , channel ] ) else : cmd_list . append ( '--channel' ) cmd_list . extend ( channel ) for key in keys : if key in kwargs and kwargs [ key ] : cmd_list . append ( '--' + key . replace ( '_' , '-' ) ) return cmd_list
Setup install commands for conda .
62,064
def _get_conda_version ( stdout , stderr ) : pat = re . compile ( r'conda:?\s+(\d+\.\d\S+|unknown)' ) m = pat . match ( stderr . decode ( ) . strip ( ) ) if m is None : m = pat . match ( stdout . decode ( ) . strip ( ) ) if m is None : raise Exception ( 'output did not match: {0}' . format ( stderr ) ) return m . group ( 1 )
Callback for get_conda_version .
62,065
def get_envs ( self , log = True ) : if log : logger . debug ( '' ) envs = os . listdir ( os . sep . join ( [ self . ROOT_PREFIX , 'envs' ] ) ) envs = [ os . sep . join ( [ self . ROOT_PREFIX , 'envs' , i ] ) for i in envs ] valid_envs = [ e for e in envs if os . path . isdir ( e ) and self . environment_exists ( prefix = e ) ] return valid_envs
Return environment list of absolute path to their prefixes .
62,066
def get_prefix_envname ( self , name , log = False ) : prefix = None if name == 'root' : prefix = self . ROOT_PREFIX envs = self . get_envs ( ) for p in envs : if basename ( p ) == name : prefix = p return prefix
Return full prefix path of environment defined by name .
62,067
def linked ( prefix ) : logger . debug ( str ( prefix ) ) if not isdir ( prefix ) : return set ( ) meta_dir = join ( prefix , 'conda-meta' ) if not isdir ( meta_dir ) : return set ( ) return set ( fn [ : - 5 ] for fn in os . listdir ( meta_dir ) if fn . endswith ( '.json' ) )
Return set of canonical names of linked packages in prefix .
62,068
def info ( self , abspath = True ) : logger . debug ( str ( '' ) ) return self . _call_and_parse ( [ 'info' , '--json' ] , abspath = abspath )
Return a dictionary with configuration information .
62,069
def package_info ( self , package , abspath = True ) : return self . _call_and_parse ( [ 'info' , package , '--json' ] , abspath = abspath )
Return a dictionary with package information .
62,070
def search ( self , regex = None , spec = None , ** kwargs ) : cmd_list = [ 'search' , '--json' ] if regex and spec : raise TypeError ( 'conda search: only one of regex or spec allowed' ) if regex : cmd_list . append ( regex ) if spec : cmd_list . extend ( [ '--spec' , spec ] ) if 'platform' in kwargs : cmd_list . extend ( [ '--platform' , kwargs . pop ( 'platform' ) ] ) cmd_list . extend ( self . _setup_install_commands_from_kwargs ( kwargs , ( 'canonical' , 'unknown' , 'use_index_cache' , 'outdated' , 'override_channels' ) ) ) return self . _call_and_parse ( cmd_list , abspath = kwargs . get ( 'abspath' , True ) )
Search for packages .
62,071
def create_from_yaml ( self , name , yamlfile ) : logger . debug ( str ( ( name , yamlfile ) ) ) cmd_list = [ 'env' , 'create' , '-n' , name , '-f' , yamlfile , '--json' ] return self . _call_and_parse ( cmd_list )
Create new environment using conda - env via a yaml specification file .
62,072
def create ( self , name = None , prefix = None , pkgs = None , channels = None ) : logger . debug ( str ( ( prefix , pkgs , channels ) ) ) if ( not pkgs or ( not isinstance ( pkgs , ( list , tuple ) ) and not is_text_string ( pkgs ) ) ) : raise TypeError ( 'must specify a list of one or more packages to ' 'install into new environment' ) cmd_list = [ 'create' , '--yes' , '--json' , '--mkdir' ] if name : ref = name search = [ os . path . join ( d , name ) for d in self . info ( ) . communicate ( ) [ 0 ] [ 'envs_dirs' ] ] cmd_list . extend ( [ '--name' , name ] ) elif prefix : ref = prefix search = [ prefix ] cmd_list . extend ( [ '--prefix' , prefix ] ) else : raise TypeError ( 'must specify either an environment name or a ' 'path for new environment' ) if any ( os . path . exists ( prefix ) for prefix in search ) : raise CondaEnvExistsError ( 'Conda environment {0} already ' 'exists' . format ( ref ) ) if isinstance ( pkgs , ( list , tuple ) ) : cmd_list . extend ( pkgs ) elif is_text_string ( pkgs ) : cmd_list . extend ( [ '--file' , pkgs ] ) if channels : cmd_list . extend ( [ '--override-channels' ] ) for channel in channels : cmd_list . extend ( [ '--channel' ] ) cmd_list . extend ( [ channel ] ) return self . _call_and_parse ( cmd_list )
Create an environment with a specified set of packages .
62,073
def parse_token_channel ( self , channel , token ) : if ( token and channel not in self . DEFAULT_CHANNELS and channel != 'defaults' ) : url_parts = channel . split ( '/' ) start = url_parts [ : - 1 ] middle = 't/{0}' . format ( token ) end = url_parts [ - 1 ] token_channel = '{0}/{1}/{2}' . format ( '/' . join ( start ) , middle , end ) return token_channel else : return channel
Adapt a channel to include token of the logged user .
62,074
def install ( self , name = None , prefix = None , pkgs = None , dep = True , channels = None , token = None ) : logger . debug ( str ( ( prefix , pkgs , channels ) ) ) if not pkgs or not isinstance ( pkgs , ( list , tuple , str ) ) : raise TypeError ( 'must specify a list of one or more packages to ' 'install into existing environment' ) cmd_list = [ 'install' , '--yes' , '--json' , '--force-pscheck' ] if name : cmd_list . extend ( [ '--name' , name ] ) elif prefix : cmd_list . extend ( [ '--prefix' , prefix ] ) else : pass if channels : cmd_list . extend ( [ '--override-channels' ] ) for channel in channels : cmd_list . extend ( [ '--channel' ] ) channel = self . parse_token_channel ( channel , token ) cmd_list . extend ( [ channel ] ) if isinstance ( pkgs , ( list , tuple ) ) : cmd_list . extend ( pkgs ) elif isinstance ( pkgs , str ) : cmd_list . extend ( [ '--file' , pkgs ] ) if not dep : cmd_list . extend ( [ '--no-deps' ] ) return self . _call_and_parse ( cmd_list )
Install a set of packages into an environment by name or path .
62,075
def remove_environment ( self , name = None , path = None , ** kwargs ) : return self . remove ( name = name , path = path , all = True , ** kwargs )
Remove an environment entirely .
62,076
def clone_environment ( self , clone , name = None , prefix = None , ** kwargs ) : cmd_list = [ 'create' , '--json' ] if ( name and prefix ) or not ( name or prefix ) : raise TypeError ( "conda clone_environment: exactly one of `name` " "or `path` required" ) if name : cmd_list . extend ( [ '--name' , name ] ) if prefix : cmd_list . extend ( [ '--prefix' , prefix ] ) cmd_list . extend ( [ '--clone' , clone ] ) cmd_list . extend ( self . _setup_install_commands_from_kwargs ( kwargs , ( 'dry_run' , 'unknown' , 'use_index_cache' , 'use_local' , 'no_pin' , 'force' , 'all' , 'channel' , 'override_channels' , 'no_default_packages' ) ) ) return self . _call_and_parse ( cmd_list , abspath = kwargs . get ( 'abspath' , True ) )
Clone the environment clone into name or prefix .
62,077
def _setup_config_from_kwargs ( kwargs ) : cmd_list = [ '--json' , '--force' ] if 'file' in kwargs : cmd_list . extend ( [ '--file' , kwargs [ 'file' ] ] ) if 'system' in kwargs : cmd_list . append ( '--system' ) return cmd_list
Setup config commands for conda .
62,078
def config_add ( self , key , value , ** kwargs ) : cmd_list = [ 'config' , '--add' , key , value ] cmd_list . extend ( self . _setup_config_from_kwargs ( kwargs ) ) return self . _call_and_parse ( cmd_list , abspath = kwargs . get ( 'abspath' , True ) , callback = lambda o , e : o . get ( 'warnings' , [ ] ) )
Add a value to a key .
62,079
def dependencies ( self , name = None , prefix = None , pkgs = None , channels = None , dep = True ) : if not pkgs or not isinstance ( pkgs , ( list , tuple ) ) : raise TypeError ( 'must specify a list of one or more packages to ' 'install into existing environment' ) cmd_list = [ 'install' , '--dry-run' , '--json' , '--force-pscheck' ] if not dep : cmd_list . extend ( [ '--no-deps' ] ) if name : cmd_list . extend ( [ '--name' , name ] ) elif prefix : cmd_list . extend ( [ '--prefix' , prefix ] ) else : pass cmd_list . extend ( pkgs ) if channels : cmd_list . extend ( [ '--override-channels' ] ) for channel in channels : cmd_list . extend ( [ '--channel' ] ) cmd_list . extend ( [ channel ] ) return self . _call_and_parse ( cmd_list )
Get dependenciy list for packages to be installed in an env .
62,080
def environment_exists ( self , name = None , prefix = None , abspath = True , log = True ) : if log : logger . debug ( str ( ( name , prefix ) ) ) if name and prefix : raise TypeError ( "Exactly one of 'name' or 'prefix' is required." ) if name : prefix = self . get_prefix_envname ( name , log = log ) if prefix is None : prefix = self . ROOT_PREFIX return os . path . isdir ( os . path . join ( prefix , 'conda-meta' ) )
Check if an environment exists by name or by prefix .
62,081
def clear_lock ( self , abspath = True ) : cmd_list = [ 'clean' , '--lock' , '--json' ] return self . _call_and_parse ( cmd_list , abspath = abspath )
Clean any conda lock in the system .
62,082
def package_version ( self , prefix = None , name = None , pkg = None , build = False ) : package_versions = { } if name and prefix : raise TypeError ( "Exactly one of 'name' or 'prefix' is required." ) if name : prefix = self . get_prefix_envname ( name ) if self . environment_exists ( prefix = prefix ) : for package in self . linked ( prefix ) : if pkg in package : n , v , b = self . split_canonical_name ( package ) if build : package_versions [ n ] = '{0}={1}' . format ( v , b ) else : package_versions [ n ] = v return package_versions . get ( pkg )
Get installed package version in a given env .
62,083
def load_rc ( self , path = None , system = False ) : if os . path . isfile ( self . user_rc_path ) and not system : path = self . user_rc_path elif os . path . isfile ( self . sys_rc_path ) : path = self . sys_rc_path if not path or not os . path . isfile ( path ) : return { } with open ( path ) as f : return yaml . load ( f ) or { }
Load the conda configuration file .
62,084
def get_condarc_channels ( self , normalize = False , conda_url = 'https://conda.anaconda.org' , channels = None ) : default_channels = self . load_rc ( system = True ) . get ( 'default_channels' , self . DEFAULT_CHANNELS ) normalized_channels = [ ] if channels is None : condarc = self . load_rc ( ) channels = condarc . get ( 'channels' ) if channels is None : channels = [ 'defaults' ] if normalize : template = '{0}/{1}' if conda_url [ - 1 ] != '/' else '{0}{1}' for channel in channels : if channel == 'defaults' : normalized_channels += default_channels elif channel . startswith ( 'http' ) : normalized_channels . append ( channel ) else : normalized_channels . append ( template . format ( conda_url , channel ) ) channels = normalized_channels return channels
Return all the channel urls defined in . condarc .
62,085
def _call_pip ( self , name = None , prefix = None , extra_args = None , callback = None ) : cmd_list = self . _pip_cmd ( name = name , prefix = prefix ) cmd_list . extend ( extra_args ) process_worker = ProcessWorker ( cmd_list , pip = True , callback = callback ) process_worker . sig_finished . connect ( self . _start ) self . _queue . append ( process_worker ) self . _start ( ) return process_worker
Call pip in QProcess worker .
62,086
def _pip_cmd ( self , name = None , prefix = None ) : if ( name and prefix ) or not ( name or prefix ) : raise TypeError ( "conda pip: exactly one of 'name' " "or 'prefix' " "required." ) if name and self . environment_exists ( name = name ) : prefix = self . get_prefix_envname ( name ) if sys . platform == 'win32' : python = join ( prefix , 'python.exe' ) pip = join ( prefix , 'pip.exe' ) else : python = join ( prefix , 'bin/python' ) pip = join ( prefix , 'bin/pip' ) cmd_list = [ python , pip ] return cmd_list
Get pip location based on environment name or prefix .
62,087
def pip_list ( self , name = None , prefix = None , abspath = True ) : if ( name and prefix ) or not ( name or prefix ) : raise TypeError ( "conda pip: exactly one of 'name' " "or 'prefix' " "required." ) if name : prefix = self . get_prefix_envname ( name ) pip_command = os . sep . join ( [ prefix , 'bin' , 'python' ] ) cmd_list = [ pip_command , PIP_LIST_SCRIPT ] process_worker = ProcessWorker ( cmd_list , pip = True , parse = True , callback = self . _pip_list , extra_kwargs = { 'prefix' : prefix } ) process_worker . sig_finished . connect ( self . _start ) self . _queue . append ( process_worker ) self . _start ( ) return process_worker
Get list of pip installed packages .
62,088
def _pip_list ( self , stdout , stderr , prefix = None ) : result = stdout linked = self . linked ( prefix ) pip_only = [ ] linked_names = [ self . split_canonical_name ( l ) [ 0 ] for l in linked ] for pkg in result : name = self . split_canonical_name ( pkg ) [ 0 ] if name not in linked_names : pip_only . append ( pkg ) return pip_only
Callback for pip_list .
62,089
def pip_remove ( self , name = None , prefix = None , pkgs = None ) : logger . debug ( str ( ( prefix , pkgs ) ) ) if isinstance ( pkgs , ( list , tuple ) ) : pkg = ' ' . join ( pkgs ) else : pkg = pkgs extra_args = [ 'uninstall' , '--yes' , pkg ] return self . _call_pip ( name = name , prefix = prefix , extra_args = extra_args )
Remove a pip package in given environment by name or prefix .
62,090
def pip_search ( self , search_string = None ) : extra_args = [ 'search' , search_string ] return self . _call_pip ( name = 'root' , extra_args = extra_args , callback = self . _pip_search )
Search for pip packages in PyPI matching search_string .
62,091
def _pip_search ( stdout , stderr ) : result = { } lines = to_text_string ( stdout ) . split ( '\n' ) while '' in lines : lines . remove ( '' ) for line in lines : if ' - ' in line : parts = line . split ( ' - ' ) name = parts [ 0 ] . strip ( ) description = parts [ 1 ] . strip ( ) result [ name ] = description return result
Callback for pip search .
62,092
def _timer_update ( self ) : self . _timer_counter += 1 dot = self . _timer_dots . pop ( 0 ) self . _timer_dots = self . _timer_dots + [ dot ] self . _rows = [ [ _ ( u'Resolving dependencies' ) + dot , u'' , u'' , u'' ] ] index = self . createIndex ( 0 , 0 ) self . dataChanged . emit ( index , index ) if self . _timer_counter > 150 : self . _timer . stop ( ) self . _timer_counter = 0
Add some moving points to the dependency resolution text .
62,093
def _create_worker ( self , method , * args , ** kwargs ) : thread = QThread ( ) worker = ClientWorker ( method , args , kwargs ) worker . moveToThread ( thread ) worker . sig_finished . connect ( self . _start ) worker . sig_finished . connect ( thread . quit ) thread . started . connect ( worker . start ) self . _queue . append ( thread ) self . _threads . append ( thread ) self . _workers . append ( worker ) self . _start ( ) return worker
Create a worker for this client to be run in a separate thread .
62,094
def _load_repodata ( filepaths , extra_data = None , metadata = None ) : extra_data = extra_data if extra_data else { } metadata = metadata if metadata else { } repodata = [ ] for filepath in filepaths : compressed = filepath . endswith ( '.bz2' ) mode = 'rb' if filepath . endswith ( '.bz2' ) else 'r' if os . path . isfile ( filepath ) : with open ( filepath , mode ) as f : raw_data = f . read ( ) if compressed : data = bz2 . decompress ( raw_data ) else : data = raw_data try : data = json . loads ( to_text_string ( data , 'UTF-8' ) ) except Exception as error : logger . error ( str ( error ) ) data = { } repodata . append ( data ) all_packages = { } for data in repodata : packages = data . get ( 'packages' , { } ) for canonical_name in packages : data = packages [ canonical_name ] name , version , b = tuple ( canonical_name . rsplit ( '-' , 2 ) ) if name not in all_packages : all_packages [ name ] = { 'versions' : set ( ) , 'size' : { } , 'type' : { } , 'app_entry' : { } , 'app_type' : { } , } elif name in metadata : temp_data = all_packages [ name ] temp_data [ 'home' ] = metadata [ name ] . get ( 'home' , '' ) temp_data [ 'license' ] = metadata [ name ] . get ( 'license' , '' ) temp_data [ 'summary' ] = metadata [ name ] . get ( 'summary' , '' ) temp_data [ 'latest_version' ] = metadata [ name ] . get ( 'version' ) all_packages [ name ] = temp_data all_packages [ name ] [ 'versions' ] . add ( version ) all_packages [ name ] [ 'size' ] [ version ] = data . get ( 'size' , '' ) if data . get ( 'type' ) : all_packages [ name ] [ 'type' ] [ version ] = data . get ( 'type' ) all_packages [ name ] [ 'app_entry' ] [ version ] = data . get ( 'app_entry' ) all_packages [ name ] [ 'app_type' ] [ version ] = data . get ( 'app_type' ) all_apps = { } for name in all_packages : versions = sort_versions ( list ( all_packages [ name ] [ 'versions' ] ) ) all_packages [ name ] [ 'versions' ] = versions [ : ] for version in versions : has_type = all_packages [ name ] . get ( 'type' ) if has_type : all_apps [ name ] = all_packages [ name ] . copy ( ) versions = all_apps [ name ] [ 'versions' ] [ : ] types = all_apps [ name ] [ 'type' ] app_versions = [ v for v in versions if v in types ] all_apps [ name ] [ 'versions' ] = app_versions return all_packages , all_apps
Load all the available pacakges information .
62,095
def login ( self , username , password , application , application_url ) : logger . debug ( str ( ( username , application , application_url ) ) ) method = self . _anaconda_client_api . authenticate return self . _create_worker ( method , username , password , application , application_url )
Login to anaconda cloud .
62,096
def logout ( self ) : logger . debug ( 'Logout' ) method = self . _anaconda_client_api . remove_authentication return self . _create_worker ( method )
Logout from anaconda cloud .
62,097
def load_repodata ( self , filepaths , extra_data = None , metadata = None ) : logger . debug ( str ( ( filepaths ) ) ) method = self . _load_repodata return self . _create_worker ( method , filepaths , extra_data = extra_data , metadata = metadata )
Load all the available pacakges information for downloaded repodata .
62,098
def prepare_model_data ( self , packages , linked , pip = None , private_packages = None ) : logger . debug ( '' ) return self . _prepare_model_data ( packages , linked , pip = pip , private_packages = private_packages )
Prepare downloaded package info along with pip pacakges info .
62,099
def set_domain ( self , domain = 'https://api.anaconda.org' ) : logger . debug ( str ( ( domain ) ) ) config = binstar_client . utils . get_config ( ) config [ 'url' ] = domain binstar_client . utils . set_config ( config ) self . _anaconda_client_api = binstar_client . utils . get_server_api ( token = None , log_level = logging . NOTSET ) return self . user ( )
Reset current api domain .