idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
46,800 | def get_mmr_constants ( ) : Cnt = { 'ISOTOPE' : 'F18' , 'DCYCRR' : DCYCRR , 'ALPHA' : ALPHA , 'NRNG' : NRNG , 'NSRNG' : NSRNG , 'NCRS' : NCRS , 'NCRSR' : NCRSR , 'NBCKT' : 224 , 'NSANGLES' : A , 'NSBINS' : W , 'Naw' : - 1 , 'NSN11' : NSN11 , 'NSN1' : NSN1 , 'NSN64' : NSN64 , 'MRD' : MRD , 'SPN' : SPAN , 'TFOV2' : TFOV2 , 'RNG_STRT' : RNG_STRT , 'RNG_END' : RNG_END , 'SS_IMZ' : SS_IMZ , 'SS_IMY' : SS_IMY , 'SS_IMX' : SS_IMX , 'SS_VXZ' : SS_VXZ , 'SS_VXY' : SS_VXY , 'IS_VXZ' : IS_VXZ , 'SSE_IMZ' : SSE_IMZ , 'SSE_IMY' : SSE_IMY , 'SSE_IMX' : SSE_IMX , 'SSE_VXZ' : SSE_VXZ , 'SSE_VXY' : SSE_VXY , 'SZ_IMZ' : SZ_IMZ , 'SZ_IMY' : SZ_IMY , 'SZ_IMX' : SZ_IMX , 'SZ_VOXZ' : SZ_VOXZ , 'SZ_VOXY' : SZ_VOXY , 'SZ_VOXZi' : SZ_VOXZi , 'SO_IMZ' : SO_IMZ , 'SO_IMY' : SO_IMY , 'SO_IMX' : SO_IMX , 'SO_VXZ' : SO_VXZ , 'SO_VXY' : SO_VXY , 'SO_VXX' : SO_VXX , 'NSEG0' : SEG0 , 'RE' : RE , 'R' : R , 'SEG' : seg , 'MNRD' : minrd , 'MXRD' : maxrd , 'SCTRNG' : sct_irng , 'TGAP' : TGAP , 'OFFGAP' : OFFGAP , 'AXR' : AXR , 'R02' : R02 , 'LLD' : LLD , 'E511' : E511 , 'ER' : ER , 'COSUPSMX' : COSUPSMX , 'NCOS' : NCOS , 'COSSTP' : COSSTP , 'ICOSSTP' : ICOSSTP , 'ETHRLD' : ETHRLD , 'CLGHT' : CLGHT , 'CWND' : CWND , 'TOFBINN' : TOFBINN , 'TOFBINS' : TOFBINS , 'TOFBIND' : TOFBIND , 'ITOFBIND' : ITOFBIND , 'AFFINE' : np . array ( [ [ - 10 * SO_VXX , 0. , 0. , 5. * SO_IMX * SO_VXX ] , [ 0. , 10 * SO_VXY , 0. , - 5. * SO_IMY * SO_VXY ] , [ 0. , 0. , 10 * SO_VXZ , - 5. * SO_IMZ * SO_VXZ ] , [ 0. , 0. , 0. , 1. ] ] ) , 'IMSIZE' : np . array ( [ SO_IMZ , SO_IMY , SO_IMX ] ) , 'BTP' : 0 , 'BTPRT' : 1.0 , 'VERBOSE' : False , 'SCTSCLEM' : SCTSCLEM , 'SCTSCLMU' : SCTSCLMU , } Cnt = get_setup ( Cnt = Cnt ) return Cnt | Put all the constants together in a dictionary |
46,801 | def check_version ( Cnt , chcklst = [ 'RESPATH' , 'REGPATH' , 'DCM2NIIX' , 'HMUDIR' ] ) : output = { } for itm in chcklst : output [ itm ] = False if 'RESPATH' in chcklst and 'RESPATH' in Cnt : try : proc = Popen ( [ Cnt [ 'RESPATH' ] , '--version' ] , stdout = PIPE ) out = proc . stdout . read ( ) if reg_ver in out : output [ 'RESPATH' ] = True except OSError : print 'e> NiftyReg (reg_resample) either is NOT installed or is corrupt.' if 'REGPATH' in chcklst and 'REGPATH' in Cnt : try : proc = Popen ( [ Cnt [ 'REGPATH' ] , '--version' ] , stdout = PIPE ) out = proc . stdout . read ( ) if reg_ver in out : output [ 'REGPATH' ] = True except OSError : print 'e> NiftyReg (reg_aladin) either is NOT installed or is corrupt.' if 'DCM2NIIX' in chcklst and 'DCM2NIIX' in Cnt : try : proc = Popen ( [ Cnt [ 'DCM2NIIX' ] , '-h' ] , stdout = PIPE ) out = proc . stdout . read ( ) ver_str = re . search ( '(?<=dcm2niiX version v)\d{1,2}.\d{1,2}.\d*' , out ) if ver_str and dcm_ver in ver_str . group ( 0 ) : output [ 'DCM2NIIX' ] = True except OSError : print 'e> dcm2niix either is NOT installed or is corrupt.' if 'HMUDIR' in chcklst and 'HMUDIR' in Cnt : for hi in Cnt [ 'HMULIST' ] : if os . path . isfile ( os . path . join ( Cnt [ 'HMUDIR' ] , hi ) ) : output [ 'HMUDIR' ] = True else : output [ 'HMUDIR' ] = False break return output | Check version and existence of all third - party software and input data . Output a dictionary with bool type of the requested bits in chcklst |
46,802 | def update_resources ( Cnt ) : key_list = [ 'PATHTOOLS' , 'RESPATH' , 'REGPATH' , 'DCM2NIIX' , 'HMUDIR' ] path_resources = cs . path_niftypet_local ( ) resources_file = os . path . join ( path_resources , 'resources.py' ) if os . path . isfile ( resources_file ) : f = open ( resources_file , 'r' ) rsrc = f . read ( ) f . close ( ) i0 = rsrc . find ( '### start NiftyPET tools ###' ) i1 = rsrc . find ( '### end NiftyPET tools ###' ) pth_list = [ ] for k in key_list : if k in Cnt : pth_list . append ( '\'' + Cnt [ k ] . replace ( "\\" , "/" ) + '\'' ) else : pth_list . append ( '\'\'' ) strNew = '### start NiftyPET tools ###\n' for i in range ( len ( key_list ) ) : if pth_list [ i ] != '\'\'' : strNew += key_list [ i ] + ' = ' + pth_list [ i ] + '\n' rsrcNew = rsrc [ : i0 ] + strNew + rsrc [ i1 : ] f = open ( resources_file , 'w' ) f . write ( rsrcNew ) f . close ( ) return Cnt | Update resources . py with the paths to the new installed apps . |
46,803 | def loadSessions ( self , callback , bare_jid , device_ids ) : if self . is_async : self . __loadSessionsAsync ( callback , bare_jid , device_ids , { } ) else : return self . __loadSessionsSync ( bare_jid , device_ids ) | Return a dict containing the session for each device id . By default this method calls loadSession for each device id . |
46,804 | def loadTrusts ( self , callback , bare_jid , device_ids ) : if self . is_async : self . __loadTrustsAsync ( callback , bare_jid , device_ids , { } ) else : return self . __loadTrustsSync ( bare_jid , device_ids ) | Return a dict containing the trust status for each device id . By default this method calls loadTrust for each device id . |
46,805 | def parse ( cls , backend , ik , spk , spk_signature , otpks ) : ik = backend . decodePublicKey ( ik ) [ 0 ] spk [ "key" ] = backend . decodePublicKey ( spk [ "key" ] ) [ 0 ] otpks = list ( map ( lambda otpk : { "key" : backend . decodePublicKey ( otpk [ "key" ] ) [ 0 ] , "id" : otpk [ "id" ] } , otpks ) ) return cls ( ik , spk , spk_signature , otpks ) | Use this method when creating a bundle from data you retrieved directly from some PEP node . This method applies an additional decoding step to the public keys in the bundle . Pass the same structure as the constructor expects . |
46,806 | def serialize ( self , backend ) : return { "ik" : backend . encodePublicKey ( self . ik , "25519" ) , "spk" : { "id" : self . spk [ "id" ] , "key" : backend . encodePublicKey ( self . spk [ "key" ] , "25519" ) , } , "spk_signature" : self . spk_signature , "otpks" : list ( map ( lambda otpk : { "id" : otpk [ "id" ] , "key" : backend . encodePublicKey ( otpk [ "key" ] , "25519" ) } , self . otpks ) ) } | Use this method to prepare the data to be uploaded directly to some PEP node . This method applies an additional encoding step to the public keys in the bundle . The result is a dictionary with the keys ik spk spk_signature and otpks . The values are structured the same way as the inputs of the constructor . |
46,807 | def imfill ( immsk ) : for iz in range ( immsk . shape [ 0 ] ) : for iy in range ( immsk . shape [ 1 ] ) : ix0 = np . argmax ( immsk [ iz , iy , : ] > 0 ) ix1 = immsk . shape [ 2 ] - np . argmax ( immsk [ iz , iy , : : - 1 ] > 0 ) if ( ix1 - ix0 ) > immsk . shape [ 2 ] - 10 : continue immsk [ iz , iy , ix0 : ix1 ] = 1 return immsk | fill the empty patches of image mask immsk |
46,808 | def create_mask ( fnii , fimout = '' , outpath = '' , fill = 1 , dtype_fill = np . uint8 , thrsh = 0. , fwhm = 0. , ) : if outpath == '' and fimout != '' : opth = os . path . dirname ( fimout ) if opth == '' : opth = os . path . dirname ( fnii ) fimout = os . path . join ( opth , fimout ) elif outpath == '' : opth = os . path . dirname ( fnii ) else : opth = outpath if fimout == '' : fniis = os . path . split ( fnii ) fimout = os . path . join ( opth , fniis [ 1 ] . split ( '.nii' ) [ 0 ] + '_mask.nii.gz' ) niidct = imio . getnii ( fnii , output = 'all' ) im = niidct [ 'im' ] hdr = niidct [ 'hdr' ] if im . ndim > 3 : raise ValueError ( 'The masking function only accepts 3-D images.' ) if thrsh > 0. : smoim = ndi . filters . gaussian_filter ( im , imio . fwhm2sig ( fwhm , voxsize = abs ( hdr [ 'pixdim' ] [ 1 ] ) ) , mode = 'mirror' ) thrsh = thrsh * smoim . max ( ) immsk = np . int8 ( smoim > thrsh ) immsk = imfill ( immsk ) imo = fill * immsk . astype ( dtype_fill ) else : imo = fill * np . ones ( im . shape , dtype = dtype_fill ) imio . array2nii ( imo , niidct [ 'affine' ] , fimout , trnsp = ( niidct [ 'transpose' ] . index ( 0 ) , niidct [ 'transpose' ] . index ( 1 ) , niidct [ 'transpose' ] . index ( 2 ) ) , flip = niidct [ 'flip' ] ) return { 'fim' : fimout , 'im' : imo } | create mask over the whole image or over the threshold area |
46,809 | def gen_token ( ) : return '' . join ( random . choice ( string . ascii_uppercase + string . ascii_lowercase + string . digits ) for _ in range ( 32 ) ) | Generate a new Skybellpy token . |
46,810 | def _validate_setting ( setting , value ) : if setting not in CONST . ALL_SETTINGS : raise SkybellException ( ERROR . INVALID_SETTING , setting ) if setting == CONST . SETTINGS_DO_NOT_DISTURB : if value not in CONST . SETTINGS_DO_NOT_DISTURB_VALUES : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( setting , value ) ) if setting == CONST . SETTINGS_OUTDOOR_CHIME : if value not in CONST . SETTINGS_OUTDOOR_CHIME_VALUES : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( setting , value ) ) if setting == CONST . SETTINGS_MOTION_POLICY : if value not in CONST . SETTINGS_MOTION_POLICY_VALUES : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( setting , value ) ) if setting == CONST . SETTINGS_MOTION_THRESHOLD : if value not in CONST . SETTINGS_MOTION_THRESHOLD_VALUES : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( setting , value ) ) if setting == CONST . SETTINGS_VIDEO_PROFILE : if value not in CONST . SETTINGS_VIDEO_PROFILE_VALUES : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( setting , value ) ) if setting in CONST . SETTINGS_LED_COLOR : if ( value < CONST . SETTINGS_LED_VALUES [ 0 ] or value > CONST . SETTINGS_LED_VALUES [ 1 ] ) : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( setting , value ) ) if setting == CONST . SETTINGS_LED_INTENSITY : if not isinstance ( value , int ) : raise SkybellException ( ERROR . COLOR_INTENSITY_NOT_VALID , value ) if ( value < CONST . SETTINGS_LED_INTENSITY_VALUES [ 0 ] or value > CONST . SETTINGS_LED_INTENSITY_VALUES [ 1 ] ) : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( setting , value ) ) | Validate the setting and value . |
46,811 | def update ( self , device_json = None , info_json = None , settings_json = None , avatar_json = None ) : if device_json : UTILS . update ( self . _device_json , device_json ) if avatar_json : UTILS . update ( self . _avatar_json , avatar_json ) if info_json : UTILS . update ( self . _info_json , info_json ) if settings_json : UTILS . update ( self . _settings_json , settings_json ) | Update the internal device json data . |
46,812 | def _update_activities ( self ) : self . _activities = self . _activities_request ( ) _LOGGER . debug ( "Device Activities Response: %s" , self . _activities ) if not self . _activities : self . _activities = [ ] elif not isinstance ( self . _activities , ( list , tuple ) ) : self . _activities = [ self . _activities ] self . _update_events ( ) | Update stored activities and update caches as required . |
46,813 | def _update_events ( self ) : events = self . _skybell . dev_cache ( self , CONST . EVENT ) or { } for activity in self . _activities : event = activity . get ( CONST . EVENT ) created_at = activity . get ( CONST . CREATED_AT ) old_event = events . get ( event ) if old_event and created_at < old_event . get ( CONST . CREATED_AT ) : continue else : events [ event ] = activity self . _skybell . update_dev_cache ( self , { CONST . EVENT : events } ) | Update our cached list of latest activity events . |
46,814 | def activities ( self , limit = 1 , event = None ) : activities = self . _activities or [ ] if event : activities = list ( filter ( lambda activity : activity [ CONST . EVENT ] == event , activities ) ) return activities [ : limit ] | Return device activity information . |
46,815 | def _set_setting ( self , settings ) : for key , value in settings . items ( ) : _validate_setting ( key , value ) try : self . _settings_request ( method = "patch" , json_data = settings ) self . update ( settings_json = settings ) except SkybellException as exc : _LOGGER . warning ( "Exception changing settings: %s" , settings ) _LOGGER . warning ( exc ) | Validate the settings and then send the PATCH request . |
46,816 | def location ( self ) : location = self . _device_json . get ( CONST . LOCATION , { } ) return ( location . get ( CONST . LOCATION_LAT , 0 ) , location . get ( CONST . LOCATION_LNG , 0 ) ) | Return lat and lng tuple . |
46,817 | def wifi_status ( self ) : return self . _info_json . get ( CONST . STATUS , { } ) . get ( CONST . WIFI_LINK ) | Get the wifi status . |
46,818 | def do_not_disturb ( self ) : return bool ( strtobool ( str ( self . _settings_json . get ( CONST . SETTINGS_DO_NOT_DISTURB ) ) ) ) | Get if do not disturb is enabled . |
46,819 | def do_not_disturb ( self , enabled ) : self . _set_setting ( { CONST . SETTINGS_DO_NOT_DISTURB : str ( enabled ) . lower ( ) } ) | Set do not disturb . |
46,820 | def motion_sensor ( self , enabled ) : if enabled is True : value = CONST . SETTINGS_MOTION_POLICY_ON elif enabled is False : value = CONST . SETTINGS_MOTION_POLICY_OFF else : raise SkybellException ( ERROR . INVALID_SETTING_VALUE , ( CONST . SETTINGS_MOTION_POLICY , enabled ) ) self . _set_setting ( { CONST . SETTINGS_MOTION_POLICY : value } ) | Set the motion sensor state . |
46,821 | def led_rgb ( self ) : return ( int ( self . _settings_json . get ( CONST . SETTINGS_LED_R ) ) , int ( self . _settings_json . get ( CONST . SETTINGS_LED_G ) ) , int ( self . _settings_json . get ( CONST . SETTINGS_LED_B ) ) ) | Get devices LED color . |
46,822 | def led_rgb ( self , color ) : if ( not isinstance ( color , ( list , tuple ) ) or not all ( isinstance ( item , int ) for item in color ) ) : raise SkybellException ( ERROR . COLOR_VALUE_NOT_VALID , color ) self . _set_setting ( { CONST . SETTINGS_LED_R : color [ 0 ] , CONST . SETTINGS_LED_G : color [ 1 ] , CONST . SETTINGS_LED_B : color [ 2 ] } ) | Set devices LED color . |
46,823 | def get ( self , filetype , ** kwargs ) : path = self . full ( filetype , ** kwargs ) if path : if self . _remote : self . download_url_to_path ( self . url ( filetype , ** kwargs ) , path ) else : print ( "There is no file with filetype=%r to access in the tree module loaded" % filetype ) | Returns file name downloading if remote access configured . |
46,824 | def download_url_to_path ( self , url , path , force = False ) : path_exists = isfile ( path ) if not path_exists or force : dir = dirname ( path ) if not exists ( dir ) : if self . verbose : print ( "CREATE %s" % dir ) makedirs ( dir ) try : u = urlopen ( url ) except HTTPError as e : u = None print ( "HTTP error code %r. Please check you ~/.netrc has the correct authorization" % e . code ) if u : with open ( path , 'wb' ) as file : meta = u . info ( ) meta_func = meta . getheaders if hasattr ( meta , 'getheaders' ) else meta . get_all meta_length = meta_func ( "Content-Length" ) file_size = None if meta_length : file_size = int ( meta_length [ 0 ] ) if self . verbose : print ( "Downloading: {0} Bytes: {1}" . format ( url , file_size ) ) file_size_dl = 0 block_sz = 8192 while True : buffer = u . read ( block_sz ) if not buffer : break file_size_dl += len ( buffer ) file . write ( buffer ) if self . verbose : if path_exists : print ( "OVERWRITING %s" % path ) else : print ( "CREATE %s" % path ) elif self . verbose : print ( "FOUND %s (already downloaded)" % path ) | Download a file from url via http and put it at path |
46,825 | def reset ( self ) : if self . stream : self . stream . reset ( ) if self . initial_stream : self . initial_stream . reset ( ) | Reset all streams |
46,826 | def add ( self , filetype , ** kwargs ) : location = self . location ( filetype , ** kwargs ) source = self . url ( filetype , sasdir = 'sas' if not self . public else '' , ** kwargs ) if 'full' not in kwargs : destination = self . full ( filetype , ** kwargs ) else : destination = kwargs . get ( 'full' ) if location and source and destination : self . initial_stream . append_task ( location = location , source = source , destination = destination ) else : print ( "There is no file with filetype=%r to access in the tree module loaded" % filetype ) | Adds a filepath into the list of tasks to download |
46,827 | def set_stream ( self ) : if not self . auth : raise AccessError ( "Please use the remote() method to set rsync authorization or use remote(public=True) for public data" ) elif not self . initial_stream . task : raise AccessError ( "No files to download." ) else : self . stream = self . get_stream ( ) self . stream . source = join ( self . remote_base , 'sas' ) if self . remote_base and not self . public else join ( self . remote_base , self . release ) if self . release else self . remote_base self . stream . destination = join ( self . base_dir , self . release ) if self . public and self . release else self . base_dir self . stream . cli . env = { 'RSYNC_PASSWORD' : self . auth . password } if self . auth . ready ( ) else None if self . stream . source and self . stream . destination : for task in self . initial_stream . task : self . set_stream_task ( task ) ntask = len ( self . stream . task ) if self . stream . stream_count > ntask : if self . verbose : print ( "SDSS_ACCESS> Reducing the number of streams from %r to %r, the number of download tasks." % ( self . stream . stream_count , ntask ) ) self . stream . stream_count = ntask self . stream . streamlet = self . stream . streamlet [ : ntask ] | Sets the download streams |
46,828 | def commit ( self , offset = None , limit = None , dryrun = False ) : self . stream . command = "rsync -avRK --files-from={path} {source} {destination}" self . stream . append_tasks_to_streamlets ( offset = offset , limit = limit ) self . stream . commit_streamlets ( ) self . stream . run_streamlets ( ) self . stream . reset_streamlet ( ) | Start the rsync download |
46,829 | def PrintRanges ( type , name , ranges ) : print "static const %s %s[] = {" % ( type , name , ) for lo , hi in ranges : print "\t{ %d, %d }," % ( lo , hi ) print "};" | Print the ranges as an array of type named name . |
46,830 | def PrintGroup ( name , codes ) : range16 = MakeRanges ( [ c for c in codes if c < 65536 ] ) range32 = MakeRanges ( [ c for c in codes if c >= 65536 ] ) global n16 global n32 n16 += len ( range16 ) n32 += len ( range32 ) ugroup = "{ \"%s\", +1" % ( name , ) if len ( range16 ) > 0 : PrintRanges ( "URange16" , name + "_range16" , range16 ) ugroup += ", %s_range16, %d" % ( name , len ( range16 ) ) else : ugroup += ", 0, 0" if len ( range32 ) > 0 : PrintRanges ( "URange32" , name + "_range32" , range32 ) ugroup += ", %s_range32, %d" % ( name , len ( range32 ) ) else : ugroup += ", 0, 0" ugroup += " }" return ugroup | Print the data structures for the group of codes . Return a UGroup literal for the group . |
46,831 | def _input_templates ( self ) : foo = self . _config . read ( [ self . _pathfile ] ) if len ( foo ) == 1 : for k , v in self . _config . items ( 'paths' ) : self . templates [ k ] = v else : raise ValueError ( "Could not read {0}!" . format ( self . _pathfile ) ) return | Read the path template file . |
46,832 | def lookup_keys ( self , name ) : assert name , 'Must specify a path name' assert name in self . templates . keys ( ) , '{0} must be defined in the path templates' . format ( name ) keys = list ( set ( re . findall ( r'{(.*?)}' , self . templates [ name ] ) ) ) skeys = self . _check_special_kwargs ( name ) keys . extend ( skeys ) keys = list ( set ( keys ) ) keys = [ k . split ( ':' ) [ 0 ] for k in keys ] return keys | Lookup the keyword arguments needed for a given path name |
46,833 | def _find_source ( method ) : source = inspect . findsource ( method ) is_method = inspect . ismethod ( method ) source_str = '\n' . join ( source [ 0 ] ) if is_method : pattern = r'def\s{0}\(self' . format ( method . __name__ ) start = re . search ( pattern , source_str ) if start : startpos = start . start ( ) endpos = source_str . find ( 'def ' , startpos + 1 ) code = source_str [ startpos : endpos ] else : code = None return code | find source code of a given method Find and extract the source code of a given method in a module . Uses inspect . findsource to get all source code and performs some selection magic to identify method source code . Doing it this way because inspect . getsource returns wrong method . |
46,834 | def extract ( self , name , example ) : if not pathlib : return None if isinstance ( example , pathlib . Path ) : example = str ( example ) assert isinstance ( example , six . string_types ) , 'example file must be a string' assert name in self . lookup_names ( ) , '{0} must be a valid template name' . format ( name ) template = self . templates [ name ] template = os . path . expandvars ( template ) if re . match ( '%spectrodir' , template ) : template = re . sub ( '%spectrodir' , os . environ [ 'BOSS_SPECTRO_REDUX' ] , template ) elif re . search ( '%platedir' , template ) : template = re . sub ( '%platedir' , '(.*)/{plateid:0>6}' , template ) elif re . search ( '%definitiondir' , template ) : template = re . sub ( '%definitiondir' , '{designid:0>6}' , template ) if re . search ( '%plateid6' , template ) : template = re . sub ( '%plateid6' , '{plateid:0>6}' , template ) haskwargs = re . search ( '[{}]' , template ) if not haskwargs : return None subtemp = template . replace ( '$' , '\\$' ) . replace ( '.' , '\\.' ) research = re . sub ( '{(.*?)}' , '(.*)' , subtemp ) pmatch = re . search ( research , template ) tmatch = re . search ( research , example ) path_dict = { } if tmatch : values = tmatch . groups ( 0 ) keys = pmatch . groups ( 0 ) assert len ( keys ) == len ( values ) , 'pattern and template matches must have same length' parts = zip ( keys , values ) for part in parts : value = part [ 1 ] if re . findall ( '{(.*?)}' , part [ 0 ] ) : keys = re . findall ( '{(.*?)}' , part [ 0 ] ) keys = [ k . split ( ':' ) [ 0 ] for k in keys ] if len ( keys ) > 1 : if keys [ 0 ] == 'dr' : drval = re . match ( '^DR[1-9][0-9]' , value ) . group ( 0 ) otherval = value . split ( drval ) [ - 1 ] pdict = { keys [ 0 ] : drval , keys [ 1 ] : otherval } elif keys [ 0 ] in [ 'rc' , 'br' , 'filter' , 'camrow' ] : pdict = { keys [ 0 ] : value [ 0 ] , keys [ 1 ] : value [ 1 : ] } else : raise ValueError ( 'This case has not yet been accounted for.' ) path_dict . update ( pdict ) else : path_dict [ keys [ 0 ] ] = value return path_dict | Extract keywords from an example path |
46,835 | def exists ( self , filetype , remote = None , ** kwargs ) : full = kwargs . get ( 'full' , None ) if not full : full = self . full ( filetype , ** kwargs ) if remote : url = self . url ( '' , full = full ) try : resp = requests . head ( url ) except Exception as e : raise AccessError ( 'Cannot check for remote file existence for {0}: {1}' . format ( url , e ) ) else : return resp . ok else : return os . path . isfile ( full ) | Checks if the given type of file exists locally |
46,836 | def expand ( self , filetype , ** kwargs ) : full = kwargs . get ( 'full' , None ) if not full : full = self . full ( filetype , ** kwargs ) files = glob ( full ) as_url = kwargs . get ( 'as_url' , None ) newfiles = [ self . url ( '' , full = full ) for full in files ] if as_url else files refine = kwargs . get ( 'refine' , None ) if refine : newfiles = self . refine ( newfiles , refine , ** kwargs ) return newfiles | Expand a wildcard path locally |
46,837 | def any ( self , filetype , ** kwargs ) : expanded_files = self . expand ( filetype , ** kwargs ) return any ( expanded_files ) | Checks if the local directory contains any of the type of file |
46,838 | def one ( self , filetype , ** kwargs ) : expanded_files = self . expand ( filetype , ** kwargs ) isany = self . any ( filetype , ** kwargs ) return choice ( expanded_files ) if isany else None | Returns random one of the given type of file |
46,839 | def random ( self , filetype , ** kwargs ) : expanded_files = self . expand ( filetype , ** kwargs ) isany = self . any ( filetype , ** kwargs ) if isany : num = kwargs . get ( 'num' , 1 ) assert num <= len ( expanded_files ) , 'Requested number must be larger the sample. Reduce your number.' return sample ( expanded_files , num ) else : return None | Returns random number of the given type of file |
46,840 | def refine ( self , filelist , regex , filterdir = 'out' , ** kwargs ) : assert filelist , 'Must provide a list of filenames to refine on' assert regex , 'Must provide a regular expression to refine the file list' r = re . compile ( regex ) assert filterdir in [ 'in' , 'out' ] , 'Filter direction must be either "in" or "out"' if filterdir == 'out' : subset = list ( filter ( lambda i : r . search ( i ) , filelist ) ) elif filterdir == 'in' : subset = list ( filter ( lambda i : not r . search ( i ) , filelist ) ) return subset | Returns a list of files filterd by a regular expression |
46,841 | def full ( self , filetype , ** kwargs ) : if 'full' in kwargs : return kwargs . get ( 'full' ) assert filetype in self . templates , ( 'No entry {0} found. Filetype must ' 'be one of the designated templates ' 'in the currently loaded tree' . format ( filetype ) ) template = self . templates [ filetype ] if template : keys = self . lookup_keys ( filetype ) keys = [ k . split ( ':' ) [ 0 ] for k in keys ] missing_keys = set ( keys ) - set ( kwargs . keys ( ) ) if missing_keys : raise KeyError ( 'Missing required keyword arguments: {0}' . format ( list ( missing_keys ) ) ) else : template = template . format ( ** kwargs ) if template : template = os . path . expandvars ( template ) template = self . _call_special_functions ( filetype , template , ** kwargs ) return template | Return the full name of a given type of file . |
46,842 | def _call_special_functions ( self , filetype , template , ** kwargs ) : functions = re . findall ( r"\%\w+" , template ) if not functions : return template for function in functions : try : method = getattr ( self , function [ 1 : ] ) except AttributeError : return None else : value = method ( filetype , ** kwargs ) template = re . sub ( function , value , template ) return template | Call the special functions found in a template path |
46,843 | def location ( self , filetype , base_dir = None , ** kwargs ) : full = kwargs . get ( 'full' , None ) if not full : full = self . full ( filetype , ** kwargs ) self . set_base_dir ( base_dir = base_dir ) location = full [ len ( self . base_dir ) : ] if full and full . startswith ( self . base_dir ) else None if location and '//' in location : location = location . replace ( '//' , '/' ) return location | Return the location of the relative sas path of a given type of file . |
46,844 | def url ( self , filetype , base_dir = None , sasdir = 'sas' , ** kwargs ) : location = self . location ( filetype , ** kwargs ) return join ( self . remote_base , sasdir , location ) if self . remote_base and location else None | Return the url of a given type of file . |
46,845 | def plateid6 ( self , filetype , ** kwargs ) : plateid = int ( kwargs [ 'plateid' ] ) if plateid < 10000 : return "{:0>6d}" . format ( plateid ) else : return "{:d}" . format ( plateid ) | Print plate ID accounting for 5 - 6 digit plate IDs . |
46,846 | def foreground_run ( self , command , test = False , logger = None , logall = False , message = None , outname = None , errname = None ) : if logger is not None : logger . debug ( command ) status = 0 out = '' err = '' if not test : if outname is None : outfile = TemporaryFile ( ) else : outfile = open ( outname , 'w+' ) if errname is None : errfile = TemporaryFile ( ) else : errfile = open ( errname , 'w+' ) proc = Popen ( split ( str ( command ) ) , stdout = outfile , stderr = errfile , env = self . env ) tstart = time ( ) while proc . poll ( ) is None : elapsed = time ( ) - tstart if elapsed > 500000 : message = "Process still running after more than 5 days!" proc . kill ( ) break tsleep = 10 ** ( int ( log10 ( elapsed ) ) - 1 ) if tsleep < 1 : tsleep = 1 sleep ( tsleep ) status = proc . returncode outfile . seek ( 0 ) out = outfile . read ( ) errfile . seek ( 0 ) err = errfile . read ( ) outfile . close ( ) errfile . close ( ) if logger is not None : if status == 0 and logall : if len ( out ) > 0 : logger . debug ( 'STDOUT = \n' + out ) if len ( err ) > 0 : logger . debug ( 'STDERR = \n' + err ) if status != 0 : logger . error ( 'status = {0}' . format ( status ) ) if len ( out ) > 0 : logger . error ( 'STDOUT = \n' + out ) if len ( err ) > 0 : logger . error ( 'STDERR = \n' + err ) if message is not None : logger . critical ( message ) exit ( status ) return ( status , out , err ) | A convenient wrapper to log and perform system calls . |
46,847 | def undo ( ui , repo , clname , ** opts ) : if repo [ None ] . branch ( ) != "default" : raise hg_util . Abort ( "cannot run hg undo outside default branch" ) err = clpatch_or_undo ( ui , repo , clname , opts , mode = "undo" ) if err : raise hg_util . Abort ( err ) | undo the effect of a CL Creates a new CL that undoes an earlier CL . After creating the CL opens the CL text for editing so that you can add the reason for the undo to the description . |
46,848 | def release_apply ( ui , repo , clname , ** opts ) : c = repo [ None ] if not releaseBranch : raise hg_util . Abort ( "no active release branches" ) if c . branch ( ) != releaseBranch : if c . modified ( ) or c . added ( ) or c . removed ( ) : raise hg_util . Abort ( "uncommitted local changes - cannot switch branches" ) err = hg_clean ( repo , releaseBranch ) if err : raise hg_util . Abort ( err ) try : err = clpatch_or_undo ( ui , repo , clname , opts , mode = "backport" ) if err : raise hg_util . Abort ( err ) except Exception , e : hg_clean ( repo , "default" ) raise e | apply a CL to the release branch |
46,849 | def download ( ui , repo , clname , ** opts ) : if codereview_disabled : raise hg_util . Abort ( codereview_disabled ) cl , vers , patch , err = DownloadCL ( ui , repo , clname ) if err != "" : return err ui . write ( cl . EditorText ( ) + "\n" ) ui . write ( patch + "\n" ) return | download a change from the code review server |
46,850 | def file ( ui , repo , clname , pat , * pats , ** opts ) : if codereview_disabled : raise hg_util . Abort ( codereview_disabled ) pats = tuple ( [ pat ] + list ( pats ) ) if not GoodCLName ( clname ) : return "invalid CL name " + clname dirty = { } cl , err = LoadCL ( ui , repo , clname , web = False ) if err != '' : return err if not cl . local : return "cannot change non-local CL " + clname files = ChangedFiles ( ui , repo , pats ) if opts [ "delete" ] : oldfiles = Intersect ( files , cl . files ) if oldfiles : if not ui . quiet : ui . status ( "# Removing files from CL. To undo:\n" ) ui . status ( "# cd %s\n" % ( repo . root ) ) for f in oldfiles : ui . status ( "# hg file %s %s\n" % ( cl . name , f ) ) cl . files = Sub ( cl . files , oldfiles ) cl . Flush ( ui , repo ) else : ui . status ( "no such files in CL" ) return if not files : return "no such modified files" files = Sub ( files , cl . files ) taken = Taken ( ui , repo ) warned = False for f in files : if f in taken : if not warned and not ui . quiet : ui . status ( "# Taking files from other CLs. To undo:\n" ) ui . status ( "# cd %s\n" % ( repo . root ) ) warned = True ocl = taken [ f ] if not ui . quiet : ui . status ( "# hg file %s %s\n" % ( ocl . name , f ) ) if ocl not in dirty : ocl . files = Sub ( ocl . files , files ) dirty [ ocl ] = True cl . files = Add ( cl . files , files ) dirty [ cl ] = True for d , _ in dirty . items ( ) : d . Flush ( ui , repo ) return | assign files to or remove files from a change list |
46,851 | def gofmt ( ui , repo , * pats , ** opts ) : if codereview_disabled : raise hg_util . Abort ( codereview_disabled ) files = ChangedExistingFiles ( ui , repo , pats , opts ) files = gofmt_required ( files ) if not files : ui . status ( "no modified go files\n" ) return cwd = os . getcwd ( ) files = [ RelativePath ( repo . root + '/' + f , cwd ) for f in files ] try : cmd = [ "gofmt" , "-l" ] if not opts [ "list" ] : cmd += [ "-w" ] if subprocess . call ( cmd + files ) != 0 : raise hg_util . Abort ( "gofmt did not exit cleanly" ) except hg_error . Abort , e : raise except : raise hg_util . Abort ( "gofmt: " + ExceptionDetail ( ) ) return | apply gofmt to modified files |
46,852 | def mail ( ui , repo , * pats , ** opts ) : if codereview_disabled : raise hg_util . Abort ( codereview_disabled ) cl , err = CommandLineCL ( ui , repo , pats , opts , op = "mail" , defaultcc = defaultcc ) if err != "" : raise hg_util . Abort ( err ) cl . Upload ( ui , repo , gofmt_just_warn = True ) if not cl . reviewer : if not defaultcc : raise hg_util . Abort ( "no reviewers listed in CL" ) cl . cc = Sub ( cl . cc , defaultcc ) cl . reviewer = defaultcc cl . Flush ( ui , repo ) if cl . files == [ ] : raise hg_util . Abort ( "no changed files, not sending mail" ) cl . Mail ( ui , repo ) | mail a change for review |
46,853 | def pending ( ui , repo , * pats , ** opts ) : if codereview_disabled : raise hg_util . Abort ( codereview_disabled ) quick = opts . get ( 'quick' , False ) short = opts . get ( 'short' , False ) m = LoadAllCL ( ui , repo , web = not quick and not short ) names = m . keys ( ) names . sort ( ) for name in names : cl = m [ name ] if short : ui . write ( name + "\t" + line1 ( cl . desc ) + "\n" ) else : ui . write ( cl . PendingText ( quick = quick ) + "\n" ) if short : return 0 files = DefaultFiles ( ui , repo , [ ] ) if len ( files ) > 0 : s = "Changed files not in any CL:\n" for f in files : s += "\t" + f + "\n" ui . write ( s ) | show pending changes |
46,854 | def sync ( ui , repo , ** opts ) : if codereview_disabled : raise hg_util . Abort ( codereview_disabled ) if not opts [ "local" ] : if hg_incoming ( ui , repo ) : err = hg_pull ( ui , repo , update = True ) else : err = hg_update ( ui , repo ) if err : return err sync_changes ( ui , repo ) | synchronize with remote repository |
46,855 | def upload ( ui , repo , name , ** opts ) : if codereview_disabled : raise hg_util . Abort ( codereview_disabled ) repo . ui . quiet = True cl , err = LoadCL ( ui , repo , name , web = True ) if err != "" : raise hg_util . Abort ( err ) if not cl . local : raise hg_util . Abort ( "cannot upload non-local change" ) cl . Upload ( ui , repo ) print "%s%s\n" % ( server_url_base , cl . name ) return 0 | upload diffs to the code review server |
46,856 | def MySend ( request_path , payload = None , content_type = "application/octet-stream" , timeout = None , force_auth = True , ** kwargs ) : try : return MySend1 ( request_path , payload , content_type , timeout , force_auth , ** kwargs ) except Exception , e : if type ( e ) != urllib2 . HTTPError or e . code != 500 : raise print >> sys . stderr , "Loading " + request_path + ": " + ExceptionDetail ( ) + "; trying again in 2 seconds." time . sleep ( 2 ) return MySend1 ( request_path , payload , content_type , timeout , force_auth , ** kwargs ) | Run MySend1 maybe twice because Rietveld is unreliable . |
46,857 | def GetEmail ( prompt ) : last_email_file_name = os . path . expanduser ( "~/.last_codereview_email_address" ) last_email = "" if os . path . exists ( last_email_file_name ) : try : last_email_file = open ( last_email_file_name , "r" ) last_email = last_email_file . readline ( ) . strip ( "\n" ) last_email_file . close ( ) prompt += " [%s]" % last_email except IOError , e : pass email = raw_input ( prompt + ": " ) . strip ( ) if email : try : last_email_file = open ( last_email_file_name , "w" ) last_email_file . write ( email ) last_email_file . close ( ) except IOError , e : pass else : email = last_email return email | Prompts the user for their email address and returns it . |
46,858 | def GetRpcServer ( options ) : rpc_server_class = HttpRpcServer def GetUserCredentials ( ) : global global_status st = global_status global_status = None email = options . email if email is None : email = GetEmail ( "Email (login for uploading to %s)" % options . server ) password = getpass . getpass ( "Password for %s: " % email ) global_status = st return ( email , password ) host = ( options . host or options . server ) . lower ( ) if host == "localhost" or host . startswith ( "localhost:" ) : email = options . email if email is None : email = "test@example.com" logging . info ( "Using debug user %s. Override with --email" % email ) server = rpc_server_class ( options . server , lambda : ( email , "password" ) , host_override = options . host , extra_headers = { "Cookie" : 'dev_appserver_login="%s:False"' % email } , save_cookies = options . save_cookies ) server . authenticated = True return server return rpc_server_class ( options . server , GetUserCredentials , host_override = options . host , save_cookies = options . save_cookies ) | Returns an instance of an AbstractRpcServer . |
46,859 | def RunShellWithReturnCode ( command , print_output = False , universal_newlines = True , env = os . environ ) : logging . info ( "Running %s" , command ) p = subprocess . Popen ( command , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = use_shell , universal_newlines = universal_newlines , env = env ) if print_output : output_array = [ ] while True : line = p . stdout . readline ( ) if not line : break print line . strip ( "\n" ) output_array . append ( line ) output = "" . join ( output_array ) else : output = p . stdout . read ( ) p . wait ( ) errout = p . stderr . read ( ) if print_output and errout : print >> sys . stderr , errout p . stdout . close ( ) p . stderr . close ( ) return output , p . returncode | Executes a command and returns the output from stdout and the return code . |
46,860 | def SplitPatch ( data ) : patches = [ ] filename = None diff = [ ] for line in data . splitlines ( True ) : new_filename = None if line . startswith ( 'Index:' ) : unused , new_filename = line . split ( ':' , 1 ) new_filename = new_filename . strip ( ) elif line . startswith ( 'Property changes on:' ) : unused , temp_filename = line . split ( ':' , 1 ) temp_filename = to_slash ( temp_filename . strip ( ) ) if temp_filename != filename : new_filename = temp_filename if new_filename : if filename and diff : patches . append ( ( filename , '' . join ( diff ) ) ) filename = new_filename diff = [ line ] continue if diff is not None : diff . append ( line ) if filename and diff : patches . append ( ( filename , '' . join ( diff ) ) ) return patches | Splits a patch into separate pieces for each file . |
46,861 | def UploadSeparatePatches ( issue , rpc_server , patchset , data , options ) : patches = SplitPatch ( data ) rv = [ ] for patch in patches : set_status ( "uploading patch for " + patch [ 0 ] ) if len ( patch [ 1 ] ) > MAX_UPLOAD_SIZE : print ( "Not uploading the patch for " + patch [ 0 ] + " because the file is too large." ) continue form_fields = [ ( "filename" , patch [ 0 ] ) ] if not options . download_base : form_fields . append ( ( "content_upload" , "1" ) ) files = [ ( "data" , "data.diff" , patch [ 1 ] ) ] ctype , body = EncodeMultipartFormData ( form_fields , files ) url = "/%d/upload_patch/%d" % ( int ( issue ) , int ( patchset ) ) print "Uploading patch for " + patch [ 0 ] response_body = rpc_server . Send ( url , body , content_type = ctype ) lines = response_body . splitlines ( ) if not lines or lines [ 0 ] != "OK" : StatusUpdate ( " % response_body ) sys . exit ( 1 ) rv . append ( [ lines [ 1 ] , patch [ 0 ] ] ) return rv | Uploads a separate patch for each file in the diff output . |
46,862 | def _CreateRequest ( self , url , data = None ) : logging . debug ( "Creating request for: '%s' with payload:\n%s" , url , data ) req = urllib2 . Request ( url , data = data ) if self . host_override : req . add_header ( "Host" , self . host_override ) for key , value in self . extra_headers . iteritems ( ) : req . add_header ( key , value ) return req | Creates a new urllib request . |
46,863 | def _GetAuthToken ( self , email , password ) : account_type = "GOOGLE" if self . host . endswith ( ".google.com" ) and not force_google_account : account_type = "HOSTED" req = self . _CreateRequest ( url = "https://www.google.com/accounts/ClientLogin" , data = urllib . urlencode ( { "Email" : email , "Passwd" : password , "service" : "ah" , "source" : "rietveld-codereview-upload" , "accountType" : account_type , } ) , ) try : response = self . opener . open ( req ) response_body = response . read ( ) response_dict = dict ( x . split ( "=" ) for x in response_body . split ( "\n" ) if x ) return response_dict [ "Auth" ] except urllib2 . HTTPError , e : if e . code == 403 : body = e . read ( ) response_dict = dict ( x . split ( "=" , 1 ) for x in body . split ( "\n" ) if x ) raise ClientLoginError ( req . get_full_url ( ) , e . code , e . msg , e . headers , response_dict ) else : raise | Uses ClientLogin to authenticate the user returning an auth token . |
46,864 | def _GetAuthCookie ( self , auth_token ) : continue_location = "http://localhost/" args = { "continue" : continue_location , "auth" : auth_token } req = self . _CreateRequest ( "https://%s/_ah/login?%s" % ( self . host , urllib . urlencode ( args ) ) ) try : response = self . opener . open ( req ) except urllib2 . HTTPError , e : response = e if ( response . code != 302 or response . info ( ) [ "location" ] != continue_location ) : raise urllib2 . HTTPError ( req . get_full_url ( ) , response . code , response . msg , response . headers , response . fp ) self . authenticated = True | Fetches authentication cookies for an authentication token . |
46,865 | def _Authenticate ( self ) : for i in range ( 3 ) : credentials = self . auth_function ( ) try : auth_token = self . _GetAuthToken ( credentials [ 0 ] , credentials [ 1 ] ) except ClientLoginError , e : if e . msg == "BadAuthentication" : print >> sys . stderr , "Invalid username or password." continue if e . msg == "CaptchaRequired" : print >> sys . stderr , ( "Please go to\n" "https://www.google.com/accounts/DisplayUnlockCaptcha\n" "and verify you are a human. Then try again." ) break if e . msg == "NotVerified" : print >> sys . stderr , "Account not verified." break if e . msg == "TermsNotAgreed" : print >> sys . stderr , "User has not agreed to TOS." break if e . msg == "AccountDeleted" : print >> sys . stderr , "The user account has been deleted." break if e . msg == "AccountDisabled" : print >> sys . stderr , "The user account has been disabled." break if e . msg == "ServiceDisabled" : print >> sys . stderr , "The user's access to the service has been disabled." break if e . msg == "ServiceUnavailable" : print >> sys . stderr , "The service is not available; try again later." break raise self . _GetAuthCookie ( auth_token ) return | Authenticates the user . |
46,866 | def _Authenticate ( self ) : super ( HttpRpcServer , self ) . _Authenticate ( ) if self . save_cookies : StatusUpdate ( "Saving authentication cookies to %s" % self . cookie_file ) self . cookie_jar . save ( ) | Save the cookie jar after authentication . |
46,867 | def _GetOpener ( self ) : opener = urllib2 . OpenerDirector ( ) opener . add_handler ( urllib2 . ProxyHandler ( ) ) opener . add_handler ( urllib2 . UnknownHandler ( ) ) opener . add_handler ( urllib2 . HTTPHandler ( ) ) opener . add_handler ( urllib2 . HTTPDefaultErrorHandler ( ) ) opener . add_handler ( urllib2 . HTTPSHandler ( ) ) opener . add_handler ( urllib2 . HTTPErrorProcessor ( ) ) if self . save_cookies : self . cookie_file = os . path . expanduser ( "~/.codereview_upload_cookies_" + server ) self . cookie_jar = cookielib . MozillaCookieJar ( self . cookie_file ) if os . path . exists ( self . cookie_file ) : try : self . cookie_jar . load ( ) self . authenticated = True StatusUpdate ( "Loaded authentication cookies from %s" % self . cookie_file ) except ( cookielib . LoadError , IOError ) : pass else : fd = os . open ( self . cookie_file , os . O_CREAT , 0600 ) os . close ( fd ) os . chmod ( self . cookie_file , 0600 ) else : self . cookie_jar = cookielib . CookieJar ( ) opener . add_handler ( urllib2 . HTTPCookieProcessor ( self . cookie_jar ) ) return opener | Returns an OpenerDirector that supports cookies and ignores redirects . |
46,868 | def CheckForUnknownFiles ( self ) : unknown_files = self . GetUnknownFiles ( ) if unknown_files : print "The following files are not added to version control:" for line in unknown_files : print line prompt = "Are you sure to continue?(y/N) " answer = raw_input ( prompt ) . strip ( ) if answer != "y" : ErrorExit ( "User aborted" ) | Show an are you sure? prompt if there are unknown files . |
46,869 | def GetBaseFiles ( self , diff ) : files = { } for line in diff . splitlines ( True ) : if line . startswith ( 'Index:' ) or line . startswith ( 'Property changes on:' ) : unused , filename = line . split ( ':' , 1 ) filename = to_slash ( filename . strip ( ) ) files [ filename ] = self . GetBaseFile ( filename ) return files | Helper that calls GetBase file for each file in the patch . |
46,870 | def IsImage ( self , filename ) : mimetype = mimetypes . guess_type ( filename ) [ 0 ] if not mimetype : return False return mimetype . startswith ( "image/" ) | Returns true if the filename has an image extension . |
46,871 | def IsBinary ( self , filename ) : mimetype = mimetypes . guess_type ( filename ) [ 0 ] if not mimetype : return False if mimetype in TEXT_MIMETYPES : return False return not mimetype . startswith ( "text/" ) | Returns true if the guessed mimetyped isnt t in text group . |
46,872 | def _GetRelPath ( self , filename ) : assert filename . startswith ( self . subdir ) , ( filename , self . subdir ) return filename [ len ( self . subdir ) : ] . lstrip ( r"\/" ) | Get relative path of a file according to the current directory given its logical path in the repo . |
46,873 | def GetUnknownFiles ( self ) : args = [ ] status = RunShell ( [ "hg" , "status" , "--rev" , self . base_rev , "-u" , "." ] , silent_ok = True ) unknown_files = [ ] for line in status . splitlines ( ) : st , fn = line . split ( " " , 1 ) if st == "?" : unknown_files . append ( fn ) return unknown_files | Return a list of files unknown to the VCS . |
46,874 | def _URange ( s ) : a = s . split ( ".." ) if len ( a ) == 1 : return [ _UInt ( a [ 0 ] ) ] if len ( a ) == 2 : lo = _UInt ( a [ 0 ] ) hi = _UInt ( a [ 1 ] ) if lo < hi : return range ( lo , hi + 1 ) raise InputError ( "invalid Unicode range %s" % ( s , ) ) | Converts string to Unicode range . |
46,875 | def _ParseContinue ( s ) : match = re . match ( "<(.*), (First|Last)>" , s ) if match is not None : return match . groups ( ) return ( s , None ) | Parses a Unicode continuation field . |
46,876 | def ReadUnicodeTable ( filename , nfields , doline ) : if nfields < 2 : raise InputError ( "invalid number of fields %d" % ( nfields , ) ) if type ( filename ) == str : if filename . startswith ( "http://" ) : fil = urllib2 . urlopen ( filename ) else : fil = open ( filename , "r" ) else : fil = filename first = None expect_last = None lineno = 0 for line in fil : lineno += 1 try : sharp = line . find ( "#" ) if sharp >= 0 : line = line [ : sharp ] line = line . strip ( ) if not line : continue fields = [ s . strip ( ) for s in line . split ( ";" ) ] if len ( fields ) != nfields : raise InputError ( "wrong number of fields %d %d - %s" % ( len ( fields ) , nfields , line ) ) codes = _URange ( fields [ 0 ] ) ( name , cont ) = _ParseContinue ( fields [ 1 ] ) if expect_last is not None : if ( len ( codes ) != 1 or codes [ 0 ] <= first or cont != "Last" or name != expect_last ) : raise InputError ( "expected Last line for %s" % ( expect_last , ) ) codes = range ( first , codes [ 0 ] + 1 ) first = None expect_last = None fields [ 0 ] = "%04X..%04X" % ( codes [ 0 ] , codes [ - 1 ] ) fields [ 1 ] = name elif cont == "First" : if len ( codes ) != 1 : raise InputError ( "bad First line: range given" ) expect_last = name first = codes [ 0 ] continue doline ( codes , fields ) except Exception , e : print "%s:%d: %s" % ( filename , lineno , e ) raise if expect_last is not None : raise InputError ( "expected Last line for %s; got EOF" % ( expect_last , ) ) | Generic Unicode table text file reader . |
46,877 | def CaseGroups ( unicode_dir = _UNICODE_DIR ) : togroup = { } def DoLine ( codes , fields ) : ( _ , foldtype , lower , _ ) = fields if foldtype not in ( "C" , "S" ) : return lower = _UInt ( lower ) togroup . setdefault ( lower , [ lower ] ) . extend ( codes ) ReadUnicodeTable ( unicode_dir + "/CaseFolding.txt" , 4 , DoLine ) groups = togroup . values ( ) for g in groups : g . sort ( ) groups . sort ( ) return togroup , groups | Returns list of Unicode code groups equivalent under case folding . |
46,878 | def Scripts ( unicode_dir = _UNICODE_DIR ) : scripts = { } def DoLine ( codes , fields ) : ( _ , name ) = fields scripts . setdefault ( name , [ ] ) . extend ( codes ) ReadUnicodeTable ( unicode_dir + "/Scripts.txt" , 2 , DoLine ) return scripts | Returns dict mapping script names to code lists . |
46,879 | def Categories ( unicode_dir = _UNICODE_DIR ) : categories = { } def DoLine ( codes , fields ) : category = fields [ 2 ] categories . setdefault ( category , [ ] ) . extend ( codes ) if len ( category ) > 1 : short = category [ 0 ] categories . setdefault ( short , [ ] ) . extend ( codes ) ReadUnicodeTable ( unicode_dir + "/UnicodeData.txt" , 15 , DoLine ) return categories | Returns dict mapping category names to code lists . |
46,880 | def _AddDelta ( a , delta ) : if type ( delta ) == int : return a + delta if delta == 'EvenOdd' : if a % 2 == 0 : return a + 1 else : return a - 1 if delta == 'OddEven' : if a % 2 == 1 : return a + 1 else : return a - 1 print >> sys . stderr , "Bad Delta: " , delta raise "Bad Delta" | Return a + delta handling EvenOdd and OddEven specially . |
46,881 | def unsubscribe ( self , callback_id ) : request = self . get_active_subscription ( callback_id ) request [ 'mode' ] = 'unsubscribe' self . subscribe_impl ( callback_id , ** request ) | Ask the hub to cancel the subscription for callback_id then delete it from the local database if successful . |
46,882 | def discover ( url , timeout = None ) : resp = get_content ( { 'REQUEST_TIMEOUT' : timeout } , url ) parser = LinkParser ( ) parser . hub_url = ( resp . links . get ( 'hub' ) or { } ) . get ( 'url' ) parser . topic_url = ( resp . links . get ( 'self' ) or { } ) . get ( 'url' ) try : parser . updated ( ) for chunk in resp . iter_content ( chunk_size = None , decode_unicode = True ) : parser . feed ( chunk ) parser . close ( ) except Finished : return { 'hub_url' : parser . hub_url , 'topic_url' : parser . topic_url } raise DiscoveryError ( "Could not find hub url in topic page" ) | Discover the hub url and topic url of a given url . Firstly by inspecting the page s headers secondarily by inspecting the content for link tags . |
46,883 | def open_tls_file ( file_path , mode , private = True ) : containing_dir = os . path . dirname ( file_path ) fh = None try : if 'w' in mode : os . chmod ( containing_dir , mode = 0o755 ) fh = open ( file_path , mode ) except OSError as e : if 'w' in mode : os . makedirs ( containing_dir , mode = 0o755 , exist_ok = True ) os . chmod ( containing_dir , mode = 0o755 ) fh = open ( file_path , 'w' ) else : raise yield fh mode = 0o600 if private else 0o644 os . chmod ( file_path , mode = mode ) fh . close ( ) | Context to ensure correct file permissions for certs and directories |
46,884 | def load ( self ) : private = self . is_private ( ) with open_tls_file ( self . file_path , 'r' , private = private ) as fh : if private : self . x509 = crypto . load_privatekey ( self . encoding , fh . read ( ) ) else : self . x509 = crypto . load_certificate ( self . encoding , fh . read ( ) ) return self . x509 | Load from a file and return an x509 object |
46,885 | def save ( self , x509 ) : self . x509 = x509 with open_tls_file ( self . file_path , 'w' , private = self . is_private ( ) ) as fh : fh . write ( str ( self ) ) | Persist this x509 object to disk |
46,886 | def _setup_tls_files ( self , files ) : for file_type in TLSFileType : if file_type . value in files : file_path = files [ file_type . value ] setattr ( self , file_type . value , TLSFile ( file_path , file_type = file_type ) ) | Initiates TLSFIle objects with the paths given to this bundle |
46,887 | def save_x509s ( self , x509s ) : for file_type in TLSFileType : if file_type . value in x509s : x509 = x509s [ file_type . value ] if file_type is not TLSFileType . CA : tlsfile = getattr ( self , file_type . value ) if tlsfile : tlsfile . save ( x509 ) | Saves the x509 objects to the paths known by this bundle |
46,888 | def to_record ( self ) : tf_list = [ getattr ( self , k , None ) for k in [ _ . value for _ in TLSFileType ] ] tf_list = filter ( lambda x : x , tf_list ) files = { tf . file_type . value : tf . file_path for tf in tf_list } self . record [ 'files' ] = files return self . record | Create a CertStore record from this TLSFileBundle |
46,889 | def from_record ( self , record ) : self . record = record self . _setup_tls_files ( self . record [ 'files' ] ) return self | Build a bundle from a CertStore record |
46,890 | def save ( self ) : with open ( self . store_file_path , 'w' ) as fh : fh . write ( json . dumps ( self . store , indent = 4 ) ) | Write the store dict to a file specified by store_file_path |
46,891 | def load ( self ) : with open ( self . store_file_path , 'r' ) as fh : self . store = json . loads ( fh . read ( ) ) | Read the store dict from file |
46,892 | def get_record ( self , common_name ) : try : record = self . store [ common_name ] return record except KeyError as e : raise CertNotFoundError ( "Unable to find record of {name}" . format ( name = common_name ) , errors = e ) | Return the record associated with this common name |
46,893 | def get_files ( self , common_name ) : record = self . get_record ( common_name ) return TLSFileBundle ( common_name ) . from_record ( record ) | Return a bundle of TLS files associated with a common name |
46,894 | def add_record ( self , common_name , serial = 0 , parent_ca = '' , signees = None , files = None , record = None , is_ca = False , overwrite = False ) : if not overwrite : try : self . get_record ( common_name ) raise CertExistsError ( "Certificate {name} already exists!" " Set overwrite=True to force add." . format ( name = common_name ) ) except CertNotFoundError : pass record = record or { 'serial' : serial , 'is_ca' : is_ca , 'parent_ca' : parent_ca , 'signees' : signees , 'files' : files , } self . store [ common_name ] = record self . save ( ) | Manually create a record of certs |
46,895 | def add_files ( self , common_name , x509s , files = None , parent_ca = '' , is_ca = False , signees = None , serial = 0 , overwrite = False ) : if common_name in self . store and not overwrite : raise CertExistsError ( "Certificate {name} already exists!" " Set overwrite=True to force add." . format ( name = common_name ) ) elif common_name in self . store and overwrite : record = self . get_record ( common_name ) serial = int ( record [ 'serial' ] ) record [ 'serial' ] = serial + 1 TLSFileBundle ( common_name ) . from_record ( record ) . save_x509s ( x509s ) else : file_base_tmpl = "{prefix}/{cn}/{cn}" file_base = file_base_tmpl . format ( prefix = self . containing_dir , cn = common_name ) try : ca_record = self . get_record ( parent_ca ) ca_file = ca_record [ 'files' ] [ 'cert' ] except CertNotFoundError : ca_file = '' files = files or { 'key' : file_base + '.key' , 'cert' : file_base + '.crt' , 'ca' : ca_file , } bundle = TLSFileBundle ( common_name , files = files , x509s = x509s , is_ca = is_ca , serial = serial , parent_ca = parent_ca , signees = signees ) self . store [ common_name ] = bundle . to_record ( ) self . save ( ) | Add a set files comprising a certificate to Certipy |
46,896 | def remove_sign_link ( self , ca_name , signee_name ) : ca_record = self . get_record ( ca_name ) signee_record = self . get_record ( signee_name ) signees = ca_record [ 'signees' ] or { } signees = Counter ( signees ) if signee_name in signees : signees [ signee_name ] = 0 ca_record [ 'signees' ] = signees signee_record [ 'parent_ca' ] = '' self . save ( ) | Removes signee_name to the signee list for ca_name |
46,897 | def update_record ( self , common_name , ** fields ) : record = self . get_record ( common_name ) if fields is not None : for field , value in fields : record [ field ] = value self . save ( ) return record | Update fields in an existing record |
46,898 | def remove_record ( self , common_name ) : bundle = self . get_files ( common_name ) num_signees = len ( Counter ( bundle . record [ 'signees' ] ) ) if bundle . is_ca ( ) and num_signees > 0 : raise CertificateAuthorityInUseError ( "Authority {name} has signed {x} certificates" . format ( name = common_name , x = num_signees ) ) try : ca_name = bundle . record [ 'parent_ca' ] ca_record = self . get_record ( ca_name ) self . remove_sign_link ( ca_name , common_name ) except CertNotFoundError : pass record_copy = dict ( self . store [ common_name ] ) del self . store [ common_name ] self . save ( ) return record_copy | Delete the record associated with this common name |
46,899 | def remove_files ( self , common_name , delete_dir = False ) : record = self . remove_record ( common_name ) if delete_dir : delete_dirs = [ ] if 'files' in record : key_containing_dir = os . path . dirname ( record [ 'files' ] [ 'key' ] ) delete_dirs . append ( key_containing_dir ) cert_containing_dir = os . path . dirname ( record [ 'files' ] [ 'cert' ] ) if key_containing_dir != cert_containing_dir : delete_dirs . append ( cert_containing_dir ) for d in delete_dirs : shutil . rmtree ( d ) return record | Delete files and record associated with this common name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.