idx
int64 0
63k
| question
stringlengths 53
5.28k
| target
stringlengths 5
805
|
|---|---|---|
3,200
|
def pack ( self , out : IO ) : out . write ( self . access_flags . pack ( ) ) out . write ( pack ( '>HH' , self . _name_index , self . _descriptor_index ) ) self . attributes . pack ( out )
|
Write the Field to the file - like object out .
|
3,201
|
def remove ( self , field : Field ) : self . _table = [ fld for fld in self . _table if fld is not field ]
|
Removes a Field from the table by identity .
|
3,202
|
def unpack ( self , source : IO ) : field_count = unpack ( '>H' , source . read ( 2 ) ) [ 0 ] for _ in repeat ( None , field_count ) : field = Field ( self . _cf ) field . unpack ( source ) self . append ( field )
|
Read the FieldTable from the file - like object source .
|
3,203
|
def pack ( self , out : IO ) : out . write ( pack ( '>H' , len ( self ) ) ) for field in self . _table : field . pack ( out )
|
Write the FieldTable to the file - like object out .
|
3,204
|
def find ( self , * , name : str = None , type_ : str = None , f : Callable = None ) -> Iterator [ Field ] : for field in self . _table : if name is not None and field . name . value != name : continue descriptor = field . descriptor . value if type_ is not None and type_ != descriptor : continue if f is not None and not f ( field ) : continue yield field
|
Iterates over the fields table yielding each matching method . Calling without any arguments is equivalent to iterating over the table .
|
3,205
|
def is_valid_host ( value ) : host_validators = validators . ipv4 , validators . ipv6 , validators . domain return any ( f ( value ) for f in host_validators )
|
Check if given value is a valid host string .
|
3,206
|
def is_valid_url ( value ) : match = URL_REGEX . match ( value ) host_str = urlparse ( value ) . hostname return match and is_valid_host ( host_str )
|
Check if given value is a valid URL string .
|
3,207
|
def accepts_valid_host ( func ) : @ functools . wraps ( func ) def wrapper ( obj , value , * args , ** kwargs ) : if not is_valid_host ( value ) : raise InvalidHostError return func ( obj , value , * args , ** kwargs ) return wrapper
|
Return a wrapper that runs given method only for valid hosts .
|
3,208
|
def accepts_valid_urls ( func ) : @ functools . wraps ( func ) def wrapper ( obj , urls , * args , ** kwargs ) : invalid_urls = [ u for u in urls if not is_valid_url ( u ) ] if invalid_urls : msg_tpl = 'The values: {} are not valid URLs' msg = msg_tpl . format ( ',' . join ( invalid_urls ) ) raise InvalidURLError ( msg ) return func ( obj , urls , * args , ** kwargs ) return wrapper
|
Return a wrapper that runs given method only for valid URLs .
|
3,209
|
def get ( self , index ) : constant = self . _pool [ index ] if not isinstance ( constant , Constant ) : constant = _constant_types [ constant [ 0 ] ] ( self , index , * constant [ 1 : ] ) self . _pool [ index ] = constant return constant
|
Returns the Constant at index raising a KeyError if it does not exist .
|
3,210
|
def find ( self , type_ = None , f = None ) : for constant in self : if type_ is not None and not isinstance ( constant , type_ ) : continue if f is not None and not f ( constant ) : continue yield constant
|
Iterates over the pool yielding each matching Constant . Calling without any arguments is equivalent to iterating over the pool .
|
3,211
|
def pack ( self , fout ) : write = fout . write write ( pack ( '>H' , self . raw_count ) ) for constant in self : write ( constant . pack ( ) )
|
Write the ConstantPool to the file - like object fout .
|
3,212
|
def checkout_and_create_branch ( repo , name ) : local_branch = repo . branches [ name ] if name in repo . branches else None if not local_branch : if name in repo . remotes . origin . refs : msg = repo . git . checkout ( name ) _LOGGER . debug ( msg ) return local_branch = repo . create_head ( name ) local_branch . checkout ( )
|
Checkout branch . Create it if necessary
|
3,213
|
def checkout_create_push_branch ( repo , name ) : try : repo . git . checkout ( name ) _LOGGER . info ( "Checkout %s success" , name ) except GitCommandError : _LOGGER . info ( "Checkout %s was impossible (branch does not exist). Creating it and push it." , name ) checkout_and_create_branch ( repo , name ) repo . git . push ( 'origin' , name , set_upstream = True )
|
Checkout this branch . Create it if necessary and push it to origin .
|
3,214
|
def get_repo_hexsha ( git_folder ) : repo = Repo ( str ( git_folder ) ) if repo . bare : not_git_hexsha = "notgitrepo" _LOGGER . warning ( "Not a git repo, SHA1 used will be: %s" , not_git_hexsha ) return not_git_hexsha hexsha = repo . head . commit . hexsha _LOGGER . info ( "Found REST API repo SHA1: %s" , hexsha ) return hexsha
|
Get the SHA1 of the current repo
|
3,215
|
def checkout_with_fetch ( git_folder , refspec , repository = "origin" ) : _LOGGER . info ( "Trying to fetch and checkout %s" , refspec ) repo = Repo ( str ( git_folder ) ) repo . git . fetch ( repository , refspec ) repo . git . checkout ( "FETCH_HEAD" ) _LOGGER . info ( "Fetch and checkout success for %s" , refspec )
|
Fetch the refspec and checkout FETCH_HEAD . Beware that you will ne in detached head mode .
|
3,216
|
def clone_to_path ( https_authenticated_url , folder , branch_or_commit = None ) : _LOGGER . info ( "Cloning repo" ) repo = Repo . clone_from ( https_authenticated_url , str ( folder ) ) if branch_or_commit : _LOGGER . info ( "Checkout branch_or_commit %s" , branch_or_commit ) repo . git . checkout ( branch_or_commit ) _LOGGER . info ( "Clone success" )
|
Clone the given URL to the folder .
|
3,217
|
def get_files_in_commit ( git_folder , commit_id = "HEAD" ) : repo = Repo ( str ( git_folder ) ) output = repo . git . diff ( "--name-only" , commit_id + "^" , commit_id ) return output . splitlines ( )
|
List of files in HEAD commit .
|
3,218
|
def parse_values ( self , query ) : values = { } for name , filt in self . filters . items ( ) : val = filt . parse_value ( query ) if val is None : continue values [ name ] = val return values
|
extract values from query
|
3,219
|
def filter_queryset ( self , queryset ) : for name , filt in self . filters . items ( ) : val = self . values . get ( name , None ) if name is None : continue params = filt . filter_params ( val ) if not params : continue if isinstance ( params , dict ) : queryset = queryset . filter ( ** params ) if isinstance ( params , QNode ) : queryset = queryset . filter ( params ) return queryset
|
convert values to filtering params and apply to queryset
|
3,220
|
def media_image_url ( self ) : if self . is_nowplaying : base = self . server . construct_url ( API_URL ) try : image_id = self . session [ 'NowPlayingItem' ] [ 'ImageTags' ] [ 'Thumb' ] image_type = 'Thumb' except KeyError : try : image_id = self . session [ 'NowPlayingItem' ] [ 'ImageTags' ] [ 'Primary' ] image_type = 'Primary' except KeyError : return None url = '{0}/Items/{1}/Images/{2}?width=500&tag={3}&api_key={4}' . format ( base , self . media_id , image_type , image_id , self . server . api_key ) return url else : return None
|
Image url of current playing media .
|
3,221
|
def state ( self ) : if self . is_active : if 'NowPlayingItem' in self . session : if self . session [ 'PlayState' ] [ 'IsPaused' ] : return STATE_PAUSED else : return STATE_PLAYING else : return STATE_IDLE else : return STATE_OFF
|
Return current playstate of the device .
|
3,222
|
async def set_playstate ( self , state , pos = 0 ) : url = '{}/Sessions/{}/Playing/{}' . format ( self . server . construct_url ( API_URL ) , self . session_id , state ) params = { 'api_key' : self . server . api_key } if state == 'seek' : params [ 'SeekPositionTicks' ] = int ( pos * 10000000 ) params [ 'static' ] = 'true' _LOGGER . debug ( 'Playstate URL: %s' , url ) post = await self . server . api_post ( url , params ) if post is None : _LOGGER . debug ( 'Error sending command.' ) else : _LOGGER . debug ( 'Post response: %s' , post )
|
Send media commands to server .
|
3,223
|
def start_shell ( local_ns : Dict = None , banner : str = '' ) : if IPYTHON_SHELL_AVAILABLE : terminal = embed . InteractiveShellEmbed ( user_ns = { } ) terminal . mainloop ( local_ns = local_ns ) else : code . interact ( banner = banner , local = local_ns )
|
Create and immediately drop into a Python shell .
|
3,224
|
def expand_constants ( ins : Instruction , * , cf ) -> Instruction : for i , operand in enumerate ( ins . operands ) : if not isinstance ( operand , Operand ) : continue if operand . op_type == OperandTypes . CONSTANT_INDEX : ins . operands [ i ] = cf . constants [ operand . value ] return ins
|
Replace CONSTANT_INDEX operands with the literal Constant object from the constant pool .
|
3,225
|
def simple_swap ( ins : Instruction ) -> Instruction : try : rule = ins . details [ 'transform' ] [ 'simple_swap' ] except KeyError : return ins replacement_ins = opcode_table [ rule [ 'op' ] ] return Instruction ( replacement_ins [ 'mnemonic' ] , replacement_ins [ 'op' ] , [ Operand ( replacement_ins [ 'operands' ] [ i ] [ 1 ] , r ) for i , r in enumerate ( rule [ 'operands' ] ) ] , ins . pos )
|
Replaces one instruction with another based on the transform rules in the bytecode definitions . This can help simplify your code as it reduces the overall number of instructions . For example aload_0 will become aload 0 .
|
3,226
|
def find_request ( ) : frame = inspect . currentframe ( ) request = None f = frame while not request and f : if 'request' in f . f_locals and isinstance ( f . f_locals [ 'request' ] , HttpRequest ) : request = f . f_locals [ 'request' ] f = f . f_back del frame return request
|
Inspect running environment for request object . There should be one but don t rely on it .
|
3,227
|
def error_view ( template_dir = None ) : if not template_dir : template_dir = "Pylot/Error" template_page = "%s/index.html" % template_dir class Error ( Pylot ) : @ classmethod def register ( cls , app , ** kwargs ) : super ( cls , cls ) . register ( app , ** kwargs ) @ app . errorhandler ( 400 ) def error_400 ( error ) : return cls . index ( error , 400 ) @ app . errorhandler ( 401 ) def error_401 ( error ) : return cls . index ( error , 401 ) @ app . errorhandler ( 403 ) def error_403 ( error ) : return cls . index ( error , 403 ) @ app . errorhandler ( 404 ) def error_404 ( error ) : return cls . index ( error , 404 ) @ app . errorhandler ( 500 ) def error_500 ( error ) : return cls . index ( error , 500 ) @ app . errorhandler ( 503 ) def error_503 ( error ) : return cls . index ( error , 503 ) @ classmethod def index ( cls , error , code ) : cls . meta_ ( title = "Error %s" % code ) return cls . render ( error = error , view_template = template_page ) , code return Error
|
Create the Error view Must be instantiated
|
3,228
|
def sign_s3_upload ( self ) : AWS_ACCESS_KEY = self . config_ ( 'AWS_ACCESS_KEY_ID' ) AWS_SECRET_KEY = self . config_ ( 'AWS_SECRET_ACCESS_KEY' ) S3_BUCKET = self . config_ ( 'AWS_S3_BUCKET_NAME' ) object_name = request . args . get ( 's3_object_name' ) mime_type = request . args . get ( 's3_object_type' ) expires = long ( time . time ( ) + 10 ) amz_headers = "x-amz-acl:public-read" put_request = "PUT\n\n%s\n%d\n%s\n/%s/%s" % ( mime_type , expires , amz_headers , S3_BUCKET , object_name ) signature = base64 . encodestring ( hmac . new ( AWS_SECRET_KEY , put_request , sha1 ) . digest ( ) ) signature = urllib . quote ( urllib . quote_plus ( signature . strip ( ) ) ) url = 'https://s3.amazonaws.com/%s/%s' % ( S3_BUCKET , object_name ) return jsonify ( { 'signed_request' : '%s?AWSAccessKeyId=%s&Expires=%d&Signature=%s' % ( url , AWS_ACCESS_KEY , expires , signature ) , 'url' : url } )
|
Allow to create Signed object to upload to S3 via JS
|
3,229
|
def add_new_devices_callback ( self , callback ) : self . _new_devices_callbacks . append ( callback ) _LOGGER . debug ( 'Added new devices callback to %s' , callback )
|
Register as callback for when new devices are added .
|
3,230
|
def add_stale_devices_callback ( self , callback ) : self . _stale_devices_callbacks . append ( callback ) _LOGGER . debug ( 'Added stale devices callback to %s' , callback )
|
Register as callback for when stale devices exist .
|
3,231
|
def add_update_callback ( self , callback , device ) : self . _update_callbacks . append ( [ callback , device ] ) _LOGGER . debug ( 'Added update callback to %s on %s' , callback , device )
|
Register as callback for when a matching device changes .
|
3,232
|
def remove_update_callback ( self , callback , device ) : if [ callback , device ] in self . _update_callbacks : self . _update_callbacks . remove ( [ callback , device ] ) _LOGGER . debug ( 'Removed update callback %s for %s' , callback , device )
|
Remove a registered update callback .
|
3,233
|
def start ( self ) : asyncio . ensure_future ( self . register ( ) , loop = self . _event_loop ) if self . _own_loop : _LOGGER . info ( "Starting up our own event loop." ) self . _event_loop . run_forever ( ) self . _event_loop . close ( ) _LOGGER . info ( "Connection shut down." )
|
Public method for initiating connectivity with the emby server .
|
3,234
|
async def stop ( self ) : self . _shutdown = True if self . wsck : _LOGGER . info ( 'Closing Emby server websocket.' ) await self . wsck . close ( ) self . wsck = None if self . _own_loop : _LOGGER . info ( "Shutting down Emby server loop..." ) self . _event_loop . call_soon_threadsafe ( self . _event_loop . stop )
|
Async method for stopping connectivity with the emby server .
|
3,235
|
async def register ( self ) : url = '{}/Sessions' . format ( self . construct_url ( API_URL ) ) params = { 'api_key' : self . _api_key } reg = await self . api_request ( url , params ) if reg is None : self . _registered = False _LOGGER . error ( 'Unable to register emby client.' ) else : self . _registered = True _LOGGER . info ( 'Emby client registered!, Id: %s' , self . unique_id ) self . _sessions = reg self . update_device_list ( self . _sessions ) asyncio . ensure_future ( self . socket_connection ( ) , loop = self . _event_loop )
|
Register library device id and get initial device list .
|
3,236
|
async def socket_connection ( self ) : if not self . _registered : _LOGGER . error ( 'Client not registered, cannot start socket.' ) return url = '{}?DeviceID={}&api_key={}' . format ( self . construct_url ( SOCKET_URL ) , self . _api_id , self . _api_key ) fail_count = 0 while True : _LOGGER . debug ( 'Attempting Socket Connection.' ) try : with async_timeout . timeout ( DEFAULT_TIMEOUT , loop = self . _event_loop ) : self . wsck = await self . _api_session . ws_connect ( url ) try : msg = await self . wsck . send_str ( '{"MessageType":"SessionsStart", "Data": "0,1500"}' ) except Exception as err : _LOGGER . error ( 'Failure setting session updates: %s' , err ) raise ValueError ( 'Session updates error.' ) _LOGGER . debug ( 'Socket Connected!' ) fail_count = 0 while True : msg = await self . wsck . receive ( ) if msg . type == aiohttp . WSMsgType . text : self . process_msg ( msg . data ) elif msg . type == aiohttp . WSMsgType . closed : raise ValueError ( 'Websocket was closed.' ) elif msg . type == aiohttp . WSMsgType . error : _LOGGER . debug ( 'Websocket encountered an error: %s' , msg ) raise ValueError ( 'Websocket error.' ) except ( aiohttp . ClientError , asyncio . TimeoutError , aiohttp . WSServerHandshakeError , ConnectionRefusedError , OSError , ValueError ) as err : if not self . _shutdown : fail_count += 1 _LOGGER . debug ( 'Websocket unintentionally closed.' ' Trying reconnect in %ss. Error: %s' , ( fail_count * 5 ) + 5 , err ) await asyncio . sleep ( 15 , self . _event_loop ) continue else : break
|
Open websocket connection .
|
3,237
|
def process_msg ( self , msg ) : jmsg = json . loads ( msg ) msgtype = jmsg [ 'MessageType' ] msgdata = jmsg [ 'Data' ] _LOGGER . debug ( 'New websocket message recieved of type: %s' , msgtype ) if msgtype == 'Sessions' : self . _sessions = msgdata self . update_device_list ( self . _sessions )
|
Process messages from the event stream .
|
3,238
|
def update_device_list ( self , sessions ) : if sessions is None : _LOGGER . error ( 'Error updating Emby devices.' ) return new_devices = [ ] active_devices = [ ] dev_update = False for device in sessions : dev_name = '{}.{}' . format ( device [ 'DeviceId' ] , device [ 'Client' ] ) try : _LOGGER . debug ( 'Session msg on %s of type: %s, themeflag: %s' , dev_name , device [ 'NowPlayingItem' ] [ 'Type' ] , device [ 'NowPlayingItem' ] [ 'IsThemeMedia' ] ) except KeyError : pass active_devices . append ( dev_name ) if dev_name not in self . _devices and device [ 'DeviceId' ] != str ( self . _api_id ) : _LOGGER . debug ( 'New Emby DeviceID: %s. Adding to device list.' , dev_name ) new = EmbyDevice ( device , self ) self . _devices [ dev_name ] = new new_devices . append ( new ) elif device [ 'DeviceId' ] != str ( self . _api_id ) : if not self . _devices [ dev_name ] . is_active : dev_update = True do_update = self . update_check ( self . _devices [ dev_name ] , device ) self . _devices [ dev_name ] . update_data ( device ) self . _devices [ dev_name ] . set_active ( True ) if dev_update : self . _do_new_devices_callback ( 0 ) dev_update = False if do_update : self . _do_update_callback ( dev_name ) for dev_id in self . _devices : if dev_id not in active_devices : if self . _devices [ dev_id ] . is_active : self . _devices [ dev_id ] . set_active ( False ) self . _do_update_callback ( dev_id ) self . _do_stale_devices_callback ( dev_id ) if new_devices : self . _do_new_devices_callback ( 0 )
|
Update device list .
|
3,239
|
def update_check ( self , existing , new ) : old_state = existing . state if 'NowPlayingItem' in existing . session_raw : try : old_theme = existing . session_raw [ 'NowPlayingItem' ] [ 'IsThemeMedia' ] except KeyError : old_theme = False else : old_theme = False if 'NowPlayingItem' in new : if new [ 'PlayState' ] [ 'IsPaused' ] : new_state = STATE_PAUSED else : new_state = STATE_PLAYING try : new_theme = new [ 'NowPlayingItem' ] [ 'IsThemeMedia' ] except KeyError : new_theme = False else : new_state = STATE_IDLE new_theme = False if old_theme or new_theme : return False elif old_state == STATE_PLAYING or new_state == STATE_PLAYING : return True elif old_state != new_state : return True else : return False
|
Check device state to see if we need to fire the callback .
|
3,240
|
def main ( ) : parser = get_parser ( ) args = parser . parse_args ( ) ARCHIVE = args . archive_path archive = ( not args . no_archive ) os . environ [ 'F2FORMAT_VERSION' ] = args . python os . environ [ 'F2FORMAT_ENCODING' ] = args . encoding def find ( root ) : flst = list ( ) temp = os . listdir ( root ) for file in temp : path = os . path . join ( root , file ) if os . path . isdir ( path ) : flst . extend ( find ( path ) ) elif os . path . isfile ( path ) : flst . append ( path ) elif os . path . islink ( path ) : continue yield from flst def rename ( path ) : stem , ext = os . path . splitext ( path ) name = '%s-%s%s' % ( stem , uuid . uuid4 ( ) , ext ) return os . path . join ( ARCHIVE , name ) if archive : os . makedirs ( ARCHIVE , exist_ok = True ) filelist = list ( ) for path in sys . argv [ 1 : ] : if os . path . isfile ( path ) : if archive : dest = rename ( path ) os . makedirs ( os . path . dirname ( dest ) , exist_ok = True ) shutil . copy ( path , dest ) filelist . append ( path ) if os . path . isdir ( path ) : if archive : shutil . copytree ( path , rename ( path ) ) filelist . extend ( find ( path ) ) def ispy ( file ) : return ( os . path . isfile ( file ) and ( os . path . splitext ( file ) [ 1 ] in ( '.py' , '.pyw' ) ) ) filelist = sorted ( filter ( ispy , filelist ) ) if len ( filelist ) == 0 : parser . error ( 'argument PATH: no valid source file found' ) if mp is None or CPU_CNT <= 1 : [ f2format ( filename ) for filename in filelist ] else : mp . Pool ( processes = CPU_CNT ) . map ( f2format , filelist )
|
Entry point for f2format .
|
3,241
|
def create ( cls , this : str , super_ : str = u'java/lang/Object' ) -> 'ClassFile' : cf = ClassFile ( ) cf . access_flags . acc_public = True cf . access_flags . acc_super = True cf . this = cf . constants . create_class ( this ) cf . super_ = cf . constants . create_class ( super_ ) return cf
|
A utility which sets up reasonable defaults for a new public class .
|
3,242
|
def save ( self , source : IO ) : write = source . write write ( pack ( '>IHH' , ClassFile . MAGIC , self . version . minor , self . version . major ) ) self . _constants . pack ( source ) write ( self . access_flags . pack ( ) ) write ( pack ( f'>HHH{len(self._interfaces)}H' , self . _this , self . _super , len ( self . _interfaces ) , * self . _interfaces ) ) self . fields . pack ( source ) self . methods . pack ( source ) self . attributes . pack ( source )
|
Saves the class to the file - like object source .
|
3,243
|
def _from_io ( self , source : IO ) : read = source . read if unpack ( '>I' , source . read ( 4 ) ) [ 0 ] != ClassFile . MAGIC : raise ValueError ( 'invalid magic number' ) self . version = unpack ( '>HH' , source . read ( 4 ) ) [ : : - 1 ] self . _constants . unpack ( source ) self . access_flags . unpack ( read ( 2 ) ) self . _this , self . _super , interfaces_count = unpack ( '>HHH' , read ( 6 ) ) self . _interfaces = unpack ( f'>{interfaces_count}H' , read ( 2 * interfaces_count ) ) self . fields . unpack ( source ) self . methods . unpack ( source ) self . attributes . unpack ( source )
|
Loads an existing JVM ClassFile from any file - like object .
|
3,244
|
def interfaces ( self ) -> Iterable [ ConstantClass ] : return [ self . _constants [ idx ] for idx in self . _interfaces ]
|
A list of direct superinterfaces of this class as indexes into the constant pool in left - to - right order .
|
3,245
|
def bootstrap_methods ( self ) -> BootstrapMethod : bootstrap = self . attributes . find_one ( name = 'BootstrapMethods' ) if bootstrap is None : bootstrap = self . attributes . create ( ATTRIBUTE_CLASSES [ 'BootstrapMethods' ] ) return bootstrap . table
|
Returns the bootstrap methods table from the BootstrapMethods attribute if one exists . If it does not one will be created .
|
3,246
|
def attributes ( ) : attribute_classes = get_attribute_classes ( ) for name , class_ in attribute_classes . items ( ) : click . echo ( u'{name} - Added in: {ai} ({cv})' . format ( name = click . style ( name , fg = 'green' ) , ai = click . style ( class_ . ADDED_IN , fg = 'yellow' ) , cv = click . style ( ClassVersion ( * class_ . MINIMUM_CLASS_VERSION ) . human , fg = 'yellow' ) ) )
|
List enabled Attributes .
|
3,247
|
def ins ( mnemonic ) : try : opcode = bytecode . opcode_table [ mnemonic ] except KeyError : click . secho ( u'No definition found.' , fg = 'red' ) return click . echo ( u'{mnemonic} (0x{op})' . format ( mnemonic = click . style ( opcode [ 'mnemonic' ] , fg = 'green' , underline = True ) , op = click . style ( format ( opcode [ 'op' ] , '02x' ) , fg = 'green' ) ) ) if opcode . get ( 'desc' ) : click . secho ( 'Description:' , fg = 'yellow' ) click . echo ( opcode [ 'desc' ] ) if opcode [ 'can_be_wide' ] : click . echo ( u'This instruction can be prefixed by the WIDE opcode.' ) if opcode . get ( 'runtime' ) : click . secho ( 'Possible runtime exceptions:' , fg = 'yellow' ) for runtime_exception in opcode [ 'runtime' ] : click . echo ( '- {runtime_exception}' . format ( runtime_exception = click . style ( runtime_exception , fg = 'red' ) ) ) if opcode [ 'operands' ] : click . secho ( u'Operand Format:' , fg = 'yellow' ) for operand_fmt , operand_type in opcode [ 'operands' ] : click . echo ( u'- {ty} as a {fmt}' . format ( ty = click . style ( operand_type . name , fg = 'yellow' ) , fmt = click . style ( operand_fmt . name , fg = 'yellow' ) ) ) elif opcode [ 'op' ] in ( 0xAB , 0xAA , 0xC4 ) : click . secho ( u'\nOperand Format:' , fg = 'yellow' ) click . echo ( u'This is a special-case opcode with variable operand parsing.' )
|
Lookup instruction information .
|
3,248
|
def shell_command ( class_path ) : loader = ClassLoader ( * class_path ) shell . start_shell ( local_ns = { 'ClassFile' : ClassFile , 'loader' : loader , 'constants' : importlib . import_module ( 'jawa.constants' ) , } )
|
Drop into a debugging shell .
|
3,249
|
def definition_to_json ( source ) : try : import yaml except ImportError : click . echo ( 'The pyyaml module could not be found and is required' ' to use this command.' , err = True ) return y = yaml . load ( source ) for k , v in y . items ( ) : v . setdefault ( 'operands' , None ) v . setdefault ( 'can_be_wide' , False ) v . setdefault ( 'transform' , { } ) v [ 'mnemonic' ] = k click . echo ( json . dumps ( y , indent = 4 , sort_keys = True ) )
|
Convert a bytecode . yaml file into a prepared bytecode . json .
|
3,250
|
def dependencies ( source ) : loader = ClassLoader ( source , max_cache = - 1 ) all_dependencies = set ( ) for klass in loader . classes : new_dependencies = loader . dependencies ( klass ) - all_dependencies all_dependencies . update ( new_dependencies ) for new_dep in new_dependencies : click . echo ( new_dep )
|
Output a list of all classes referenced by the given source .
|
3,251
|
def grep ( source , regex , stop_on_first = False ) : loader = ClassLoader ( source , max_cache = - 1 ) r = re . compile ( regex ) def _matches ( constant ) : return r . match ( constant . value ) for klass in loader . classes : it = loader . search_constant_pool ( path = klass , type_ = UTF8 , f = _matches ) if next ( it , None ) : print ( klass ) if stop_on_first : break
|
Grep the constant pool of all classes in source .
|
3,252
|
def fetch ( * args , ** kwargs ) : data = kwargs . get ( 'data' , None ) files = kwargs . get ( 'files' , { } ) if data and isinstance ( data , ( basestring , dict ) ) or files : return post ( * args , ** kwargs ) return get ( * args , ** kwargs )
|
fetch an URL .
|
3,253
|
def parse_url ( url ) : try : url = unicode ( url ) except UnicodeDecodeError : pass if py3k : make_utf8 = lambda x : x else : make_utf8 = lambda x : isinstance ( x , unicode ) and x . encode ( 'utf-8' ) or x if '://' in url : scheme , url = url . split ( '://' , 1 ) else : scheme = 'http' url = 'http://' + url parsed = urlparse . urlsplit ( url ) r = ObjectDict ( ) r [ 'scheme' ] = make_utf8 ( scheme ) r [ 'netloc' ] = make_utf8 ( parsed . netloc ) r [ 'path' ] = make_utf8 ( parsed . path ) r [ 'query' ] = make_utf8 ( parsed . query ) r [ 'fragment' ] = make_utf8 ( parsed . fragment ) r [ 'uri' ] = make_utf8 ( parsed . path ) if parsed . query : r [ 'uri' ] += '?' + make_utf8 ( parsed . query ) r [ 'username' ] = make_utf8 ( parsed . username ) r [ 'password' ] = make_utf8 ( parsed . password ) host = make_utf8 ( parsed . hostname . encode ( 'idna' ) . decode ( 'utf-8' ) ) r [ 'host' ] = r [ 'hostname' ] = host try : r [ 'port' ] = parsed . port except ValueError : r [ 'port' ] = None if r [ 'port' ] : r [ 'http_host' ] = '%s:%d' % ( r [ 'host' ] , r [ 'port' ] ) else : r [ 'http_host' ] = r [ 'host' ] return r
|
Return a dictionary of parsed url
|
3,254
|
def get_proxies_from_environ ( ) : proxies = { } http_proxy = os . getenv ( 'http_proxy' ) or os . getenv ( 'HTTP_PROXY' ) https_proxy = os . getenv ( 'https_proxy' ) or os . getenv ( 'HTTPS_PROXY' ) if http_proxy : proxies [ 'http' ] = http_proxy if https_proxy : proxies [ 'https' ] = https_proxy return proxies
|
Get proxies from os . environ .
|
3,255
|
def random_useragent ( filename = True ) : import random default_ua = 'urlfetch/%s' % __version__ if isinstance ( filename , basestring ) : filenames = [ filename ] else : filenames = [ ] if filename and UAFILE : filenames . append ( UAFILE ) for filename in filenames : try : st = os . stat ( filename ) if stat . S_ISREG ( st . st_mode ) and os . access ( filename , os . R_OK ) : break except : pass else : return default_ua with open ( filename , 'rb' ) as f : filesize = st . st_size pos = 0 r = random . Random ( ) for i in range ( 3 ) : pos += r . randint ( 0 , filesize ) pos %= filesize f . seek ( pos ) f . readline ( ) line = f . readline ( ) if not line : if f . tell ( ) == filesize : f . seek ( 0 ) line = f . readline ( ) line = line . strip ( ) if line and line [ 0 ] != '#' : return line return default_ua
|
Returns a User - Agent string randomly from file .
|
3,256
|
def url_concat ( url , args , keep_existing = True ) : if not args : return url if keep_existing : if url [ - 1 ] not in ( '?' , '&' ) : url += '&' if ( '?' in url ) else '?' return url + urlencode ( args , 1 ) else : url , seq , query = url . partition ( '?' ) query = urlparse . parse_qs ( query , True ) query . update ( args ) return url + '?' + urlencode ( query , 1 )
|
Concatenate url and argument dictionary
|
3,257
|
def choose_boundary ( ) : global BOUNDARY_PREFIX if BOUNDARY_PREFIX is None : BOUNDARY_PREFIX = "urlfetch" try : uid = repr ( os . getuid ( ) ) BOUNDARY_PREFIX += "." + uid except AttributeError : pass try : pid = repr ( os . getpid ( ) ) BOUNDARY_PREFIX += "." + pid except AttributeError : pass return "%s.%s" % ( BOUNDARY_PREFIX , uuid . uuid4 ( ) . hex )
|
Generate a multipart boundry .
|
3,258
|
def encode_multipart ( data , files ) : body = BytesIO ( ) boundary = choose_boundary ( ) part_boundary = b ( '--%s\r\n' % boundary ) writer = codecs . lookup ( 'utf-8' ) [ 3 ] if isinstance ( data , dict ) : for name , values in data . items ( ) : if not isinstance ( values , ( list , tuple , set ) ) : values = ( values , ) for value in values : body . write ( part_boundary ) writer ( body ) . write ( 'Content-Disposition: form-data; ' 'name="%s"\r\n' % name ) body . write ( b'Content-Type: text/plain\r\n\r\n' ) if isinstance ( value , int ) : value = str ( value ) if py3k and isinstance ( value , str ) : writer ( body ) . write ( value ) else : body . write ( value ) body . write ( b'\r\n' ) for fieldname , f in files . items ( ) : if isinstance ( f , tuple ) : filename , f = f elif hasattr ( f , 'name' ) : filename = basename ( f . name ) else : filename = None raise UrlfetchException ( "file must has filename" ) if hasattr ( f , 'read' ) : value = f . read ( ) elif isinstance ( f , basestring ) : value = f else : value = str ( f ) body . write ( part_boundary ) if filename : writer ( body ) . write ( 'Content-Disposition: form-data; name="%s"; ' 'filename="%s"\r\n' % ( fieldname , filename ) ) body . write ( b'Content-Type: application/octet-stream\r\n\r\n' ) else : writer ( body ) . write ( 'Content-Disposition: form-data; name="%s"' '\r\n' % name ) body . write ( b'Content-Type: text/plain\r\n\r\n' ) if py3k and isinstance ( value , str ) : writer ( body ) . write ( value ) else : body . write ( value ) body . write ( b'\r\n' ) body . write ( b ( '--' + boundary + '--\r\n' ) ) content_type = 'multipart/form-data; boundary=%s' % boundary return content_type , body . getvalue ( )
|
Encode multipart .
|
3,259
|
def body ( self ) : content = [ ] length = 0 for chunk in self : content . append ( chunk ) length += len ( chunk ) if self . length_limit and length > self . length_limit : self . close ( ) raise ContentLimitExceeded ( "Content length is more than %d " "bytes" % self . length_limit ) return b ( "" ) . join ( content )
|
Response body .
|
3,260
|
def json ( self ) : try : return json . loads ( self . text ) except Exception as e : raise ContentDecodingError ( e )
|
Load response body as json .
|
3,261
|
def headers ( self ) : if py3k : return dict ( ( k . lower ( ) , v ) for k , v in self . getheaders ( ) ) else : return dict ( self . getheaders ( ) )
|
Response headers .
|
3,262
|
def cookies ( self ) : c = Cookie . SimpleCookie ( self . getheader ( 'set-cookie' ) ) return dict ( ( i . key , i . value ) for i in c . values ( ) )
|
Cookies in dict
|
3,263
|
def links ( self ) : ret = [ ] linkheader = self . getheader ( 'link' ) if not linkheader : return ret for i in linkheader . split ( ',' ) : try : url , params = i . split ( ';' , 1 ) except ValueError : url , params = i , '' link = { } link [ 'url' ] = url . strip ( ) for param in params . split ( ';' ) : try : k , v = param . split ( '=' ) except ValueError : break link [ k . strip ( ) ] = v . strip ( ) ret . append ( link ) return ret
|
Links parsed from HTTP Link header
|
3,264
|
def cookiestring ( self , value ) : c = Cookie . SimpleCookie ( value ) sc = [ ( i . key , i . value ) for i in c . values ( ) ] self . cookies = dict ( sc )
|
Cookie string setter
|
3,265
|
def request ( self , * args , ** kwargs ) : headers = self . headers . copy ( ) if self . cookiestring : headers [ 'Cookie' ] = self . cookiestring headers . update ( kwargs . get ( 'headers' , { } ) ) kwargs [ 'headers' ] = headers r = request ( * args , ** kwargs ) self . cookies . update ( r . cookies ) return r
|
Issue a request .
|
3,266
|
def f2format ( filename ) : print ( 'Now converting %r...' % filename ) encoding = os . getenv ( 'F2FORMAT_ENCODING' , LOCALE_ENCODING ) lineno = dict ( ) content = list ( ) with open ( filename , 'r' , encoding = encoding ) as file : lineno [ 1 ] = 0 for lnum , line in enumerate ( file , start = 1 ) : content . append ( line ) lineno [ lnum + 1 ] = lineno [ lnum ] + len ( line ) string = '' . join ( content ) text = convert ( string , lineno ) with open ( filename , 'w' , encoding = encoding ) as file : file . write ( text )
|
Wrapper works for conversion .
|
3,267
|
def exception_to_github ( github_obj_to_comment , summary = "" ) : context = ExceptionContext ( ) try : yield context except Exception : if summary : summary = ": ({})" . format ( summary ) error_type = "an unknown error" try : raise except CalledProcessError as err : error_type = "a Subprocess error" content = "Command: {}\n" . format ( err . cmd ) content += "Finished with return code {}\n" . format ( err . returncode ) if err . output : content += "and output:\n```shell\n{}\n```" . format ( err . output ) else : content += "and no output" except Exception : content = "```python\n{}\n```" . format ( traceback . format_exc ( ) ) response = "<details><summary>Encountered {}{}</summary><p>\n\n" . format ( error_type , summary ) response += content response += "\n\n</p></details>" context . comment = create_comment ( github_obj_to_comment , response )
|
If any exception comes log them in the given Github obj .
|
3,268
|
def create_comment ( github_object , body ) : try : return github_object . create_issue_comment ( body ) except AttributeError : return github_object . create_comment ( body )
|
Create a comment whatever the object is a PR a commit or an issue .
|
3,269
|
def get_full_sdk_id ( gh_token , sdk_git_id ) : if not '/' in sdk_git_id : login = user_from_token ( gh_token ) . login return '{}/{}' . format ( login , sdk_git_id ) return sdk_git_id
|
If the SDK git id is incomplete try to complete it with user login
|
3,270
|
def sync_fork ( gh_token , github_repo_id , repo , push = True ) : if not gh_token : _LOGGER . warning ( 'Skipping the upstream repo sync, no token' ) return _LOGGER . info ( 'Check if repo has to be sync with upstream' ) github_con = Github ( gh_token ) github_repo = github_con . get_repo ( github_repo_id ) if not github_repo . parent : _LOGGER . warning ( 'This repo has no upstream' ) return upstream_url = 'https://github.com/{}.git' . format ( github_repo . parent . full_name ) upstream = repo . create_remote ( 'upstream' , url = upstream_url ) upstream . fetch ( ) active_branch_name = repo . active_branch . name if not active_branch_name in repo . remotes . upstream . refs : _LOGGER . info ( 'Upstream has no branch %s to merge from' , active_branch_name ) return else : _LOGGER . info ( 'Merge from upstream' ) msg = repo . git . rebase ( 'upstream/{}' . format ( repo . active_branch . name ) ) _LOGGER . debug ( msg ) if push : msg = repo . git . push ( ) _LOGGER . debug ( msg )
|
Sync the current branch in this fork against the direct parent on Github
|
3,271
|
def get_or_create_pull ( github_repo , title , body , head , base , * , none_if_no_commit = False ) : try : return github_repo . create_pull ( title = title , body = body , head = head , base = base ) except GithubException as err : err_message = err . data [ 'errors' ] [ 0 ] . get ( 'message' , '' ) if err . status == 422 and err_message . startswith ( 'A pull request already exists' ) : _LOGGER . info ( 'PR already exists, get this PR' ) return list ( github_repo . get_pulls ( head = head , base = base ) ) [ 0 ] elif none_if_no_commit and err . status == 422 and err_message . startswith ( 'No commits between' ) : _LOGGER . info ( 'No PR possible since head %s and base %s are the same' , head , base ) return None else : _LOGGER . warning ( "Unable to create PR:\n%s" , err . data ) raise except Exception as err : response = traceback . format_exc ( ) _LOGGER . warning ( "Unable to create PR:\n%s" , response ) raise
|
Try to create the PR . If the PR exists try to find it instead . Raises otherwise .
|
3,272
|
def clone_to_path ( gh_token , folder , sdk_git_id , branch_or_commit = None , * , pr_number = None ) : _LOGGER . info ( "Clone SDK repository %s" , sdk_git_id ) url_parsing = urlsplit ( sdk_git_id ) sdk_git_id = url_parsing . path if sdk_git_id . startswith ( "/" ) : sdk_git_id = sdk_git_id [ 1 : ] credentials_part = '' if gh_token : login = user_from_token ( gh_token ) . login credentials_part = '{user}:{token}@' . format ( user = login , token = gh_token ) else : _LOGGER . warning ( 'Will clone the repo without writing credentials' ) https_authenticated_url = 'https://{credentials}github.com/{sdk_git_id}.git' . format ( credentials = credentials_part , sdk_git_id = sdk_git_id ) _git_clone_to_path ( https_authenticated_url , folder ) if pr_number : try : checkout_with_fetch ( folder , "pull/{}/merge" . format ( pr_number ) ) return except Exception : pass checkout_with_fetch ( folder , "pull/{}/head" . format ( pr_number ) ) if branch_or_commit : repo = Repo ( str ( folder ) ) repo . git . checkout ( branch_or_commit )
|
Clone the given repo_id to the folder .
|
3,273
|
def do_pr ( gh_token , sdk_git_id , sdk_pr_target_repo_id , branch_name , base_branch , pr_body = "" ) : "Do the PR" if not gh_token : _LOGGER . info ( 'Skipping the PR, no token found' ) return None if not sdk_pr_target_repo_id : _LOGGER . info ( 'Skipping the PR, no target repo id' ) return None github_con = Github ( gh_token ) sdk_pr_target_repo = github_con . get_repo ( sdk_pr_target_repo_id ) if '/' in sdk_git_id : sdk_git_owner = sdk_git_id . split ( '/' ) [ 0 ] _LOGGER . info ( "Do the PR from %s" , sdk_git_owner ) head_name = "{}:{}" . format ( sdk_git_owner , branch_name ) else : head_name = branch_name sdk_git_repo = github_con . get_repo ( sdk_git_id ) sdk_git_owner = sdk_git_repo . owner . login try : github_pr = sdk_pr_target_repo . create_pull ( title = 'Automatic PR from {}' . format ( branch_name ) , body = pr_body , head = head_name , base = base_branch ) except GithubException as err : if err . status == 422 and err . data [ 'errors' ] [ 0 ] . get ( 'message' , '' ) . startswith ( 'A pull request already exists' ) : matching_pulls = sdk_pr_target_repo . get_pulls ( base = base_branch , head = sdk_git_owner + ":" + head_name ) matching_pull = matching_pulls [ 0 ] _LOGGER . info ( 'PR already exists: %s' , matching_pull . html_url ) return matching_pull raise _LOGGER . info ( "Made PR %s" , github_pr . html_url ) return github_pr
|
Do the PR
|
3,274
|
def remove_readonly ( func , path , _ ) : "Clear the readonly bit and reattempt the removal" os . chmod ( path , stat . S_IWRITE ) func ( path )
|
Clear the readonly bit and reattempt the removal
|
3,275
|
def manage_git_folder ( gh_token , temp_dir , git_id , * , pr_number = None ) : _LOGGER . debug ( "Git ID %s" , git_id ) if Path ( git_id ) . exists ( ) : yield git_id return split_git_id = git_id . split ( "@" ) branch = split_git_id [ 1 ] if len ( split_git_id ) > 1 else None clone_to_path ( gh_token , temp_dir , split_git_id [ 0 ] , branch_or_commit = branch , pr_number = pr_number ) try : yield temp_dir finally : _LOGGER . debug ( "Preclean Rest folder" ) shutil . rmtree ( temp_dir , onerror = remove_readonly )
|
Context manager to avoid readonly problem while cleanup the temp dir .
|
3,276
|
def as_raw_link ( self ) : if self . link_type == "raw" : return self if self . link_type != "blob" : raise ValueError ( "Cannot get a download link from a tree link" ) return self . __class__ ( self . gitid , "raw" , self . branch_or_commit , self . path , self . token )
|
Returns a GithubLink to a raw content .
|
3,277
|
def create_comment ( self , text ) : return DashboardComment . get_or_create ( self . _issue_or_pr , self . _header , text )
|
Mimic issue API so we can use it everywhere . Return dashboard comment .
|
3,278
|
def get_or_create ( cls , issue , header , text = None ) : for comment in get_comments ( issue ) : try : if comment . body . splitlines ( ) [ 0 ] == header : obj = cls ( comment , header ) break except IndexError : pass else : comment = create_comment ( issue , header ) obj = cls ( comment , header ) if text : obj . edit ( text ) return obj
|
Get or create the dashboard comment in this issue .
|
3,279
|
def disconnect ( self , name , func , dispatch_uid = None ) : try : signal = self . _registry [ name ] except KeyError : return signal . disconnect ( func , dispatch_uid = dispatch_uid )
|
Disconnects a function from a hook
|
3,280
|
def create_host ( factories , value ) : data = [ value ] for func in factories : try : return func ( value ) except InvalidHostError as ex : data . append ( str ( ex ) ) msg_tpl = ( "Failed to create a host object for '{}', raising the following errors" " in the process:" + "\n" . join ( data ) ) raise InvalidHostError ( msg_tpl . format ( value ) )
|
Use the factories to create a host object .
|
3,281
|
def is_subdomain ( self , other ) : compared = other . value if hasattr ( other , 'value' ) else other try : return self . value . is_subdomain ( compared ) except AttributeError : return False
|
Test if the object is a subdomain of the other .
|
3,282
|
def assemble ( code ) : final = [ ] for line in code : if isinstance ( line , Label ) : final . append ( line ) continue mnemonic , operands = line [ 0 ] , line [ 1 : ] operand_fmts = opcode_table [ mnemonic ] [ 'operands' ] final_operands = [ ] for i , operand in enumerate ( operands ) : if isinstance ( operand , Operand ) : final_operands . append ( operand ) elif isinstance ( operand , Constant ) : final_operands . append ( Operand ( OperandTypes . CONSTANT_INDEX , operand . index ) ) elif isinstance ( operand , dict ) : final_operands . append ( operand ) elif isinstance ( operand , Label ) : final_operands . append ( operand ) else : final_operands . append ( Operand ( operand_fmts [ i ] [ 1 ] , operand ) ) final . append ( Instruction . create ( mnemonic , final_operands ) ) label_pcs = { } current_pc = 0 for ins in final : if isinstance ( ins , Label ) : label_pcs [ ins . name ] = current_pc continue current_pc += ins . size_on_disk ( current_pc ) current_pc = 0 for ins in final : if isinstance ( ins , Label ) : continue for i , operand in enumerate ( ins . operands ) : if isinstance ( operand , dict ) : for k , v in operand . items ( ) : if isinstance ( v , Label ) : operand [ k ] = Operand ( 40 , label_pcs [ v . name ] - current_pc ) elif isinstance ( operand , Label ) : ins . operands [ i ] = Operand ( 40 , label_pcs [ operand . name ] - current_pc ) current_pc += ins . size_on_disk ( current_pc ) yield ins
|
Assemble the given iterable of mnemonics operands and lables .
|
3,283
|
def register ( self , hook ) : assert callable ( hook ) , "Hook must be a callable" assert issubclass ( hook , HookBase ) , "The hook does not inherit from HookBase" self . _registry . append ( hook )
|
Register a hook .
|
3,284
|
def save ( self , * args , ** kwargs ) : return [ ( form , form . save ( * args , ** kwargs ) ) for form in self . instances ]
|
Save all the forms
|
3,285
|
def get_attribute_classes ( ) -> Dict [ str , Attribute ] : attribute_children = pkgutil . iter_modules ( importlib . import_module ( 'jawa.attributes' ) . __path__ , prefix = 'jawa.attributes.' ) result = { } for _ , name , _ in attribute_children : classes = inspect . getmembers ( importlib . import_module ( name ) , lambda c : ( inspect . isclass ( c ) and issubclass ( c , Attribute ) and c is not Attribute ) ) for class_name , class_ in classes : attribute_name = getattr ( class_ , 'ATTRIBUTE_NAME' , class_name [ : - 9 ] ) result [ attribute_name ] = class_ return result
|
Lookup all builtin Attribute subclasses load them and return a dict
|
3,286
|
def unpack ( self , source : IO ) : count = unpack ( '>H' , source . read ( 2 ) ) [ 0 ] for _ in repeat ( None , count ) : name_index , length = unpack ( '>HI' , source . read ( 6 ) ) info_blob = source . read ( length ) self . _table . append ( ( name_index , info_blob ) )
|
Read the ConstantPool from the file - like object source .
|
3,287
|
def pack ( self , out : IO ) : out . write ( pack ( '>H' , len ( self . _table ) ) ) for attribute in self : info = attribute . pack ( ) out . write ( pack ( '>HI' , attribute . name . index , len ( info ) ) ) out . write ( info )
|
Write the AttributeTable to the file - like object out .
|
3,288
|
def create ( self , type_ , * args , ** kwargs ) -> Any : attribute = type_ ( self , * args , ** kwargs ) self . _table . append ( attribute ) return attribute
|
Creates a new attribute of type_ appending it to the attribute table and returning it .
|
3,289
|
def get_locations ( self , url ) : if not is_valid_url ( url ) : raise InvalidURLError ( '{} is not a valid URL' . format ( url ) ) try : response = self . session . head ( url ) except ( ConnectionError , InvalidSchema , Timeout ) : raise StopIteration try : generator = self . session . resolve_redirects ( response , response . request ) for response in generator : yield response . url except InvalidURL : pass except ( ConnectionError , InvalidSchema , Timeout ) as error : last_url = response . headers [ 'location' ] if isinstance ( error , Timeout ) or is_valid_url ( last_url ) : yield last_url
|
Get valid location header values from responses .
|
3,290
|
def get_new_locations ( self , urls ) : seen = set ( urls ) for i in urls : for k in self . get_locations ( i ) : if k not in seen : seen . add ( k ) yield k
|
Get valid location header values for all given URLs .
|
3,291
|
def get_urls_and_locations ( self , urls ) : location_generator = self . get_new_locations ( urls ) initial_cache = list ( set ( urls ) ) return CachedIterable ( location_generator , initial_cache )
|
Get URLs and their redirection addresses .
|
3,292
|
def _handle_get ( self , request_data ) : der = base64 . b64decode ( request_data ) ocsp_request = self . _parse_ocsp_request ( der ) return self . _build_http_response ( ocsp_request )
|
An OCSP GET request contains the DER - in - base64 encoded OCSP request in the HTTP request URL .
|
3,293
|
def _handle_post ( self ) : der = request . body . read ( ) ocsp_request = self . _parse_ocsp_request ( der ) return self . _build_http_response ( ocsp_request )
|
An OCSP POST request contains the DER encoded OCSP request in the HTTP request body .
|
3,294
|
def _build_ocsp_response ( self , ocsp_request : OCSPRequest ) -> OCSPResponse : tbs_request = ocsp_request [ 'tbs_request' ] request_list = tbs_request [ 'request_list' ] if len ( request_list ) != 1 : logger . warning ( 'Received OCSP request with multiple sub requests' ) raise NotImplemented ( 'Combined requests not yet supported' ) single_request = request_list [ 0 ] req_cert = single_request [ 'req_cert' ] serial = req_cert [ 'serial_number' ] . native try : certificate_status , revocation_date = self . _validate ( serial ) except Exception as e : logger . exception ( 'Could not determine certificate status: %s' , e ) return self . _fail ( ResponseStatus . internal_error ) try : subject_cert_contents = self . _cert_retrieve ( serial ) except Exception as e : logger . exception ( 'Could not retrieve certificate with serial %s: %s' , serial , e ) return self . _fail ( ResponseStatus . internal_error ) try : subject_cert = asymmetric . load_certificate ( subject_cert_contents . encode ( 'utf8' ) ) except Exception as e : logger . exception ( 'Returned certificate with serial %s is invalid: %s' , serial , e ) return self . _fail ( ResponseStatus . internal_error ) builder = OCSPResponseBuilder ( ** { 'response_status' : ResponseStatus . successful . value , 'certificate' : subject_cert , 'certificate_status' : certificate_status . value , 'revocation_date' : revocation_date , } ) for extension in tbs_request [ 'request_extensions' ] : extn_id = extension [ 'extn_id' ] . native critical = extension [ 'critical' ] . native value = extension [ 'extn_value' ] . parsed unknown = False if extn_id == 'nonce' : builder . nonce = value . native else : unknown = True if unknown is True and critical is True : logger . warning ( 'Could not parse unknown critical extension: %r' , dict ( extension . native ) ) return self . _fail ( ResponseStatus . internal_error ) elif unknown is True : logger . info ( 'Ignored unknown non-critical extension: %r' , dict ( extension . native ) ) builder . certificate_issuer = self . _issuer_cert builder . next_update = datetime . now ( timezone . utc ) + timedelta ( days = self . _next_update_days ) return builder . build ( self . _responder_key , self . _responder_cert )
|
Create and return an OCSP response from an OCSP request .
|
3,295
|
def hook_tag ( context , name , * args , ** kwargs ) : return format_html_join ( sep = "\n" , format_string = "{}" , args_generator = ( ( response , ) for response in hook ( name , context , * args , ** kwargs ) ) )
|
Hook tag to call within templates
|
3,296
|
def template_hook_collect ( module , hook_name , * args , ** kwargs ) : try : templatehook = getattr ( module , hook_name ) except AttributeError : return "" return format_html_join ( sep = "\n" , format_string = "{}" , args_generator = ( ( response , ) for response in templatehook ( * args , ** kwargs ) ) )
|
Helper to include in your own templatetag for static TemplateHooks
|
3,297
|
def _extract ( self , source , * args , ** kwargs ) : self . _data = mbox_to_pandas ( source ) self . _data [ 'MessageID' ] = pd . Series ( range ( 0 , len ( self . _data ) ) )
|
Extracts data from mbox files . Mutates _data .
|
3,298
|
def build_from_issue_comment ( gh_token , body ) : if body [ "action" ] in [ "created" , "edited" ] : github_con = Github ( gh_token ) repo = github_con . get_repo ( body [ 'repository' ] [ 'full_name' ] ) issue = repo . get_issue ( body [ 'issue' ] [ 'number' ] ) text = body [ 'comment' ] [ 'body' ] try : comment = issue . get_comment ( body [ 'comment' ] [ 'id' ] ) except UnknownObjectException : return None return WebhookMetadata ( repo , issue , text , comment ) return None
|
Create a WebhookMetadata from a comment added to an issue .
|
3,299
|
def build_from_issues ( gh_token , body ) : if body [ "action" ] in [ "opened" , "edited" ] : github_con = Github ( gh_token ) repo = github_con . get_repo ( body [ 'repository' ] [ 'full_name' ] ) issue = repo . get_issue ( body [ 'issue' ] [ 'number' ] ) text = body [ 'issue' ] [ 'body' ] comment = issue return WebhookMetadata ( repo , issue , text , comment ) return None
|
Create a WebhookMetadata from an opening issue text .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.