idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
50,200
def process_configs ( file_lookup , app_config_format , pipeline_config ) : app_configs = collections . defaultdict ( dict ) for env in ENVS : file_json = app_config_format . format ( env = env ) try : env_config = file_lookup . json ( filename = file_json ) app_configs [ env ] = apply_region_configs ( env_config ) except FileNotFoundError : LOG . critical ( 'Application configuration not available for %s.' , env ) continue try : app_configs [ 'pipeline' ] = file_lookup . json ( filename = pipeline_config ) except FileNotFoundError : LOG . warning ( 'Unable to process pipeline.json. Using defaults.' ) app_configs [ 'pipeline' ] = { 'env' : [ 'stage' , 'prod' ] } LOG . debug ( 'Application configs:\n%s' , app_configs ) return app_configs
Processes the configs from lookup sources .
50,201
def apply_region_configs ( env_config ) : new_config = env_config . copy ( ) for region in env_config . get ( 'regions' , REGIONS ) : if isinstance ( env_config . get ( 'regions' ) , dict ) : region_specific_config = env_config [ 'regions' ] [ region ] new_config [ region ] = dict ( DeepChainMap ( region_specific_config , env_config ) ) else : new_config [ region ] = env_config . copy ( ) LOG . debug ( 'Region Specific Config:\n%s' , new_config ) return new_config
Override default env configs with region specific configs and nest all values under a region
50,202
def create_iam_resources ( env = 'dev' , app = '' , ** _ ) : session = boto3 . session . Session ( profile_name = env ) client = session . client ( 'iam' ) app_properties = get_properties ( env = 'pipeline' ) generated = get_details ( env = env , app = app ) generated_iam = generated . iam ( ) app_details = collections . namedtuple ( 'AppDetails' , generated_iam . keys ( ) ) details = app_details ( ** generated_iam ) LOG . debug ( 'Application details: %s' , details ) deployment_type = app_properties [ 'type' ] role_trust_template = get_template ( 'infrastructure/iam/trust/{0}_role.json.j2' . format ( deployment_type ) , formats = generated ) resource_action ( client , action = 'create_role' , log_format = 'Created Role: %(RoleName)s' , RoleName = details . role , AssumeRolePolicyDocument = role_trust_template ) resource_action ( client , action = 'create_instance_profile' , log_format = 'Created Instance Profile: %(InstanceProfileName)s' , InstanceProfileName = details . profile ) attach_profile_to_role ( client , role_name = details . role , profile_name = details . profile ) iam_policy = construct_policy ( app = app , group = details . group , env = env , pipeline_settings = app_properties ) if iam_policy : resource_action ( client , action = 'put_role_policy' , log_format = 'Added IAM Policy: %(PolicyName)s' , RoleName = details . role , PolicyName = details . policy , PolicyDocument = iam_policy ) resource_action ( client , action = 'create_user' , log_format = 'Created User: %(UserName)s' , UserName = details . user ) resource_action ( client , action = 'create_group' , log_format = 'Created Group: %(GroupName)s' , GroupName = details . group ) resource_action ( client , action = 'add_user_to_group' , log_format = 'Added User to Group: %(UserName)s -> %(GroupName)s' , GroupName = details . group , UserName = details . user ) return True
Create the IAM Resources for the application .
50,203
def attach_profile_to_role ( client , role_name = 'forrest_unicorn_role' , profile_name = 'forrest_unicorn_profile' ) : current_instance_profiles = resource_action ( client , action = 'list_instance_profiles_for_role' , log_format = 'Found Instance Profiles for %(RoleName)s.' , RoleName = role_name ) [ 'InstanceProfiles' ] for profile in current_instance_profiles : if profile [ 'InstanceProfileName' ] == profile_name : LOG . info ( 'Found Instance Profile attached to Role: %s -> %s' , profile_name , role_name ) break else : for remove_profile in current_instance_profiles : resource_action ( client , action = 'remove_role_from_instance_profile' , log_format = 'Removed Instance Profile from Role: ' '%(InstanceProfileName)s -> %(RoleName)s' , InstanceProfileName = remove_profile [ 'InstanceProfileName' ] , RoleName = role_name ) resource_action ( client , action = 'add_role_to_instance_profile' , log_format = 'Added Instance Profile to Role: ' '%(InstanceProfileName)s -> %(RoleName)s' , InstanceProfileName = profile_name , RoleName = role_name ) return True
Attach an IAM Instance Profile _profile_name_ to Role _role_name_ .
50,204
def find_elb ( name = '' , env = '' , region = '' ) : LOG . info ( 'Find %s ELB in %s [%s].' , name , env , region ) url = '{0}/applications/{1}/loadBalancers' . format ( API_URL , name ) response = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) assert response . ok elb_dns = None accounts = response . json ( ) for account in accounts : if account [ 'account' ] == env and account [ 'region' ] == region : elb_dns = account [ 'dnsname' ] break else : raise SpinnakerElbNotFound ( 'Elb for "{0}" in region {1} not found' . format ( name , region ) ) LOG . info ( 'Found: %s' , elb_dns ) return elb_dns
Get an application s AWS elb dns name .
50,205
def find_elb_dns_zone_id ( name = '' , env = 'dev' , region = 'us-east-1' ) : LOG . info ( 'Find %s ELB DNS Zone ID in %s [%s].' , name , env , region ) client = boto3 . Session ( profile_name = env ) . client ( 'elb' , region_name = region ) elbs = client . describe_load_balancers ( LoadBalancerNames = [ name ] ) return elbs [ 'LoadBalancerDescriptions' ] [ 0 ] [ 'CanonicalHostedZoneNameID' ]
Get an application s AWS elb dns zone id .
50,206
def get_version ( ) : version = 'Not installed.' try : version = pkg_resources . get_distribution ( __package__ ) . version except pkg_resources . DistributionNotFound : pass return version
Retrieve package version .
50,207
async def loop ( self ) : self . _running = True while self . _running : updates = await self . api_call ( "getUpdates" , offset = self . _offset + 1 , timeout = self . api_timeout ) self . _process_updates ( updates )
Return bot s main loop as coroutine . Use with asyncio .
50,208
def run ( self , debug = False , reload = None ) : loop = asyncio . get_event_loop ( ) logging . basicConfig ( level = logging . DEBUG if debug else logging . INFO ) if reload is None : reload = debug bot_loop = asyncio . ensure_future ( self . loop ( ) ) try : if reload : loop . run_until_complete ( run_with_reloader ( loop , bot_loop , self . stop ) ) else : loop . run_until_complete ( bot_loop ) except KeyboardInterrupt : logger . debug ( "User cancelled" ) bot_loop . cancel ( ) self . stop ( ) finally : if AIOHTTP_23 : loop . run_until_complete ( self . session . close ( ) ) logger . debug ( "Closing loop" ) loop . stop ( ) loop . close ( )
Convenience method for running bots in getUpdates mode
50,209
def run_webhook ( self , webhook_url , ** options ) : loop = asyncio . get_event_loop ( ) loop . run_until_complete ( self . set_webhook ( webhook_url , ** options ) ) if webhook_url : url = urlparse ( webhook_url ) app = self . create_webhook_app ( url . path , loop ) host = os . environ . get ( "HOST" , "0.0.0.0" ) port = int ( os . environ . get ( "PORT" , 0 ) ) or url . port if AIOHTTP_23 : app . on_cleanup . append ( lambda _ : self . session . close ( ) ) web . run_app ( app , host = host , port = port ) else : loop . run_until_complete ( self . session . close ( ) )
Convenience method for running bots in webhook mode
50,210
def command ( self , regexp ) : def decorator ( fn ) : self . add_command ( regexp , fn ) return fn return decorator
Register a new command
50,211
def inline ( self , callback ) : if callable ( callback ) : self . _default_inline = callback return callback elif isinstance ( callback , str ) : def decorator ( fn ) : self . add_inline ( callback , fn ) return fn return decorator else : raise TypeError ( "str expected {} given" . format ( type ( callback ) ) )
Set callback for inline queries
50,212
def callback ( self , callback ) : if callable ( callback ) : self . _default_callback = callback return callback elif isinstance ( callback , str ) : def decorator ( fn ) : self . add_callback ( callback , fn ) return fn return decorator else : raise TypeError ( "str expected {} given" . format ( type ( callback ) ) )
Set callback for callback queries
50,213
def handle ( self , msg_type ) : def wrap ( callback ) : self . _handlers [ msg_type ] = callback return callback return wrap
Set handler for specific message type
50,214
def api_call ( self , method , ** params ) : coro = self . _api_call ( method , ** params ) return asyncio . ensure_future ( coro )
Call Telegram API .
50,215
def send_message ( self , chat_id , text , ** options ) : return self . api_call ( "sendMessage" , chat_id = chat_id , text = text , ** options )
Send a text message to chat
50,216
def edit_message_text ( self , chat_id , message_id , text , ** options ) : return self . api_call ( "editMessageText" , chat_id = chat_id , message_id = message_id , text = text , ** options )
Edit a text message in a chat
50,217
def edit_message_reply_markup ( self , chat_id , message_id , reply_markup , ** options ) : return self . api_call ( "editMessageReplyMarkup" , chat_id = chat_id , message_id = message_id , reply_markup = reply_markup , ** options )
Edit a reply markup of message in a chat
50,218
def download_file ( self , file_path , range = None ) : headers = { "range" : range } if range else None url = "{0}/file/bot{1}/{2}" . format ( API_URL , self . api_token , file_path ) return self . session . get ( url , headers = headers , proxy = self . proxy , proxy_auth = self . proxy_auth )
Download a file from Telegram servers
50,219
def get_user_profile_photos ( self , user_id , ** options ) : return self . api_call ( "getUserProfilePhotos" , user_id = str ( user_id ) , ** options )
Get a list of profile pictures for a user
50,220
async def webhook_handle ( self , request ) : update = await request . json ( loads = self . json_deserialize ) self . _process_update ( update ) return web . Response ( )
aiohttp . web handle for processing web hooks
50,221
def create_webhook_app ( self , path , loop = None ) : app = web . Application ( loop = loop ) app . router . add_route ( "POST" , path , self . webhook_handle ) return app
Shorthand for creating aiohttp . web . Application with registered webhook hanlde
50,222
def send_text ( self , text , ** options ) : return self . bot . send_message ( self . id , text , ** options )
Send a text message to the chat .
50,223
def reply ( self , text , markup = None , parse_mode = None ) : if markup is None : markup = { } return self . send_text ( text , reply_to_message_id = self . message [ "message_id" ] , disable_web_page_preview = "true" , reply_markup = self . bot . json_serialize ( markup ) , parse_mode = parse_mode , )
Reply to the message this Chat object is based on .
50,224
def edit_text ( self , message_id , text , markup = None , parse_mode = None ) : if markup is None : markup = { } return self . bot . edit_message_text ( self . id , message_id , text , reply_markup = self . bot . json_serialize ( markup ) , parse_mode = parse_mode , )
Edit the message in this chat .
50,225
def edit_reply_markup ( self , message_id , markup ) : return self . bot . edit_message_reply_markup ( self . id , message_id , reply_markup = self . bot . json_serialize ( markup ) )
Edit only reply markup of the message in this chat .
50,226
def get_chat_member ( self , user_id ) : return self . bot . api_call ( "getChatMember" , chat_id = str ( self . id ) , user_id = str ( user_id ) )
Get information about a member of a chat .
50,227
def send_sticker ( self , sticker , ** options ) : return self . bot . api_call ( "sendSticker" , chat_id = str ( self . id ) , sticker = sticker , ** options )
Send a sticker to the chat .
50,228
def send_audio ( self , audio , ** options ) : return self . bot . api_call ( "sendAudio" , chat_id = str ( self . id ) , audio = audio , ** options )
Send an mp3 audio file to the chat .
50,229
def send_photo ( self , photo , caption = "" , ** options ) : return self . bot . api_call ( "sendPhoto" , chat_id = str ( self . id ) , photo = photo , caption = caption , ** options )
Send a photo to the chat .
50,230
def send_video ( self , video , caption = "" , ** options ) : return self . bot . api_call ( "sendVideo" , chat_id = str ( self . id ) , video = video , caption = caption , ** options )
Send an mp4 video file to the chat .
50,231
def send_document ( self , document , caption = "" , ** options ) : return self . bot . api_call ( "sendDocument" , chat_id = str ( self . id ) , document = document , caption = caption , ** options )
Send a general file .
50,232
def send_voice ( self , voice , ** options ) : return self . bot . api_call ( "sendVoice" , chat_id = str ( self . id ) , voice = voice , ** options )
Send an OPUS - encoded . ogg audio file .
50,233
def send_location ( self , latitude , longitude , ** options ) : return self . bot . api_call ( "sendLocation" , chat_id = self . id , latitude = latitude , longitude = longitude , ** options )
Send a point on the map .
50,234
def send_venue ( self , latitude , longitude , title , address , ** options ) : return self . bot . api_call ( "sendVenue" , chat_id = self . id , latitude = latitude , longitude = longitude , title = title , address = address , ** options )
Send information about a venue .
50,235
def send_contact ( self , phone_number , first_name , ** options ) : return self . bot . api_call ( "sendContact" , chat_id = self . id , phone_number = phone_number , first_name = first_name , ** options )
Send phone contacts .
50,236
def send_chat_action ( self , action ) : return self . bot . api_call ( "sendChatAction" , chat_id = self . id , action = action )
Send a chat action to tell the user that something is happening on the bot s side .
50,237
def send_media_group ( self , media : str , disable_notification : bool = False , reply_to_message_id : int = None , ** options ) : return self . bot . api_call ( "sendMediaGroup" , chat_id = str ( self . id ) , media = media , disable_notification = disable_notification , reply_to_message_id = reply_to_message_id , ** options )
Send a group of photos or videos as an album
50,238
def forward_message ( self , from_chat_id , message_id ) : return self . bot . api_call ( "forwardMessage" , chat_id = self . id , from_chat_id = from_chat_id , message_id = message_id , )
Forward a message from another chat to this chat .
50,239
def kick_chat_member ( self , user_id ) : return self . bot . api_call ( "kickChatMember" , chat_id = self . id , user_id = user_id )
Use this method to kick a user from a group or a supergroup . The bot must be an administrator in the group for this to work .
50,240
def unban_chat_member ( self , user_id ) : return self . bot . api_call ( "unbanChatMember" , chat_id = self . id , user_id = user_id )
Use this method to unban a previously kicked user in a supergroup . The bot must be an administrator in the group for this to work .
50,241
def delete_message ( self , message_id ) : return self . bot . api_call ( "deleteMessage" , chat_id = self . id , message_id = message_id )
Delete message from this chat
50,242
def from_message ( bot , message ) : chat = message [ "chat" ] return Chat ( bot , chat [ "id" ] , chat [ "type" ] , message )
Create a Chat object from a message .
50,243
def run ( self , timeout = POD_RUN_WAIT_TIMEOUT_SECONDS ) : if not isinstance ( timeout , int ) : raise SyntaxError ( "K8sCronJob.run() timeout: [ {} ] is invalid." ) if len ( self . active ) : raise CronJobAlreadyRunningException ( "K8sCronJob.run() failed: CronJob: [ {} ] " "has [ {} ] active Jobs currently." . format ( self . name , len ( self . active ) ) ) self . suspend = True self . update ( ) pod = self . pod if timeout : self . POD_RUN_WAIT_TIMEOUT_SECONDS = timeout try : pod . create ( ) start_time = time . time ( ) while pod . phase not in [ 'Succeeded' , 'Failed' ] : pod . get ( ) time . sleep ( 2 ) self . _check_timeout ( start_time ) except Exception as err : raise CronJobRunException ( "K8sCronJob.run() failed: {}" . format ( err ) ) finally : pod . delete ( ) self . suspend = False self . update ( )
Forces a K8sCronJob to run immediately .
50,244
def scale ( config = None , name = None , replicas = None ) : rc = K8sReplicationController ( config = config , name = name ) . get ( ) rc . desired_replicas = replicas rc . update ( ) rc . _wait_for_desired_replicas ( ) return rc
Scales the number of pods in the specified K8sReplicationController to the desired replica count .
50,245
def rolling_update ( config = None , name = None , image = None , container_name = None , rc_new = None ) : if name is None : raise SyntaxError ( 'K8sReplicationController: name: [ {0} ] cannot be None.' . format ( name ) ) if image is None and rc_new is None : raise SyntaxError ( "K8sReplicationController: please specify either 'image' or 'rc_new'" ) if container_name is not None and image is not None and rc_new is not None : raise SyntaxError ( 'K8sReplicationController: rc_new is mutually exclusive with an (container_name, image) pair.' ) return K8sReplicationController . _rolling_update_init ( config = config , name = name , image = image , container_name = container_name , rc_new = rc_new )
Performs a simple rolling update of a ReplicationController .
50,246
def restart ( self ) : rc_new = copy . deepcopy ( self ) return K8sReplicationController . rolling_update ( config = self . config , name = self . name , rc_new = rc_new )
Restart will force a rolling update of the current ReplicationController to the current revision . This essentially spawns a fresh copy of the RC and its pods . Useful when something is misbehaving .
50,247
def _has_local_storage ( self , pod = None ) : for vol in pod . volumes : if vol . emptyDir is not None : return True return False
Determines if a K8sPod has any local storage susceptible to be lost .
50,248
def rollback ( self , revision = None , annotations = None ) : rollback = DeploymentRollback ( ) rollback . name = self . name rollback_config = RollbackConfig ( ) if revision is not None : rollback_config . revision = revision else : current_revision = int ( self . get_annotation ( self . REVISION_ANNOTATION ) ) rev = max ( current_revision - 1 , 0 ) rollback_config . revision = rev rollback . rollback_to = rollback_config if annotations is not None : rollback . updated_annotations = annotations url = '{base}/{name}/rollback' . format ( base = self . base_url , name = self . name ) state = self . request ( method = 'POST' , url = url , data = rollback . serialize ( ) ) if not state . get ( 'success' ) : status = state . get ( 'status' , '' ) reason = state . get ( 'data' , dict ( ) ) . get ( 'message' , None ) message = 'K8sDeployment: ROLLBACK failed : HTTP {0} : {1}' . format ( status , reason ) raise BadRequestException ( message ) time . sleep ( 0.2 ) self . _wait_for_desired_replicas ( ) self . get ( ) return self
Performs a rollback of the Deployment .
50,249
def roll_dice ( ) : sums = 0 while True : roll = random . randint ( 1 , 6 ) sums += roll if ( input ( "Enter y or n to continue: " ) . upper ( ) ) == 'N' : print ( sums ) break
Roll a die .
50,250
def get_raw_file ( ) : with open ( "{0}/dividers.txt" . format ( os . path . abspath ( os . path . dirname ( __file__ ) ) ) , mode = "r" ) as file_handler : lines = file_handler . readlines ( ) lines [ 35 ] = str ( random . randint ( 0 , 999999999999 ) ) return lines
Get the raw divider file in a string array .
50,251
def reduce_to_unit ( divider ) : for unit_size in range ( 1 , len ( divider ) // 2 + 1 ) : length = len ( divider ) unit = divider [ : unit_size ] divider_item = divider [ : unit_size * ( length // unit_size ) ] if unit * ( length // unit_size ) == divider_item : return unit return divider
Reduce a repeating divider to the smallest repeating unit possible .
50,252
def splitter ( div , * args ) : retstr = "" if type ( div ) is int : div = theArray ( ) [ div ] if len ( args ) == 1 : return args [ 0 ] for s in args : retstr += s retstr += "\n" retstr += div retstr += "\n" return retstr
Split text with dividers easily .
50,253
def area4info ( ) : name = "area4" author = "https://github.com/RDIL" author_email = rdillib . get_email ( ) description = "Dividers in Python, the easy way!" return "{0}: {1}\n{2}: {3}\n{4}: {5}\n{6}: {7}" . format ( "Name:" , name , "Author:" , author , "Author Email:" , author_email , "Description:" , description )
Get some info about the package .
50,254
def make_div ( unit , length = 24 , start = '' , end = '' , literal_unit = False ) : if not literal_unit : unit = utils . reduce_to_unit ( unit ) repeats = ( length - len ( start + end ) ) // len ( unit ) return ( start + unit * repeats + end ) [ 0 : length ]
Generate and return a custom divider .
50,255
def localpath ( * args ) : plist = [ ROOT ] + list ( args ) return os . path . abspath ( pjoin ( * plist ) )
construct an absolute path from a list relative to the root pycapnp directory
50,256
def fetch_libcapnp ( savedir , url = None ) : is_preconfigured = False if url is None : url = libcapnp_url is_preconfigured = True dest = pjoin ( savedir , 'capnproto-c++' ) if os . path . exists ( dest ) : info ( "already have %s" % dest ) return fname = fetch_archive ( savedir , url , libcapnp ) tf = tarfile . open ( fname ) with_version = pjoin ( savedir , tf . firstmember . path ) tf . extractall ( savedir ) tf . close ( ) if is_preconfigured : shutil . move ( with_version , dest ) else : cpp_dir = os . path . join ( with_version , 'c++' ) conf = Popen ( [ 'autoreconf' , '-i' ] , cwd = cpp_dir ) returncode = conf . wait ( ) if returncode != 0 : raise RuntimeError ( 'Autoreconf failed. Make sure autotools are installed on your system.' ) shutil . move ( cpp_dir , dest )
download and extract libcapnp
50,257
def stage_platform_hpp ( capnproot ) : platform_hpp = pjoin ( capnproot , 'src' , 'platform.hpp' ) if os . path . exists ( platform_hpp ) : info ( "already have platform.hpp" ) return if os . name == 'nt' : platform_dir = pjoin ( capnproot , 'builds' , 'msvc' ) else : info ( "attempting ./configure to generate platform.hpp" ) p = Popen ( './configure' , cwd = capnproot , shell = True , stdout = PIPE , stderr = PIPE , ) o , e = p . communicate ( ) if p . returncode : warn ( "failed to configure libcapnp:\n%s" % e ) if sys . platform == 'darwin' : platform_dir = pjoin ( HERE , 'include_darwin' ) elif sys . platform . startswith ( 'freebsd' ) : platform_dir = pjoin ( HERE , 'include_freebsd' ) elif sys . platform . startswith ( 'linux-armv' ) : platform_dir = pjoin ( HERE , 'include_linux-armv' ) else : platform_dir = pjoin ( HERE , 'include_linux' ) else : return info ( "staging platform.hpp from: %s" % platform_dir ) shutil . copy ( pjoin ( platform_dir , 'platform.hpp' ) , platform_hpp )
stage platform . hpp into libcapnp sources
50,258
def _find_library ( lib , path ) : for d in path [ : : - 1 ] : real_lib = os . path . join ( d , lib ) if os . path . exists ( real_lib ) : return real_lib
Find a library
50,259
def get_output_error ( cmd ) : if not isinstance ( cmd , list ) : cmd = [ cmd ] logging . debug ( "Running: %s" , ' ' . join ( map ( quote , cmd ) ) ) try : result = Popen ( cmd , stdout = PIPE , stderr = PIPE ) except IOError as e : return - 1 , u ( '' ) , u ( 'Failed to run %r: %r' % ( cmd , e ) ) so , se = result . communicate ( ) so = so . decode ( 'utf8' , 'replace' ) se = se . decode ( 'utf8' , 'replace' ) return result . returncode , so , se
Return the exit status stdout stderr of a command
50,260
def load_config ( name , base = 'conf' ) : fname = pjoin ( base , name + '.json' ) if not os . path . exists ( fname ) : return { } try : with open ( fname ) as f : cfg = json . load ( f ) except Exception as e : warn ( "Couldn't load %s: %s" % ( fname , e ) ) cfg = { } return cfg
Load config dict from JSON
50,261
def save_config ( name , data , base = 'conf' ) : if not os . path . exists ( base ) : os . mkdir ( base ) fname = pjoin ( base , name + '.json' ) with open ( fname , 'w' ) as f : json . dump ( data , f , indent = 2 )
Save config dict to JSON
50,262
def get_eargs ( ) : settings = { } zmq = os . environ . get ( "ZMQ_PREFIX" , None ) if zmq is not None : debug ( "Found environ var ZMQ_PREFIX=%s" % zmq ) settings [ 'zmq_prefix' ] = zmq return settings
Look for options in environment vars
50,263
def cfg2dict ( cfg ) : d = { } for section in cfg . sections ( ) : d [ section ] = dict ( cfg . items ( section ) ) return d
turn a ConfigParser into a nested dict because ConfigParser objects are dumb .
50,264
def get_cfg_args ( ) : if not os . path . exists ( 'setup.cfg' ) : return { } cfg = ConfigParser ( ) cfg . read ( 'setup.cfg' ) cfg = cfg2dict ( cfg ) g = cfg . setdefault ( 'global' , { } ) for key in [ 'libzmq_extension' , 'bundle_libzmq_dylib' , 'no_libzmq_extension' , 'have_sys_un_h' , 'skip_check_zmq' , ] : if key in g : g [ key ] = eval ( g [ key ] ) cfg . update ( cfg . pop ( 'global' ) ) return cfg
Look for options in setup . cfg
50,265
def config_from_prefix ( prefix ) : settings = { } if prefix . lower ( ) in ( 'default' , 'auto' , '' ) : settings [ 'zmq_prefix' ] = '' settings [ 'libzmq_extension' ] = False settings [ 'no_libzmq_extension' ] = False elif prefix . lower ( ) in ( 'bundled' , 'extension' ) : settings [ 'zmq_prefix' ] = '' settings [ 'libzmq_extension' ] = True settings [ 'no_libzmq_extension' ] = False else : settings [ 'zmq_prefix' ] = prefix settings [ 'libzmq_extension' ] = False settings [ 'no_libzmq_extension' ] = True return settings
Get config from zmq prefix
50,266
def merge ( into , d ) : if isinstance ( into , dict ) : for key in d . keys ( ) : if key not in into : into [ key ] = d [ key ] else : into [ key ] = merge ( into [ key ] , d [ key ] ) return into elif isinstance ( into , list ) : return into + d else : return d
merge two containers into is updated d has priority
50,267
def discover_settings ( conf_base = None ) : settings = { 'zmq_prefix' : '' , 'libzmq_extension' : False , 'no_libzmq_extension' : False , 'skip_check_zmq' : False , 'build_ext' : { } , 'bdist_egg' : { } , } if sys . platform . startswith ( 'win' ) : settings [ 'have_sys_un_h' ] = False if conf_base : merge ( settings , load_config ( 'config' , conf_base ) ) merge ( settings , get_cfg_args ( ) ) merge ( settings , get_eargs ( ) ) return settings
Discover custom settings for ZMQ path
50,268
def call ( self , params , _context , ** kwargs ) : assert len ( params ) == self . paramCount return evaluate_impl ( self . body , params ) . then ( lambda value : setattr ( _context . results , 'value' , value ) )
Note that we re returning a Promise object here and bypassing the helper functionality that normally sets the results struct from the returned object . Instead we set _context . results directly inside of another promise
50,269
def detect_version ( basedir , compiler = None , ** compiler_attrs ) : if compiler is None : compiler = get_default_compiler ( ) cfile = pjoin ( basedir , 'vers.cpp' ) shutil . copy ( pjoin ( os . path . dirname ( __file__ ) , 'vers.cpp' ) , cfile ) if sys . platform . startswith ( 'linux' ) : cc = ccompiler . new_compiler ( compiler = compiler ) cc . output_dir = basedir if not cc . has_function ( 'timer_create' ) : if 'libraries' not in compiler_attrs : compiler_attrs [ 'libraries' ] = [ ] compiler_attrs [ 'libraries' ] . append ( 'rt' ) cc = get_compiler ( compiler = compiler , ** compiler_attrs ) efile = test_compilation ( cfile , compiler = cc ) patch_lib_paths ( efile , cc . library_dirs ) rc , so , se = get_output_error ( [ efile ] ) if rc : msg = "Error running version detection script:\n%s\n%s" % ( so , se ) logging . error ( msg ) raise IOError ( msg ) handlers = { 'vers' : lambda val : tuple ( int ( v ) for v in val . split ( '.' ) ) } props = { } for line in ( x for x in so . split ( '\n' ) if x ) : key , val = line . split ( ':' ) props [ key ] = handlers [ key ] ( val ) return props
Compile link & execute a test program in empty directory basedir .
50,270
def generate_file ( fname , ns_func , dest_dir = "." ) : with open ( pjoin ( root , 'buildutils' , 'templates' , '%s' % fname ) , 'r' ) as f : tpl = f . read ( ) out = tpl . format ( ** ns_func ( ) ) dest = pjoin ( dest_dir , fname ) info ( "generating %s from template" % dest ) with open ( dest , 'w' ) as f : f . write ( out )
generate a constants file from its template
50,271
def render_constants ( ) : generate_file ( "constant_enums.pxi" , cython_enums , pjoin ( root , 'zmq' , 'backend' , 'cython' ) ) generate_file ( "constants.pxi" , constants_pyx , pjoin ( root , 'zmq' , 'backend' , 'cython' ) ) generate_file ( "zmq_constants.h" , ifndefs , pjoin ( root , 'zmq' , 'utils' ) )
render generated constant files from templates
50,272
def from_time ( cls , source ) : return cls ( hours = source . hour , minutes = source . minute , seconds = source . second , milliseconds = source . microsecond // 1000 )
datetime . time - > SubRipTime corresponding to time object
50,273
def to_time ( self ) : return time ( self . hours , self . minutes , self . seconds , self . milliseconds * 1000 )
Convert SubRipTime instance into a pure datetime . time object
50,274
def export_pages ( root_page , export_unpublished = False ) : pages = Page . objects . descendant_of ( root_page , inclusive = True ) . order_by ( 'path' ) . specific ( ) if not export_unpublished : pages = pages . filter ( live = True ) page_data = [ ] exported_paths = set ( ) for ( i , page ) in enumerate ( pages ) : parent_path = page . path [ : - ( Page . steplen ) ] if i == 0 or ( parent_path in exported_paths ) : page_data . append ( { 'content' : json . loads ( page . to_json ( ) ) , 'model' : page . content_type . model , 'app_label' : page . content_type . app_label , } ) exported_paths . add ( page . path ) return { 'pages' : page_data }
Create a JSON defintion of part of a site s page tree starting from root_page and descending into its descendants
50,275
def import_from_api ( request ) : if request . method == 'POST' : form = ImportFromAPIForm ( request . POST ) if form . is_valid ( ) : base_url = re . sub ( r'\/$' , '' , form . cleaned_data [ 'source_site_base_url' ] ) import_url = ( base_url + reverse ( 'wagtailimportexport:export' , args = [ form . cleaned_data [ 'source_page_id' ] ] ) ) r = requests . get ( import_url ) import_data = r . json ( ) parent_page = form . cleaned_data [ 'parent_page' ] try : page_count = import_pages ( import_data , parent_page ) except LookupError as e : messages . error ( request , _ ( "Import failed: %(reason)s" ) % { 'reason' : e } ) else : messages . success ( request , ungettext ( "%(count)s page imported." , "%(count)s pages imported." , page_count ) % { 'count' : page_count } ) return redirect ( 'wagtailadmin_explore' , parent_page . pk ) else : form = ImportFromAPIForm ( ) return render ( request , 'wagtailimportexport/import_from_api.html' , { 'form' : form , } )
Import a part of a source site s page tree via a direct API request from this Wagtail Admin to the source site
50,276
def import_from_file ( request ) : if request . method == 'POST' : form = ImportFromFileForm ( request . POST , request . FILES ) if form . is_valid ( ) : import_data = json . loads ( form . cleaned_data [ 'file' ] . read ( ) . decode ( 'utf-8-sig' ) ) parent_page = form . cleaned_data [ 'parent_page' ] try : page_count = import_pages ( import_data , parent_page ) except LookupError as e : messages . error ( request , _ ( "Import failed: %(reason)s" ) % { 'reason' : e } ) else : messages . success ( request , ungettext ( "%(count)s page imported." , "%(count)s pages imported." , page_count ) % { 'count' : page_count } ) return redirect ( 'wagtailadmin_explore' , parent_page . pk ) else : form = ImportFromFileForm ( ) return render ( request , 'wagtailimportexport/import_from_file.html' , { 'form' : form , } )
Import a part of a source site s page tree via an import of a JSON file exported to a user s filesystem from the source site s Wagtail Admin
50,277
def export_to_file ( request ) : if request . method == 'POST' : form = ExportForm ( request . POST ) if form . is_valid ( ) : payload = export_pages ( form . cleaned_data [ 'root_page' ] , export_unpublished = True ) response = JsonResponse ( payload ) response [ 'Content-Disposition' ] = 'attachment; filename="export.json"' return response else : form = ExportForm ( ) return render ( request , 'wagtailimportexport/export_to_file.html' , { 'form' : form , } )
Export a part of this source site s page tree to a JSON file on this user s filesystem for subsequent import in a destination site s Wagtail Admin
50,278
def export ( request , page_id , export_unpublished = False ) : try : if export_unpublished : root_page = Page . objects . get ( id = page_id ) else : root_page = Page . objects . get ( id = page_id , live = True ) except Page . DoesNotExist : return JsonResponse ( { 'error' : _ ( 'page not found' ) } ) payload = export_pages ( root_page , export_unpublished = export_unpublished ) return JsonResponse ( payload )
API endpoint of this source site to export a part of the page tree rooted at page_id
50,279
def import_pages ( import_data , parent_page ) : pages_by_original_path = { } pages_by_original_id = { } page_content_type = ContentType . objects . get_for_model ( Page ) for ( i , page_record ) in enumerate ( import_data [ 'pages' ] ) : page = Page . from_serializable_data ( page_record [ 'content' ] ) original_path = page . path original_id = page . id page . id = None page . path = None page . depth = None page . numchild = 0 page . url_path = None page . content_type = page_content_type if i == 0 : parent_page . add_child ( instance = page ) else : parent_path = original_path [ : - ( Page . steplen ) ] pages_by_original_path [ parent_path ] . add_child ( instance = page ) pages_by_original_path [ original_path ] = page pages_by_original_id [ original_id ] = page for ( i , page_record ) in enumerate ( import_data [ 'pages' ] ) : model = apps . get_model ( page_record [ 'app_label' ] , page_record [ 'model' ] ) specific_page = model . from_serializable_data ( page_record [ 'content' ] , check_fks = False , strict_fks = False ) base_page = pages_by_original_id [ specific_page . id ] specific_page . page_ptr = base_page specific_page . __dict__ . update ( base_page . __dict__ ) specific_page . content_type = ContentType . objects . get_for_model ( model ) update_page_references ( specific_page , pages_by_original_id ) specific_page . save ( ) return len ( import_data [ 'pages' ] )
Take a JSON export of part of a source site s page tree and create those pages under the parent page
50,280
def remove_all_filters ( self ) : self . attitude_filter = self . source_filter = None self . question_filter = self . link_filter = False
Removes all filters
50,281
def set_source_filter ( self , source ) : if isinstance ( source , str if py3k else basestring ) and len ( source ) >= 2 : self . source_filter = source else : raise TwitterSearchException ( 1009 )
Only search for tweets entered via given source
50,282
def add_keyword ( self , word , or_operator = False ) : if isinstance ( word , str if py3k else basestring ) and len ( word ) >= 2 : self . searchterms . append ( word if " " not in word else '"%s"' % word ) elif isinstance ( word , ( tuple , list ) ) : word = [ ( i if " " not in i else '"%s"' % i ) for i in word ] self . searchterms += [ " OR " . join ( word ) ] if or_operator else word else : raise TwitterSearchException ( 1000 )
Adds a given string or list to the current keyword list
50,283
def set_keywords ( self , words , or_operator = False ) : if not isinstance ( words , ( tuple , list ) ) : raise TwitterSearchException ( 1001 ) words = [ ( i if " " not in i else '"%s"' % i ) for i in words ] self . searchterms = [ " OR " . join ( words ) ] if or_operator else words
Sets a given list as the new keyword list
50,284
def set_language ( self , lang ) : if lang in self . iso_6391 : self . arguments . update ( { 'lang' : '%s' % lang } ) else : raise TwitterSearchException ( 1002 )
Sets lang parameter used to only fetch tweets within \ a certain language
50,285
def set_callback ( self , func ) : if isinstance ( func , str if py3k else basestring ) and func : self . arguments . update ( { 'callback' : '%s' % func } ) else : raise TwitterSearchException ( 1006 )
Sets callback parameter . If supplied the response \ will use the JSONP format with a callback of the given name
50,286
def set_until ( self , date ) : if isinstance ( date , datetime . date ) and date <= datetime . date . today ( ) : self . arguments . update ( { 'until' : '%s' % date . strftime ( '%Y-%m-%d' ) } ) else : raise TwitterSearchException ( 1007 )
Sets until parameter used to return \ only tweets generated before the given date
50,287
def set_proxy ( self , proxy ) : if isinstance ( proxy , str if py3k else basestring ) : self . __proxy = proxy else : raise TwitterSearchException ( 1009 )
Sets a HTTPS proxy to query the Twitter API
50,288
def get_minimal_id ( self ) : if not self . __response : raise TwitterSearchException ( 1013 ) return min ( self . __response [ 'content' ] [ 'statuses' ] if self . __order_is_search else self . __response [ 'content' ] , key = lambda i : i [ 'id' ] ) [ 'id' ] - 1
Returns the minimal tweet ID of the current response
50,289
def get_amount_of_tweets ( self ) : if not self . __response : raise TwitterSearchException ( 1013 ) return ( len ( self . __response [ 'content' ] [ 'statuses' ] ) if self . __order_is_search else len ( self . __response [ 'content' ] ) )
Returns current amount of tweets available within this instance
50,290
def set_count ( self , cnt ) : if isinstance ( cnt , int ) and cnt > 0 and cnt <= 100 : self . arguments . update ( { 'count' : '%s' % cnt } ) else : raise TwitterSearchException ( 1004 )
Sets count parameter used to define the number of \ tweets to return per page . Maximum and default value is 100
50,291
def set_include_entities ( self , include ) : if not isinstance ( include , bool ) : raise TwitterSearchException ( 1008 ) self . arguments . update ( { 'include_entities' : 'true' if include else 'false' } )
Sets include entities parameter to either \ include or exclude the entities node within the results
50,292
def set_trim_user ( self , trim ) : if not isinstance ( trim , bool ) : raise TwitterSearchException ( 1008 ) self . arguments . update ( { 'trim_user' : 'true' if trim else 'false' } )
Sets trim_user parameter . When set to True \ each tweet returned in a timeline will include a \ user object including only the status authors numerical ID
50,293
def set_include_rts ( self , rts ) : if not isinstance ( rts , bool ) : raise TwitterSearchException ( 1008 ) self . arguments . update ( { 'include_rts' : 'true' if rts else 'false' } )
Sets include_rts parameter . When set to False \ the timeline will strip any native retweets from the returned timeline
50,294
def set_exclude_replies ( self , exclude ) : if not isinstance ( exclude , bool ) : raise TwitterSearchException ( 1008 ) self . arguments . update ( { 'exclude_replies' : 'true' if exclude else 'false' } )
Sets exclude_replies parameter used to \ prevent replies from appearing in the returned timeline
50,295
def set_contributor_details ( self , contdetails ) : if not isinstance ( contdetails , bool ) : raise TwitterSearchException ( 1008 ) self . arguments . update ( { 'contributor_details' : 'true' if contdetails else 'false' } )
Sets contributor_details parameter used to enhance the \ contributors element of the status response to include \ the screen_name of the contributor . By default only \ the user_id of the contributor is included
50,296
def url ( context , view , subdomain = UNSET , * args , ** kwargs ) : if subdomain is UNSET : request = context . get ( 'request' ) if request is not None : subdomain = getattr ( request , 'subdomain' , None ) else : subdomain = None elif subdomain is '' : subdomain = None return reverse ( view , subdomain = subdomain , args = args , kwargs = kwargs )
Resolves a URL in a template using subdomain - based URL resolution .
50,297
def urljoin ( domain , path = None , scheme = None ) : if scheme is None : scheme = getattr ( settings , 'DEFAULT_URL_SCHEME' , 'http' ) return urlunparse ( ( scheme , domain , path or '' , None , None , None ) )
Joins a domain path and scheme part together returning a full URL .
50,298
def process_request ( self , request ) : domain , host = map ( lower , ( self . get_domain_for_request ( request ) , request . get_host ( ) ) ) pattern = r'^(?:(?P<subdomain>.*?)\.)?%s(?::.*)?$' % re . escape ( domain ) matches = re . match ( pattern , host ) if matches : request . subdomain = matches . group ( 'subdomain' ) else : request . subdomain = None logger . warning ( 'The host %s does not belong to the domain %s, ' 'unable to identify the subdomain for this request' , request . get_host ( ) , domain )
Adds a subdomain attribute to the request parameter .
50,299
def process_request ( self , request ) : super ( SubdomainURLRoutingMiddleware , self ) . process_request ( request ) subdomain = getattr ( request , 'subdomain' , UNSET ) if subdomain is not UNSET : urlconf = settings . SUBDOMAIN_URLCONFS . get ( subdomain ) if urlconf is not None : logger . debug ( "Using urlconf %s for subdomain: %s" , repr ( urlconf ) , repr ( subdomain ) ) request . urlconf = urlconf
Sets the current request s urlconf attribute to the urlconf associated with the subdomain if it is listed in settings . SUBDOMAIN_URLCONFS .