idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
54,700
def read_input ( self , input_cls , filename , ** kwargs ) : input_inst = input_cls ( ) input_inst . read_input ( filename ) return input_inst . get_data ( )
Read in input and do some minimal preformatting input_cls - the class to use to read the input filename - input filename
54,701
def reformat_file ( self , input_file , input_format , output_format ) : if input_file is None or input_format is None : return None try : input_cls = self . find_input ( input_format ) input_inst = input_cls ( ) except TypeError : return None try : input_inst . read_input ( self . absolute_filepath ( input_file ) ) ex...
Reformat input data files to a format the tasks can use
54,702
def reformat_input ( self , ** kwargs ) : reformatted_input = { } needed_formats = [ ] for task_cls in self . tasks : needed_formats . append ( task_cls . data_format ) self . needed_formats = list ( set ( needed_formats ) ) for output_format in self . needed_formats : reformatted_input . update ( { output_format : { '...
Reformat input data
54,703
def _create_modulename ( cdef_sources , source , sys_version ) : key = '\x00' . join ( [ sys_version [ : 3 ] , source , cdef_sources ] ) key = key . encode ( 'utf-8' ) k1 = hex ( binascii . crc32 ( key [ 0 : : 2 ] ) & 0xffffffff ) k1 = k1 . lstrip ( '0x' ) . rstrip ( 'L' ) k2 = hex ( binascii . crc32 ( key [ 1 : : 2 ] ...
This is the same as CFFI s create modulename except we don t include the CFFI version .
54,704
def is_authenticated_with_token ( self ) : server_login_response = post_log_in ( self , keyid = self . user_fingerprint , user_token_result = self . user_auth_token ) if not check_server_login_stage2_response ( server_login_response ) : raise GPGAuthStage2Exception ( "Login endpoint wrongly formatted" ) self . cookies ...
GPGAuth Stage 2
54,705
def save ( self , obj , run_id ) : id_code = self . generate_save_identifier ( obj , run_id ) self . store . save ( obj , id_code )
Save a workflow obj - instance of a workflow to save run_id - unique id to give the run
54,706
def setup_tasks ( self , tasks ) : task_classes = [ ] for task in tasks : category , namespace , name = task . split ( "." ) try : cls = find_in_registry ( category = category , namespace = namespace , name = name ) [ 0 ] except TypeError : log . error ( "Could not find the task with category.namespace.name {0}" . form...
Find task classes from category . namespace . name strings tasks - list of strings
54,707
def initialize_workflow ( self , workflow ) : self . workflow = workflow ( ) self . workflow . tasks = self . tasks self . workflow . input_file = self . input_file self . workflow . input_format = self . input_format self . workflow . target_file = self . target_file self . workflow . target_format = self . target_for...
Create a workflow workflow - a workflow class
54,708
def reformat_filepath ( self , config_file , filename ) : if not filename . startswith ( "/" ) : filename = self . config_file_format . format ( config_file , filename ) return filename
Convert relative paths in config file to absolute
54,709
def item_lister ( command , _connection , page_size , page_number , sort_by , sort_order , item_class , result_set , ** kwargs ) : page = page_number while True : item_collection = _connection . get_list ( command , page_size = page_size , page_number = page , sort_by = sort_by , sort_order = sort_order , item_class = ...
A generator function for listing Video and Playlist objects .
54,710
def get_manifest ( self , asset_xml ) : manifest = '<?xml version="1.0" encoding="utf-8"?>' manifest += '<publisher-upload-manifest publisher-id="%s" ' % self . publisher_id manifest += 'preparer="%s" ' % self . preparer if self . report_success : manifest += 'report-success="TRUE">\n' for notify in self . notification...
Construct and return the xml manifest to deliver along with video file .
54,711
def _send_file ( self , filename ) : ftp = ftplib . FTP ( host = self . host ) ftp . login ( user = self . user , passwd = self . password ) ftp . set_pasv ( True ) ftp . storbinary ( "STOR %s" % os . path . basename ( filename ) , file ( filename , 'rb' ) )
Sends a file via FTP .
54,712
def _post ( self , data , file_to_upload = None ) : params = { "JSONRPC" : simplejson . dumps ( data ) } req = None if file_to_upload : req = http_core . HttpRequest ( self . write_url ) req . method = 'POST' req . add_body_part ( "JSONRPC" , simplejson . dumps ( data ) , 'text/plain' ) upload = file ( file_to_upload ,...
Make the POST request .
54,713
def _get_response ( self , ** kwargs ) : url = self . read_url + "?output=JSON&token=%s" % self . read_token for key in kwargs : if key and kwargs [ key ] : val = kwargs [ key ] if isinstance ( val , ( list , tuple ) ) : val = "," . join ( val ) url += "&%s=%s" % ( key , val ) self . _api_url = url req = urllib2 . urlo...
Make the GET request .
54,714
def get_list ( self , command , item_class , page_size , page_number , sort_by , sort_order , ** kwargs ) : data = self . _get_response ( command = command , page_size = page_size , page_number = page_number , sort_by = sort_by , sort_order = sort_order , video_fields = None , get_item_count = "true" , ** kwargs ) retu...
Not intended to be called directly but rather through an by the ItemResultSet object iterator .
54,715
def setup_formats ( self ) : methods = self . get_methods ( ) for m in methods : if m . startswith ( "from_" ) : self . input_formats . append ( re . sub ( "from_" , "" , m ) ) elif m . startswith ( "to_" ) : self . output_formats . append ( re . sub ( "to_" , "" , m ) )
Inspects its methods to see what it can convert from and to
54,716
def get_data ( self , data_format ) : if data_format not in self . output_formats : raise Exception ( "Output format {0} not available with this class. Available formats are {1}." . format ( data_format , self . output_formats ) ) data_converter = getattr ( self , "to_" + data_format ) return data_converter ( )
Reads the common format and converts to output data data_format - the format of the output data . See utils . input . dataformats
54,717
def from_csv ( self , input_data ) : reformatted_data = [ ] for ( i , row ) in enumerate ( input_data ) : if i == 0 : headers = row else : data_row = { } for ( j , h ) in enumerate ( headers ) : data_row . update ( { h : row [ j ] } ) reformatted_data . append ( data_row ) return reformatted_data
Reads csv format input data and converts to json .
54,718
def to_dataframe ( self ) : keys = self . data [ 0 ] . keys ( ) column_list = [ ] for k in keys : key_list = [ ] for i in xrange ( 0 , len ( self . data ) ) : key_list . append ( self . data [ i ] [ k ] ) column_list . append ( key_list ) df = DataFrame ( np . asarray ( column_list ) . transpose ( ) , columns = keys ) ...
Reads the common format self . data and writes out to a dataframe .
54,719
def check_extensions ( extensions : Set [ str ] , allow_multifile : bool = False ) : check_var ( extensions , var_types = set , var_name = 'extensions' ) for ext in extensions : check_extension ( ext , allow_multifile = allow_multifile )
Utility method to check that all extensions in the provided set are valid
54,720
def are_worth_chaining ( parser , to_type : Type [ S ] , converter : Converter [ S , T ] ) -> bool : if not parser . can_chain : return False elif not is_any_type ( to_type ) and is_any_type ( converter . to_type ) : return True elif issubclass ( to_type , converter . to_type ) : return False else : return True
Utility method to check if it makes sense to chain this parser with the given destination type and the given converter to create a parsing chain . Returns True if it brings value to chain them .
54,721
def _execute ( self , logger : Logger , options : Dict [ str , Dict [ str , Any ] ] ) -> T : pass
Implementing classes should perform the parsing here possibly using custom methods of self . parser .
54,722
def create_parsing_plan ( self , desired_type : Type [ T ] , filesystem_object : PersistedObject , logger : Logger , options : Dict [ str , Dict [ str , Any ] ] ) -> ParsingPlan [ T ] : pass
Creates a parsing plan to parse the given filesystem object into the given desired_type . Implementing classes may wish to support additional parameters .
54,723
def add ( self , f_ipaddr , f_macaddr , f_hostname , f_netbios_name , f_engineer , f_asset_group , f_confirmed ) : return self . send . host_add ( f_ipaddr , f_macaddr , f_hostname , f_netbios_name , f_engineer , f_asset_group , f_confirmed )
Add a t_hosts record
54,724
def retrieve_data ( self ) : df = self . manager . get_historic_data ( self . start . date ( ) , self . end . date ( ) ) df . replace ( 0 , np . nan , inplace = True ) return df
Retrives data as a DataFrame .
54,725
def get_min_risk ( self , weights , cov_matrix ) : def func ( weights ) : return np . matmul ( np . matmul ( weights . transpose ( ) , cov_matrix ) , weights ) def func_deriv ( weights ) : return ( np . matmul ( weights . transpose ( ) , cov_matrix . transpose ( ) ) + np . matmul ( weights . transpose ( ) , cov_matrix ...
Minimizes the variance of a portfolio .
54,726
def get_max_return ( self , weights , returns ) : def func ( weights ) : return np . dot ( weights , returns . values ) * - 1 constraints = ( { 'type' : 'eq' , 'fun' : lambda weights : ( weights . sum ( ) - 1 ) } ) solution = self . solve_minimize ( func , weights , constraints ) max_return = solution . fun * - 1 retur...
Maximizes the returns of a portfolio .
54,727
def efficient_frontier ( self , returns , cov_matrix , min_return , max_return , count ) : columns = [ coin for coin in self . SUPPORTED_COINS ] values = pd . DataFrame ( columns = columns ) weights = [ 1 / len ( self . SUPPORTED_COINS ) ] * len ( self . SUPPORTED_COINS ) def func ( weights ) : return np . matmul ( np ...
Returns a DataFrame of efficient portfolio allocations for count risk indices .
54,728
def solve_minimize ( self , func , weights , constraints , lower_bound = 0.0 , upper_bound = 1.0 , func_deriv = False ) : bounds = ( ( lower_bound , upper_bound ) , ) * len ( self . SUPPORTED_COINS ) return minimize ( fun = func , x0 = weights , jac = func_deriv , bounds = bounds , constraints = constraints , method = ...
Returns the solution to a minimization problem .
54,729
def allocate ( self ) : df = self . manager . get_historic_data ( ) [ self . SUPPORTED_COINS ] change_columns = [ ] for column in df : if column in self . SUPPORTED_COINS : change_column = '{}_change' . format ( column ) values = pd . Series ( ( df [ column ] . shift ( - 1 ) - df [ column ] ) / - df [ column ] . shift ...
Returns an efficient portfolio allocation for the given risk index .
54,730
def handle_default_options ( options ) : if options . settings : os . environ [ 'PERCEPT_SETTINGS_MODULE' ] = options . settings if options . pythonpath : options . pythonpath = os . path . abspath ( os . path . expanduser ( options . pythonpath ) ) up_one_path = os . path . abspath ( os . path . join ( options . pytho...
Pass in a Values instance from OptionParser . Handle settings and pythonpath options - Values from OptionParser
54,731
def create_parser ( self , prog_name , subcommand ) : parser = OptionParser ( prog = prog_name , usage = self . usage ( subcommand ) , option_list = self . option_list ) return parser
Create an OptionParser prog_name - Name of a command subcommand - Name of a subcommand
54,732
def hook ( name = None , * args , ** kwargs ) : def decorator ( f ) : if not hasattr ( f , "hooks" ) : f . hooks = [ ] f . hooks . append ( ( name or f . __name__ , args , kwargs ) ) return f return decorator
Decorator to register the function as a hook
54,733
def expose ( rule , ** options ) : def decorator ( f ) : if not hasattr ( f , "urls" ) : f . urls = [ ] if isinstance ( rule , ( list , tuple ) ) : f . urls . extend ( rule ) else : f . urls . append ( ( rule , options ) ) return f return decorator
Decorator to add an url rule to a function
54,734
def _create_unicode_map ( ) : unicode_map = { } for beta , uni in _map . BETACODE_MAP . items ( ) : norm = unicodedata . normalize ( 'NFC' , uni ) unicode_map [ norm ] = beta unicode_map [ uni ] = beta final_sigma_norm = unicodedata . normalize ( 'NFC' , _FINAL_LC_SIGMA ) unicode_map [ final_sigma_norm ] = 's' unicode_...
Create the inverse map from unicode to betacode .
54,735
def _create_conversion_trie ( strict ) : t = pygtrie . CharTrie ( ) for beta , uni in _map . BETACODE_MAP . items ( ) : if strict : t [ beta ] = uni else : diacritics = beta [ 1 : ] perms = itertools . permutations ( diacritics ) for perm in perms : perm_str = beta [ 0 ] + '' . join ( perm ) t [ perm_str . lower ( ) ] ...
Create the trie for betacode conversion .
54,736
def _find_max_beta_token_len ( ) : max_beta_len = - 1 for beta , uni in _map . BETACODE_MAP . items ( ) : if len ( beta ) > max_beta_len : max_beta_len = len ( beta ) return max_beta_len
Finds the maximum length of a single betacode token .
54,737
def beta_to_uni ( text , strict = False ) : param_key = ( strict , ) try : t = _BETA_CONVERSION_TRIES [ param_key ] except KeyError : t = _create_conversion_trie ( * param_key ) _BETA_CONVERSION_TRIES [ param_key ] = t transform = [ ] idx = 0 possible_word_boundary = False while idx < len ( text ) : if possible_word_bo...
Converts the given text from betacode to unicode .
54,738
def uni_to_beta ( text ) : u = _UNICODE_MAP transform = [ ] for ch in text : try : conv = u [ ch ] except KeyError : conv = ch transform . append ( conv ) converted = '' . join ( transform ) return converted
Convert unicode text to a betacode equivalent .
54,739
def __calculate_order ( self , node_dict ) : if len ( node_dict . keys ( ) ) != len ( set ( node_dict . keys ( ) ) ) : raise DependencyTreeException ( "Duplicate Keys Exist in node dictionary!" ) valid_order = [ node for node , dependencies in node_dict . items ( ) if len ( dependencies ) == 0 ] remaining_nodes = [ nod...
Determine a valid ordering of the nodes in which a node is not called before all of it s dependencies .
54,740
def warn_import_error ( type_of_obj_support : str , caught : ImportError ) : msg = StringIO ( ) msg . writelines ( 'Import Error while trying to add support for ' + type_of_obj_support + '. You may continue but ' 'the associated parsers and converters wont be available : \n' ) traceback . print_tb ( caught . __tracebac...
Utility method to print a warning message about failed import of some modules
54,741
def create_parser_options ( lazy_mfcollection_parsing : bool = False ) -> Dict [ str , Dict [ str , Any ] ] : return { MultifileCollectionParser . __name__ : { 'lazy_parsing' : lazy_mfcollection_parsing } }
Utility method to create a default options structure with the lazy parsing inside
54,742
def register_default_plugins ( root_parser : ParserRegistryWithConverters ) : try : from parsyfiles . plugins_base . support_for_primitive_types import get_default_primitive_parsers , get_default_primitive_converters root_parser . register_parsers ( get_default_primitive_parsers ( ) ) root_parser . register_converters ...
Utility method to register all default plugins on the given parser + converter registry
54,743
def parse_collection ( self , item_file_prefix : str , base_item_type : Type [ T ] , item_name_for_log : str = None , file_mapping_conf : FileMappingConfiguration = None , options : Dict [ str , Dict [ str , Any ] ] = None ) -> Dict [ str , T ] : item_name_for_log = item_name_for_log or '' check_var ( item_name_for_log...
Main method to parse a collection of items of type base_item_type .
54,744
def parse_item ( self , location : str , item_type : Type [ T ] , item_name_for_log : str = None , file_mapping_conf : FileMappingConfiguration = None , options : Dict [ str , Dict [ str , Any ] ] = None ) -> T : item_name_for_log = item_name_for_log or '' check_var ( item_name_for_log , var_types = str , var_name = 'i...
Main method to parse an item of type item_type
54,745
def _parse__item ( self , item_type : Type [ T ] , item_file_prefix : str , file_mapping_conf : FileMappingConfiguration = None , options : Dict [ str , Dict [ str , Any ] ] = None ) -> T : options = options or create_parser_options ( ) file_mapping_conf = file_mapping_conf or WrappedFileMappingConfiguration ( ) obj = ...
Common parsing steps to parse an item
54,746
def SpamsumDistance ( ssA , ssB ) : mA = re . match ( '^(\d+)[:](.*)$' , ssA ) mB = re . match ( '^(\d+)[:](.*)$' , ssB ) if mA == None or mB == None : raise "do not appear to be spamsum signatures" if mA . group ( 1 ) != mB . group ( 1 ) : return max ( [ len ( mA . group ( 2 ) ) , len ( mB . group ( 2 ) ) ] ) else : r...
returns the spamsum distance between ssA and ssB if they use a different block size assume maximum distance otherwise returns the LevDistance
54,747
def add_image ( self , image_path , annotations ) : self . image_paths . append ( image_path ) self . bounding_boxes . append ( [ bounding_box_from_annotation ( ** a ) for a in annotations ] )
Adds an image and its bounding boxes to the current list of files
54,748
def save ( self , list_file ) : bob . io . base . create_directories_safe ( os . path . dirname ( list_file ) ) with open ( list_file , 'w' ) as f : for i in range ( len ( self . image_paths ) ) : f . write ( self . image_paths [ i ] ) for bbx in self . bounding_boxes [ i ] : f . write ( "\t[%f %f %f %f]" % ( bbx . top...
Saves the current list of annotations to the given file .
54,749
def _feature_file ( self , parallel = None , index = None ) : if index is None : index = 0 if parallel is None or "SGE_TASK_ID" not in os . environ else int ( os . environ [ "SGE_TASK_ID" ] ) return os . path . join ( self . feature_directory , "Features_%02d.hdf5" % index )
Returns the name of an intermediate file for storing features .
54,750
def get ( self , param , default = EMPTY ) : if not self . has ( param ) : if default is not EMPTY : return default raise ParamNotFoundException ( "value for %s not found" % param ) context_dict = copy . deepcopy ( self . manifest . get_context_dict ( ) ) for k , v in self . raw_dict . items ( ) : context_dict [ "%s:%s...
Returns the nparam value and returns the default if it doesn t exist . If default is none an exception will be raised instead .
54,751
def set ( self , param , value ) : self . raw_dict [ param ] = value self . manifest . set ( self . feature_name , param , value )
sets the param to the value provided
54,752
def remove ( self , param ) : if self . has ( param ) : del ( self . raw_dict [ param ] ) self . manifest . remove_option ( self . feature_name , param )
Remove a parameter from the manifest
54,753
def set_if_empty ( self , param , default ) : if not self . has ( param ) : self . set ( param , default )
Set the parameter to the default if it doesn t exist
54,754
def to_dict ( self ) : return dict ( ( k , str ( self . get ( k ) ) ) for k in self . raw_dict )
Returns the context fully specialized as a dictionary
54,755
def write_to_manifest ( self ) : self . manifest . remove_section ( self . feature_name ) self . manifest . add_section ( self . feature_name ) for k , v in self . raw_dict . items ( ) : self . manifest . set ( self . feature_name , k , v )
Overwrites the section of the manifest with the featureconfig s value
54,756
def round_to_05 ( n , exp = None , mode = 's' ) : n = np . asarray ( n ) if exp is None : exp = np . floor ( np . log10 ( np . abs ( n ) ) ) ntmp = np . abs ( n ) / 10. ** exp if mode == 's' : n1 = ntmp s = 1. n2 = nret = np . floor ( ntmp ) else : n1 = nret = np . ceil ( ntmp ) s = - 1. n2 = ntmp return np . where ( n...
Round to the next 0 . 5 - value .
54,757
def convert_radian ( coord , * variables ) : if any ( v . attrs . get ( 'units' ) == 'radian' for v in variables ) : return coord * 180. / np . pi return coord
Convert the given coordinate from radian to degree
54,758
def replace_coord ( self , i ) : da = next ( islice ( self . data_iterator , i , i + 1 ) ) name , coord = self . get_alternative_coord ( da , i ) other_coords = { key : da . coords [ key ] for key in set ( da . coords ) . difference ( da . dims ) } ret = da . rename ( { da . dims [ - 1 ] : name } ) . assign_coords ( **...
Replace the coordinate for the data array at the given position
54,759
def value2pickle ( self ) : return { key : s . get_edgecolor ( ) for key , s in self . ax . spines . items ( ) }
Return the current axis colors
54,760
def set_default_formatters ( self , which = None ) : if which is None or which == 'minor' : self . default_formatters [ 'minor' ] = self . axis . get_minor_formatter ( ) if which is None or which == 'major' : self . default_formatters [ 'major' ] = self . axis . get_major_formatter ( )
Sets the default formatters that is used for updating to None
54,761
def plotted_data ( self ) : return InteractiveList ( [ arr for arr , val in zip ( self . iter_data , cycle ( slist ( self . value ) ) ) if val is not None ] )
The data that is shown to the user
54,762
def axis ( self ) : return getattr ( self . colorbar . ax , self . axis_locations [ self . position ] + 'axis' )
axis of the colorbar with the ticks . Will be overwritten during update process .
54,763
def default_formatters ( self ) : if self . _default_formatters : return self . _default_formatters else : self . set_default_formatters ( ) return self . _default_formatters
Default locator of the axis of the colorbars
54,764
def get_xyz_2d ( self , xcoord , x , ycoord , y , u , v ) : xy = xcoord . values . ravel ( ) + 1j * ycoord . values . ravel ( ) dist = np . abs ( xy - ( x + 1j * y ) ) imin = np . nanargmin ( dist ) xy_min = xy [ imin ] return ( xy_min . real , xy_min . imag , u . values . ravel ( ) [ imin ] , v . values . ravel ( ) [ ...
Get closest x y and z for the given x and y in data for 2d coords
54,765
def hist2d ( self , da , ** kwargs ) : if self . value is None or self . value == 'counts' : normed = False else : normed = True y = da . values x = da . coords [ da . dims [ 0 ] ] . values counts , xedges , yedges = np . histogram2d ( x , y , normed = normed , ** kwargs ) if self . value == 'counts' : counts = counts ...
Make the two dimensional histogram
54,766
def _statsmodels_bivariate_kde ( self , x , y , bws , xsize , ysize , xyranges ) : import statsmodels . nonparametric . api as smnp for i , ( coord , bw ) in enumerate ( zip ( [ x , y ] , bws ) ) : if isinstance ( bw , six . string_types ) : bw_func = getattr ( smnp . bandwidths , "bw_" + bw ) bws [ i ] = bw_func ( coo...
Compute a bivariate kde using statsmodels . This function is mainly motivated through seaborn . distributions . _statsmodels_bivariate_kde
54,767
def append_diff_hist ( diff , diff_hist = list ( ) ) : diff , diff_hist = _norm_json_params ( diff , diff_hist ) if not diff_hist : diff_hist = list ( ) diff_hist . append ( { 'diff' : diff , 'diff_date' : now_field ( ) } ) return diff_hist
Given a diff as generated by record_diff append a diff record to the list of diff_hist records .
54,768
def _find_video ( self ) : data = None if self . id : data = self . connection . get_item ( 'find_video_by_id' , video_id = self . id ) elif self . reference_id : data = self . connection . get_item ( 'find_video_by_reference_id' , reference_id = self . reference_id ) if data : self . _load ( data )
Lookup and populate pybrightcove . video . Video object given a video id or reference_id .
54,769
def to_xml ( self ) : xml = '' for asset in self . assets : xml += '<asset filename="%s" ' % os . path . basename ( asset [ 'filename' ] ) xml += ' refid="%(refid)s"' % asset xml += ' size="%(size)s"' % asset xml += ' hash-code="%s"' % asset [ 'hash-code' ] xml += ' type="%(type)s"' % asset if asset . get ( 'encoding-r...
Converts object into an XML string .
54,770
def _load ( self , data ) : self . raw_data = data self . creation_date = _convert_tstamp ( data [ 'creationDate' ] ) self . economics = data [ 'economics' ] self . id = data [ 'id' ] self . last_modified_date = _convert_tstamp ( data [ 'lastModifiedDate' ] ) self . length = data [ 'length' ] self . link_text = data [ ...
Deserialize a dictionary of data into a pybrightcove . video . Video object .
54,771
def get_custom_metadata ( self ) : if self . id is not None : data = self . connection . get_item ( 'find_video_by_id' , video_id = self . id , video_fields = "customFields" ) for key in data . get ( "customFields" , { } ) . keys ( ) : val = data [ "customFields" ] . get ( key ) if val is not None : self . add_custom_m...
Fetches custom metadta for an already exisiting Video .
54,772
def add_custom_metadata ( self , key , value , meta_type = None ) : self . metadata . append ( { 'key' : key , 'value' : value , 'type' : meta_type } )
Add custom metadata to the Video . meta_type is required for XML API .
54,773
def add_asset ( self , filename , asset_type , display_name , encoding_rate = None , frame_width = None , frame_height = None , encode_to = None , encode_multiple = False , h264_preserve_as_rendition = False , h264_no_processing = False ) : m = hashlib . md5 ( ) fp = file ( filename , 'rb' ) bits = fp . read ( 262144 )...
Add an asset to the Video object .
54,774
def save ( self , create_multiple_renditions = True , preserve_source_rendition = True , encode_to = enums . EncodeToEnum . FLV ) : if is_ftp_connection ( self . connection ) and len ( self . assets ) > 0 : self . connection . post ( xml = self . to_xml ( ) , assets = self . assets ) elif not self . id and self . _file...
Creates or updates the video
54,775
def delete ( self , cascade = False , delete_shares = False ) : if self . id : self . connection . post ( 'delete_video' , video_id = self . id , cascade = cascade , delete_shares = delete_shares ) self . id = None
Deletes the video .
54,776
def get_upload_status ( self ) : if self . id : return self . connection . post ( 'get_upload_status' , video_id = self . id )
Get the status of the video that has been uploaded .
54,777
def share ( self , accounts ) : if not isinstance ( accounts , ( list , tuple ) ) : msg = "Video.share expects an iterable argument" raise exceptions . PyBrightcoveError ( msg ) raise exceptions . PyBrightcoveError ( "Not yet implemented" )
Create a share
54,778
def set_image ( self , image , filename = None , resize = False ) : if self . id : data = self . connection . post ( 'add_image' , filename , video_id = self . id , image = image . to_dict ( ) , resize = resize ) if data : self . image = Image ( data = data )
Set the poster or thumbnail of a this Vidoe .
54,779
def find_related ( self , _connection = None , page_size = 100 , page_number = 0 ) : if self . id : return connection . ItemResultSet ( 'find_related_videos' , Video , _connection , page_size , page_number , None , None , video_id = self . id )
List all videos that are related to this one .
54,780
def delete_video ( video_id , cascade = False , delete_shares = False , _connection = None ) : c = _connection if not c : c = connection . APIConnection ( ) c . post ( 'delete_video' , video_id = video_id , cascade = cascade , delete_shares = delete_shares )
Delete the video represented by the video_id parameter .
54,781
def get_status ( video_id , _connection = None ) : c = _connection if not c : c = connection . APIConnection ( ) return c . post ( 'get_upload_status' , video_id = video_id )
Get the status of a video given the video_id parameter .
54,782
def activate ( video_id , _connection = None ) : c = _connection if not c : c = connection . APIConnection ( ) data = c . post ( 'update_video' , video = { 'id' : video_id , 'itemState' : enums . ItemStateEnum . ACTIVE } ) return Video ( data = data , _connection = c )
Mark a video as Active
54,783
def find_modified ( since , filter_list = None , _connection = None , page_size = 25 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : filters = [ ] if filter_list is not None : filters = filter_list if not isinstance ( since , datetime ) : msg = 'The parameter "since" ...
List all videos modified since a certain date .
54,784
def find_all ( _connection = None , page_size = 100 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : return connection . ItemResultSet ( 'find_all_videos' , Video , _connection , page_size , page_number , sort_by , sort_order )
List all videos .
54,785
def find_by_tags ( and_tags = None , or_tags = None , _connection = None , page_size = 100 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : err = None if not and_tags and not or_tags : err = "You must supply at least one of either and_tags or or_tags." if and_tags and ...
List videos given a certain set of tags .
54,786
def find_by_text ( text , _connection = None , page_size = 100 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : return connection . ItemResultSet ( 'find_videos_by_text' , Video , _connection , page_size , page_number , sort_by , sort_order , text = text )
List videos that match the text in title or description .
54,787
def find_by_campaign ( campaign_id , _connection = None , page_size = 100 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : return connection . ItemResultSet ( 'find_videos_by_campaign_id' , Video , _connection , page_size , page_number , sort_by , sort_order , campaign...
List all videos for a given campaign .
54,788
def find_by_user ( user_id , _connection = None , page_size = 100 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : return connection . ItemResultSet ( 'find_videos_by_user_id' , Video , _connection , page_size , page_number , sort_by , sort_order , user_id = user_id )
List all videos uploaded by a certain user .
54,789
def find_by_reference_ids ( reference_ids , _connection = None , page_size = 100 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : if not isinstance ( reference_ids , ( list , tuple ) ) : err = "Video.find_by_reference_ids expects an iterable argument" raise exceptions ...
List all videos identified by a list of reference ids
54,790
def find_by_ids ( ids , _connection = None , page_size = 100 , page_number = 0 , sort_by = enums . DEFAULT_SORT_BY , sort_order = enums . DEFAULT_SORT_ORDER ) : if not isinstance ( ids , ( list , tuple ) ) : err = "Video.find_by_ids expects an iterable argument" raise exceptions . PyBrightcoveError ( err ) ids = ',' . ...
List all videos identified by a list of Brightcove video ids
54,791
def __wrap ( self , func ) : def deffunc ( * args , ** kwargs ) : if hasattr ( inspect , 'signature' ) : function_args = inspect . signature ( func ) . parameters else : function_args = inspect . getargspec ( func ) . args filtered_kwargs = kwargs . copy ( ) for param in function_args : if param in kwargs : filtered_kw...
This decorator overrides the default arguments of a function .
54,792
def _sumDiceRolls ( self , rollList ) : if isinstance ( rollList , RollList ) : self . rolls . append ( rollList ) return rollList . sum ( ) else : return rollList
convert from dice roll structure to a single integer result
54,793
def annotated_references ( obj ) : references = KeyTransformDict ( transform = id , default_factory = list ) for type_ in type ( obj ) . __mro__ : if type_ in type_based_references : type_based_references [ type_ ] ( obj , references ) add_attr ( obj , "__dict__" , references ) add_attr ( obj , "__class__" , references...
Return known information about references held by the given object .
54,794
def object_annotation ( obj ) : if isinstance ( obj , BASE_TYPES ) : return repr ( obj ) if type ( obj ) . __name__ == 'function' : return "function\\n{}" . format ( obj . __name__ ) elif isinstance ( obj , types . MethodType ) : if six . PY2 : im_class = obj . im_class if im_class is None : im_class_name = "<None>" el...
Return a string to be used for Graphviz nodes . The string should be short but as informative as possible .
54,795
def disttar ( target , source , env ) : import tarfile env_dict = env . Dictionary ( ) if env_dict . get ( "DISTTAR_FORMAT" ) in [ "gz" , "bz2" ] : tar_format = env_dict [ "DISTTAR_FORMAT" ] else : tar_format = "" base_name = str ( target [ 0 ] ) . split ( '.tar' ) [ 0 ] ( target_dir , dir_name ) = os . path . split ( ...
tar archive builder
54,796
def disttar_suffix ( env , sources ) : env_dict = env . Dictionary ( ) if env_dict . has_key ( "DISTTAR_FORMAT" ) and env_dict [ "DISTTAR_FORMAT" ] in [ "gz" , "bz2" ] : return ".tar." + env_dict [ "DISTTAR_FORMAT" ] else : return ".tar"
tar archive suffix generator
54,797
def generate ( env ) : disttar_action = SCons . Action . Action ( disttar , disttar_string ) env [ 'BUILDERS' ] [ 'DistTar' ] = Builder ( action = disttar_action , emitter = disttar_emitter , suffix = disttar_suffix , target_factory = env . fs . Entry ) env . AppendUnique ( DISTTAR_FORMAT = 'gz' )
Add builders and construction variables for the DistTar builder .
54,798
def find_one ( self , cls , id ) : found = self . find_by_index ( cls , 'id' , id ) return found [ 0 ] if found else None
Find single keyed row - as per the gludb spec .
54,799
def save ( self , obj ) : cur = self . _conn ( ) . cursor ( ) tabname = obj . __class__ . get_table_name ( ) index_names = obj . __class__ . index_names ( ) or [ ] col_names = [ 'id' , 'value' ] + index_names value_holders = [ '%s' ] * len ( col_names ) updates = [ '%s = EXCLUDED.%s' % ( cn , cn ) for cn in col_names [...
Save current instance - as per the gludb spec .