idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
12,700
def handle_pre_response ( self , item_session : ItemSession ) -> Actions : action = self . consult_pre_response_hook ( item_session ) if action == Actions . RETRY : item_session . set_status ( Status . skipped ) elif action == Actions . FINISH : item_session . set_status ( Status . done ) elif action == Actions . STOP ...
Process a response that is starting .
12,701
def handle_document ( self , item_session : ItemSession , filename : str ) -> Actions : self . _waiter . reset ( ) action = self . handle_response ( item_session ) if action == Actions . NORMAL : self . _statistics . increment ( item_session . response . body . size ( ) ) item_session . set_status ( Status . done , fil...
Process a successful document response .
12,702
def handle_no_document ( self , item_session : ItemSession ) -> Actions : self . _waiter . reset ( ) action = self . handle_response ( item_session ) if action == Actions . NORMAL : item_session . set_status ( Status . skipped ) return action
Callback for successful responses containing no useful document .
12,703
def handle_intermediate_response ( self , item_session : ItemSession ) -> Actions : self . _waiter . reset ( ) action = self . handle_response ( item_session ) return action
Callback for successful intermediate responses .
12,704
def handle_document_error ( self , item_session : ItemSession ) -> Actions : self . _waiter . increment ( ) self . _statistics . errors [ ServerError ] += 1 action = self . handle_response ( item_session ) if action == Actions . NORMAL : item_session . set_status ( Status . error ) return action
Callback for when the document only describes an server error .
12,705
def handle_response ( self , item_session : ItemSession ) -> Actions : action = self . consult_response_hook ( item_session ) if action == Actions . RETRY : item_session . set_status ( Status . error ) elif action == Actions . FINISH : item_session . set_status ( Status . done ) elif action == Actions . STOP : raise Ho...
Generic handler for a response .
12,706
def handle_error ( self , item_session : ItemSession , error : BaseException ) -> Actions : if not self . _ssl_verification and isinstance ( error , SSLVerificationError ) : self . _statistics . increment_error ( ProtocolError ( ) ) else : self . _statistics . increment_error ( error ) self . _waiter . increment ( ) ac...
Process an error .
12,707
def get_wait_time ( self , item_session : ItemSession , error = None ) : seconds = self . _waiter . get ( ) try : return self . hook_dispatcher . call ( PluginFunctions . wait_time , seconds , item_session , error ) except HookDisconnected : return seconds
Return the wait time in seconds between requests .
12,708
def plugin_wait_time ( seconds : float , item_session : ItemSession , error : Optional [ Exception ] = None ) -> float : return seconds
Return the wait time between requests .
12,709
def consult_pre_response_hook ( self , item_session : ItemSession ) -> Actions : try : return self . hook_dispatcher . call ( PluginFunctions . handle_pre_response , item_session ) except HookDisconnected : return Actions . NORMAL
Return scripting action when a response begins .
12,710
def consult_response_hook ( self , item_session : ItemSession ) -> Actions : try : return self . hook_dispatcher . call ( PluginFunctions . handle_response , item_session ) except HookDisconnected : return Actions . NORMAL
Return scripting action when a response ends .
12,711
def consult_error_hook ( self , item_session : ItemSession , error : BaseException ) : try : return self . hook_dispatcher . call ( PluginFunctions . handle_error , item_session , error ) except HookDisconnected : return Actions . NORMAL
Return scripting action when an error occured .
12,712
def add_extra_urls ( self , item_session : ItemSession ) : if item_session . url_record . level == 0 and self . _sitemaps : extra_url_infos = ( self . parse_url ( '{0}://{1}/robots.txt' . format ( item_session . url_record . url_info . scheme , item_session . url_record . url_info . hostname_with_port ) ) , self . pars...
Add additional URLs such as robots . txt favicon . ico .
12,713
def scrape_document ( self , item_session : ItemSession ) : self . event_dispatcher . notify ( PluginFunctions . get_urls , item_session ) if not self . _document_scraper : return demux_info = self . _document_scraper . scrape_info ( item_session . request , item_session . response , item_session . url_record . link_ty...
Process document for links .
12,714
def _process_scrape_info ( self , scraper : BaseScraper , scrape_result : ScrapeResult , item_session : ItemSession ) : if not scrape_result : return 0 , 0 num_inline = 0 num_linked = 0 for link_context in scrape_result . link_contexts : url_info = self . parse_url ( link_context . link ) if not url_info : continue url...
Collect the URLs from the scrape info dict .
12,715
def rewrite_url ( self , url_info : URLInfo ) -> URLInfo : if self . _url_rewriter : return self . _url_rewriter . rewrite ( url_info ) else : return url_info
Return a rewritten URL such as escaped fragment .
12,716
def run_add_system ( name , token , org , system , prompt ) : repo = get_repo ( token = token , org = org , name = name ) try : repo . create_label ( name = system . strip ( ) , color = SYSTEM_LABEL_COLOR ) click . secho ( "Successfully added new system {}" . format ( system ) , fg = "green" ) if prompt and click . con...
Adds a new system to the repo .
12,717
def run_remove_system ( name , token , org , system , prompt ) : repo = get_repo ( token = token , org = org , name = name ) try : label = repo . get_label ( name = system . strip ( ) ) label . delete ( ) click . secho ( "Successfully deleted {}" . format ( system ) , fg = "green" ) if prompt and click . confirm ( "Run...
Removes a system from the repo .
12,718
def get_config ( repo ) : files = get_files ( repo ) config = DEFAULT_CONFIG if "config.json" in files : config_file = repo . get_file_contents ( '/config.json' , ref = "gh-pages" ) try : repo_config = json . loads ( config_file . decoded_content . decode ( "utf-8" ) ) config . update ( repo_config ) except ValueError ...
Get the config for the repo merged with the default config . Returns the default config if no config file is found .
12,719
def dispatch ( self , * args , ** kwargs ) : if not self . registration_allowed ( ) : return HttpResponseRedirect ( force_text ( self . disallowed_url ) ) return super ( RegistrationView , self ) . dispatch ( * args , ** kwargs )
Check that user signup is allowed before even bothering to dispatch or do other processing .
12,720
def get_email_context ( self , activation_key ) : scheme = 'https' if self . request . is_secure ( ) else 'http' return { 'scheme' : scheme , 'activation_key' : activation_key , 'expiration_days' : settings . ACCOUNT_ACTIVATION_DAYS , 'site' : get_current_site ( self . request ) }
Build the template context used for the activation email .
12,721
def validate_key ( self , activation_key ) : try : username = signing . loads ( activation_key , salt = REGISTRATION_SALT , max_age = settings . ACCOUNT_ACTIVATION_DAYS * 86400 ) return username except signing . SignatureExpired : raise ActivationError ( self . EXPIRED_MESSAGE , code = 'expired' ) except signing . BadS...
Verify that the activation key is valid and within the permitted activation time window returning the username if valid or raising ActivationError if not .
12,722
def get_user ( self , username ) : User = get_user_model ( ) try : user = User . objects . get ( ** { User . USERNAME_FIELD : username , } ) if user . is_active : raise ActivationError ( self . ALREADY_ACTIVATED_MESSAGE , code = 'already_activated' ) return user except User . DoesNotExist : raise ActivationError ( self...
Given the verified username look up and return the corresponding user account if it exists or raising ActivationError if it doesn t .
12,723
def validate_confusables ( value ) : if not isinstance ( value , six . text_type ) : return if confusables . is_dangerous ( value ) : raise ValidationError ( CONFUSABLE , code = 'invalid' )
Validator which disallows dangerous usernames likely to represent homograph attacks .
12,724
def validate_confusables_email ( value ) : if '@' not in value : return local_part , domain = value . split ( '@' ) if confusables . is_dangerous ( local_part ) or confusables . is_dangerous ( domain ) : raise ValidationError ( CONFUSABLE_EMAIL , code = 'invalid' )
Validator which disallows dangerous email addresses likely to represent homograph attacks .
12,725
def minify_js_files ( ) : for k , v in JS_FILE_MAPPING . items ( ) : input_files = " " . join ( v [ "input_files" ] ) output_file = v [ "output_file" ] uglifyjs_command = "uglifyjs {input_files} -o {output_file}" . format ( input_files = input_files , output_file = output_file ) local ( uglifyjs_command )
This command minified js files with UglifyJS
12,726
def minify_css_files ( ) : for k , v in CSS_FILE_MAPPING . items ( ) : input_files = " " . join ( v [ "input_files" ] ) output_file = v [ "output_file" ] uglifyjs_command = "uglifycss {input_files} > {output_file}" . format ( input_files = input_files , output_file = output_file ) local ( uglifyjs_command )
This command minified js files with UglifyCSS
12,727
def timestamp_with_timezone ( dt = None ) : dt = dt or datetime . now ( ) if timezone is None : return dt . strftime ( '%Y-%m-%d %H:%M%z' ) if not dt . tzinfo : tz = timezone . get_current_timezone ( ) if not tz : tz = timezone . utc dt = dt . replace ( tzinfo = timezone . get_current_timezone ( ) ) return dt . strftim...
Return a timestamp with a timezone for the configured locale . If all else fails consider localtime to be UTC .
12,728
def get_access_control_function ( ) : fn_path = getattr ( settings , 'ROSETTA_ACCESS_CONTROL_FUNCTION' , None ) if fn_path is None : return is_superuser_staff_or_in_translators_group perm_module , perm_func = fn_path . rsplit ( '.' , 1 ) perm_module = importlib . import_module ( perm_module ) return getattr ( perm_modu...
Return a predicate for determining if a user can access the Rosetta views
12,729
def fix_nls ( self , in_ , out_ ) : if 0 == len ( in_ ) or 0 == len ( out_ ) : return out_ if "\r" in out_ and "\r" not in in_ : out_ = out_ . replace ( "\r" , '' ) if "\n" == in_ [ 0 ] and "\n" != out_ [ 0 ] : out_ = "\n" + out_ elif "\n" != in_ [ 0 ] and "\n" == out_ [ 0 ] : out_ = out_ . lstrip ( ) if 0 == len ( out...
Fixes submitted translations by filtering carriage returns and pairing newlines at the begging and end of the translated string with the original
12,730
def ref_lang_po_file ( self ) : ref_pofile = None if rosetta_settings . ENABLE_REFLANG and self . ref_lang != 'msgid' : replacement = '{separator}locale{separator}{ref_lang}' . format ( separator = os . sep , ref_lang = self . ref_lang ) pattern = '\{separator}locale\{separator}[a-z]{{2}}' . format ( separator = os . s...
Return a parsed . po file object for the reference language if one exists otherwise None .
12,731
def convert_from_gps_time ( gps_time , gps_week = None ) : converted_gps_time = None gps_timestamp = float ( gps_time ) if gps_week != None : converted_gps_time = GPS_START + datetime . timedelta ( seconds = int ( gps_week ) * SECS_IN_WEEK + gps_timestamp ) else : os . environ [ 'TZ' ] = 'right/UTC' gps_time_as_gps = G...
Convert gps time in ticks to standard time .
12,732
def get_video_duration ( video_file ) : try : return float ( FFProbe ( video_file ) . video [ 0 ] . duration ) except Exception as e : print ( "could not extract duration from video {} due to {}" . format ( video_file , e ) ) return None
Get video duration in seconds
12,733
def get_video_end_time ( video_file ) : if not os . path . isfile ( video_file ) : print ( "Error, video file {} does not exist" . format ( video_file ) ) return None try : time_string = FFProbe ( video_file ) . video [ 0 ] . creation_time try : creation_time = datetime . datetime . strptime ( time_string , TIME_FORMAT...
Get video end time in seconds
12,734
def get_video_start_time ( video_file ) : if not os . path . isfile ( video_file ) : print ( "Error, video file {} does not exist" . format ( video_file ) ) return None video_end_time = get_video_end_time ( video_file ) duration = get_video_duration ( video_file ) if video_end_time == None or duration == None : return ...
Get start time in seconds
12,735
def _extract_alternative_fields ( self , fields , default = None , field_type = float ) : for field in fields : if field in self . tags : if field_type is float : value = eval_frac ( self . tags [ field ] . values [ 0 ] ) if field_type is str : value = str ( self . tags [ field ] . values ) if field_type is int : value...
Extract a value for a list of ordered fields . Return the value of the first existed field in the list
12,736
def extract_geo ( self ) : altitude = self . extract_altitude ( ) dop = self . extract_dop ( ) lon , lat = self . extract_lon_lat ( ) d = { } if lon is not None and lat is not None : d [ 'latitude' ] = lat d [ 'longitude' ] = lon if altitude is not None : d [ 'altitude' ] = altitude if dop is not None : d [ 'dop' ] = d...
Extract geo - related information from exif
12,737
def extract_gps_time ( self ) : gps_date_field = "GPS GPSDate" gps_time_field = "GPS GPSTimeStamp" gps_time = 0 if gps_date_field in self . tags and gps_time_field in self . tags : date = str ( self . tags [ gps_date_field ] . values ) . split ( ":" ) if int ( date [ 0 ] ) == 0 or int ( date [ 1 ] ) == 0 or int ( date ...
Extract timestamp from GPS field .
12,738
def extract_exif ( self ) : width , height = self . extract_image_size ( ) make , model = self . extract_make ( ) , self . extract_model ( ) orientation = self . extract_orientation ( ) geo = self . extract_geo ( ) capture = self . extract_capture_time ( ) direction = self . extract_direction ( ) d = { 'width' : width ...
Extract a list of exif infos
12,739
def extract_image_size ( self ) : width , _ = self . _extract_alternative_fields ( [ 'Image ImageWidth' , 'EXIF ExifImageWidth' ] , - 1 , int ) height , _ = self . _extract_alternative_fields ( [ 'Image ImageLength' , 'EXIF ExifImageLength' ] , - 1 , int ) return width , height
Extract image height and width
12,740
def extract_make ( self ) : fields = [ 'EXIF LensMake' , 'Image Make' ] make , _ = self . _extract_alternative_fields ( fields , default = 'none' , field_type = str ) return make
Extract camera make
12,741
def extract_model ( self ) : fields = [ 'EXIF LensModel' , 'Image Model' ] model , _ = self . _extract_alternative_fields ( fields , default = 'none' , field_type = str ) return model
Extract camera model
12,742
def extract_orientation ( self ) : fields = [ 'Image Orientation' ] orientation , _ = self . _extract_alternative_fields ( fields , default = 1 , field_type = int ) if orientation not in range ( 1 , 9 ) : return 1 return orientation
Extract image orientation
12,743
def fields_exist ( self , fields ) : for rexif in fields : vflag = False for subrexif in rexif : if subrexif in self . tags : vflag = True if not vflag : print ( "Missing required EXIF tag: {0} for image {1}" . format ( rexif [ 0 ] , self . filename ) ) return False return True
Check existence of a list fields in exif
12,744
def mapillary_tag_exists ( self ) : description_tag = "Image ImageDescription" if description_tag not in self . tags : return False for requirement in [ "MAPSequenceUUID" , "MAPSettingsUserKey" , "MAPCaptureTime" , "MAPLongitude" , "MAPLatitude" ] : if requirement not in self . tags [ description_tag ] . values or json...
Check existence of required Mapillary tags
12,745
def query_search_api ( min_lat , max_lat , min_lon , max_lon , max_results ) : params = urllib . urlencode ( zip ( [ 'client_id' , 'bbox' , 'per_page' ] , [ CLIENT_ID , ',' . join ( [ str ( min_lon ) , str ( min_lat ) , str ( max_lon ) , str ( max_lat ) ] ) , str ( max_results ) ] ) ) print ( MAPILLARY_API_IM_SEARCH_UR...
Send query to the search API and get dict with image data .
12,746
def download_images ( query , path , size = 1024 ) : im_size = "thumb-{0}.jpg" . format ( size ) im_list = [ ] for im in query : key = im [ 'properties' ] [ 'key' ] url = MAPILLARY_API_IM_RETRIEVE_URL + key + '/' + im_size filename = key + ".jpg" try : image = urllib . URLopener ( ) image . retrieve ( url , path + file...
Download images in query result to path .
12,747
def get_lat_lon_time_from_gpx ( gpx_file , local_time = True ) : with open ( gpx_file , 'r' ) as f : gpx = gpxpy . parse ( f ) points = [ ] if len ( gpx . tracks ) > 0 : for track in gpx . tracks : for segment in track . segments : for point in segment . points : t = utc_to_localtime ( point . time ) if local_time else...
Read location and time stamps from a track in a GPX file .
12,748
def get_lat_lon_time_from_nmea ( nmea_file , local_time = True ) : with open ( nmea_file , "r" ) as f : lines = f . readlines ( ) lines = [ l . rstrip ( "\n\r" ) for l in lines ] for l in lines : if "GPRMC" in l : data = pynmea2 . parse ( l ) date = data . datetime . date ( ) break points = [ ] for l in lines : if "GPR...
Read location and time stamps from a track in a NMEA file .
12,749
def ecef_from_lla ( lat , lon , alt ) : a2 = WGS84_a ** 2 b2 = WGS84_b ** 2 lat = math . radians ( lat ) lon = math . radians ( lon ) L = 1.0 / math . sqrt ( a2 * math . cos ( lat ) ** 2 + b2 * math . sin ( lat ) ** 2 ) x = ( a2 * L + alt ) * math . cos ( lat ) * math . cos ( lon ) y = ( a2 * L + alt ) * math . cos ( l...
Compute ECEF XYZ from latitude longitude and altitude .
12,750
def get_max_distance_from_start ( latlon_track ) : latlon_list = [ ] for idx , point in enumerate ( latlon_track ) : lat = latlon_track [ idx ] [ 1 ] lon = latlon_track [ idx ] [ 2 ] alt = latlon_track [ idx ] [ 3 ] latlon_list . append ( [ lat , lon , alt ] ) start_position = latlon_list [ 0 ] max_distance = 0 for pos...
Returns the radius of an entire GPS track . Used to calculate whether or not the entire sequence was just stationary video Takes a sequence of points as input
12,751
def get_total_distance_traveled ( latlon_track ) : latlon_list = [ ] for idx , point in enumerate ( latlon_track ) : lat = latlon_track [ idx ] [ 1 ] lon = latlon_track [ idx ] [ 2 ] alt = latlon_track [ idx ] [ 3 ] latlon_list . append ( [ lat , lon , alt ] ) total_distance = 0 last_position = latlon_list [ 0 ] for po...
Returns the total distance traveled of a GPS track . Used to calculate whether or not the entire sequence was just stationary video Takes a sequence of points as input
12,752
def dms_to_decimal ( degrees , minutes , seconds , hemisphere ) : dms = float ( degrees ) + float ( minutes ) / 60 + float ( seconds ) / 3600 if hemisphere in "WwSs" : dms = - 1 * dms return dms
Convert from degrees minutes seconds to decimal degrees .
12,753
def decimal_to_dms ( value , precision ) : deg = math . floor ( value ) min = math . floor ( ( value - deg ) * 60 ) sec = math . floor ( ( value - deg - min / 60 ) * 3600 * precision ) return ( ( deg , 1 ) , ( min , 1 ) , ( sec , precision ) )
Convert decimal position to degrees minutes seconds in a fromat supported by EXIF
12,754
def compute_bearing ( start_lat , start_lon , end_lat , end_lon ) : start_lat = math . radians ( start_lat ) start_lon = math . radians ( start_lon ) end_lat = math . radians ( end_lat ) end_lon = math . radians ( end_lon ) dLong = end_lon - start_lon dPhi = math . log ( math . tan ( end_lat / 2.0 + math . pi / 4.0 ) /...
Get the compass bearing from start to end .
12,755
def diff_bearing ( b1 , b2 ) : d = abs ( b2 - b1 ) d = 360 - d if d > 180 else d return d
Compute difference between two bearings
12,756
def normalize_bearing ( bearing , check_hex = False ) : if bearing > 360 and check_hex : bearing = bin ( int ( bearing ) ) [ 2 : ] bearing = '' . join ( [ str ( int ( int ( a ) == 0 ) ) for a in bearing ] ) bearing = - float ( int ( bearing , 2 ) ) bearing %= 360 return bearing
Normalize bearing and convert from hex if
12,757
def interpolate_lat_lon ( points , t , max_dt = 1 ) : if ( t <= points [ 0 ] [ 0 ] ) or ( t >= points [ - 1 ] [ 0 ] ) : if t <= points [ 0 ] [ 0 ] : dt = abs ( ( points [ 0 ] [ 0 ] - t ) . total_seconds ( ) ) else : dt = ( t - points [ - 1 ] [ 0 ] ) . total_seconds ( ) if dt > max_dt : raise ValueError ( "time t not in...
Return interpolated lat lon and compass bearing for time t .
12,758
def add_image_description ( self , dict ) : if self . _ef is not None : self . _ef [ '0th' ] [ piexif . ImageIFD . ImageDescription ] = json . dumps ( dict )
Add a dict to image description .
12,759
def add_orientation ( self , orientation ) : if not orientation in range ( 1 , 9 ) : print_error ( "Error value for orientation, value must be in range(1,9), setting to default 1" ) self . _ef [ '0th' ] [ piexif . ImageIFD . Orientation ] = 1 else : self . _ef [ '0th' ] [ piexif . ImageIFD . Orientation ] = orientation
Add image orientation to image .
12,760
def add_date_time_original ( self , date_time , time_format = '%Y:%m:%d %H:%M:%S.%f' ) : try : DateTimeOriginal = date_time . strftime ( time_format ) [ : - 3 ] self . _ef [ 'Exif' ] [ piexif . ExifIFD . DateTimeOriginal ] = DateTimeOriginal except Exception as e : print_error ( "Error writing DateTimeOriginal, due to ...
Add date time original .
12,761
def add_image_history ( self , data ) : self . _ef [ '0th' ] [ piexif . ImageIFD . ImageHistory ] = json . dumps ( data )
Add arbitrary string to ImageHistory tag .
12,762
def add_camera_make_model ( self , make , model ) : self . _ef [ '0th' ] [ piexif . ImageIFD . Make ] = make self . _ef [ '0th' ] [ piexif . ImageIFD . Model ] = model
Add camera make and model .
12,763
def add_direction ( self , direction , ref = "T" , precision = 100 ) : direction = direction % 360.0 self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSImgDirection ] = ( int ( abs ( direction ) * precision ) , precision ) self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSImgDirectionRef ] = ref
Add image direction .
12,764
def write ( self , filename = None ) : if filename is None : filename = self . _filename exif_bytes = piexif . dump ( self . _ef ) with open ( self . _filename , "rb" ) as fin : img = fin . read ( ) try : piexif . insert ( exif_bytes , img , filename ) except IOError : type , value , traceback = sys . exc_info ( ) prin...
Save exif data to file .
12,765
def estimate_sub_second_time ( files , interval = 0.0 ) : if interval <= 0.0 : return [ exif_time ( f ) for f in tqdm ( files , desc = "Reading image capture time" ) ] onesecond = datetime . timedelta ( seconds = 1.0 ) T = datetime . timedelta ( seconds = interval ) for i , f in tqdm ( enumerate ( files ) , desc = "Est...
Estimate the capture time of a sequence with sub - second precision EXIF times are only given up to a second of precision . This function uses the given interval between shots to estimate the time inside that second that each picture was taken .
12,766
def interpolate_timestamp ( capture_times ) : timestamps = [ ] num_file = len ( capture_times ) time_dict = OrderedDict ( ) if num_file < 2 : return capture_times time_dict = OrderedDict ( ) for i , t in enumerate ( capture_times ) : if t not in time_dict : time_dict [ t ] = { "count" : 0 , "pointer" : 0 } interval = 0...
Interpolate time stamps in case of identical timestamps
12,767
def extract_stream ( source , dest , stream_id ) : if not os . path . isfile ( source ) : raise IOError ( 'No such file: ' + source ) subprocess . check_output ( [ 'ffmpeg' , '-i' , source , '-y' , '-nostats' , '-loglevel' , '0' , '-codec' , 'copy' , '-map' , '0:' + str ( stream_id ) , '-f' , 'rawvideo' , dest , ] )
Get the data out of the file using ffmpeg
12,768
def verify_exif ( filename ) : required_exif = required_fields ( ) exif = ExifRead ( filename ) required_exif_exist = exif . fields_exist ( required_exif ) return required_exif_exist
Check that image file has the required EXIF fields . Incompatible files will be ignored server side .
12,769
def verify_mapillary_tag ( filepath ) : filepath_keep_original = processing . processed_images_rootpath ( filepath ) if os . path . isfile ( filepath_keep_original ) : filepath = filepath_keep_original return ExifRead ( filepath ) . mapillary_tag_exists ( )
Check that image file has the required Mapillary tag
12,770
def isAudio ( self ) : val = False if self . __dict__ [ 'codec_type' ] : if str ( self . __dict__ [ 'codec_type' ] ) == 'audio' : val = True return val
Is this stream labelled as an audio stream?
12,771
def isVideo ( self ) : val = False if self . __dict__ [ 'codec_type' ] : if self . codec_type == 'video' : val = True return val
Is the stream labelled as a video stream .
12,772
def isSubtitle ( self ) : val = False if self . __dict__ [ 'codec_type' ] : if str ( self . codec_type ) == 'subtitle' : val = True return val
Is the stream labelled as a subtitle stream .
12,773
def frames ( self ) : f = 0 if self . isVideo ( ) or self . isAudio ( ) : if self . __dict__ [ 'nb_frames' ] : try : f = int ( self . __dict__ [ 'nb_frames' ] ) except Exception as e : print "None integer frame count" return f
Returns the length of a video stream in frames . Returns 0 if not a video stream .
12,774
def durationSeconds ( self ) : f = 0.0 if self . isVideo ( ) or self . isAudio ( ) : if self . __dict__ [ 'duration' ] : try : f = float ( self . __dict__ [ 'duration' ] ) except Exception as e : print "None numeric duration" return f
Returns the runtime duration of the video stream as a floating point number of seconds . Returns 0 . 0 if not a video stream .
12,775
def bitrate ( self ) : b = 0 if self . __dict__ [ 'bit_rate' ] : try : b = int ( self . __dict__ [ 'bit_rate' ] ) except Exception as e : print "None integer bitrate" return b
Returns bitrate as an integer in bps
12,776
def get_upload_url ( credentials ) : request_url = "https://a.mapillary.com/v3/users/{}/upload_secrets?client_id={}" . format ( credentials [ "MAPSettingsUserKey" ] , CLIENT_ID ) request = urllib2 . Request ( request_url ) request . add_header ( 'Authorization' , 'Bearer {}' . format ( credentials [ "user_upload_token"...
Returns upload URL using new upload API
12,777
def get_upload_token ( mail , pwd ) : try : params = urllib . urlencode ( { "email" : mail , "password" : pwd } ) response = urllib . urlopen ( LOGIN_URL , params ) except : return None resp = json . loads ( response . read ( ) ) if not resp or 'token' not in resp : return None return resp [ 'token' ]
Get upload token
12,778
def authenticate_with_email_and_pwd ( user_email , user_password ) : if user_email is None or user_password is None : raise ValueError ( 'Could not authenticate user. Missing username or password' ) upload_token = uploader . get_upload_token ( user_email , user_password ) if not upload_token : print ( "Authentication f...
Authenticate the user by passing the email and password . This function avoids prompting the command line for user credentials and is useful for calling tools programmatically
12,779
def upload_file ( filepath , max_attempts , url , permission , signature , key = None , aws_key = None ) : if max_attempts == None : max_attempts = MAX_ATTEMPTS filename = os . path . basename ( filepath ) s3_filename = filename try : s3_filename = ExifRead ( filepath ) . exif_name ( ) except : pass filepath_keep_origi...
Upload file at filepath .
12,780
def murmur3_32 ( data , seed = 0 ) : c1 = 0xcc9e2d51 c2 = 0x1b873593 length = len ( data ) h1 = seed roundedEnd = ( length & 0xfffffffc ) for i in range ( 0 , roundedEnd , 4 ) : k1 = ( ord ( data [ i ] ) & 0xff ) | ( ( ord ( data [ i + 1 ] ) & 0xff ) << 8 ) | ( ( ord ( data [ i + 2 ] ) & 0xff ) << 16 ) | ( ord ( data [...
MurmurHash3 was written by Austin Appleby and is placed in the public domain . The author hereby disclaims copyright to this source code .
12,781
def _readline ( sock , buf ) : chunks = [ ] last_char = b'' while True : if last_char == b'\r' and buf [ 0 : 1 ] == b'\n' : chunks [ - 1 ] = chunks [ - 1 ] [ : - 1 ] return buf [ 1 : ] , b'' . join ( chunks ) elif buf . find ( b'\r\n' ) != - 1 : before , sep , after = buf . partition ( b"\r\n" ) chunks . append ( befor...
Read line of text from the socket .
12,782
def _readvalue ( sock , buf , size ) : chunks = [ ] rlen = size + 2 while rlen - len ( buf ) > 0 : if buf : rlen -= len ( buf ) chunks . append ( buf ) buf = _recv ( sock , RECV_SIZE ) if not buf : raise MemcacheUnexpectedCloseError ( ) if rlen == 1 : chunks [ - 1 ] = chunks [ - 1 ] [ : - 1 ] else : chunks . append ( b...
Read specified amount of bytes from the socket .
12,783
def close ( self ) : if self . sock is not None : try : self . sock . close ( ) except Exception : pass finally : self . sock = None
Close the connection to memcached if it is open . The next call to a method that requires a connection will re - open it .
12,784
def set ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'set' , { key : value } , expire , noreply ) [ key ]
The memcached set command .
12,785
def set_many ( self , values , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply result = self . _store_cmd ( b'set' , values , expire , noreply ) return [ k for k , v in six . iteritems ( result ) if not v ]
A convenience function for setting multiple values .
12,786
def add ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'add' , { key : value } , expire , noreply ) [ key ]
The memcached add command .
12,787
def replace ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'replace' , { key : value } , expire , noreply ) [ key ]
The memcached replace command .
12,788
def append ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'append' , { key : value } , expire , noreply ) [ key ]
The memcached append command .
12,789
def prepend ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'prepend' , { key : value } , expire , noreply ) [ key ]
The memcached prepend command .
12,790
def cas ( self , key , value , cas , expire = 0 , noreply = False ) : return self . _store_cmd ( b'cas' , { key : value } , expire , noreply , cas ) [ key ]
The memcached cas command .
12,791
def get ( self , key , default = None ) : return self . _fetch_cmd ( b'get' , [ key ] , False ) . get ( key , default )
The memcached get command but only for one key as a convenience .
12,792
def gets ( self , key , default = None , cas_default = None ) : defaults = ( default , cas_default ) return self . _fetch_cmd ( b'gets' , [ key ] , True ) . get ( key , defaults )
The memcached gets command for one key as a convenience .
12,793
def delete ( self , key , noreply = None ) : if noreply is None : noreply = self . default_noreply cmd = b'delete ' + self . check_key ( key ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'delete' , noreply ) if noreply : return True return results [ 0 ] == b'DELETED'
The memcached delete command .
12,794
def delete_many ( self , keys , noreply = None ) : if not keys : return True if noreply is None : noreply = self . default_noreply cmds = [ ] for key in keys : cmds . append ( b'delete ' + self . check_key ( key ) + ( b' noreply' if noreply else b'' ) + b'\r\n' ) self . _misc_cmd ( cmds , b'delete' , noreply ) return T...
A convenience function to delete multiple keys .
12,795
def incr ( self , key , value , noreply = False ) : key = self . check_key ( key ) cmd = b'incr ' + key + b' ' + six . text_type ( value ) . encode ( 'ascii' ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'incr' , noreply ) if noreply : return None if results [ 0 ] == b'NOT_FOU...
The memcached incr command .
12,796
def decr ( self , key , value , noreply = False ) : key = self . check_key ( key ) cmd = b'decr ' + key + b' ' + six . text_type ( value ) . encode ( 'ascii' ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'decr' , noreply ) if noreply : return None if results [ 0 ] == b'NOT_FOU...
The memcached decr command .
12,797
def touch ( self , key , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply key = self . check_key ( key ) cmd = b'touch ' + key + b' ' + six . text_type ( expire ) . encode ( 'ascii' ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'touch' , no...
The memcached touch command .
12,798
def stats ( self , * args ) : result = self . _fetch_cmd ( b'stats' , args , False ) for key , value in six . iteritems ( result ) : converter = STAT_TYPES . get ( key , int ) try : result [ key ] = converter ( value ) except Exception : pass return result
The memcached stats command .
12,799
def cache_memlimit ( self , memlimit ) : self . _fetch_cmd ( b'cache_memlimit' , [ str ( int ( memlimit ) ) ] , False ) return True
The memcached cache_memlimit command .