idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
12,700 | def handle_pre_response ( self , item_session : ItemSession ) -> Actions : action = self . consult_pre_response_hook ( item_session ) if action == Actions . RETRY : item_session . set_status ( Status . skipped ) elif action == Actions . FINISH : item_session . set_status ( Status . done ) elif action == Actions . STOP : raise HookStop ( 'Script requested immediate stop.' ) return action | Process a response that is starting . |
12,701 | def handle_document ( self , item_session : ItemSession , filename : str ) -> Actions : self . _waiter . reset ( ) action = self . handle_response ( item_session ) if action == Actions . NORMAL : self . _statistics . increment ( item_session . response . body . size ( ) ) item_session . set_status ( Status . done , filename = filename ) return action | Process a successful document response . |
12,702 | def handle_no_document ( self , item_session : ItemSession ) -> Actions : self . _waiter . reset ( ) action = self . handle_response ( item_session ) if action == Actions . NORMAL : item_session . set_status ( Status . skipped ) return action | Callback for successful responses containing no useful document . |
12,703 | def handle_intermediate_response ( self , item_session : ItemSession ) -> Actions : self . _waiter . reset ( ) action = self . handle_response ( item_session ) return action | Callback for successful intermediate responses . |
12,704 | def handle_document_error ( self , item_session : ItemSession ) -> Actions : self . _waiter . increment ( ) self . _statistics . errors [ ServerError ] += 1 action = self . handle_response ( item_session ) if action == Actions . NORMAL : item_session . set_status ( Status . error ) return action | Callback for when the document only describes an server error . |
12,705 | def handle_response ( self , item_session : ItemSession ) -> Actions : action = self . consult_response_hook ( item_session ) if action == Actions . RETRY : item_session . set_status ( Status . error ) elif action == Actions . FINISH : item_session . set_status ( Status . done ) elif action == Actions . STOP : raise HookStop ( 'Script requested immediate stop.' ) return action | Generic handler for a response . |
12,706 | def handle_error ( self , item_session : ItemSession , error : BaseException ) -> Actions : if not self . _ssl_verification and isinstance ( error , SSLVerificationError ) : self . _statistics . increment_error ( ProtocolError ( ) ) else : self . _statistics . increment_error ( error ) self . _waiter . increment ( ) action = self . consult_error_hook ( item_session , error ) if action == Actions . RETRY : item_session . set_status ( Status . error ) elif action == Actions . FINISH : item_session . set_status ( Status . done ) elif action == Actions . STOP : raise HookStop ( 'Script requested immediate stop.' ) elif self . _ssl_verification and isinstance ( error , SSLVerificationError ) : raise elif isinstance ( error , ConnectionRefused ) and not self . retry_connrefused : item_session . set_status ( Status . skipped ) elif isinstance ( error , DNSNotFound ) and not self . retry_dns_error : item_session . set_status ( Status . skipped ) else : item_session . set_status ( Status . error ) return action | Process an error . |
12,707 | def get_wait_time ( self , item_session : ItemSession , error = None ) : seconds = self . _waiter . get ( ) try : return self . hook_dispatcher . call ( PluginFunctions . wait_time , seconds , item_session , error ) except HookDisconnected : return seconds | Return the wait time in seconds between requests . |
12,708 | def plugin_wait_time ( seconds : float , item_session : ItemSession , error : Optional [ Exception ] = None ) -> float : return seconds | Return the wait time between requests . |
12,709 | def consult_pre_response_hook ( self , item_session : ItemSession ) -> Actions : try : return self . hook_dispatcher . call ( PluginFunctions . handle_pre_response , item_session ) except HookDisconnected : return Actions . NORMAL | Return scripting action when a response begins . |
12,710 | def consult_response_hook ( self , item_session : ItemSession ) -> Actions : try : return self . hook_dispatcher . call ( PluginFunctions . handle_response , item_session ) except HookDisconnected : return Actions . NORMAL | Return scripting action when a response ends . |
12,711 | def consult_error_hook ( self , item_session : ItemSession , error : BaseException ) : try : return self . hook_dispatcher . call ( PluginFunctions . handle_error , item_session , error ) except HookDisconnected : return Actions . NORMAL | Return scripting action when an error occured . |
12,712 | def add_extra_urls ( self , item_session : ItemSession ) : if item_session . url_record . level == 0 and self . _sitemaps : extra_url_infos = ( self . parse_url ( '{0}://{1}/robots.txt' . format ( item_session . url_record . url_info . scheme , item_session . url_record . url_info . hostname_with_port ) ) , self . parse_url ( '{0}://{1}/sitemap.xml' . format ( item_session . url_record . url_info . scheme , item_session . url_record . url_info . hostname_with_port ) ) ) for url_info in extra_url_infos : item_session . add_child_url ( url_info . url ) | Add additional URLs such as robots . txt favicon . ico . |
12,713 | def scrape_document ( self , item_session : ItemSession ) : self . event_dispatcher . notify ( PluginFunctions . get_urls , item_session ) if not self . _document_scraper : return demux_info = self . _document_scraper . scrape_info ( item_session . request , item_session . response , item_session . url_record . link_type ) num_inline_urls = 0 num_linked_urls = 0 for scraper , scrape_result in demux_info . items ( ) : new_inline , new_linked = self . _process_scrape_info ( scraper , scrape_result , item_session ) num_inline_urls += new_inline num_linked_urls += new_linked _logger . debug ( 'Candidate URLs: inline={0} linked={1}' , num_inline_urls , num_linked_urls ) | Process document for links . |
12,714 | def _process_scrape_info ( self , scraper : BaseScraper , scrape_result : ScrapeResult , item_session : ItemSession ) : if not scrape_result : return 0 , 0 num_inline = 0 num_linked = 0 for link_context in scrape_result . link_contexts : url_info = self . parse_url ( link_context . link ) if not url_info : continue url_info = self . rewrite_url ( url_info ) child_url_record = item_session . child_url_record ( url_info . url , inline = link_context . inline ) if not self . _fetch_rule . consult_filters ( item_session . request . url_info , child_url_record ) [ 0 ] : continue if link_context . inline : num_inline += 1 else : num_linked += 1 item_session . add_child_url ( url_info . url , inline = link_context . inline , link_type = link_context . link_type ) return num_inline , num_linked | Collect the URLs from the scrape info dict . |
12,715 | def rewrite_url ( self , url_info : URLInfo ) -> URLInfo : if self . _url_rewriter : return self . _url_rewriter . rewrite ( url_info ) else : return url_info | Return a rewritten URL such as escaped fragment . |
12,716 | def run_add_system ( name , token , org , system , prompt ) : repo = get_repo ( token = token , org = org , name = name ) try : repo . create_label ( name = system . strip ( ) , color = SYSTEM_LABEL_COLOR ) click . secho ( "Successfully added new system {}" . format ( system ) , fg = "green" ) if prompt and click . confirm ( "Run update to re-generate the page?" ) : run_update ( name = name , token = token , org = org ) except GithubException as e : if e . status == 422 : click . secho ( "Unable to add new system {}, it already exists." . format ( system ) , fg = "yellow" ) return raise | Adds a new system to the repo . |
12,717 | def run_remove_system ( name , token , org , system , prompt ) : repo = get_repo ( token = token , org = org , name = name ) try : label = repo . get_label ( name = system . strip ( ) ) label . delete ( ) click . secho ( "Successfully deleted {}" . format ( system ) , fg = "green" ) if prompt and click . confirm ( "Run update to re-generate the page?" ) : run_update ( name = name , token = token , org = org ) except UnknownObjectException : click . secho ( "Unable to remove system {}, it does not exist." . format ( system ) , fg = "yellow" ) | Removes a system from the repo . |
12,718 | def get_config ( repo ) : files = get_files ( repo ) config = DEFAULT_CONFIG if "config.json" in files : config_file = repo . get_file_contents ( '/config.json' , ref = "gh-pages" ) try : repo_config = json . loads ( config_file . decoded_content . decode ( "utf-8" ) ) config . update ( repo_config ) except ValueError : click . secho ( "WARNING: Unable to parse config file. Using defaults." , fg = "yellow" ) return config | Get the config for the repo merged with the default config . Returns the default config if no config file is found . |
12,719 | def dispatch ( self , * args , ** kwargs ) : if not self . registration_allowed ( ) : return HttpResponseRedirect ( force_text ( self . disallowed_url ) ) return super ( RegistrationView , self ) . dispatch ( * args , ** kwargs ) | Check that user signup is allowed before even bothering to dispatch or do other processing . |
12,720 | def get_email_context ( self , activation_key ) : scheme = 'https' if self . request . is_secure ( ) else 'http' return { 'scheme' : scheme , 'activation_key' : activation_key , 'expiration_days' : settings . ACCOUNT_ACTIVATION_DAYS , 'site' : get_current_site ( self . request ) } | Build the template context used for the activation email . |
12,721 | def validate_key ( self , activation_key ) : try : username = signing . loads ( activation_key , salt = REGISTRATION_SALT , max_age = settings . ACCOUNT_ACTIVATION_DAYS * 86400 ) return username except signing . SignatureExpired : raise ActivationError ( self . EXPIRED_MESSAGE , code = 'expired' ) except signing . BadSignature : raise ActivationError ( self . INVALID_KEY_MESSAGE , code = 'invalid_key' , params = { 'activation_key' : activation_key } ) | Verify that the activation key is valid and within the permitted activation time window returning the username if valid or raising ActivationError if not . |
12,722 | def get_user ( self , username ) : User = get_user_model ( ) try : user = User . objects . get ( ** { User . USERNAME_FIELD : username , } ) if user . is_active : raise ActivationError ( self . ALREADY_ACTIVATED_MESSAGE , code = 'already_activated' ) return user except User . DoesNotExist : raise ActivationError ( self . BAD_USERNAME_MESSAGE , code = 'bad_username' ) | Given the verified username look up and return the corresponding user account if it exists or raising ActivationError if it doesn t . |
12,723 | def validate_confusables ( value ) : if not isinstance ( value , six . text_type ) : return if confusables . is_dangerous ( value ) : raise ValidationError ( CONFUSABLE , code = 'invalid' ) | Validator which disallows dangerous usernames likely to represent homograph attacks . |
12,724 | def validate_confusables_email ( value ) : if '@' not in value : return local_part , domain = value . split ( '@' ) if confusables . is_dangerous ( local_part ) or confusables . is_dangerous ( domain ) : raise ValidationError ( CONFUSABLE_EMAIL , code = 'invalid' ) | Validator which disallows dangerous email addresses likely to represent homograph attacks . |
12,725 | def minify_js_files ( ) : for k , v in JS_FILE_MAPPING . items ( ) : input_files = " " . join ( v [ "input_files" ] ) output_file = v [ "output_file" ] uglifyjs_command = "uglifyjs {input_files} -o {output_file}" . format ( input_files = input_files , output_file = output_file ) local ( uglifyjs_command ) | This command minified js files with UglifyJS |
12,726 | def minify_css_files ( ) : for k , v in CSS_FILE_MAPPING . items ( ) : input_files = " " . join ( v [ "input_files" ] ) output_file = v [ "output_file" ] uglifyjs_command = "uglifycss {input_files} > {output_file}" . format ( input_files = input_files , output_file = output_file ) local ( uglifyjs_command ) | This command minified js files with UglifyCSS |
12,727 | def timestamp_with_timezone ( dt = None ) : dt = dt or datetime . now ( ) if timezone is None : return dt . strftime ( '%Y-%m-%d %H:%M%z' ) if not dt . tzinfo : tz = timezone . get_current_timezone ( ) if not tz : tz = timezone . utc dt = dt . replace ( tzinfo = timezone . get_current_timezone ( ) ) return dt . strftime ( "%Y-%m-%d %H:%M%z" ) | Return a timestamp with a timezone for the configured locale . If all else fails consider localtime to be UTC . |
12,728 | def get_access_control_function ( ) : fn_path = getattr ( settings , 'ROSETTA_ACCESS_CONTROL_FUNCTION' , None ) if fn_path is None : return is_superuser_staff_or_in_translators_group perm_module , perm_func = fn_path . rsplit ( '.' , 1 ) perm_module = importlib . import_module ( perm_module ) return getattr ( perm_module , perm_func ) | Return a predicate for determining if a user can access the Rosetta views |
12,729 | def fix_nls ( self , in_ , out_ ) : if 0 == len ( in_ ) or 0 == len ( out_ ) : return out_ if "\r" in out_ and "\r" not in in_ : out_ = out_ . replace ( "\r" , '' ) if "\n" == in_ [ 0 ] and "\n" != out_ [ 0 ] : out_ = "\n" + out_ elif "\n" != in_ [ 0 ] and "\n" == out_ [ 0 ] : out_ = out_ . lstrip ( ) if 0 == len ( out_ ) : pass elif "\n" == in_ [ - 1 ] and "\n" != out_ [ - 1 ] : out_ = out_ + "\n" elif "\n" != in_ [ - 1 ] and "\n" == out_ [ - 1 ] : out_ = out_ . rstrip ( ) return out_ | Fixes submitted translations by filtering carriage returns and pairing newlines at the begging and end of the translated string with the original |
12,730 | def ref_lang_po_file ( self ) : ref_pofile = None if rosetta_settings . ENABLE_REFLANG and self . ref_lang != 'msgid' : replacement = '{separator}locale{separator}{ref_lang}' . format ( separator = os . sep , ref_lang = self . ref_lang ) pattern = '\{separator}locale\{separator}[a-z]{{2}}' . format ( separator = os . sep ) ref_fn = re . sub ( pattern , replacement , self . po_file_path , ) try : ref_pofile = pofile ( ref_fn ) except IOError : pass return ref_pofile | Return a parsed . po file object for the reference language if one exists otherwise None . |
12,731 | def convert_from_gps_time ( gps_time , gps_week = None ) : converted_gps_time = None gps_timestamp = float ( gps_time ) if gps_week != None : converted_gps_time = GPS_START + datetime . timedelta ( seconds = int ( gps_week ) * SECS_IN_WEEK + gps_timestamp ) else : os . environ [ 'TZ' ] = 'right/UTC' gps_time_as_gps = GPS_START + datetime . timedelta ( seconds = gps_timestamp ) gps_time_as_tai = gps_time_as_gps + datetime . timedelta ( seconds = 19 ) tai_epoch_as_tai = datetime . datetime ( 1970 , 1 , 1 , 0 , 0 , 10 ) tai_timestamp = ( gps_time_as_tai - tai_epoch_as_tai ) . total_seconds ( ) converted_gps_time = ( datetime . datetime . utcfromtimestamp ( tai_timestamp ) ) return converted_gps_time | Convert gps time in ticks to standard time . |
12,732 | def get_video_duration ( video_file ) : try : return float ( FFProbe ( video_file ) . video [ 0 ] . duration ) except Exception as e : print ( "could not extract duration from video {} due to {}" . format ( video_file , e ) ) return None | Get video duration in seconds |
12,733 | def get_video_end_time ( video_file ) : if not os . path . isfile ( video_file ) : print ( "Error, video file {} does not exist" . format ( video_file ) ) return None try : time_string = FFProbe ( video_file ) . video [ 0 ] . creation_time try : creation_time = datetime . datetime . strptime ( time_string , TIME_FORMAT ) except : creation_time = datetime . datetime . strptime ( time_string , TIME_FORMAT_2 ) except : return None return creation_time | Get video end time in seconds |
12,734 | def get_video_start_time ( video_file ) : if not os . path . isfile ( video_file ) : print ( "Error, video file {} does not exist" . format ( video_file ) ) return None video_end_time = get_video_end_time ( video_file ) duration = get_video_duration ( video_file ) if video_end_time == None or duration == None : return None else : video_start_time = ( video_end_time - datetime . timedelta ( seconds = duration ) ) return video_start_time | Get start time in seconds |
12,735 | def _extract_alternative_fields ( self , fields , default = None , field_type = float ) : for field in fields : if field in self . tags : if field_type is float : value = eval_frac ( self . tags [ field ] . values [ 0 ] ) if field_type is str : value = str ( self . tags [ field ] . values ) if field_type is int : value = int ( self . tags [ field ] . values [ 0 ] ) return value , field return default , None | Extract a value for a list of ordered fields . Return the value of the first existed field in the list |
12,736 | def extract_geo ( self ) : altitude = self . extract_altitude ( ) dop = self . extract_dop ( ) lon , lat = self . extract_lon_lat ( ) d = { } if lon is not None and lat is not None : d [ 'latitude' ] = lat d [ 'longitude' ] = lon if altitude is not None : d [ 'altitude' ] = altitude if dop is not None : d [ 'dop' ] = dop return d | Extract geo - related information from exif |
12,737 | def extract_gps_time ( self ) : gps_date_field = "GPS GPSDate" gps_time_field = "GPS GPSTimeStamp" gps_time = 0 if gps_date_field in self . tags and gps_time_field in self . tags : date = str ( self . tags [ gps_date_field ] . values ) . split ( ":" ) if int ( date [ 0 ] ) == 0 or int ( date [ 1 ] ) == 0 or int ( date [ 2 ] ) == 0 : return None t = self . tags [ gps_time_field ] gps_time = datetime . datetime ( year = int ( date [ 0 ] ) , month = int ( date [ 1 ] ) , day = int ( date [ 2 ] ) , hour = int ( eval_frac ( t . values [ 0 ] ) ) , minute = int ( eval_frac ( t . values [ 1 ] ) ) , second = int ( eval_frac ( t . values [ 2 ] ) ) , ) microseconds = datetime . timedelta ( microseconds = int ( ( eval_frac ( t . values [ 2 ] ) % 1 ) * 1e6 ) ) gps_time += microseconds return gps_time | Extract timestamp from GPS field . |
12,738 | def extract_exif ( self ) : width , height = self . extract_image_size ( ) make , model = self . extract_make ( ) , self . extract_model ( ) orientation = self . extract_orientation ( ) geo = self . extract_geo ( ) capture = self . extract_capture_time ( ) direction = self . extract_direction ( ) d = { 'width' : width , 'height' : height , 'orientation' : orientation , 'direction' : direction , 'make' : make , 'model' : model , 'capture_time' : capture } d [ 'gps' ] = geo return d | Extract a list of exif infos |
12,739 | def extract_image_size ( self ) : width , _ = self . _extract_alternative_fields ( [ 'Image ImageWidth' , 'EXIF ExifImageWidth' ] , - 1 , int ) height , _ = self . _extract_alternative_fields ( [ 'Image ImageLength' , 'EXIF ExifImageLength' ] , - 1 , int ) return width , height | Extract image height and width |
12,740 | def extract_make ( self ) : fields = [ 'EXIF LensMake' , 'Image Make' ] make , _ = self . _extract_alternative_fields ( fields , default = 'none' , field_type = str ) return make | Extract camera make |
12,741 | def extract_model ( self ) : fields = [ 'EXIF LensModel' , 'Image Model' ] model , _ = self . _extract_alternative_fields ( fields , default = 'none' , field_type = str ) return model | Extract camera model |
12,742 | def extract_orientation ( self ) : fields = [ 'Image Orientation' ] orientation , _ = self . _extract_alternative_fields ( fields , default = 1 , field_type = int ) if orientation not in range ( 1 , 9 ) : return 1 return orientation | Extract image orientation |
12,743 | def fields_exist ( self , fields ) : for rexif in fields : vflag = False for subrexif in rexif : if subrexif in self . tags : vflag = True if not vflag : print ( "Missing required EXIF tag: {0} for image {1}" . format ( rexif [ 0 ] , self . filename ) ) return False return True | Check existence of a list fields in exif |
12,744 | def mapillary_tag_exists ( self ) : description_tag = "Image ImageDescription" if description_tag not in self . tags : return False for requirement in [ "MAPSequenceUUID" , "MAPSettingsUserKey" , "MAPCaptureTime" , "MAPLongitude" , "MAPLatitude" ] : if requirement not in self . tags [ description_tag ] . values or json . loads ( self . tags [ description_tag ] . values ) [ requirement ] in [ "" , None , " " ] : return False return True | Check existence of required Mapillary tags |
12,745 | def query_search_api ( min_lat , max_lat , min_lon , max_lon , max_results ) : params = urllib . urlencode ( zip ( [ 'client_id' , 'bbox' , 'per_page' ] , [ CLIENT_ID , ',' . join ( [ str ( min_lon ) , str ( min_lat ) , str ( max_lon ) , str ( max_lat ) ] ) , str ( max_results ) ] ) ) print ( MAPILLARY_API_IM_SEARCH_URL + params ) query = urllib2 . urlopen ( MAPILLARY_API_IM_SEARCH_URL + params ) . read ( ) query = json . loads ( query ) [ 'features' ] print ( "Result: {0} images in area." . format ( len ( query ) ) ) return query | Send query to the search API and get dict with image data . |
12,746 | def download_images ( query , path , size = 1024 ) : im_size = "thumb-{0}.jpg" . format ( size ) im_list = [ ] for im in query : key = im [ 'properties' ] [ 'key' ] url = MAPILLARY_API_IM_RETRIEVE_URL + key + '/' + im_size filename = key + ".jpg" try : image = urllib . URLopener ( ) image . retrieve ( url , path + filename ) coords = "," . join ( map ( str , im [ 'geometry' ] [ 'coordinates' ] ) ) im_list . append ( [ filename , coords ] ) print ( "Successfully downloaded: {0}" . format ( filename ) ) except KeyboardInterrupt : break except Exception as e : print ( "Failed to download: {} due to {}" . format ( filename , e ) ) return im_list | Download images in query result to path . |
12,747 | def get_lat_lon_time_from_gpx ( gpx_file , local_time = True ) : with open ( gpx_file , 'r' ) as f : gpx = gpxpy . parse ( f ) points = [ ] if len ( gpx . tracks ) > 0 : for track in gpx . tracks : for segment in track . segments : for point in segment . points : t = utc_to_localtime ( point . time ) if local_time else point . time points . append ( ( t , point . latitude , point . longitude , point . elevation ) ) if len ( gpx . waypoints ) > 0 : for point in gpx . waypoints : t = utc_to_localtime ( point . time ) if local_time else point . time points . append ( ( t , point . latitude , point . longitude , point . elevation ) ) points . sort ( ) return points | Read location and time stamps from a track in a GPX file . |
12,748 | def get_lat_lon_time_from_nmea ( nmea_file , local_time = True ) : with open ( nmea_file , "r" ) as f : lines = f . readlines ( ) lines = [ l . rstrip ( "\n\r" ) for l in lines ] for l in lines : if "GPRMC" in l : data = pynmea2 . parse ( l ) date = data . datetime . date ( ) break points = [ ] for l in lines : if "GPRMC" in l : data = pynmea2 . parse ( l ) date = data . datetime . date ( ) if "$GPGGA" in l : data = pynmea2 . parse ( l ) timestamp = datetime . datetime . combine ( date , data . timestamp ) lat , lon , alt = data . latitude , data . longitude , data . altitude points . append ( ( timestamp , lat , lon , alt ) ) points . sort ( ) return points | Read location and time stamps from a track in a NMEA file . |
12,749 | def ecef_from_lla ( lat , lon , alt ) : a2 = WGS84_a ** 2 b2 = WGS84_b ** 2 lat = math . radians ( lat ) lon = math . radians ( lon ) L = 1.0 / math . sqrt ( a2 * math . cos ( lat ) ** 2 + b2 * math . sin ( lat ) ** 2 ) x = ( a2 * L + alt ) * math . cos ( lat ) * math . cos ( lon ) y = ( a2 * L + alt ) * math . cos ( lat ) * math . sin ( lon ) z = ( b2 * L + alt ) * math . sin ( lat ) return x , y , z | Compute ECEF XYZ from latitude longitude and altitude . |
12,750 | def get_max_distance_from_start ( latlon_track ) : latlon_list = [ ] for idx , point in enumerate ( latlon_track ) : lat = latlon_track [ idx ] [ 1 ] lon = latlon_track [ idx ] [ 2 ] alt = latlon_track [ idx ] [ 3 ] latlon_list . append ( [ lat , lon , alt ] ) start_position = latlon_list [ 0 ] max_distance = 0 for position in latlon_list : distance = gps_distance ( start_position , position ) if distance > max_distance : max_distance = distance return max_distance | Returns the radius of an entire GPS track . Used to calculate whether or not the entire sequence was just stationary video Takes a sequence of points as input |
12,751 | def get_total_distance_traveled ( latlon_track ) : latlon_list = [ ] for idx , point in enumerate ( latlon_track ) : lat = latlon_track [ idx ] [ 1 ] lon = latlon_track [ idx ] [ 2 ] alt = latlon_track [ idx ] [ 3 ] latlon_list . append ( [ lat , lon , alt ] ) total_distance = 0 last_position = latlon_list [ 0 ] for position in latlon_list : total_distance += gps_distance ( last_position , position ) last_position = position return total_distance | Returns the total distance traveled of a GPS track . Used to calculate whether or not the entire sequence was just stationary video Takes a sequence of points as input |
12,752 | def dms_to_decimal ( degrees , minutes , seconds , hemisphere ) : dms = float ( degrees ) + float ( minutes ) / 60 + float ( seconds ) / 3600 if hemisphere in "WwSs" : dms = - 1 * dms return dms | Convert from degrees minutes seconds to decimal degrees . |
12,753 | def decimal_to_dms ( value , precision ) : deg = math . floor ( value ) min = math . floor ( ( value - deg ) * 60 ) sec = math . floor ( ( value - deg - min / 60 ) * 3600 * precision ) return ( ( deg , 1 ) , ( min , 1 ) , ( sec , precision ) ) | Convert decimal position to degrees minutes seconds in a fromat supported by EXIF |
12,754 | def compute_bearing ( start_lat , start_lon , end_lat , end_lon ) : start_lat = math . radians ( start_lat ) start_lon = math . radians ( start_lon ) end_lat = math . radians ( end_lat ) end_lon = math . radians ( end_lon ) dLong = end_lon - start_lon dPhi = math . log ( math . tan ( end_lat / 2.0 + math . pi / 4.0 ) / math . tan ( start_lat / 2.0 + math . pi / 4.0 ) ) if abs ( dLong ) > math . pi : if dLong > 0.0 : dLong = - ( 2.0 * math . pi - dLong ) else : dLong = ( 2.0 * math . pi + dLong ) y = math . sin ( dLong ) * math . cos ( end_lat ) x = math . cos ( start_lat ) * math . sin ( end_lat ) - math . sin ( start_lat ) * math . cos ( end_lat ) * math . cos ( dLong ) bearing = ( math . degrees ( math . atan2 ( y , x ) ) + 360.0 ) % 360.0 return bearing | Get the compass bearing from start to end . |
12,755 | def diff_bearing ( b1 , b2 ) : d = abs ( b2 - b1 ) d = 360 - d if d > 180 else d return d | Compute difference between two bearings |
12,756 | def normalize_bearing ( bearing , check_hex = False ) : if bearing > 360 and check_hex : bearing = bin ( int ( bearing ) ) [ 2 : ] bearing = '' . join ( [ str ( int ( int ( a ) == 0 ) ) for a in bearing ] ) bearing = - float ( int ( bearing , 2 ) ) bearing %= 360 return bearing | Normalize bearing and convert from hex if |
12,757 | def interpolate_lat_lon ( points , t , max_dt = 1 ) : if ( t <= points [ 0 ] [ 0 ] ) or ( t >= points [ - 1 ] [ 0 ] ) : if t <= points [ 0 ] [ 0 ] : dt = abs ( ( points [ 0 ] [ 0 ] - t ) . total_seconds ( ) ) else : dt = ( t - points [ - 1 ] [ 0 ] ) . total_seconds ( ) if dt > max_dt : raise ValueError ( "time t not in scope of gpx file by {} seconds" . format ( dt ) ) else : print ( "time t not in scope of gpx file by {} seconds, extrapolating..." . format ( dt ) ) if t < points [ 0 ] [ 0 ] : before = points [ 0 ] after = points [ 1 ] else : before = points [ - 2 ] after = points [ - 1 ] bearing = compute_bearing ( before [ 1 ] , before [ 2 ] , after [ 1 ] , after [ 2 ] ) if t == points [ 0 ] [ 0 ] : x = points [ 0 ] return ( x [ 1 ] , x [ 2 ] , bearing , x [ 3 ] ) if t == points [ - 1 ] [ 0 ] : x = points [ - 1 ] return ( x [ 1 ] , x [ 2 ] , bearing , x [ 3 ] ) else : for i , point in enumerate ( points ) : if t < point [ 0 ] : if i > 0 : before = points [ i - 1 ] else : before = points [ i ] after = points [ i ] break weight = ( t - before [ 0 ] ) . total_seconds ( ) / ( after [ 0 ] - before [ 0 ] ) . total_seconds ( ) if before [ 1 ] == after [ 1 ] : lat = before [ 1 ] else : lat = before [ 1 ] - weight * before [ 1 ] + weight * after [ 1 ] if before [ 2 ] == after [ 2 ] : lon = before [ 2 ] else : lon = before [ 2 ] - weight * before [ 2 ] + weight * after [ 2 ] bearing = compute_bearing ( before [ 1 ] , before [ 2 ] , after [ 1 ] , after [ 2 ] ) if before [ 3 ] is not None : ele = before [ 3 ] - weight * before [ 3 ] + weight * after [ 3 ] else : ele = None return lat , lon , bearing , ele | Return interpolated lat lon and compass bearing for time t . |
12,758 | def add_image_description ( self , dict ) : if self . _ef is not None : self . _ef [ '0th' ] [ piexif . ImageIFD . ImageDescription ] = json . dumps ( dict ) | Add a dict to image description . |
12,759 | def add_orientation ( self , orientation ) : if not orientation in range ( 1 , 9 ) : print_error ( "Error value for orientation, value must be in range(1,9), setting to default 1" ) self . _ef [ '0th' ] [ piexif . ImageIFD . Orientation ] = 1 else : self . _ef [ '0th' ] [ piexif . ImageIFD . Orientation ] = orientation | Add image orientation to image . |
12,760 | def add_date_time_original ( self , date_time , time_format = '%Y:%m:%d %H:%M:%S.%f' ) : try : DateTimeOriginal = date_time . strftime ( time_format ) [ : - 3 ] self . _ef [ 'Exif' ] [ piexif . ExifIFD . DateTimeOriginal ] = DateTimeOriginal except Exception as e : print_error ( "Error writing DateTimeOriginal, due to " + str ( e ) ) | Add date time original . |
12,761 | def add_image_history ( self , data ) : self . _ef [ '0th' ] [ piexif . ImageIFD . ImageHistory ] = json . dumps ( data ) | Add arbitrary string to ImageHistory tag . |
12,762 | def add_camera_make_model ( self , make , model ) : self . _ef [ '0th' ] [ piexif . ImageIFD . Make ] = make self . _ef [ '0th' ] [ piexif . ImageIFD . Model ] = model | Add camera make and model . |
12,763 | def add_direction ( self , direction , ref = "T" , precision = 100 ) : direction = direction % 360.0 self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSImgDirection ] = ( int ( abs ( direction ) * precision ) , precision ) self . _ef [ "GPS" ] [ piexif . GPSIFD . GPSImgDirectionRef ] = ref | Add image direction . |
12,764 | def write ( self , filename = None ) : if filename is None : filename = self . _filename exif_bytes = piexif . dump ( self . _ef ) with open ( self . _filename , "rb" ) as fin : img = fin . read ( ) try : piexif . insert ( exif_bytes , img , filename ) except IOError : type , value , traceback = sys . exc_info ( ) print >> sys . stderr , "Error saving file:" , value | Save exif data to file . |
12,765 | def estimate_sub_second_time ( files , interval = 0.0 ) : if interval <= 0.0 : return [ exif_time ( f ) for f in tqdm ( files , desc = "Reading image capture time" ) ] onesecond = datetime . timedelta ( seconds = 1.0 ) T = datetime . timedelta ( seconds = interval ) for i , f in tqdm ( enumerate ( files ) , desc = "Estimating subsecond time" ) : m = exif_time ( f ) if not m : pass if i == 0 : smin = m smax = m + onesecond else : m0 = m - T * i smin = max ( smin , m0 ) smax = min ( smax , m0 + onesecond ) if not smin or not smax : return None if smin > smax : print ( 'Interval not compatible with EXIF times' ) return None else : s = smin + ( smax - smin ) / 2 return [ s + T * i for i in range ( len ( files ) ) ] | Estimate the capture time of a sequence with sub - second precision EXIF times are only given up to a second of precision . This function uses the given interval between shots to estimate the time inside that second that each picture was taken . |
12,766 | def interpolate_timestamp ( capture_times ) : timestamps = [ ] num_file = len ( capture_times ) time_dict = OrderedDict ( ) if num_file < 2 : return capture_times time_dict = OrderedDict ( ) for i , t in enumerate ( capture_times ) : if t not in time_dict : time_dict [ t ] = { "count" : 0 , "pointer" : 0 } interval = 0 if i != 0 : interval = ( t - capture_times [ i - 1 ] ) . total_seconds ( ) time_dict [ capture_times [ i - 1 ] ] [ "interval" ] = interval time_dict [ t ] [ "count" ] += 1 if len ( time_dict ) >= 2 : time_dict [ time_dict . keys ( ) [ - 1 ] ] [ "interval" ] = time_dict [ time_dict . keys ( ) [ - 2 ] ] [ "interval" ] else : time_dict [ time_dict . keys ( ) [ 0 ] ] [ "interval" ] = time_dict [ time_dict . keys ( ) [ 0 ] ] [ "count" ] * 1. for t in capture_times : d = time_dict [ t ] s = datetime . timedelta ( seconds = d [ "pointer" ] * d [ "interval" ] / float ( d [ "count" ] ) ) updated_time = t + s time_dict [ t ] [ "pointer" ] += 1 timestamps . append ( updated_time ) return timestamps | Interpolate time stamps in case of identical timestamps |
12,767 | def extract_stream ( source , dest , stream_id ) : if not os . path . isfile ( source ) : raise IOError ( 'No such file: ' + source ) subprocess . check_output ( [ 'ffmpeg' , '-i' , source , '-y' , '-nostats' , '-loglevel' , '0' , '-codec' , 'copy' , '-map' , '0:' + str ( stream_id ) , '-f' , 'rawvideo' , dest , ] ) | Get the data out of the file using ffmpeg |
12,768 | def verify_exif ( filename ) : required_exif = required_fields ( ) exif = ExifRead ( filename ) required_exif_exist = exif . fields_exist ( required_exif ) return required_exif_exist | Check that image file has the required EXIF fields . Incompatible files will be ignored server side . |
12,769 | def verify_mapillary_tag ( filepath ) : filepath_keep_original = processing . processed_images_rootpath ( filepath ) if os . path . isfile ( filepath_keep_original ) : filepath = filepath_keep_original return ExifRead ( filepath ) . mapillary_tag_exists ( ) | Check that image file has the required Mapillary tag |
12,770 | def isAudio ( self ) : val = False if self . __dict__ [ 'codec_type' ] : if str ( self . __dict__ [ 'codec_type' ] ) == 'audio' : val = True return val | Is this stream labelled as an audio stream? |
12,771 | def isVideo ( self ) : val = False if self . __dict__ [ 'codec_type' ] : if self . codec_type == 'video' : val = True return val | Is the stream labelled as a video stream . |
12,772 | def isSubtitle ( self ) : val = False if self . __dict__ [ 'codec_type' ] : if str ( self . codec_type ) == 'subtitle' : val = True return val | Is the stream labelled as a subtitle stream . |
12,773 | def frames ( self ) : f = 0 if self . isVideo ( ) or self . isAudio ( ) : if self . __dict__ [ 'nb_frames' ] : try : f = int ( self . __dict__ [ 'nb_frames' ] ) except Exception as e : print "None integer frame count" return f | Returns the length of a video stream in frames . Returns 0 if not a video stream . |
12,774 | def durationSeconds ( self ) : f = 0.0 if self . isVideo ( ) or self . isAudio ( ) : if self . __dict__ [ 'duration' ] : try : f = float ( self . __dict__ [ 'duration' ] ) except Exception as e : print "None numeric duration" return f | Returns the runtime duration of the video stream as a floating point number of seconds . Returns 0 . 0 if not a video stream . |
12,775 | def bitrate ( self ) : b = 0 if self . __dict__ [ 'bit_rate' ] : try : b = int ( self . __dict__ [ 'bit_rate' ] ) except Exception as e : print "None integer bitrate" return b | Returns bitrate as an integer in bps |
12,776 | def get_upload_url ( credentials ) : request_url = "https://a.mapillary.com/v3/users/{}/upload_secrets?client_id={}" . format ( credentials [ "MAPSettingsUserKey" ] , CLIENT_ID ) request = urllib2 . Request ( request_url ) request . add_header ( 'Authorization' , 'Bearer {}' . format ( credentials [ "user_upload_token" ] ) ) try : response = json . loads ( urllib2 . urlopen ( request ) . read ( ) ) except requests . exceptions . HTTPError as e : print ( "Error getting upload parameters, upload could not start" ) sys . exit ( 1 ) return response | Returns upload URL using new upload API |
12,777 | def get_upload_token ( mail , pwd ) : try : params = urllib . urlencode ( { "email" : mail , "password" : pwd } ) response = urllib . urlopen ( LOGIN_URL , params ) except : return None resp = json . loads ( response . read ( ) ) if not resp or 'token' not in resp : return None return resp [ 'token' ] | Get upload token |
12,778 | def authenticate_with_email_and_pwd ( user_email , user_password ) : if user_email is None or user_password is None : raise ValueError ( 'Could not authenticate user. Missing username or password' ) upload_token = uploader . get_upload_token ( user_email , user_password ) if not upload_token : print ( "Authentication failed for user name " + user_name + ", please try again." ) sys . exit ( 1 ) user_key = get_user_key ( user_name ) if not user_key : print ( "User name {} does not exist, please try again or contact Mapillary user support." . format ( user_name ) ) sys . exit ( 1 ) user_permission_hash , user_signature_hash = get_user_hashes ( user_key , upload_token ) user_items [ "MAPSettingsUsername" ] = section user_items [ "MAPSettingsUserKey" ] = user_key user_items [ "user_upload_token" ] = upload_token user_items [ "user_permission_hash" ] = user_permission_hash user_items [ "user_signature_hash" ] = user_signature_hash return user_items | Authenticate the user by passing the email and password . This function avoids prompting the command line for user credentials and is useful for calling tools programmatically |
12,779 | def upload_file ( filepath , max_attempts , url , permission , signature , key = None , aws_key = None ) : if max_attempts == None : max_attempts = MAX_ATTEMPTS filename = os . path . basename ( filepath ) s3_filename = filename try : s3_filename = ExifRead ( filepath ) . exif_name ( ) except : pass filepath_keep_original = processing . processed_images_rootpath ( filepath ) filepath_in = filepath if os . path . isfile ( filepath_keep_original ) : filepath = filepath_keep_original if key is None : s3_key = s3_filename else : s3_key = key + s3_filename parameters = { "key" : s3_key , "AWSAccessKeyId" : aws_key , "acl" : "private" , "policy" : permission , "signature" : signature , "Content-Type" : "image/jpeg" } with open ( filepath , "rb" ) as f : encoded_string = f . read ( ) data , headers = encode_multipart ( parameters , { 'file' : { 'filename' : filename , 'content' : encoded_string } } ) if ( DRY_RUN == False ) : displayed_upload_error = False for attempt in range ( max_attempts ) : response = None try : request = urllib2 . Request ( url , data = data , headers = headers ) response = urllib2 . urlopen ( request ) if response . getcode ( ) == 204 : create_upload_log ( filepath_in , "upload_success" ) if displayed_upload_error == True : print ( "Successful upload of {} on attempt {}" . format ( filename , attempt ) ) else : create_upload_log ( filepath_in , "upload_failed" ) break except urllib2 . HTTPError as e : print ( "HTTP error: {} on {}, will attempt upload again for {} more times" . format ( e , filename , max_attempts - attempt - 1 ) ) displayed_upload_error = True time . sleep ( 5 ) except urllib2 . URLError as e : print ( "URL error: {} on {}, will attempt upload again for {} more times" . format ( e , filename , max_attempts - attempt - 1 ) ) time . sleep ( 5 ) except httplib . HTTPException as e : print ( "HTTP exception: {} on {}, will attempt upload again for {} more times" . format ( e , filename , max_attempts - attempt - 1 ) ) time . sleep ( 5 ) except OSError as e : print ( "OS error: {} on {}, will attempt upload again for {} more times" . format ( e , filename , max_attempts - attempt - 1 ) ) time . sleep ( 5 ) except socket . timeout as e : print ( "Timeout error: {} (retrying), will attempt upload again for {} more times" . format ( filename , max_attempts - attempt - 1 ) ) finally : if response is not None : response . close ( ) else : print ( 'DRY_RUN, Skipping actual image upload. Use this for debug only.' ) | Upload file at filepath . |
12,780 | def murmur3_32 ( data , seed = 0 ) : c1 = 0xcc9e2d51 c2 = 0x1b873593 length = len ( data ) h1 = seed roundedEnd = ( length & 0xfffffffc ) for i in range ( 0 , roundedEnd , 4 ) : k1 = ( ord ( data [ i ] ) & 0xff ) | ( ( ord ( data [ i + 1 ] ) & 0xff ) << 8 ) | ( ( ord ( data [ i + 2 ] ) & 0xff ) << 16 ) | ( ord ( data [ i + 3 ] ) << 24 ) k1 *= c1 k1 = ( k1 << 15 ) | ( ( k1 & 0xffffffff ) >> 17 ) k1 *= c2 h1 ^= k1 h1 = ( h1 << 13 ) | ( ( h1 & 0xffffffff ) >> 19 ) h1 = h1 * 5 + 0xe6546b64 k1 = 0 val = length & 0x03 if val == 3 : k1 = ( ord ( data [ roundedEnd + 2 ] ) & 0xff ) << 16 if val in [ 2 , 3 ] : k1 |= ( ord ( data [ roundedEnd + 1 ] ) & 0xff ) << 8 if val in [ 1 , 2 , 3 ] : k1 |= ord ( data [ roundedEnd ] ) & 0xff k1 *= c1 k1 = ( k1 << 15 ) | ( ( k1 & 0xffffffff ) >> 17 ) k1 *= c2 h1 ^= k1 h1 ^= length h1 ^= ( ( h1 & 0xffffffff ) >> 16 ) h1 *= 0x85ebca6b h1 ^= ( ( h1 & 0xffffffff ) >> 13 ) h1 *= 0xc2b2ae35 h1 ^= ( ( h1 & 0xffffffff ) >> 16 ) return h1 & 0xffffffff | MurmurHash3 was written by Austin Appleby and is placed in the public domain . The author hereby disclaims copyright to this source code . |
12,781 | def _readline ( sock , buf ) : chunks = [ ] last_char = b'' while True : if last_char == b'\r' and buf [ 0 : 1 ] == b'\n' : chunks [ - 1 ] = chunks [ - 1 ] [ : - 1 ] return buf [ 1 : ] , b'' . join ( chunks ) elif buf . find ( b'\r\n' ) != - 1 : before , sep , after = buf . partition ( b"\r\n" ) chunks . append ( before ) return after , b'' . join ( chunks ) if buf : chunks . append ( buf ) last_char = buf [ - 1 : ] buf = _recv ( sock , RECV_SIZE ) if not buf : raise MemcacheUnexpectedCloseError ( ) | Read line of text from the socket . |
12,782 | def _readvalue ( sock , buf , size ) : chunks = [ ] rlen = size + 2 while rlen - len ( buf ) > 0 : if buf : rlen -= len ( buf ) chunks . append ( buf ) buf = _recv ( sock , RECV_SIZE ) if not buf : raise MemcacheUnexpectedCloseError ( ) if rlen == 1 : chunks [ - 1 ] = chunks [ - 1 ] [ : - 1 ] else : chunks . append ( buf [ : rlen - 2 ] ) return buf [ rlen : ] , b'' . join ( chunks ) | Read specified amount of bytes from the socket . |
12,783 | def close ( self ) : if self . sock is not None : try : self . sock . close ( ) except Exception : pass finally : self . sock = None | Close the connection to memcached if it is open . The next call to a method that requires a connection will re - open it . |
12,784 | def set ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'set' , { key : value } , expire , noreply ) [ key ] | The memcached set command . |
12,785 | def set_many ( self , values , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply result = self . _store_cmd ( b'set' , values , expire , noreply ) return [ k for k , v in six . iteritems ( result ) if not v ] | A convenience function for setting multiple values . |
12,786 | def add ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'add' , { key : value } , expire , noreply ) [ key ] | The memcached add command . |
12,787 | def replace ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'replace' , { key : value } , expire , noreply ) [ key ] | The memcached replace command . |
12,788 | def append ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'append' , { key : value } , expire , noreply ) [ key ] | The memcached append command . |
12,789 | def prepend ( self , key , value , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply return self . _store_cmd ( b'prepend' , { key : value } , expire , noreply ) [ key ] | The memcached prepend command . |
12,790 | def cas ( self , key , value , cas , expire = 0 , noreply = False ) : return self . _store_cmd ( b'cas' , { key : value } , expire , noreply , cas ) [ key ] | The memcached cas command . |
12,791 | def get ( self , key , default = None ) : return self . _fetch_cmd ( b'get' , [ key ] , False ) . get ( key , default ) | The memcached get command but only for one key as a convenience . |
12,792 | def gets ( self , key , default = None , cas_default = None ) : defaults = ( default , cas_default ) return self . _fetch_cmd ( b'gets' , [ key ] , True ) . get ( key , defaults ) | The memcached gets command for one key as a convenience . |
12,793 | def delete ( self , key , noreply = None ) : if noreply is None : noreply = self . default_noreply cmd = b'delete ' + self . check_key ( key ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'delete' , noreply ) if noreply : return True return results [ 0 ] == b'DELETED' | The memcached delete command . |
12,794 | def delete_many ( self , keys , noreply = None ) : if not keys : return True if noreply is None : noreply = self . default_noreply cmds = [ ] for key in keys : cmds . append ( b'delete ' + self . check_key ( key ) + ( b' noreply' if noreply else b'' ) + b'\r\n' ) self . _misc_cmd ( cmds , b'delete' , noreply ) return True | A convenience function to delete multiple keys . |
12,795 | def incr ( self , key , value , noreply = False ) : key = self . check_key ( key ) cmd = b'incr ' + key + b' ' + six . text_type ( value ) . encode ( 'ascii' ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'incr' , noreply ) if noreply : return None if results [ 0 ] == b'NOT_FOUND' : return None return int ( results [ 0 ] ) | The memcached incr command . |
12,796 | def decr ( self , key , value , noreply = False ) : key = self . check_key ( key ) cmd = b'decr ' + key + b' ' + six . text_type ( value ) . encode ( 'ascii' ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'decr' , noreply ) if noreply : return None if results [ 0 ] == b'NOT_FOUND' : return None return int ( results [ 0 ] ) | The memcached decr command . |
12,797 | def touch ( self , key , expire = 0 , noreply = None ) : if noreply is None : noreply = self . default_noreply key = self . check_key ( key ) cmd = b'touch ' + key + b' ' + six . text_type ( expire ) . encode ( 'ascii' ) if noreply : cmd += b' noreply' cmd += b'\r\n' results = self . _misc_cmd ( [ cmd ] , b'touch' , noreply ) if noreply : return True return results [ 0 ] == b'TOUCHED' | The memcached touch command . |
12,798 | def stats ( self , * args ) : result = self . _fetch_cmd ( b'stats' , args , False ) for key , value in six . iteritems ( result ) : converter = STAT_TYPES . get ( key , int ) try : result [ key ] = converter ( value ) except Exception : pass return result | The memcached stats command . |
12,799 | def cache_memlimit ( self , memlimit ) : self . _fetch_cmd ( b'cache_memlimit' , [ str ( int ( memlimit ) ) ] , False ) return True | The memcached cache_memlimit command . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.