idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
52,600 | def _format_lines ( self , tokensource ) : nocls = self . noclasses lsep = self . lineseparator getcls = self . ttype2class . get c2s = self . class2style escape_table = _escape_html_table tagsfile = self . tagsfile lspan = '' line = [ ] for ttype , value in tokensource : if nocls : cclass = getcls ( ttype ) while ccla... | Just format the tokens without any wrapping tags . Yield individual lines . |
52,601 | def _highlight_lines ( self , tokensource ) : hls = self . hl_lines for i , ( t , value ) in enumerate ( tokensource ) : if t != 1 : yield t , value if i + 1 in hls : if self . noclasses : style = '' if self . style . highlight_color is not None : style = ( ' style="background-color: %s"' % ( self . style . highlight_c... | Highlighted the lines specified in the hl_lines option by post - processing the token stream coming from _format_lines . |
52,602 | def format_unencoded ( self , tokensource , outfile ) : source = self . _format_lines ( tokensource ) if self . hl_lines : source = self . _highlight_lines ( source ) if not self . nowrap : if self . linenos == 2 : source = self . _wrap_inlinelinenos ( source ) if self . lineanchors : source = self . _wrap_lineanchors ... | The formatting process uses several nested generators ; which of them are used is determined by the user s options . |
52,603 | def bygroups ( * args ) : def callback ( lexer , match , ctx = None ) : for i , action in enumerate ( args ) : if action is None : continue elif type ( action ) is _TokenType : data = match . group ( i + 1 ) if data : yield match . start ( i + 1 ) , action , data else : data = match . group ( i + 1 ) if data is not Non... | Callback that yields multiple actions for each group in the match . |
52,604 | def using ( _other , ** kwargs ) : gt_kwargs = { } if 'state' in kwargs : s = kwargs . pop ( 'state' ) if isinstance ( s , ( list , tuple ) ) : gt_kwargs [ 'stack' ] = s else : gt_kwargs [ 'stack' ] = ( 'root' , s ) if _other is this : def callback ( lexer , match , ctx = None ) : if kwargs : kwargs . update ( lexer . ... | Callback that processes the match with a different lexer . |
52,605 | def do_insertions ( insertions , tokens ) : insertions = iter ( insertions ) try : index , itokens = next ( insertions ) except StopIteration : for item in tokens : yield item return realpos = None insleft = True for i , t , v in tokens : if realpos is None : realpos = i oldi = 0 while insleft and i + len ( v ) >= inde... | Helper for lexers which must combine the results of several sublexers . |
52,606 | def _process_regex ( cls , regex , rflags , state ) : if isinstance ( regex , Future ) : regex = regex . get ( ) return re . compile ( regex , rflags ) . match | Preprocess the regular expression component of a token definition . |
52,607 | def _process_token ( cls , token ) : assert type ( token ) is _TokenType or callable ( token ) , 'token type must be simple type or callable, not %r' % ( token , ) return token | Preprocess the token component of a token definition . |
52,608 | def _process_new_state ( cls , new_state , unprocessed , processed ) : if isinstance ( new_state , str ) : if new_state == '#pop' : return - 1 elif new_state in unprocessed : return ( new_state , ) elif new_state == '#push' : return new_state elif new_state [ : 5 ] == '#pop:' : return - int ( new_state [ 5 : ] ) else :... | Preprocess the state transition action of a token definition . |
52,609 | def _process_state ( cls , unprocessed , processed , state ) : assert type ( state ) is str , "wrong state name %r" % state assert state [ 0 ] != '#' , "invalid state name %r" % state if state in processed : return processed [ state ] tokens = processed [ state ] = [ ] rflags = cls . flags for tdef in unprocessed [ sta... | Preprocess a single state definition . |
52,610 | def process_tokendef ( cls , name , tokendefs = None ) : processed = cls . _all_tokens [ name ] = { } tokendefs = tokendefs or cls . tokens [ name ] for state in list ( tokendefs ) : cls . _process_state ( tokendefs , processed , state ) return processed | Preprocess a dictionary of token definitions . |
52,611 | def get_tokendefs ( cls ) : tokens = { } inheritable = { } for c in cls . __mro__ : toks = c . __dict__ . get ( 'tokens' , { } ) for state , items in iteritems ( toks ) : curitems = tokens . get ( state ) if curitems is None : tokens [ state ] = items try : inherit_ndx = items . index ( inherit ) except ValueError : co... | Merge tokens from superclasses in MRO order returning a single tokendef dictionary . |
52,612 | def memorized_timedelta ( seconds ) : try : return _timedelta_cache [ seconds ] except KeyError : delta = timedelta ( seconds = seconds ) _timedelta_cache [ seconds ] = delta return delta | Create only one instance of each distinct timedelta |
52,613 | def memorized_datetime ( seconds ) : try : return _datetime_cache [ seconds ] except KeyError : dt = _epoch + timedelta ( seconds = seconds ) _datetime_cache [ seconds ] = dt return dt | Create only one instance of each distinct datetime |
52,614 | def memorized_ttinfo ( * args ) : try : return _ttinfo_cache [ args ] except KeyError : ttinfo = ( memorized_timedelta ( args [ 0 ] ) , memorized_timedelta ( args [ 1 ] ) , args [ 2 ] ) _ttinfo_cache [ args ] = ttinfo return ttinfo | Create only one instance of each distinct tuple |
52,615 | def unpickler ( zone , utcoffset = None , dstoffset = None , tzname = None ) : tz = pytz . timezone ( zone ) if utcoffset is None : return tz utcoffset = memorized_timedelta ( utcoffset ) dstoffset = memorized_timedelta ( dstoffset ) try : return tz . _tzinfos [ ( utcoffset , dstoffset , tzname ) ] except KeyError : pa... | Factory function for unpickling pytz tzinfo instances . |
52,616 | def utcoffset ( self , dt , is_dst = None ) : if dt is None : return None elif dt . tzinfo is not self : dt = self . localize ( dt , is_dst ) return dt . tzinfo . _utcoffset else : return self . _utcoffset | See datetime . tzinfo . utcoffset |
52,617 | def dst ( self , dt , is_dst = None ) : if dt is None : return None elif dt . tzinfo is not self : dt = self . localize ( dt , is_dst ) return dt . tzinfo . _dst else : return self . _dst | See datetime . tzinfo . dst |
52,618 | def tzname ( self , dt , is_dst = None ) : if dt is None : return self . zone elif dt . tzinfo is not self : dt = self . localize ( dt , is_dst ) return dt . tzinfo . _tzname else : return self . _tzname | See datetime . tzinfo . tzname |
52,619 | def check_range ( number , min_r , max_r , name = "" ) : try : number = float ( number ) if number < min_r or number > max_r : raise FFmpegNormalizeError ( "{} must be within [{},{}]" . format ( name , min_r , max_r ) ) return number pass except Exception as e : raise e | Check if a number is within a given range |
52,620 | def add_media_file ( self , input_file , output_file ) : if not os . path . exists ( input_file ) : raise FFmpegNormalizeError ( "file " + input_file + " does not exist" ) ext = os . path . splitext ( output_file ) [ 1 ] [ 1 : ] if ( self . audio_codec is None or 'pcm' in self . audio_codec ) and ext in PCM_INCOMPATIBL... | Add a media file to normalize |
52,621 | def run_normalization ( self ) : for index , media_file in enumerate ( tqdm ( self . media_files , desc = "File" , disable = not self . progress , position = 0 ) ) : logger . info ( "Normalizing file {} ({} of {})" . format ( media_file , index + 1 , self . file_count ) ) media_file . run_normalization ( ) logger . inf... | Run the normalization procedures |
52,622 | def get_ffmpeg_exe ( ) : if 'FFMPEG_PATH' in os . environ : ffmpeg_exe = os . environ [ 'FFMPEG_PATH' ] else : ffmpeg_exe = which ( 'ffmpeg' ) if not ffmpeg_exe : if which ( 'avconv' ) : raise FFmpegNormalizeError ( "avconv is not supported. " "Please install ffmpeg from http://ffmpeg.org instead." ) else : raise FFmpe... | Return path to ffmpeg executable |
52,623 | def ffmpeg_has_loudnorm ( ) : cmd_runner = CommandRunner ( [ get_ffmpeg_exe ( ) , '-filters' ] ) cmd_runner . run_command ( ) output = cmd_runner . get_output ( ) if 'loudnorm' in output : return True else : logger . warning ( "Your ffmpeg version does not support the 'loudnorm' filter. " "Please make sure you are runn... | Run feature detection on ffmpeg returns True if ffmpeg supports the loudnorm filter |
52,624 | def parse_streams ( self ) : logger . debug ( "Parsing streams of {}" . format ( self . input_file ) ) cmd = [ self . ffmpeg_normalize . ffmpeg_exe , '-i' , self . input_file , '-c' , 'copy' , '-t' , '0' , '-map' , '0' , '-f' , 'null' , NUL ] cmd_runner = CommandRunner ( cmd ) cmd_runner . run_command ( ) output = cmd_... | Try to parse all input streams from file |
52,625 | def _get_audio_filter_cmd ( self ) : all_filters = [ ] output_labels = [ ] for audio_stream in self . streams [ 'audio' ] . values ( ) : if self . ffmpeg_normalize . normalization_type == 'ebu' : stream_filter = audio_stream . get_second_pass_opts_ebu ( ) else : stream_filter = audio_stream . get_second_pass_opts_peakr... | Return filter_complex command and output labels needed |
52,626 | def parse_volumedetect_stats ( self ) : logger . info ( "Running first pass volumedetect filter for stream {}" . format ( self . stream_id ) ) filter_str = '[0:{}]volumedetect' . format ( self . stream_id ) cmd = [ self . media_file . ffmpeg_normalize . ffmpeg_exe , '-nostdin' , '-y' , '-i' , self . media_file . input_... | Use ffmpeg with volumedetect filter to get the mean volume of the input file . |
52,627 | def parse_loudnorm_stats ( self ) : logger . info ( "Running first pass loudnorm filter for stream {}" . format ( self . stream_id ) ) opts = { 'i' : self . media_file . ffmpeg_normalize . target_level , 'lra' : self . media_file . ffmpeg_normalize . loudness_range_target , 'tp' : self . media_file . ffmpeg_normalize .... | Run a first pass loudnorm filter to get measured data . |
52,628 | def get_second_pass_opts_ebu ( self ) : if not self . loudness_statistics [ 'ebu' ] : raise FFmpegNormalizeError ( "First pass not run, you must call parse_loudnorm_stats first" ) input_i = float ( self . loudness_statistics [ 'ebu' ] [ "input_i" ] ) if input_i > 0 : logger . warn ( "Input file had measured input loudn... | Return second pass loudnorm filter options string for ffmpeg |
52,629 | def setup_custom_logger ( name ) : global loggers if loggers . get ( name ) : return loggers . get ( name ) formatter = logging . Formatter ( fmt = '%(levelname)s: %(message)s' ) handler = TqdmLoggingHandler ( ) handler . setFormatter ( formatter ) if system ( ) not in [ 'Windows' , 'cli' ] : logging . addLevelName ( l... | Create a logger with a certain name and level |
52,630 | def register_checkers ( linter ) : linter . register_checker ( ModelChecker ( linter ) ) linter . register_checker ( DjangoInstalledChecker ( linter ) ) linter . register_checker ( JsonResponseChecker ( linter ) ) linter . register_checker ( FormChecker ( linter ) ) | Register checkers . |
52,631 | def register ( linter ) : linter . register_checker ( NewDbFieldWithDefaultChecker ( linter ) ) if not compat . LOAD_CONFIGURATION_SUPPORTED : load_configuration ( linter ) | Required method to auto register this checker . |
52,632 | def ignore_import_warnings_for_related_fields ( orig_method , self , node ) : consumer = self . _to_consume [ 0 ] new_things = { } iterat = consumer . to_consume . items if PY3 else consumer . to_consume . iteritems for name , stmts in iterat ( ) : if isinstance ( stmts [ 0 ] , ImportFrom ) : if any ( [ n [ 0 ] in ( 'F... | Replaces the leave_module method on the VariablesChecker class to prevent unused - import warnings which are caused by the ForeignKey and OneToOneField transformations . By replacing the nodes in the AST with their type rather than the django field imports of the form from django . db . models import OneToOneField rais... |
52,633 | def is_model_admin_subclass ( node ) : if node . name [ - 5 : ] != 'Admin' or isinstance ( node . parent , ClassDef ) : return False return node_is_subclass ( node , 'django.contrib.admin.options.ModelAdmin' ) | Checks that node is derivative of ModelAdmin class . |
52,634 | def is_model_factory ( node ) : try : parent_classes = node . expr . inferred ( ) except : return False parents = ( 'factory.declarations.LazyFunction' , 'factory.declarations.SubFactory' , 'factory.django.DjangoModelFactory' ) for parent_class in parent_classes : try : if parent_class . qname ( ) in parents : return T... | Checks that node is derivative of DjangoModelFactory or SubFactory class . |
52,635 | def is_model_mpttmeta_subclass ( node ) : if node . name != 'MPTTMeta' or not isinstance ( node . parent , ClassDef ) : return False parents = ( 'django.db.models.base.Model' , '.Model' , 'django.forms.forms.Form' , '.Form' , 'django.forms.models.ModelForm' , '.ModelForm' ) return node_is_subclass ( node . parent , * p... | Checks that node is derivative of MPTTMeta class . |
52,636 | def _attribute_is_magic ( node , attrs , parents ) : if node . attrname not in attrs : return False if not node . last_child ( ) : return False try : for cls in node . last_child ( ) . inferred ( ) : if isinstance ( cls , Super ) : cls = cls . _self_class if node_is_subclass ( cls , * parents ) or cls . qname ( ) in pa... | Checks that node is an attribute used inside one of allowed parents |
52,637 | def generic_is_view_attribute ( parents , attrs ) : def is_attribute ( node ) : return _attribute_is_magic ( node , attrs , parents ) return is_attribute | Generates is_X_attribute function for given parents and attrs . |
52,638 | def is_model_view_subclass_method_shouldnt_be_function ( node ) : if node . name not in ( 'get' , 'post' ) : return False parent = node . parent while parent and not isinstance ( parent , ScopedClass ) : parent = parent . parent subclass = ( 'django.views.View' , 'django.views.generic.View' , 'django.views.generic.base... | Checks that node is get or post method of the View class . |
52,639 | def is_model_media_valid_attributes ( node ) : if node . name not in ( 'js' , ) : return False parent = node . parent while parent and not isinstance ( parent , ScopedClass ) : parent = parent . parent if parent is None or parent . name != "Media" : return False return True | Suppress warnings for valid attributes of Media class . |
52,640 | def is_templatetags_module_valid_constant ( node ) : if node . name not in ( 'register' , ) : return False parent = node . parent while not isinstance ( parent , Module ) : parent = parent . parent if "templatetags." not in parent . name : return False return True | Suppress warnings for valid constants in templatetags module . |
52,641 | def is_urls_module_valid_constant ( node ) : if node . name not in ( 'urlpatterns' , 'app_name' ) : return False parent = node . parent while not isinstance ( parent , Module ) : parent = parent . parent if not parent . name . endswith ( 'urls' ) : return False return True | Suppress warnings for valid constants in urls module . |
52,642 | def load_configuration ( linter ) : name_checker = get_checker ( linter , NameChecker ) name_checker . config . good_names += ( 'qs' , 'urlpatterns' , 'register' , 'app_name' , 'handler500' ) linter . config . black_list += ( 'migrations' , 'south_migrations' ) | Amend existing checker config . |
52,643 | def register ( linter ) : register_checkers ( linter ) try : from pylint_django . augmentations import apply_augmentations apply_augmentations ( linter ) except ImportError : pass if not compat . LOAD_CONFIGURATION_SUPPORTED : load_configuration ( linter ) | Registering additional checkers . |
52,644 | async def create_object ( model , ** data ) : warnings . warn ( "create_object() is deprecated, Manager.create() " "should be used instead" , DeprecationWarning ) obj = model ( ** data ) pk = await insert ( model . insert ( ** dict ( obj . __data__ ) ) ) if obj . _pk is None : obj . _pk = pk return obj | Create object asynchronously . |
52,645 | async def get_object ( source , * args ) : warnings . warn ( "get_object() is deprecated, Manager.get() " "should be used instead" , DeprecationWarning ) if isinstance ( source , peewee . Query ) : query = source model = query . model else : query = source . select ( ) model = source for obj in ( await select ( query .... | Get object asynchronously . |
52,646 | async def delete_object ( obj , recursive = False , delete_nullable = False ) : warnings . warn ( "delete_object() is deprecated, Manager.delete() " "should be used instead" , DeprecationWarning ) if recursive : dependencies = obj . dependencies ( delete_nullable ) for query , fk in reversed ( list ( dependencies ) ) :... | Delete object asynchronously . |
52,647 | async def update_object ( obj , only = None ) : warnings . warn ( "update_object() is deprecated, Manager.update() " "should be used instead" , DeprecationWarning ) field_dict = dict ( obj . __data__ ) pk_field = obj . _meta . primary_key if only : field_dict = obj . _prune_fields ( field_dict , only ) if not isinstanc... | Update object asynchronously . |
52,648 | async def select ( query ) : assert isinstance ( query , peewee . SelectQuery ) , ( "Error, trying to run select coroutine" "with wrong query class %s" % str ( query ) ) cursor = await _execute_query_async ( query ) result = AsyncQueryWrapper ( cursor = cursor , query = query ) try : while True : await result . fetchon... | Perform SELECT query asynchronously . |
52,649 | async def insert ( query ) : assert isinstance ( query , peewee . Insert ) , ( "Error, trying to run insert coroutine" "with wrong query class %s" % str ( query ) ) cursor = await _execute_query_async ( query ) try : if query . _returning : row = await cursor . fetchone ( ) result = row [ 0 ] else : database = _query_d... | Perform INSERT query asynchronously . Returns last insert ID . This function is called by object . create for single objects only . |
52,650 | async def update ( query ) : assert isinstance ( query , peewee . Update ) , ( "Error, trying to run update coroutine" "with wrong query class %s" % str ( query ) ) cursor = await _execute_query_async ( query ) rowcount = cursor . rowcount await cursor . release ( ) return rowcount | Perform UPDATE query asynchronously . Returns number of rows updated . |
52,651 | async def delete ( query ) : assert isinstance ( query , peewee . Delete ) , ( "Error, trying to run delete coroutine" "with wrong query class %s" % str ( query ) ) cursor = await _execute_query_async ( query ) rowcount = cursor . rowcount await cursor . release ( ) return rowcount | Perform DELETE query asynchronously . Returns number of rows deleted . |
52,652 | def sync_unwanted ( database ) : warnings . warn ( "sync_unwanted() context manager is deprecated, " "use database's `.allow_sync()` context manager or " "`Manager.allow_sync()` context manager. " , DeprecationWarning ) old_allow_sync = database . _allow_sync database . _allow_sync = False yield database . _allow_sync ... | Context manager for preventing unwanted sync queries . UnwantedSyncQueryError exception will raise on such query . |
52,653 | async def get ( self , source_ , * args , ** kwargs ) : await self . connect ( ) if isinstance ( source_ , peewee . Query ) : query = source_ model = query . model else : query = source_ . select ( ) model = source_ conditions = list ( args ) + [ ( getattr ( model , k ) == v ) for k , v in kwargs . items ( ) ] if condi... | Get the model instance . |
52,654 | async def create ( self , model_ , ** data ) : inst = model_ ( ** data ) query = model_ . insert ( ** dict ( inst . __data__ ) ) pk = await self . execute ( query ) if inst . _pk is None : inst . _pk = pk return inst | Create a new object saved to database . |
52,655 | async def get_or_create ( self , model_ , defaults = None , ** kwargs ) : try : return ( await self . get ( model_ , ** kwargs ) ) , False except model_ . DoesNotExist : data = defaults or { } data . update ( { k : v for k , v in kwargs . items ( ) if '__' not in k } ) return ( await self . create ( model_ , ** data ) ... | Try to get an object or create it with the specified defaults . |
52,656 | async def create_or_get ( self , model_ , ** kwargs ) : try : return ( await self . create ( model_ , ** kwargs ) ) , True except IntegrityErrors : query = [ ] for field_name , value in kwargs . items ( ) : field = getattr ( model_ , field_name ) if field . unique or field . primary_key : query . append ( field == valu... | Try to create new object with specified data . If object already exists then try to get it by unique fields . |
52,657 | def _subclassed ( base , * classes ) : return all ( map ( lambda obj : isinstance ( obj , base ) , classes ) ) | Check if all classes are subclassed from base . |
52,658 | def _get_result_wrapper ( self , query ) : cursor = RowsCursor ( self . _rows , self . _cursor . description ) return query . _get_cursor_wrapper ( cursor ) | Get result wrapper class . |
52,659 | async def fetchone ( self ) : row = await self . _cursor . fetchone ( ) if not row : raise GeneratorExit self . _rows . append ( row ) | Fetch single row from the cursor . |
52,660 | async def connect_async ( self , loop = None , timeout = None ) : if self . deferred : raise Exception ( "Error, database not properly initialized " "before opening connection" ) if self . _async_conn : return elif self . _async_wait : await self . _async_wait else : self . _loop = loop self . _async_wait = asyncio . F... | Set up async connection on specified event loop or on default event loop . |
52,661 | async def cursor_async ( self ) : await self . connect_async ( loop = self . _loop ) if self . transaction_depth_async ( ) > 0 : conn = self . transaction_conn_async ( ) else : conn = None try : return ( await self . _async_conn . cursor ( conn = conn ) ) except : await self . close_async ( ) raise | Acquire async cursor . |
52,662 | async def close_async ( self ) : if self . _async_wait : await self . _async_wait if self . _async_conn : conn = self . _async_conn self . _async_conn = None self . _async_wait = None self . _task_data = None await conn . close ( ) | Close async connection . |
52,663 | async def push_transaction_async ( self ) : await self . connect_async ( loop = self . loop ) depth = self . transaction_depth_async ( ) if not depth : conn = await self . _async_conn . acquire ( ) self . _task_data . set ( 'conn' , conn ) self . _task_data . set ( 'depth' , depth + 1 ) | Increment async transaction depth . |
52,664 | async def pop_transaction_async ( self ) : depth = self . transaction_depth_async ( ) if depth > 0 : depth -= 1 self . _task_data . set ( 'depth' , depth ) if depth == 0 : conn = self . _task_data . get ( 'conn' ) self . _async_conn . release ( conn ) else : raise ValueError ( "Invalid async transaction depth value" ) | Decrement async transaction depth . |
52,665 | def allow_sync ( self ) : old_allow_sync = self . _allow_sync self . _allow_sync = True try : yield except : raise finally : try : self . close ( ) except self . Error : pass self . _allow_sync = old_allow_sync | Allow sync queries within context . Close sync connection on exit if connected . |
52,666 | def execute_sql ( self , * args , ** kwargs ) : assert self . _allow_sync , ( "Error, sync query is not allowed! Call the `.set_allow_sync()` " "or use the `.allow_sync()` context manager." ) if self . _allow_sync in ( logging . ERROR , logging . WARNING ) : logging . log ( self . _allow_sync , "Error, sync query is no... | Sync execute SQL query allow_sync must be set to True . |
52,667 | async def cursor ( self , conn = None , * args , ** kwargs ) : in_transaction = conn is not None if not conn : conn = await self . acquire ( ) cursor = await conn . cursor ( * args , ** kwargs ) cursor . release = functools . partial ( self . release_cursor , cursor , in_transaction = in_transaction ) return cursor | Get a cursor for the specified transaction connection or acquire from the pool . |
52,668 | def connect_params_async ( self ) : kwargs = self . connect_params . copy ( ) kwargs . update ( { 'minsize' : self . min_connections , 'maxsize' : self . max_connections , 'enable_json' : self . _enable_json , 'enable_hstore' : self . _enable_hstore , } ) return kwargs | Connection parameters for aiopg . Connection |
52,669 | async def release_cursor ( self , cursor , in_transaction = False ) : conn = cursor . connection await cursor . close ( ) if not in_transaction : self . release ( conn ) | Release cursor coroutine . Unless in transaction the connection is also released back to the pool . |
52,670 | def connect_params_async ( self ) : kwargs = self . connect_params . copy ( ) kwargs . update ( { 'minsize' : self . min_connections , 'maxsize' : self . max_connections , 'autocommit' : True , } ) return kwargs | Connection parameters for aiomysql . Connection |
52,671 | def get ( self , key , * val ) : data = self . get_data ( ) if data is not None : return data . get ( key , * val ) if val : return val [ 0 ] raise KeyError ( key ) | Get value stored for current running task . Optionally you may provide the default value . Raises KeyError when can t get the value and no default one is provided . |
52,672 | def set ( self , key , val ) : data = self . get_data ( True ) if data is not None : data [ key ] = val else : raise RuntimeError ( "No task is currently running" ) | Set value stored for current running task . |
52,673 | def get_data ( self , create = False ) : task = asyncio_current_task ( loop = self . loop ) if task : task_id = id ( task ) if create and task_id not in self . data : self . data [ task_id ] = { } task . add_done_callback ( self . del_data ) return self . data . get ( task_id ) return None | Get dict stored for current running task . Return None or an empty dict if no data was found depending on the create argument value . |
52,674 | def _get_from_bin ( self ) : java_bin = os . path . realpath ( self . _java ) if os . path . exists ( java_bin ) : java_home = os . path . abspath ( os . path . join ( os . path . dirname ( java_bin ) , '..' ) ) return self . find_libjvm ( java_home ) | Retrieves the Java library path according to the real installation of the java executable |
52,675 | def initialize_options ( self , * args ) : import distutils . sysconfig cfg_vars = distutils . sysconfig . get_config_vars ( ) for k , v in cfg_vars . items ( ) : if isinstance ( v , str ) and v . find ( "-Wstrict-prototypes" ) : v = v . replace ( '-Wstrict-prototypes' , '' ) cfg_vars [ k ] = v if isinstance ( v , str ... | omit - Wstrict - prototypes from CFLAGS since its only valid for C code . |
52,676 | def addClassPath ( path1 ) : global _CLASSPATHS path1 = _os . path . abspath ( path1 ) if _sys . platform == 'cygwin' : path1 = _posix2win ( path1 ) _CLASSPATHS . add ( str ( path1 ) ) | Add a path to the java class path |
52,677 | def getClassPath ( ) : global _CLASSPATHS global _SEP out = [ ] for path in _CLASSPATHS : if path == '' : continue if path . endswith ( '*' ) : paths = _glob . glob ( path + ".jar" ) if len ( path ) == 0 : continue out . extend ( paths ) else : out . append ( path ) return _SEP . join ( out ) | Get the full java class path . |
52,678 | def find_libjvm ( self , java_home ) : found_jamvm = False non_supported_jvm = ( 'cacao' , 'jamvm' ) found_non_supported_jvm = False for root , _ , names in os . walk ( java_home ) : if self . _libfile in names : candidate = os . path . split ( root ) [ 1 ] if candidate in non_supported_jvm : found_non_supported_jvm = ... | Recursively looks for the given file |
52,679 | def find_possible_homes ( self , parents ) : homes = [ ] java_names = ( 'jre' , 'jdk' , 'java' ) for parent in parents : for childname in sorted ( os . listdir ( parent ) ) : path = os . path . realpath ( os . path . join ( parent , childname ) ) if path in homes or not os . path . isdir ( path ) : continue real_name =... | Generator that looks for the first - level children folders that could be Java installations according to their name |
52,680 | def _get_from_java_home ( self ) : java_home = os . getenv ( "JAVA_HOME" ) if java_home and os . path . exists ( java_home ) : java_home = os . path . realpath ( java_home ) if not os . path . exists ( java_home ) : java_home = os . getenv ( "JAVA_HOME" ) return self . find_libjvm ( java_home ) | Retrieves the Java library path according to the JAVA_HOME environment variable |
52,681 | def _get_from_known_locations ( self ) : for home in self . find_possible_homes ( self . _locations ) : jvm = self . find_libjvm ( home ) if jvm is not None : return jvm | Retrieves the first existing Java library path in the predefined known locations |
52,682 | def node_query ( self , node ) : if isinstance ( node , ast . Call ) : assert node . args arg = node . args [ 0 ] if not isinstance ( arg , ast . Str ) : return else : raise TypeError ( type ( node ) ) return arg . s | Return the query for the gql call node |
52,683 | def default ( thumbnailer , prepared_options , source_filename , thumbnail_extension , ** kwargs ) : filename_parts = [ source_filename ] if ( '%(opts)s' in thumbnailer . thumbnail_basedir or '%(opts)s' in thumbnailer . thumbnail_subdir ) : if thumbnail_extension != os . path . splitext ( source_filename ) [ 1 ] [ 1 : ... | Easy - thumbnails default name processor . |
52,684 | def hashed ( source_filename , prepared_options , thumbnail_extension , ** kwargs ) : parts = ':' . join ( [ source_filename ] + prepared_options ) short_sha = hashlib . sha1 ( parts . encode ( 'utf-8' ) ) . digest ( ) short_hash = base64 . urlsafe_b64encode ( short_sha [ : 9 ] ) . decode ( 'utf-8' ) return '.' . join ... | Generate a short hashed thumbnail filename . |
52,685 | def source_hashed ( source_filename , prepared_options , thumbnail_extension , ** kwargs ) : source_sha = hashlib . sha1 ( source_filename . encode ( 'utf-8' ) ) . digest ( ) source_hash = base64 . urlsafe_b64encode ( source_sha [ : 9 ] ) . decode ( 'utf-8' ) parts = ':' . join ( prepared_options [ 1 : ] ) parts_sha = ... | Generate a thumbnail filename of the source filename and options separately hashed along with the size . |
52,686 | def save_image ( image , destination = None , filename = None , ** options ) : if destination is None : destination = BytesIO ( ) filename = filename or '' Image . init ( ) format = Image . EXTENSION . get ( os . path . splitext ( filename ) [ 1 ] . lower ( ) , 'JPEG' ) if format in ( 'JPEG' , 'WEBP' ) : options . setd... | Save a PIL image . |
52,687 | def generate_source_image ( source_file , processor_options , generators = None , fail_silently = True ) : processor_options = ThumbnailOptions ( processor_options ) was_closed = getattr ( source_file , 'closed' , False ) if generators is None : generators = [ utils . dynamic_import ( name ) for name in settings . THUM... | Processes a source File through a series of source generators stopping once a generator returns an image . |
52,688 | def revert ( self ) : for attr , value in self . _changed . items ( ) : setattr ( django_settings , attr , value ) for attr in self . _added : delattr ( django_settings , attr ) self . _changed = { } self . _added = [ ] if self . isolated : self . _isolated_overrides = BaseSettings ( ) | Revert any changes made to settings . |
52,689 | def pil_image ( source , exif_orientation = True , ** options ) : if not source : return source = BytesIO ( source . read ( ) ) image = Image . open ( source ) try : image . load ( ) except IOError : pass image . load ( ) if exif_orientation : image = utils . exif_orientation ( image ) return image | Try to open the source file directly using PIL ignoring any errors . |
52,690 | def optimize_thumbnail ( thumbnail ) : try : optimize_command = settings . THUMBNAIL_OPTIMIZE_COMMAND [ determinetype ( thumbnail . path ) ] if not optimize_command : return except ( TypeError , KeyError , NotImplementedError ) : return storage = thumbnail . storage try : with NamedTemporaryFile ( ) as temp_file : thum... | Optimize thumbnail images by removing unnecessary data |
52,691 | def thumbnail ( parser , token ) : args = token . split_contents ( ) tag = args [ 0 ] if len ( args ) > 4 and args [ - 2 ] == 'as' : context_name = args [ - 1 ] args = args [ : - 2 ] else : context_name = None if len ( args ) < 3 : raise TemplateSyntaxError ( "Invalid syntax. Expected " "'{%% %s source size [option1 op... | Creates a thumbnail of an ImageField . |
52,692 | def thumbnail_url ( source , alias ) : try : thumb = get_thumbnailer ( source ) [ alias ] except Exception : return '' return thumb . url | Return the thumbnail url for a source file using an aliased set of thumbnail options . |
52,693 | def data_uri ( thumbnail ) : try : thumbnail . open ( 'rb' ) data = thumbnail . read ( ) finally : thumbnail . close ( ) mime_type = mimetypes . guess_type ( str ( thumbnail . file ) ) [ 0 ] or 'application/octet-stream' data = b64encode ( data ) . decode ( 'utf-8' ) return 'data:{0};base64,{1}' . format ( mime_type , ... | This filter will return the base64 encoded data URI for a given thumbnail object . |
52,694 | def read_files ( * filenames ) : output = [ ] for filename in filenames : f = codecs . open ( filename , encoding = 'utf-8' ) try : output . append ( f . read ( ) ) finally : f . close ( ) return '\n\n' . join ( output ) | Output the contents of one or more files to a single concatenated string . |
52,695 | def all_thumbnails ( path , recursive = True , prefix = None , subdir = None ) : if prefix is None : prefix = settings . THUMBNAIL_PREFIX if subdir is None : subdir = settings . THUMBNAIL_SUBDIR thumbnail_files = { } if not path . endswith ( '/' ) : path = '%s/' % path len_path = len ( path ) if recursive : all = os . ... | Return a dictionary referencing all files which match the thumbnail format . |
52,696 | def thumbnails_for_file ( relative_source_path , root = None , basedir = None , subdir = None , prefix = None ) : if root is None : root = settings . MEDIA_ROOT if prefix is None : prefix = settings . THUMBNAIL_PREFIX if subdir is None : subdir = settings . THUMBNAIL_SUBDIR if basedir is None : basedir = settings . THU... | Return a list of dictionaries one for each thumbnail belonging to the source image . |
52,697 | def delete_thumbnails ( relative_source_path , root = None , basedir = None , subdir = None , prefix = None ) : thumbs = thumbnails_for_file ( relative_source_path , root , basedir , subdir , prefix ) return _delete_using_thumbs_list ( thumbs ) | Delete all thumbnails for a source image . |
52,698 | def delete_all_thumbnails ( path , recursive = True ) : total = 0 for thumbs in all_thumbnails ( path , recursive = recursive ) . values ( ) : total += _delete_using_thumbs_list ( thumbs ) return total | Delete all files within a path which match the thumbnails pattern . |
52,699 | def signal_committed_filefields ( sender , instance , ** kwargs ) : for field_name in getattr ( instance , '_uncommitted_filefields' , ( ) ) : fieldfile = getattr ( instance , field_name ) if fieldfile : signals . saved_file . send_robust ( sender = sender , fieldfile = fieldfile ) | A post_save signal handler which sends a signal for each FileField that was committed this save . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.