idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
57,700 | def capture_widget ( widget , path = None ) : if use_qt5 : pixmap = widget . grab ( ) else : pixmap = QtGui . QPixmap . grabWidget ( widget ) if path : pixmap . save ( path ) else : image_buffer = QtCore . QBuffer ( ) image_buffer . open ( QtCore . QIODevice . ReadWrite ) pixmap . save ( image_buffer , "PNG" ) return image_buffer . data ( ) . data ( ) | Grab an image of a Qt widget |
57,701 | def get_widget ( title ) : tform = idaapi . find_tform ( title ) if not tform : raise exceptions . FormNotFound ( "No form titled {!r} found." . format ( title ) ) return form_to_widget ( tform ) | Get the Qt widget of the IDA window with the given title . |
57,702 | def get_window ( ) : tform = idaapi . get_current_tform ( ) if not tform : tform = idaapi . find_tform ( "Output window" ) widget = form_to_widget ( tform ) window = widget . window ( ) return window | Get IDA s top level window . |
57,703 | def add_menu ( self , name ) : if name in self . _menus : raise exceptions . MenuAlreadyExists ( "Menu name {!r} already exists." . format ( name ) ) menu = self . _menu . addMenu ( name ) self . _menus [ name ] = menu | Add a top - level menu . |
57,704 | def remove_menu ( self , name ) : if name not in self . _menus : raise exceptions . MenuNotFound ( "Menu {!r} was not found. It might be deleted, or belong to another menu manager." . format ( name ) ) self . _menu . removeAction ( self . _menus [ name ] . menuAction ( ) ) del self . _menus [ name ] | Remove a top - level menu . |
57,705 | def clear ( self ) : for menu in self . _menus . itervalues ( ) : self . _menu . removeAction ( menu . menuAction ( ) ) self . _menus = { } | Clear all menus created by this manager . |
57,706 | def get_by_flags ( self , flags ) : for reg in self . _reg_infos : if reg . flags & flags == flags : yield reg | Iterate all register infos matching the given flags . |
57,707 | def get_single_by_flags ( self , flags ) : regs = list ( self . get_by_flags ( flags ) ) if len ( regs ) != 1 : raise ValueError ( "Flags do not return unique resigter. {!r}" , regs ) return regs [ 0 ] | Get the register info matching the flag . Raises ValueError if more than one are found . |
57,708 | def segments ( seg_type = None ) : for index in xrange ( idaapi . get_segm_qty ( ) ) : seg = Segment ( index = index ) if ( seg_type is None ) or ( seg . type == seg_type ) : yield Segment ( index = index ) | Iterate segments based on type |
57,709 | def next ( self ) : seg = Segment ( segment_t = idaapi . get_next_seg ( self . ea ) ) if seg . ea <= self . ea : raise exceptions . NoMoreSegments ( "This is the last segment. No segments exist after it." ) return seg | Get the next segment . |
57,710 | def prev ( self ) : seg = Segment ( segment_t = idaapi . get_prev_seg ( self . ea ) ) if seg . ea >= self . ea : raise exceptions . NoMoreSegments ( "This is the first segment. no segments exist before it." ) return seg | Get the previous segment . |
57,711 | def get_ecosystem_solver ( ecosystem_name , parser_kwargs = None , fetcher_kwargs = None ) : from . python import PythonSolver if ecosystem_name . lower ( ) == "pypi" : source = Source ( url = "https://pypi.org/simple" , warehouse_api_url = "https://pypi.org/pypi" , warehouse = True ) return PythonSolver ( parser_kwargs , fetcher_kwargs = { "source" : source } ) raise NotImplementedError ( "Unknown ecosystem: {}" . format ( ecosystem_name ) ) | Get Solver subclass instance for particular ecosystem . |
57,712 | def check ( self , version ) : def _compare_spec ( spec ) : if len ( spec ) == 1 : spec = ( "=" , spec [ 0 ] ) token = Tokens . operators . index ( spec [ 0 ] ) comparison = compare_version ( version , spec [ 1 ] ) if token in [ Tokens . EQ1 , Tokens . EQ2 ] : return comparison == 0 elif token == Tokens . GT : return comparison == 1 elif token == Tokens . LT : return comparison == - 1 elif token == Tokens . GTE : return comparison >= 0 elif token == Tokens . LTE : return comparison <= 0 elif token == Tokens . NEQ : return comparison != 0 else : raise ValueError ( "Invalid comparison token" ) results , intermediaries = False , False for spec in self . spec : if isinstance ( spec , list ) : intermediary = True for sub in spec : intermediary &= _compare_spec ( sub ) intermediaries |= intermediary elif isinstance ( spec , tuple ) : results |= _compare_spec ( spec ) return results or intermediaries | Check if version fits into our dependency specification . |
57,713 | def solve ( self , dependencies , graceful = True , all_versions = False ) : def _compare_version_index_url ( v1 , v2 ) : return compare_version ( v1 [ 0 ] , v2 [ 0 ] ) solved = { } for dep in self . dependency_parser . parse ( dependencies ) : _LOGGER . debug ( "Fetching releases for: {}" . format ( dep ) ) name , releases = self . release_fetcher . fetch_releases ( dep . name ) if name in solved : raise SolverException ( "Dependency: {} is listed multiple times" . format ( name ) ) if not releases : if graceful : _LOGGER . info ( "No releases found for package %s" , dep . name ) else : raise SolverException ( "No releases found for package {}" . format ( dep . name ) ) releases = [ release for release in releases if release in dep ] matching = sorted ( releases , key = cmp_to_key ( _compare_version_index_url ) ) _LOGGER . debug ( " matching: %s" , matching ) if all_versions : solved [ name ] = matching else : if not matching : solved [ name ] = None else : if self . _highest_dependency_version : solved [ name ] = matching [ - 1 ] else : solved [ name ] = matching [ 0 ] return solved | Solve dependencies against upstream repository . |
57,714 | def pip_compile ( * packages : str ) : result = None packages = "\n" . join ( packages ) with tempfile . TemporaryDirectory ( ) as tmp_dirname , cwd ( tmp_dirname ) : with open ( "requirements.in" , "w" ) as requirements_file : requirements_file . write ( packages ) runner = CliRunner ( ) try : result = runner . invoke ( cli , [ "requirements.in" ] , catch_exceptions = False ) except Exception as exc : raise ThothPipCompileError ( str ( exc ) ) from exc if result . exit_code != 0 : error_msg = ( f"pip-compile returned non-zero ({result.exit_code:d}) " f"output: {result.output_bytes.decode():s}" ) raise ThothPipCompileError ( error_msg ) return result . output_bytes . decode ( ) | Run pip - compile to pin down packages also resolve their transitive dependencies . |
57,715 | def _print_version ( ctx , _ , value ) : if not value or ctx . resilient_parsing : return click . echo ( analyzer_version ) ctx . exit ( ) | Print solver version and exit . |
57,716 | def cli ( ctx = None , verbose = 0 ) : if ctx : ctx . auto_envvar_prefix = "THOTH_SOLVER" if verbose : _LOG . setLevel ( logging . DEBUG ) _LOG . debug ( "Debug mode is on" ) | Thoth solver command line interface . |
57,717 | def pypi ( click_ctx , requirements , index = None , python_version = 3 , exclude_packages = None , output = None , subgraph_check_api = None , no_transitive = True , no_pretty = False , ) : requirements = [ requirement . strip ( ) for requirement in requirements . split ( "\\n" ) if requirement ] if not requirements : _LOG . error ( "No requirements specified, exiting" ) sys . exit ( 1 ) if not subgraph_check_api : _LOG . info ( "No subgraph check API provided, no queries will be done for dependency subgraphs that should be avoided" ) result = resolve_python ( requirements , index_urls = index . split ( "," ) if index else ( "https://pypi.org/simple" , ) , python_version = int ( python_version ) , transitive = not no_transitive , exclude_packages = set ( map ( str . strip , ( exclude_packages or "" ) . split ( "," ) ) ) , subgraph_check_api = subgraph_check_api , ) print_command_result ( click_ctx , result , analyzer = analyzer_name , analyzer_version = analyzer_version , output = output or "-" , pretty = not no_pretty , ) | Manipulate with dependency requirements using PyPI . |
57,718 | def _create_entry ( entry : dict , source : Source = None ) -> dict : entry [ "package_name" ] = entry [ "package" ] . pop ( "package_name" ) entry [ "package_version" ] = entry [ "package" ] . pop ( "installed_version" ) if source : entry [ "index_url" ] = source . url entry [ "sha256" ] = [ ] for item in source . get_package_hashes ( entry [ "package_name" ] , entry [ "package_version" ] ) : entry [ "sha256" ] . append ( item [ "sha256" ] ) entry . pop ( "package" ) for dependency in entry [ "dependencies" ] : dependency . pop ( "key" , None ) dependency . pop ( "installed_version" , None ) return entry | Filter and normalize the output of pipdeptree entry . |
57,719 | def _get_environment_details ( python_bin : str ) -> list : cmd = "{} -m pipdeptree --json" . format ( python_bin ) output = run_command ( cmd , is_json = True ) . stdout return [ _create_entry ( entry ) for entry in output ] | Get information about packages in environment where packages get installed . |
57,720 | def _should_resolve_subgraph ( subgraph_check_api : str , package_name : str , package_version : str , index_url : str ) -> bool : _LOGGER . info ( "Checking if the given dependency subgraph for package %r in version %r from index %r should be resolved" , package_name , package_version , index_url , ) response = requests . get ( subgraph_check_api , params = { "package_name" : package_name , "package_version" : package_version , "index_url" : index_url } , ) if response . status_code == 200 : return True elif response . status_code == 208 : return False response . raise_for_status ( ) raise ValueError ( "Unreachable code - subgraph check API responded with unknown HTTP status " "code %s for package %r in version %r from index %r" , package_name , package_version , index_url , ) | Ask the given subgraph check API if the given package in the given version should be included in the resolution . |
57,721 | def _install_requirement ( python_bin : str , package : str , version : str = None , index_url : str = None , clean : bool = True ) -> None : previous_version = _pipdeptree ( python_bin , package ) try : cmd = "{} -m pip install --force-reinstall --no-cache-dir --no-deps {}" . format ( python_bin , quote ( package ) ) if version : cmd += "=={}" . format ( quote ( version ) ) if index_url : cmd += ' --index-url "{}" ' . format ( quote ( index_url ) ) trusted_host = urlparse ( index_url ) . netloc cmd += " --trusted-host {}" . format ( trusted_host ) _LOGGER . debug ( "Installing requirement %r in version %r" , package , version ) run_command ( cmd ) yield finally : if clean : _LOGGER . debug ( "Removing installed package %r" , package ) cmd = "{} -m pip uninstall --yes {}" . format ( python_bin , quote ( package ) ) result = run_command ( cmd , raise_on_error = False ) if result . return_code != 0 : _LOGGER . warning ( "Failed to restore previous environment by removing package %r (installed version %r), " "the error is not fatal but can affect future actions: %s" , package , version , result . stderr , ) _LOGGER . debug ( "Restoring previous environment setup after installation of %r (%s)" , package , previous_version ) if previous_version : cmd = "{} -m pip install --force-reinstall --no-cache-dir --no-deps {}=={}" . format ( python_bin , quote ( package ) , quote ( previous_version [ "package" ] [ "installed_version" ] ) ) result = run_command ( cmd , raise_on_error = False ) if result . return_code != 0 : _LOGGER . warning ( "Failed to restore previous environment for package %r (installed version %r), " ", the error is not fatal but can affect future actions (previous version: %r): %s" , package , version , previous_version , result . stderr , ) | Install requirements specified using suggested pip binary . |
57,722 | def _pipdeptree ( python_bin , package_name : str = None , warn : bool = False ) -> typing . Optional [ dict ] : cmd = "{} -m pipdeptree --json" . format ( python_bin ) _LOGGER . debug ( "Obtaining pip dependency tree using: %r" , cmd ) output = run_command ( cmd , is_json = True ) . stdout if not package_name : return output for entry in output : if entry [ "package" ] [ "key" ] . lower ( ) == package_name . lower ( ) : return entry if warn : _LOGGER . warning ( "Package %r was not found in pipdeptree output %r" , package_name , output ) return None | Get pip dependency tree by executing pipdeptree tool . |
57,723 | def _get_dependency_specification ( dep_spec : typing . List [ tuple ] ) -> str : return "," . join ( dep_range [ 0 ] + dep_range [ 1 ] for dep_range in dep_spec ) | Get string representation of dependency specification as provided by PythonDependencyParser . |
57,724 | def resolve ( requirements : typing . List [ str ] , index_urls : list = None , python_version : int = 3 , exclude_packages : set = None , transitive : bool = True , subgraph_check_api : str = None , ) -> dict : assert python_version in ( 2 , 3 ) , "Unknown Python version" if subgraph_check_api and not transitive : _LOGGER . error ( "The check against subgraph API cannot be done if no transitive dependencies are resolved" ) sys . exit ( 2 ) python_bin = "python3" if python_version == 3 else "python2" run_command ( "virtualenv -p python3 venv" ) python_bin = "venv/bin/" + python_bin run_command ( "{} -m pip install pipdeptree" . format ( python_bin ) ) environment_details = _get_environment_details ( python_bin ) result = { "tree" : [ ] , "errors" : [ ] , "unparsed" : [ ] , "unresolved" : [ ] , "environment" : environment_details } all_solvers = [ ] for index_url in index_urls : source = Source ( index_url ) all_solvers . append ( PythonSolver ( fetcher_kwargs = { "source" : source } ) ) for solver in all_solvers : solver_result = _do_resolve_index ( python_bin = python_bin , solver = solver , all_solvers = all_solvers , requirements = requirements , exclude_packages = exclude_packages , transitive = transitive , subgraph_check_api = subgraph_check_api , ) result [ "tree" ] . extend ( solver_result [ "tree" ] ) result [ "errors" ] . extend ( solver_result [ "errors" ] ) result [ "unparsed" ] . extend ( solver_result [ "unparsed" ] ) result [ "unresolved" ] . extend ( solver_result [ "unresolved" ] ) return result | Resolve given requirements for the given Python version . |
57,725 | def fetch_releases ( self , package_name ) : package_name = self . source . normalize_package_name ( package_name ) releases = self . source . get_package_versions ( package_name ) releases_with_index_url = [ ( item , self . index_url ) for item in releases ] return package_name , releases_with_index_url | Fetch package and index_url for a package_name . |
57,726 | def parse_python ( spec ) : def _extract_op_version ( spec ) : if spec . operator == "~=" : version = spec . version . split ( "." ) if len ( version ) in { 2 , 3 , 4 } : if len ( version ) in { 3 , 4 } : del version [ - 1 ] version [ - 1 ] = str ( int ( version [ - 1 ] ) + 1 ) else : raise ValueError ( "%r must not be used with %r" % ( spec . operator , spec . version ) ) return [ ( ">=" , spec . version ) , ( "<" , "." . join ( version ) ) ] elif spec . operator == "==" and spec . version . endswith ( ".*" ) : try : result = check_output ( [ "/usr/bin/semver-ranger" , spec . version ] , universal_newlines = True ) . strip ( ) gte , lt = result . split ( ) return [ ( ">=" , gte . lstrip ( ">=" ) ) , ( "<" , lt . lstrip ( "<" ) ) ] except ValueError : _LOGGER . warning ( "couldn't resolve ==%s" , spec . version ) return spec . operator , spec . version elif spec . operator == "===" : return "==" , spec . version else : return spec . operator , spec . version def _get_pip_spec ( requirements ) : if hasattr ( requirements , "specs" ) : return requirements . specs elif hasattr ( requirements , "specifier" ) : specs = [ _extract_op_version ( spec ) for spec in requirements . specifier ] if len ( specs ) == 0 : specs = [ ( ">=" , "0.0.0" ) ] return specs _LOGGER . info ( "Parsing dependency %r" , spec ) with NamedTemporaryFile ( mode = "w+" , suffix = "pysolve" ) as f : f . write ( spec ) f . flush ( ) parsed = parse_requirements ( f . name , session = f . name ) dependency = [ Dependency ( x . name , _get_pip_spec ( x . req ) ) for x in parsed ] . pop ( ) return dependency | Parse PyPI specification of a single dependency . |
57,727 | def get ( obj ) : if not isinstance ( obj , bytes ) : raise TypeError ( "object type must be bytes" ) info = { "type" : dict ( ) , "extension" : dict ( ) , "mime" : dict ( ) } stream = " " . join ( [ '{:02X}' . format ( byte ) for byte in obj ] ) for element in data : for signature in element [ "signature" ] : offset = element [ "offset" ] * 2 + element [ "offset" ] if signature == stream [ offset : len ( signature ) + offset ] : for key in [ "type" , "extension" , "mime" ] : info [ key ] [ element [ key ] ] = len ( signature ) for key in [ "type" , "extension" , "mime" ] : info [ key ] = [ element for element in sorted ( info [ key ] , key = info [ key ] . get , reverse = True ) ] return Info ( info [ "type" ] , info [ "extension" ] , info [ "mime" ] ) | Determines file format and picks suitable file types extensions and MIME types |
57,728 | def bottleneck_matching ( I1 , I2 , matchidx , D , labels = [ "dgm1" , "dgm2" ] , ax = None ) : plot_diagrams ( [ I1 , I2 ] , labels = labels , ax = ax ) cp = np . cos ( np . pi / 4 ) sp = np . sin ( np . pi / 4 ) R = np . array ( [ [ cp , - sp ] , [ sp , cp ] ] ) if I1 . size == 0 : I1 = np . array ( [ [ 0 , 0 ] ] ) if I2 . size == 0 : I2 = np . array ( [ [ 0 , 0 ] ] ) I1Rot = I1 . dot ( R ) I2Rot = I2 . dot ( R ) dists = [ D [ i , j ] for ( i , j ) in matchidx ] ( i , j ) = matchidx [ np . argmax ( dists ) ] if i >= I1 . shape [ 0 ] and j >= I2 . shape [ 0 ] : return if i >= I1 . shape [ 0 ] : diagElem = np . array ( [ I2Rot [ j , 0 ] , 0 ] ) diagElem = diagElem . dot ( R . T ) plt . plot ( [ I2 [ j , 0 ] , diagElem [ 0 ] ] , [ I2 [ j , 1 ] , diagElem [ 1 ] ] , "g" ) elif j >= I2 . shape [ 0 ] : diagElem = np . array ( [ I1Rot [ i , 0 ] , 0 ] ) diagElem = diagElem . dot ( R . T ) plt . plot ( [ I1 [ i , 0 ] , diagElem [ 0 ] ] , [ I1 [ i , 1 ] , diagElem [ 1 ] ] , "g" ) else : plt . plot ( [ I1 [ i , 0 ] , I2 [ j , 0 ] ] , [ I1 [ i , 1 ] , I2 [ j , 1 ] ] , "g" ) | Visualize bottleneck matching between two diagrams |
57,729 | def transform ( self , diagrams ) : if len ( diagrams ) == 0 : return np . zeros ( ( self . nx , self . ny ) ) try : singular = not isinstance ( diagrams [ 0 ] [ 0 ] , collections . Iterable ) except IndexError : singular = False if singular : diagrams = [ diagrams ] dgs = [ np . copy ( diagram , np . float64 ) for diagram in diagrams ] landscapes = [ PersImage . to_landscape ( dg ) for dg in dgs ] if not self . specs : self . specs = { "maxBD" : np . max ( [ np . max ( np . vstack ( ( landscape , np . zeros ( ( 1 , 2 ) ) ) ) ) for landscape in landscapes ] + [ 0 ] ) , "minBD" : np . min ( [ np . min ( np . vstack ( ( landscape , np . zeros ( ( 1 , 2 ) ) ) ) ) for landscape in landscapes ] + [ 0 ] ) , } imgs = [ self . _transform ( dgm ) for dgm in landscapes ] if singular : imgs = imgs [ 0 ] return imgs | Convert diagram or list of diagrams to a persistence image . |
57,730 | def weighting ( self , landscape = None ) : if landscape is not None : if len ( landscape ) > 0 : maxy = np . max ( landscape [ : , 1 ] ) else : maxy = 1 def linear ( interval ) : d = interval [ 1 ] return ( 1 / maxy ) * d if landscape is not None else d def pw_linear ( interval ) : t = interval [ 1 ] b = maxy / self . ny if t <= 0 : return 0 if 0 < t < b : return t / b if b <= t : return 1 return linear | Define a weighting function for stability results to hold the function must be 0 at y = 0 . |
57,731 | def show ( self , imgs , ax = None ) : ax = ax or plt . gca ( ) if type ( imgs ) is not list : imgs = [ imgs ] for i , img in enumerate ( imgs ) : ax . imshow ( img , cmap = plt . get_cmap ( "plasma" ) ) ax . axis ( "off" ) | Visualize the persistence image |
57,732 | def resolve_orm_path ( model , orm_path ) : bits = orm_path . split ( '__' ) endpoint_model = reduce ( get_model_at_related_field , [ model ] + bits [ : - 1 ] ) if bits [ - 1 ] == 'pk' : field = endpoint_model . _meta . pk else : field = endpoint_model . _meta . get_field ( bits [ - 1 ] ) return field | Follows the queryset - style query path of orm_path starting from model class . If the path ends up referring to a bad field name django . db . models . fields . FieldDoesNotExist will be raised . |
57,733 | def get_model_at_related_field ( model , attr ) : field = model . _meta . get_field ( attr ) if hasattr ( field , 'related_model' ) : return field . related_model raise ValueError ( "{model}.{attr} ({klass}) is not a relationship field." . format ( ** { 'model' : model . __name__ , 'attr' : attr , 'klass' : field . __class__ . __name__ , } ) ) | Looks up attr as a field of model and returns the related model class . If attr is not a relationship field ValueError is raised . |
57,734 | def contains_plural_field ( model , fields ) : source_model = model for orm_path in fields : model = source_model bits = orm_path . lstrip ( '+-' ) . split ( '__' ) for bit in bits [ : - 1 ] : field = model . _meta . get_field ( bit ) if field . many_to_many or field . one_to_many : return True model = get_model_at_related_field ( model , bit ) return False | Returns a boolean indicating if fields contains a relationship to multiple items . |
57,735 | def get_json_response_object ( self , datatable ) : datatable . populate_records ( ) draw = getattr ( self . request , self . request . method ) . get ( 'draw' , None ) if draw is not None : draw = escape_uri_path ( draw ) response_data = { 'draw' : draw , 'recordsFiltered' : datatable . unpaged_record_count , 'recordsTotal' : datatable . total_initial_record_count , 'data' : [ dict ( record , ** { 'DT_RowId' : record . pop ( 'pk' ) , 'DT_RowData' : record . pop ( '_extra_data' ) , } ) for record in datatable . get_records ( ) ] , } return response_data | Returns the JSON - compatible dictionary that will be serialized for an AJAX response . |
57,736 | def serialize_to_json ( self , response_data ) : indent = None if settings . DEBUG : indent = 4 return json . dumps ( response_data , indent = indent , cls = DjangoJSONEncoder ) | Returns the JSON string for the compiled data object . |
57,737 | def get_ajax ( self , request , * args , ** kwargs ) : response_data = self . get_json_response_object ( self . _datatable ) response = HttpResponse ( self . serialize_to_json ( response_data ) , content_type = "application/json" ) return response | Called when accessed via AJAX on the request method specified by the Datatable . |
57,738 | def get_active_ajax_datatable ( self ) : data = getattr ( self . request , self . request . method ) datatables_dict = self . get_datatables ( only = data [ 'datatable' ] ) return list ( datatables_dict . values ( ) ) [ 0 ] | Returns a single datatable according to the hint GET variable from an AJAX request . |
57,739 | def get_datatables ( self , only = None ) : if not hasattr ( self , '_datatables' ) : self . _datatables = { } datatable_classes = self . get_datatable_classes ( ) for name , datatable_class in datatable_classes . items ( ) : if only and name != only : continue queryset_getter_name = 'get_%s_datatable_queryset' % ( name , ) queryset_getter = getattr ( self , queryset_getter_name , None ) if queryset_getter is None : raise ValueError ( "%r must declare a method %r." % ( self . __class__ . __name__ , queryset_getter_name ) ) queryset = queryset_getter ( ) if datatable_class is None : class AutoMeta : model = queryset . model opts = AutoMeta ( ) datatable_class = Datatable else : opts = datatable_class . options_class ( datatable_class . _meta ) kwargs = self . get_default_datatable_kwargs ( object_list = queryset ) kwargs_getter_name = 'get_%s_datatable_kwargs' % ( name , ) kwargs_getter = getattr ( self , kwargs_getter_name , None ) if kwargs_getter : kwargs = kwargs_getter ( ** kwargs ) if 'url' in kwargs : kwargs [ 'url' ] = kwargs [ 'url' ] + "?datatable=%s" % ( name , ) for meta_opt in opts . __dict__ : if meta_opt in kwargs : setattr ( opts , meta_opt , kwargs . pop ( meta_opt ) ) datatable_class = type ( '%s_Synthesized' % ( datatable_class . __name__ , ) , ( datatable_class , ) , { '__module__' : datatable_class . __module__ , 'Meta' : opts , } ) self . _datatables [ name ] = datatable_class ( ** kwargs ) return self . _datatables | Returns a dict of the datatables served by this view . |
57,740 | def get_default_datatable_kwargs ( self , ** kwargs ) : kwargs [ 'view' ] = self if hasattr ( self , 'request' ) : kwargs [ 'url' ] = self . request . path kwargs [ 'query_config' ] = getattr ( self . request , self . request . method ) else : kwargs [ 'query_config' ] = { } return kwargs | Builds the default set of kwargs for initializing a Datatable class . Note that by default the MultipleDatatableMixin does not support any configuration via the view s class attributes and instead relies completely on the Datatable class itself to declare its configuration details . |
57,741 | def get_column_for_modelfield ( model_field ) : while model_field . related_model : model_field = model_field . related_model . _meta . pk for ColumnClass , modelfield_classes in COLUMN_CLASSES : if isinstance ( model_field , tuple ( modelfield_classes ) ) : return ColumnClass | Return the built - in Column class for a model field class . |
57,742 | def get_source_value ( self , obj , source , ** kwargs ) : result = [ ] for sub_source in self . expand_source ( source ) : sub_result = super ( CompoundColumn , self ) . get_source_value ( obj , sub_source , ** kwargs ) result . extend ( sub_result ) return result | Treat field as a nested sub - Column instance which explicitly stands in as the object to which term coercions and the query type lookup are delegated . |
57,743 | def _get_flat_db_sources ( self , model ) : sources = [ ] for source in self . sources : for sub_source in self . expand_source ( source ) : target_field = self . resolve_source ( model , sub_source ) if target_field : sources . append ( sub_source ) return sources | Return a flattened representation of the individual sources lists . |
57,744 | def get_source_handler ( self , model , source ) : if isinstance ( source , Column ) : return source modelfield = resolve_orm_path ( model , source ) column_class = get_column_for_modelfield ( modelfield ) return column_class ( ) | Allow the nested Column source to be its own handler . |
57,745 | def dispatch ( self , request , * args , ** kwargs ) : if request . GET . get ( self . xeditable_fieldname_param ) : return self . get_ajax_xeditable_choices ( request , * args , ** kwargs ) return super ( XEditableMixin , self ) . dispatch ( request , * args , ** kwargs ) | Introduces the ensure_csrf_cookie decorator and handles xeditable choices ajax . |
57,746 | def get_ajax_xeditable_choices ( self , request , * args , ** kwargs ) : field_name = request . GET . get ( self . xeditable_fieldname_param ) if not field_name : return HttpResponseBadRequest ( "Field name must be given" ) queryset = self . get_queryset ( ) if not self . model : self . model = queryset . model from datatableview . views import legacy if isinstance ( self , legacy . LegacyDatatableMixin ) : columns = self . _get_datatable_options ( ) [ 'columns' ] for name in columns : if isinstance ( name , ( list , tuple ) ) : name = name [ 1 ] if name == field_name : break else : return HttpResponseBadRequest ( "Invalid field name" ) else : datatable = self . get_datatable ( ) if not hasattr ( datatable , 'config' ) : datatable . configure ( ) if field_name not in datatable . config [ 'columns' ] : return HttpResponseBadRequest ( "Invalid field name" ) field = self . model . _meta . get_field ( field_name ) choices = self . get_field_choices ( field , field_name ) return HttpResponse ( json . dumps ( choices ) ) | AJAX GET handler for xeditable queries asking for field choice lists . |
57,747 | def post ( self , request , * args , ** kwargs ) : self . object_list = None form = self . get_xeditable_form ( self . get_xeditable_form_class ( ) ) if form . is_valid ( ) : obj = self . get_update_object ( form ) if obj is None : data = json . dumps ( { 'status' : 'error' , 'message' : "Object does not exist." } ) return HttpResponse ( data , content_type = "application/json" , status = 404 ) return self . update_object ( form , obj ) else : data = json . dumps ( { 'status' : 'error' , 'message' : "Invalid request" , 'form_errors' : form . errors , } ) return HttpResponse ( data , content_type = "application/json" , status = 400 ) | Builds a dynamic form that targets only the field in question and saves the modification . |
57,748 | def get_xeditable_form_kwargs ( self ) : kwargs = { 'model' : self . get_queryset ( ) . model , } if self . request . method in ( 'POST' , 'PUT' ) : kwargs . update ( { 'data' : self . request . POST , } ) return kwargs | Returns a dict of keyword arguments to be sent to the xeditable form class . |
57,749 | def get_update_object ( self , form ) : pk = form . cleaned_data [ 'pk' ] queryset = self . get_queryset ( ) try : obj = queryset . get ( pk = pk ) except queryset . model . DoesNotExist : obj = None return obj | Retrieves the target object based on the update form s pk and the table s queryset . |
57,750 | def update_object ( self , form , obj ) : field_name = form . cleaned_data [ 'name' ] value = form . cleaned_data [ 'value' ] setattr ( obj , field_name , value ) save_kwargs = { } if CAN_UPDATE_FIELDS : save_kwargs [ 'update_fields' ] = [ field_name ] obj . save ( ** save_kwargs ) data = json . dumps ( { 'status' : 'success' , } ) return HttpResponse ( data , content_type = "application/json" ) | Saves the new value to the target object . |
57,751 | def get_field_choices ( self , field , field_name ) : if self . request . GET . get ( 'select2' ) : names = [ 'id' , 'text' ] else : names = [ 'value' , 'text' ] choices_getter = getattr ( self , 'get_field_%s_choices' , None ) if choices_getter is None : if isinstance ( field , ForeignKey ) : choices_getter = self . _get_foreignkey_choices else : choices_getter = self . _get_default_choices return [ dict ( zip ( names , choice ) ) for choice in choices_getter ( field , field_name ) ] | Returns the valid choices for field . The field_name argument is given for convenience . |
57,752 | def preload_record_data ( self , obj ) : data = { } for orm_path , column_name in self . value_queries . items ( ) : value = obj [ orm_path ] if column_name not in data : data [ column_name ] = value else : if not isinstance ( data [ column_name ] , ( tuple , list ) ) : data [ column_name ] = [ data [ column_name ] ] data [ column_name ] . append ( value ) obj . update ( data ) return super ( ValuesDatatable , self ) . preload_record_data ( obj ) | Modifies the obj values dict to alias the selected values to the column name that asked for its selection . |
57,753 | def resolve_virtual_columns ( self , * names ) : from . views . legacy import get_field_definition virtual_columns = { } for name in names : field = get_field_definition ( name ) column = TextColumn ( sources = field . fields , label = field . pretty_name , processor = field . callback ) column . name = field . pretty_name if field . pretty_name else field . fields [ 0 ] virtual_columns [ name ] = column new_columns = OrderedDict ( ) for name in self . _meta . columns : if self . columns . get ( name ) : column = self . columns [ name ] else : column = virtual_columns [ name ] new_columns [ column . name ] = column self . columns = new_columns | Assume that all names are legacy - style tuple declarations and generate modern columns instances to match the behavior of the old syntax . |
57,754 | def set_value_field ( self , model , field_name ) : fields = fields_for_model ( model , fields = [ field_name ] ) self . fields [ 'value' ] = fields [ field_name ] | Adds a value field to this form that uses the appropriate formfield for the named target field . This will help to ensure that the value is correctly validated . |
57,755 | def clean_name ( self ) : field_name = self . cleaned_data [ 'name' ] if hasattr ( self . model . _meta , 'get_fields' ) : field_names = [ field . name for field in self . model . _meta . get_fields ( ) ] else : field_names = self . model . _meta . get_all_field_names ( ) if field_name not in field_names : raise ValidationError ( "%r is not a valid field." % field_name ) return field_name | Validates that the name field corresponds to a field on the model . |
57,756 | def get_field_definition ( field_definition ) : if not isinstance ( field_definition , ( tuple , list ) ) : field_definition = [ field_definition ] else : field_definition = list ( field_definition ) if len ( field_definition ) == 1 : field = [ None , field_definition , None ] elif len ( field_definition ) == 2 : field = field_definition + [ None ] elif len ( field_definition ) == 3 : field = field_definition else : raise ValueError ( "Invalid field definition format." ) if not isinstance ( field [ 1 ] , ( tuple , list ) ) : field [ 1 ] = ( field [ 1 ] , ) field [ 1 ] = tuple ( name for name in field [ 1 ] if name is not None ) return FieldDefinitionTuple ( * field ) | Normalizes a field definition into its component parts even if some are missing . |
57,757 | def get_cached_data ( datatable , ** kwargs ) : cache_key = '%s%s' % ( CACHE_PREFIX , datatable . get_cache_key ( ** kwargs ) ) data = cache . get ( cache_key ) log . debug ( "Reading data from cache at %r: %r" , cache_key , data ) return data | Returns the cached object list under the appropriate key or None if not set . |
57,758 | def cache_data ( datatable , data , ** kwargs ) : cache_key = '%s%s' % ( CACHE_PREFIX , datatable . get_cache_key ( ** kwargs ) ) log . debug ( "Setting data to cache at %r: %r" , cache_key , data ) cache . set ( cache_key , data ) | Stores the object list in the cache under the appropriate key . |
57,759 | def keyed_helper ( helper ) : @ wraps ( helper ) def wrapper ( instance = None , key = None , attr = None , * args , ** kwargs ) : if set ( ( instance , key , attr ) ) == { None } : raise ValueError ( "If called directly, helper function '%s' requires either a model" " instance, or a 'key' or 'attr' keyword argument." % helper . __name__ ) if instance is not None : return helper ( instance , * args , ** kwargs ) if key is None and attr is None : attr = 'self' if attr : if attr == 'self' : key = lambda obj : obj else : key = operator . attrgetter ( attr ) @ wraps ( helper ) def helper_wrapper ( instance , * args , ** kwargs ) : return helper ( key ( instance ) , * args , ** kwargs ) return helper_wrapper wrapper . _is_wrapped = True return wrapper | Decorator for helper functions that operate on direct values instead of model instances . |
57,760 | def itemgetter ( k , ellipsis = False , key = None ) : def helper ( instance , * args , ** kwargs ) : default_value = kwargs . get ( 'default_value' ) if default_value is None : default_value = instance value = default_value [ k ] if ellipsis and isinstance ( k , slice ) and isinstance ( value , six . string_types ) and len ( default_value ) > len ( value ) : if ellipsis is True : value += "..." else : value += ellipsis return value if key : helper = keyed_helper ( helper ) ( key = key ) return helper | Looks up k as an index of the column s value . |
57,761 | def attrgetter ( attr , key = None ) : def helper ( instance , * args , ** kwargs ) : value = instance for bit in attr . split ( '.' ) : value = getattr ( value , bit ) if callable ( value ) : value = value ( ) return value if key : helper = keyed_helper ( helper ) ( key = key ) return helper | Looks up attr on the target value . If the result is a callable it will be called in place without arguments . |
57,762 | def make_processor ( func , arg = None ) : def helper ( instance , * args , ** kwargs ) : value = kwargs . get ( 'default_value' ) if value is None : value = instance if arg is not None : extra_arg = [ arg ] else : extra_arg = [ ] return func ( value , * extra_arg ) return helper | A pre - called processor that wraps the execution of the target callable func . |
57,763 | def upload_kitten ( client ) : config = { 'album' : album , 'name' : 'Catastrophe!' , 'title' : 'Catastrophe!' , 'description' : 'Cute kitten being cute on {0}' . format ( datetime . now ( ) ) } print ( "Uploading image... " ) image = client . upload_from_path ( image_path , config = config , anon = False ) print ( "Done" ) print ( ) return image | Upload a picture of a kitten . We don t ship one so get creative! |
57,764 | def _isdst ( dt ) : if type ( dt ) == datetime . date : dt = datetime . datetime . combine ( dt , datetime . datetime . min . time ( ) ) dtc = dt . replace ( year = datetime . datetime . now ( ) . year ) if time . localtime ( dtc . timestamp ( ) ) . tm_isdst == 1 : return True return False | Check if date is in dst . |
57,765 | def _mktime ( time_struct ) : try : return time . mktime ( time_struct ) except OverflowError : dt = datetime . datetime ( * time_struct [ : 6 ] ) ep = datetime . datetime ( 1970 , 1 , 1 ) diff = dt - ep ts = diff . days * 24 * 3600 + diff . seconds + time . timezone if time_struct . tm_isdst == 1 : ts -= 3600 if time_struct . tm_isdst == - 1 and _isdst ( dt ) : ts -= 3600 return ts | Custom mktime because Windows can t be arsed to properly do pre - Epoch dates probably because it s busy counting all its chromosomes . |
57,766 | def _strftime ( pattern , time_struct = time . localtime ( ) ) : try : return time . strftime ( pattern , time_struct ) except OSError : dt = datetime . datetime . fromtimestamp ( _mktime ( time_struct ) ) original = dt . year current = datetime . datetime . now ( ) . year dt = dt . replace ( year = current ) ts = dt . timestamp ( ) if _isdst ( dt ) : ts -= 3600 string = time . strftime ( pattern , time . localtime ( ts ) ) string = string . replace ( str ( current ) , str ( original ) ) return string | Custom strftime because Windows is shit again . |
57,767 | def _gmtime ( timestamp ) : try : return time . gmtime ( timestamp ) except OSError : dt = datetime . datetime ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = timestamp ) dst = int ( _isdst ( dt ) ) return time . struct_time ( dt . timetuple ( ) [ : 8 ] + tuple ( [ dst ] ) ) | Custom gmtime because yada yada . |
57,768 | def _dtfromtimestamp ( timestamp ) : try : return datetime . datetime . fromtimestamp ( timestamp ) except OSError : timestamp -= time . timezone dt = datetime . datetime ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = timestamp ) if _isdst ( dt ) : timestamp += 3600 dt = datetime . datetime ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = timestamp ) return dt | Custom datetime timestamp constructor . because Windows . again . |
57,769 | def _dfromtimestamp ( timestamp ) : try : return datetime . date . fromtimestamp ( timestamp ) except OSError : timestamp -= time . timezone d = datetime . date ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = timestamp ) if _isdst ( d ) : timestamp += 3600 d = datetime . date ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = timestamp ) return d | Custom date timestamp constructor . ditto |
57,770 | def guesstype ( timestr ) : timestr_full = " {} " . format ( timestr ) if timestr_full . find ( " in " ) != - 1 or timestr_full . find ( " ago " ) != - 1 : return Chronyk ( timestr ) comps = [ "second" , "minute" , "hour" , "day" , "week" , "month" , "year" ] for comp in comps : if timestr_full . find ( comp ) != - 1 : return ChronykDelta ( timestr ) return Chronyk ( timestr ) | Tries to guess whether a string represents a time or a time delta and returns the appropriate object . |
57,771 | def _round ( num ) : deci = num - math . floor ( num ) if deci > 0.8 : return int ( math . floor ( num ) + 1 ) else : return int ( math . floor ( num ) ) | A custom rounding function that s a bit more strict . |
57,772 | def datetime ( self , timezone = None ) : if timezone is None : timezone = self . timezone return _dtfromtimestamp ( self . __timestamp__ - timezone ) | Returns a datetime object . |
57,773 | def ctime ( self , timezone = None ) : if timezone is None : timezone = self . timezone return time . ctime ( self . __timestamp__ - timezone ) | Returns a ctime string . |
57,774 | def timestring ( self , pattern = "%Y-%m-%d %H:%M:%S" , timezone = None ) : if timezone is None : timezone = self . timezone timestamp = self . __timestamp__ - timezone timestamp -= LOCALTZ return _strftime ( pattern , _gmtime ( timestamp ) ) | Returns a time string . |
57,775 | def get_ticket ( self , ticket_id ) : url = 'tickets/%d' % ticket_id ticket = self . _api . _get ( url ) return Ticket ( ** ticket ) | Fetches the ticket for the given ticket ID |
57,776 | def create_outbound_email ( self , subject , description , email , email_config_id , ** kwargs ) : url = 'tickets/outbound_email' priority = kwargs . get ( 'priority' , 1 ) data = { 'subject' : subject , 'description' : description , 'priority' : priority , 'email' : email , 'email_config_id' : email_config_id , } data . update ( kwargs ) ticket = self . _api . _post ( url , data = json . dumps ( data ) ) return Ticket ( ** ticket ) | Creates an outbound email |
57,777 | def update_ticket ( self , ticket_id , ** kwargs ) : url = 'tickets/%d' % ticket_id ticket = self . _api . _put ( url , data = json . dumps ( kwargs ) ) return Ticket ( ** ticket ) | Updates a ticket from a given ticket ID |
57,778 | def get_agent ( self , agent_id ) : url = 'agents/%s' % agent_id return Agent ( ** self . _api . _get ( url ) ) | Fetches the agent for the given agent ID |
57,779 | def update_agent ( self , agent_id , ** kwargs ) : url = 'agents/%s' % agent_id agent = self . _api . _put ( url , data = json . dumps ( kwargs ) ) return Agent ( ** agent ) | Updates an agent |
57,780 | def _action ( self , res ) : try : j = res . json ( ) except : res . raise_for_status ( ) j = { } if 'Retry-After' in res . headers : raise HTTPError ( '403 Forbidden: API rate-limit has been reached until {}.' 'See http://freshdesk.com/api#ratelimit' . format ( res . headers [ 'Retry-After' ] ) ) if 'require_login' in j : raise HTTPError ( '403 Forbidden: API key is incorrect for this domain' ) if 'error' in j : raise HTTPError ( '{}: {}' . format ( j . get ( 'description' ) , j . get ( 'errors' ) ) ) try : res . raise_for_status ( ) except Exception as e : raise HTTPError ( "{}: {}" . format ( e , j ) ) return j | Returns JSON response or raise exception if errors are present |
57,781 | def headTail_breaks ( values , cuts ) : values = np . array ( values ) mean = np . mean ( values ) cuts . append ( mean ) if len ( values ) > 1 : return headTail_breaks ( values [ values >= mean ] , cuts ) return cuts | head tail breaks helper function |
57,782 | def quantile ( y , k = 4 ) : w = 100. / k p = np . arange ( w , 100 + w , w ) if p [ - 1 ] > 100.0 : p [ - 1 ] = 100.0 q = np . array ( [ stats . scoreatpercentile ( y , pct ) for pct in p ] ) q = np . unique ( q ) k_q = len ( q ) if k_q < k : Warn ( 'Warning: Not enough unique values in array to form k classes' , UserWarning ) Warn ( 'Warning: setting k to %d' % k_q , UserWarning ) return q | Calculates the quantiles for an array |
57,783 | def bin1d ( x , bins ) : left = [ - float ( "inf" ) ] left . extend ( bins [ 0 : - 1 ] ) right = bins cuts = list ( zip ( left , right ) ) k = len ( bins ) binIds = np . zeros ( x . shape , dtype = 'int' ) while cuts : k -= 1 l , r = cuts . pop ( - 1 ) binIds += ( x > l ) * ( x <= r ) * k counts = np . bincount ( binIds , minlength = len ( bins ) ) return ( binIds , counts ) | Place values of a 1 - d array into bins and determine counts of values in each bin |
57,784 | def _kmeans ( y , k = 5 ) : y = y * 1. centroids = KMEANS ( y , k ) [ 0 ] centroids . sort ( ) try : class_ids = np . abs ( y - centroids ) . argmin ( axis = 1 ) except : class_ids = np . abs ( y [ : , np . newaxis ] - centroids ) . argmin ( axis = 1 ) uc = np . unique ( class_ids ) cuts = np . array ( [ y [ class_ids == c ] . max ( ) for c in uc ] ) y_cent = np . zeros_like ( y ) for c in uc : y_cent [ class_ids == c ] = centroids [ c ] diffs = y - y_cent diffs *= diffs return class_ids , cuts , diffs . sum ( ) , centroids | Helper function to do kmeans in one dimension |
57,785 | def natural_breaks ( values , k = 5 ) : values = np . array ( values ) uv = np . unique ( values ) uvk = len ( uv ) if uvk < k : Warn ( 'Warning: Not enough unique values in array to form k classes' , UserWarning ) Warn ( 'Warning: setting k to %d' % uvk , UserWarning ) k = uvk kres = _kmeans ( values , k ) sids = kres [ - 1 ] fit = kres [ - 2 ] class_ids = kres [ 0 ] cuts = kres [ 1 ] return ( sids , class_ids , fit , cuts ) | natural breaks helper function |
57,786 | def _fit ( y , classes ) : tss = 0 for class_def in classes : yc = y [ class_def ] css = yc - yc . mean ( ) css *= css tss += sum ( css ) return tss | Calculate the total sum of squares for a vector y classified into classes |
57,787 | def gadf ( y , method = "Quantiles" , maxk = 15 , pct = 0.8 ) : y = np . array ( y ) adam = ( np . abs ( y - np . median ( y ) ) ) . sum ( ) for k in range ( 2 , maxk + 1 ) : cl = kmethods [ method ] ( y , k ) gadf = 1 - cl . adcm / adam if gadf > pct : break return ( k , cl , gadf ) | Evaluate the Goodness of Absolute Deviation Fit of a Classifier Finds the minimum value of k for which gadf > pct |
57,788 | def make ( cls , * args , ** kwargs ) : to_annotate = copy . deepcopy ( kwargs ) return_object = kwargs . pop ( 'return_object' , False ) return_bins = kwargs . pop ( 'return_bins' , False ) return_counts = kwargs . pop ( 'return_counts' , False ) rolling = kwargs . pop ( 'rolling' , False ) if rolling : data = list ( range ( 10 ) ) cls_instance = cls ( data , * args , ** kwargs ) cls_instance . y = np . array ( [ ] ) else : cls_instance = None def classifier ( data , cls_instance = cls_instance ) : if rolling : cls_instance . update ( data , inplace = True , ** kwargs ) yb = cls_instance . find_bin ( data ) else : cls_instance = cls ( data , * args , ** kwargs ) yb = cls_instance . yb outs = [ yb , None , None , None ] outs [ 1 ] = cls_instance if return_object else None outs [ 2 ] = cls_instance . bins if return_bins else None outs [ 3 ] = cls_instance . counts if return_counts else None outs = [ a for a in outs if a is not None ] if len ( outs ) == 1 : return outs [ 0 ] else : return outs classifier . _options = to_annotate return classifier | Configure and create a classifier that will consume data and produce classifications given the configuration options specified by this function . |
57,789 | def get_tss ( self ) : tss = 0 for class_def in self . classes : if len ( class_def ) > 0 : yc = self . y [ class_def ] css = yc - yc . mean ( ) css *= css tss += sum ( css ) return tss | Total sum of squares around class means |
57,790 | def get_gadf ( self ) : adam = ( np . abs ( self . y - np . median ( self . y ) ) ) . sum ( ) gadf = 1 - self . adcm / adam return gadf | Goodness of absolute deviation of fit |
57,791 | def find_bin ( self , x ) : x = np . asarray ( x ) . flatten ( ) right = np . digitize ( x , self . bins , right = True ) if right . max ( ) == len ( self . bins ) : right [ right == len ( self . bins ) ] = len ( self . bins ) - 1 return right | Sort input or inputs according to the current bin estimate |
57,792 | def update ( self , y = None , inplace = False , ** kwargs ) : kwargs . update ( { 'k' : kwargs . pop ( 'k' , self . k ) } ) kwargs . update ( { 'pct' : kwargs . pop ( 'pct' , self . pct ) } ) kwargs . update ( { 'truncate' : kwargs . pop ( 'truncate' , self . _truncated ) } ) if inplace : self . _update ( y , ** kwargs ) else : new = copy . deepcopy ( self ) new . _update ( y , ** kwargs ) return new | Add data or change classification parameters . |
57,793 | def _ss ( self , class_def ) : yc = self . y [ class_def ] css = yc - yc . mean ( ) css *= css return sum ( css ) | calculates sum of squares for a class |
57,794 | def _swap ( self , class1 , class2 , a ) : ss1 = self . _ss ( class1 ) ss2 = self . _ss ( class2 ) tss1 = ss1 + ss2 class1c = copy . copy ( class1 ) class2c = copy . copy ( class2 ) class1c . remove ( a ) class2c . append ( a ) ss1 = self . _ss ( class1c ) ss2 = self . _ss ( class2c ) tss2 = ss1 + ss2 if tss1 < tss2 : return False else : return True | evaluate cost of moving a from class1 to class2 |
57,795 | def get_bounding_box_list_render_image ( pdf_file_name , input_doc ) : program_to_use = "pdftoppm" if args . gsRender : program_to_use = "Ghostscript" if not args . threshold : args . threshold = 191 threshold = args . threshold if not args . numSmooths : args . numSmooths = 0 if not args . numBlurs : args . numBlurs = 0 temp_dir = ex . program_temp_directory temp_image_file_root = os . path . join ( temp_dir , ex . temp_file_prefix + "PageImage" ) if args . verbose : print ( "\nRendering the PDF to images using the " + program_to_use + " program," "\nthis may take a while..." ) render_pdf_file_to_image_files ( pdf_file_name , temp_image_file_root , program_to_use ) outfiles = sorted ( glob . glob ( temp_image_file_root + "*" ) ) if args . verbose : print ( "\nAnalyzing the page images with PIL to find bounding boxes," "\nusing the threshold " + str ( args . threshold ) + "." " Finding the bounding box for page:\n" ) bounding_box_list = [ ] for page_num , tmp_image_file_name in enumerate ( outfiles ) : curr_page = input_doc . getPage ( page_num ) max_num_tries = 3 time_between_tries = 1 curr_num_tries = 0 while True : try : im = Image . open ( tmp_image_file_name ) break except ( IOError , UnicodeDecodeError ) as e : curr_num_tries += 1 if args . verbose : print ( "Warning: Exception opening image" , tmp_image_file_name , "on try" , curr_num_tries , "\nError is" , e , file = sys . stderr ) if curr_num_tries > max_num_tries : raise time . sleep ( time_between_tries ) for i in range ( args . numBlurs ) : im = im . filter ( ImageFilter . BLUR ) for i in range ( args . numSmooths ) : im = im . filter ( ImageFilter . SMOOTH_MORE ) if args . verbose : print ( page_num + 1 , end = " " ) im = im . point ( lambda p : 255 if p < threshold else 0 ) if args . showImages : im . show ( ) bounding_box = calculate_bounding_box_from_image ( im , curr_page ) bounding_box_list . append ( bounding_box ) os . remove ( tmp_image_file_name ) if args . verbose : print ( ) return bounding_box_list | Calculate the bounding box list by directly rendering each page of the PDF as an image file . The MediaBox and CropBox values in input_doc should have already been set to the chosen page size before the rendering . |
57,796 | def render_pdf_file_to_image_files ( pdf_file_name , output_filename_root , program_to_use ) : res_x = str ( args . resX ) res_y = str ( args . resY ) if program_to_use == "Ghostscript" : if ex . system_os == "Windows" : ex . render_pdf_file_to_image_files__ghostscript_bmp ( pdf_file_name , output_filename_root , res_x , res_y ) else : ex . render_pdf_file_to_image_files__ghostscript_png ( pdf_file_name , output_filename_root , res_x , res_y ) elif program_to_use == "pdftoppm" : use_gray = False if use_gray : ex . render_pdf_file_to_image_files_pdftoppm_pgm ( pdf_file_name , output_filename_root , res_x , res_y ) else : ex . render_pdf_file_to_image_files_pdftoppm_ppm ( pdf_file_name , output_filename_root , res_x , res_y ) else : print ( "Error in renderPdfFileToImageFile: Unrecognized external program." , file = sys . stderr ) ex . cleanup_and_exit ( 1 ) | Render all the pages of the PDF file at pdf_file_name to image files with path and filename prefix given by output_filename_root . Any directories must have already been created and the calling program is responsible for deleting any directories or image files . The program program_to_use currently either the string pdftoppm or the string Ghostscript will be called externally . The image type that the PDF is converted into must to be directly openable by PIL . |
57,797 | def calculate_bounding_box_from_image ( im , curr_page ) : xMax , y_max = im . size bounding_box = im . getbbox ( ) if not bounding_box : bounding_box = ( xMax / 2 , y_max / 2 , xMax / 2 , y_max / 2 ) bounding_box = list ( bounding_box ) bounding_box [ 1 ] = y_max - bounding_box [ 1 ] bounding_box [ 3 ] = y_max - bounding_box [ 3 ] full_page_box = curr_page . mediaBox convert_x = float ( full_page_box . getUpperRight_x ( ) - full_page_box . getLowerLeft_x ( ) ) / xMax convert_y = float ( full_page_box . getUpperRight_y ( ) - full_page_box . getLowerLeft_y ( ) ) / y_max final_box = [ bounding_box [ 0 ] * convert_x , bounding_box [ 3 ] * convert_y , bounding_box [ 2 ] * convert_x , bounding_box [ 1 ] * convert_y ] return final_box | This function uses a PIL routine to get the bounding box of the rendered image . |
57,798 | def samefile ( path1 , path2 ) : if system_os == "Linux" or system_os == "Cygwin" : return os . path . samefile ( path1 , path2 ) return ( get_canonical_absolute_expanded_path ( path1 ) == get_canonical_absolute_expanded_path ( path2 ) ) | Test if paths refer to the same file or directory . |
57,799 | def convert_windows_path_to_cygwin ( path ) : if len ( path ) > 2 and path [ 1 ] == ":" and path [ 2 ] == "\\" : newpath = cygwin_full_path_prefix + "/" + path [ 0 ] if len ( path ) > 3 : newpath += "/" + path [ 3 : ] path = newpath path = path . replace ( "\\" , "/" ) return path | Convert a Windows path to a Cygwin path . Just handles the basic case . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.