idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
50,300
def process_response ( self , request , response ) : if getattr ( settings , 'FORCE_VARY_ON_HOST' , True ) : patch_vary_headers ( response , ( 'Host' , ) ) return response
Forces the HTTP Vary header onto requests to avoid having responses cached across subdomains .
50,301
def process_docstring ( app , what , name , obj , options , lines ) : result = [ re . sub ( r'U\{([^}]*)\}' , r'\1' , re . sub ( r'(L|C)\{([^}]*)\}' , r':py:obj:`\2`' , re . sub ( r'@(' + '|' . join ( FIELDS ) + r')' , r':\1' , l ) ) ) for l in lines ] lines [ : ] = result [ : ]
Process the docstring for a given python object . Note that the list lines is changed in this function . Sphinx uses the altered content of the list .
50,302
def doArc8 ( arcs , domains , assignments ) : check = dict . fromkeys ( domains , True ) while check : variable , _ = check . popitem ( ) if variable not in arcs or variable in assignments : continue domain = domains [ variable ] arcsvariable = arcs [ variable ] for othervariable in arcsvariable : arcconstraints = arcsvariable [ othervariable ] if othervariable in assignments : otherdomain = [ assignments [ othervariable ] ] else : otherdomain = domains [ othervariable ] if domain : for value in domain [ : ] : assignments [ variable ] = value if otherdomain : for othervalue in otherdomain : assignments [ othervariable ] = othervalue for constraint , variables in arcconstraints : if not constraint ( variables , domains , assignments , True ) : break else : break else : domain . hideValue ( value ) del assignments [ othervariable ] del assignments [ variable ] if not domain : return False return True
Perform the ARC - 8 arc checking algorithm and prune domains
50,303
def addVariables ( self , variables , domain ) : for variable in variables : self . addVariable ( variable , domain )
Add one or more variables to the problem
50,304
def addConstraint ( self , constraint , variables = None ) : if not isinstance ( constraint , Constraint ) : if callable ( constraint ) : constraint = FunctionConstraint ( constraint ) else : msg = "Constraints must be instances of subclasses " "of the Constraint class" raise ValueError ( msg ) self . _constraints . append ( ( constraint , variables ) )
Add a constraint to the problem
50,305
def getSolution ( self ) : domains , constraints , vconstraints = self . _getArgs ( ) if not domains : return None return self . _solver . getSolution ( domains , constraints , vconstraints )
Find and return a solution to the problem
50,306
def getSolutions ( self ) : domains , constraints , vconstraints = self . _getArgs ( ) if not domains : return [ ] return self . _solver . getSolutions ( domains , constraints , vconstraints )
Find and return all solutions to the problem
50,307
def getSolutionIter ( self ) : domains , constraints , vconstraints = self . _getArgs ( ) if not domains : return iter ( ( ) ) return self . _solver . getSolutionIter ( domains , constraints , vconstraints )
Return an iterator to the solutions of the problem
50,308
def getSolution ( self , domains , constraints , vconstraints ) : msg = "%s is an abstract class" % self . __class__ . __name__ raise NotImplementedError ( msg )
Return one solution for the given problem
50,309
def resetState ( self ) : self . extend ( self . _hidden ) del self . _hidden [ : ] del self . _states [ : ]
Reset to the original domain state including all possible values
50,310
def popState ( self ) : diff = self . _states . pop ( ) - len ( self ) if diff : self . extend ( self . _hidden [ - diff : ] ) del self . _hidden [ - diff : ]
Restore domain state from the top of the stack
50,311
def hideValue ( self , value ) : list . remove ( self , value ) self . _hidden . append ( value )
Hide the given value from the domain
50,312
def preProcess ( self , variables , domains , constraints , vconstraints ) : if len ( variables ) == 1 : variable = variables [ 0 ] domain = domains [ variable ] for value in domain [ : ] : if not self ( variables , domains , { variable : value } ) : domain . remove ( value ) constraints . remove ( ( self , variables ) ) vconstraints [ variable ] . remove ( ( self , variables ) )
Preprocess variable domains
50,313
def forwardCheck ( self , variables , domains , assignments , _unassigned = Unassigned ) : unassignedvariable = _unassigned for variable in variables : if variable not in assignments : if unassignedvariable is _unassigned : unassignedvariable = variable else : break else : if unassignedvariable is not _unassigned : domain = domains [ unassignedvariable ] if domain : for value in domain [ : ] : assignments [ unassignedvariable ] = value if not self ( variables , domains , assignments ) : domain . hideValue ( value ) del assignments [ unassignedvariable ] if not domain : return False return True
Helper method for generic forward checking
50,314
def block_view ( request ) : blocked_ip_list = get_blocked_ips ( ) blocked_username_list = get_blocked_usernames ( ) context = { 'blocked_ip_list' : blocked_ip_list , 'blocked_username_list' : blocked_username_list } return render ( request , 'defender/admin/blocks.html' , context )
List the blocked IP and Usernames
50,315
def get_ip ( request ) : if config . BEHIND_REVERSE_PROXY : ip_address = request . META . get ( config . REVERSE_PROXY_HEADER , '' ) ip_address = ip_address . split ( "," , 1 ) [ 0 ] . strip ( ) if ip_address == '' : ip_address = get_ip_address_from_request ( request ) else : ip_address = get_ip_address_from_request ( request ) return ip_address
get the ip address from the request
50,316
def get_blocked_ips ( ) : if config . DISABLE_IP_LOCKOUT : return [ ] key = get_ip_blocked_cache_key ( "*" ) key_list = [ redis_key . decode ( 'utf-8' ) for redis_key in REDIS_SERVER . keys ( key ) ] return strip_keys ( key_list )
get a list of blocked ips from redis
50,317
def get_blocked_usernames ( ) : if config . DISABLE_USERNAME_LOCKOUT : return [ ] key = get_username_blocked_cache_key ( "*" ) key_list = [ redis_key . decode ( 'utf-8' ) for redis_key in REDIS_SERVER . keys ( key ) ] return strip_keys ( key_list )
get a list of blocked usernames from redis
50,318
def increment_key ( key ) : pipe = REDIS_SERVER . pipeline ( ) pipe . incr ( key , 1 ) if config . COOLOFF_TIME : pipe . expire ( key , config . COOLOFF_TIME ) new_value = pipe . execute ( ) [ 0 ] return new_value
given a key increment the value
50,319
def username_from_request ( request ) : if config . USERNAME_FORM_FIELD in request . POST : return request . POST [ config . USERNAME_FORM_FIELD ] [ : 255 ] return None
unloads username from default POST request
50,320
def get_user_attempts ( request , get_username = get_username_from_request , username = None ) : ip_address = get_ip ( request ) username = lower_username ( username or get_username ( request ) ) ip_count = REDIS_SERVER . get ( get_ip_attempt_cache_key ( ip_address ) ) if not ip_count : ip_count = 0 ip_count = int ( ip_count ) username_count = REDIS_SERVER . get ( get_username_attempt_cache_key ( username ) ) if not username_count : username_count = 0 username_count = int ( username_count ) return max ( ip_count , username_count )
Returns number of access attempts for this ip username
50,321
def block_ip ( ip_address ) : if not ip_address : return if config . DISABLE_IP_LOCKOUT : return key = get_ip_blocked_cache_key ( ip_address ) if config . COOLOFF_TIME : REDIS_SERVER . set ( key , 'blocked' , config . COOLOFF_TIME ) else : REDIS_SERVER . set ( key , 'blocked' ) send_ip_block_signal ( ip_address )
given the ip block it
50,322
def block_username ( username ) : if not username : return if config . DISABLE_USERNAME_LOCKOUT : return key = get_username_blocked_cache_key ( username ) if config . COOLOFF_TIME : REDIS_SERVER . set ( key , 'blocked' , config . COOLOFF_TIME ) else : REDIS_SERVER . set ( key , 'blocked' ) send_username_block_signal ( username )
given the username block it .
50,323
def record_failed_attempt ( ip_address , username ) : ip_block = False if not config . DISABLE_IP_LOCKOUT : ip_count = increment_key ( get_ip_attempt_cache_key ( ip_address ) ) if ip_count > config . IP_FAILURE_LIMIT : block_ip ( ip_address ) ip_block = True user_block = False if username and not config . DISABLE_USERNAME_LOCKOUT : user_count = increment_key ( get_username_attempt_cache_key ( username ) ) if user_count > config . USERNAME_FAILURE_LIMIT : block_username ( username ) user_block = True if config . DISABLE_IP_LOCKOUT : return not user_block if config . DISABLE_USERNAME_LOCKOUT : return not ip_block if config . LOCKOUT_BY_IP_USERNAME : return not ( ip_block and user_block ) return not ( ip_block or user_block )
record the failed login attempt if over limit return False if not over limit return True
50,324
def unblock_ip ( ip_address , pipe = None ) : do_commit = False if not pipe : pipe = REDIS_SERVER . pipeline ( ) do_commit = True if ip_address : pipe . delete ( get_ip_attempt_cache_key ( ip_address ) ) pipe . delete ( get_ip_blocked_cache_key ( ip_address ) ) if do_commit : pipe . execute ( )
unblock the given IP
50,325
def unblock_username ( username , pipe = None ) : do_commit = False if not pipe : pipe = REDIS_SERVER . pipeline ( ) do_commit = True if username : pipe . delete ( get_username_attempt_cache_key ( username ) ) pipe . delete ( get_username_blocked_cache_key ( username ) ) if do_commit : pipe . execute ( )
unblock the given Username
50,326
def reset_failed_attempts ( ip_address = None , username = None ) : pipe = REDIS_SERVER . pipeline ( ) unblock_ip ( ip_address , pipe = pipe ) unblock_username ( username , pipe = pipe ) pipe . execute ( )
reset the failed attempts for these ip s and usernames
50,327
def lockout_response ( request ) : if config . LOCKOUT_TEMPLATE : context = { 'cooloff_time_seconds' : config . COOLOFF_TIME , 'cooloff_time_minutes' : config . COOLOFF_TIME / 60 , 'failure_limit' : config . FAILURE_LIMIT , } return render ( request , config . LOCKOUT_TEMPLATE , context ) if config . LOCKOUT_URL : return HttpResponseRedirect ( config . LOCKOUT_URL ) if config . COOLOFF_TIME : return HttpResponse ( "Account locked: too many login attempts. " "Please try again later." ) else : return HttpResponse ( "Account locked: too many login attempts. " "Contact an admin to unlock your account." )
if we are locked out here is the response
50,328
def is_user_already_locked ( username ) : if username is None : return False if config . DISABLE_USERNAME_LOCKOUT : return False return REDIS_SERVER . get ( get_username_blocked_cache_key ( username ) )
Is this username already locked?
50,329
def is_source_ip_already_locked ( ip_address ) : if ip_address is None : return False if config . DISABLE_IP_LOCKOUT : return False return REDIS_SERVER . get ( get_ip_blocked_cache_key ( ip_address ) )
Is this IP already locked?
50,330
def is_already_locked ( request , get_username = get_username_from_request , username = None ) : user_blocked = is_user_already_locked ( username or get_username ( request ) ) ip_blocked = is_source_ip_already_locked ( get_ip ( request ) ) if config . LOCKOUT_BY_IP_USERNAME : return ip_blocked and user_blocked return ip_blocked or user_blocked
Parse the username & IP from the request and see if it s already locked .
50,331
def check_request ( request , login_unsuccessful , get_username = get_username_from_request , username = None ) : ip_address = get_ip ( request ) username = username or get_username ( request ) if not login_unsuccessful : reset_failed_attempts ( ip_address = ip_address , username = username ) return True else : return record_failed_attempt ( ip_address , username )
check the request and process results
50,332
def add_login_attempt_to_db ( request , login_valid , get_username = get_username_from_request , username = None ) : if not config . STORE_ACCESS_ATTEMPTS : return username = username or get_username ( request ) user_agent = request . META . get ( 'HTTP_USER_AGENT' , '<unknown>' ) [ : 255 ] ip_address = get_ip ( request ) http_accept = request . META . get ( 'HTTP_ACCEPT' , '<unknown>' ) path_info = request . META . get ( 'PATH_INFO' , '<unknown>' ) if config . USE_CELERY : from . tasks import add_login_attempt_task add_login_attempt_task . delay ( user_agent , ip_address , username , http_accept , path_info , login_valid ) else : store_login_attempt ( user_agent , ip_address , username , http_accept , path_info , login_valid )
Create a record for the login attempt If using celery call celery task if not call the method normally
50,333
def add_login_attempt_task ( user_agent , ip_address , username , http_accept , path_info , login_valid ) : store_login_attempt ( user_agent , ip_address , username , http_accept , path_info , login_valid )
Create a record for the login attempt
50,334
def store_login_attempt ( user_agent , ip_address , username , http_accept , path_info , login_valid ) : AccessAttempt . objects . create ( user_agent = user_agent , ip_address = ip_address , username = username , http_accept = http_accept , path_info = path_info , login_valid = login_valid , )
Store the login attempt to the db .
50,335
def get_redis_connection ( ) : if config . MOCK_REDIS : import mockredis return mockredis . mock_strict_redis_client ( ) elif config . DEFENDER_REDIS_NAME : try : cache = caches [ config . DEFENDER_REDIS_NAME ] except InvalidCacheBackendError : raise KeyError ( INVALID_CACHE_ERROR_MSG . format ( config . DEFENDER_REDIS_NAME ) ) try : return cache . get_master_client ( ) except AttributeError : return cache . client . get_client ( True ) else : redis_config = parse_redis_url ( config . DEFENDER_REDIS_URL ) return redis . StrictRedis ( host = redis_config . get ( 'HOST' ) , port = redis_config . get ( 'PORT' ) , db = redis_config . get ( 'DB' ) , password = redis_config . get ( 'PASSWORD' ) , ssl = redis_config . get ( 'SSL' ) )
Get the redis connection if not using mock
50,336
def parse_redis_url ( url ) : redis_config = { "DB" : 0 , "PASSWORD" : None , "HOST" : "localhost" , "PORT" : 6379 , "SSL" : False } if not url : return redis_config url = urlparse . urlparse ( url ) path = url . path [ 1 : ] path = path . split ( '?' , 2 ) [ 0 ] if path : redis_config . update ( { "DB" : int ( path ) } ) if url . password : redis_config . update ( { "PASSWORD" : url . password } ) if url . hostname : redis_config . update ( { "HOST" : url . hostname } ) if url . port : redis_config . update ( { "PORT" : int ( url . port ) } ) if url . scheme in [ 'https' , 'rediss' ] : redis_config . update ( { "SSL" : True } ) return redis_config
Parses a redis URL .
50,337
def handle ( self , ** options ) : print ( "Starting clean up of django-defender table" ) now = timezone . now ( ) cleanup_delta = timedelta ( hours = config . ACCESS_ATTEMPT_EXPIRATION ) min_attempt_time = now - cleanup_delta attempts_to_clean = AccessAttempt . objects . filter ( attempt_time__lt = min_attempt_time , ) attempts_to_clean_count = attempts_to_clean . count ( ) attempts_to_clean . delete ( ) print ( "Finished. Removed {0} AccessAttempt entries." . format ( attempts_to_clean_count ) )
Removes any entries in the AccessAttempt that are older than your DEFENDER_ACCESS_ATTEMPT_EXPIRATION config default 24 HOURS .
50,338
def connection ( self , shareable = True ) : if shareable and self . _maxshared : self . _lock . acquire ( ) try : while ( not self . _shared_cache and self . _maxconnections and self . _connections >= self . _maxconnections ) : self . _wait_lock ( ) if len ( self . _shared_cache ) < self . _maxshared : try : con = self . _idle_cache . pop ( 0 ) except IndexError : con = self . steady_connection ( ) else : con . _ping_check ( ) con = SharedDBConnection ( con ) self . _connections += 1 else : self . _shared_cache . sort ( ) con = self . _shared_cache . pop ( 0 ) while con . con . _transaction : self . _shared_cache . insert ( 0 , con ) self . _wait_lock ( ) self . _shared_cache . sort ( ) con = self . _shared_cache . pop ( 0 ) con . con . _ping_check ( ) con . share ( ) self . _shared_cache . append ( con ) self . _lock . notify ( ) finally : self . _lock . release ( ) con = PooledSharedDBConnection ( self , con ) else : self . _lock . acquire ( ) try : while ( self . _maxconnections and self . _connections >= self . _maxconnections ) : self . _wait_lock ( ) try : con = self . _idle_cache . pop ( 0 ) except IndexError : con = self . steady_connection ( ) else : con . _ping_check ( ) con = PooledDedicatedDBConnection ( self , con ) self . _connections += 1 finally : self . _lock . release ( ) return con
Get a steady cached DB - API 2 connection from the pool .
50,339
def unshare ( self , con ) : self . _lock . acquire ( ) try : con . unshare ( ) shared = con . shared if not shared : try : self . _shared_cache . remove ( con ) except ValueError : pass finally : self . _lock . release ( ) if not shared : self . cache ( con . con )
Decrease the share of a connection in the shared cache .
50,340
def close ( self ) : if self . _con : self . _pool . unshare ( self . _shared_con ) self . _shared_con = self . _con = None
Close the pooled shared connection .
50,341
def steady_connection ( self ) : return connect ( self . _creator , self . _maxusage , self . _setsession , self . _failures , self . _ping , self . _closeable , * self . _args , ** self . _kwargs )
Get a steady non - persistent DB - API 2 connection .
50,342
def connection ( self , shareable = False ) : try : con = self . thread . connection except AttributeError : con = self . steady_connection ( ) if not con . threadsafety ( ) : raise NotSupportedError ( "Database module is not thread-safe." ) self . thread . connection = con con . _ping_check ( ) return con
Get a steady persistent DB - API 2 connection .
50,343
def steady_connection ( self ) : return SteadyPgConnection ( self . _maxusage , self . _setsession , self . _closeable , * self . _args , ** self . _kwargs )
Get a steady non - persistent PyGreSQL connection .
50,344
def connection ( self ) : try : con = self . thread . connection except AttributeError : con = self . steady_connection ( ) self . thread . connection = con return con
Get a steady persistent PyGreSQL connection .
50,345
def versionString ( version ) : ver = list ( map ( str , version ) ) numbers , rest = ver [ : 2 if ver [ 2 ] == '0' else 3 ] , ver [ 3 : ] return '.' . join ( numbers ) + '-' . join ( rest )
Create version string .
50,346
def steady_connection ( self ) : return SteadyPgConnection ( self . _maxusage , self . _setsession , True , * self . _args , ** self . _kwargs )
Get a steady unpooled PostgreSQL connection .
50,347
def connection ( self ) : if self . _connections : if not self . _connections . acquire ( self . _blocking ) : raise TooManyConnections try : con = self . _cache . get ( 0 ) except Empty : con = self . steady_connection ( ) return PooledPgConnection ( self , con )
Get a steady cached PostgreSQL connection from the pool .
50,348
def cache ( self , con ) : try : if self . _reset == 2 : con . reset ( ) else : if self . _reset or con . _transaction : try : con . rollback ( ) except Exception : pass self . _cache . put ( con , 0 ) except Full : con . close ( ) if self . _connections : self . _connections . release ( )
Put a connection back into the pool cache .
50,349
def reopen ( self ) : if self . _con : self . _con . reopen ( ) else : self . _con = self . _pool . connection ( )
Reopen the pooled connection .
50,350
def reopen ( self ) : try : self . _con . reopen ( ) except Exception : if self . _transcation : self . _transaction = False try : self . _con . query ( 'rollback' ) except Exception : pass else : self . _transaction = False self . _closed = False self . _setsession ( ) self . _usage = 0
Reopen the tough connection .
50,351
def reset ( self ) : try : self . _con . reset ( ) self . _transaction = False self . _setsession ( ) self . _usage = 0 except Exception : try : self . reopen ( ) except Exception : try : self . rollback ( ) except Exception : pass
Reset the tough connection .
50,352
def _get_tough_method ( self , method ) : def tough_method ( * args , ** kwargs ) : transaction = self . _transaction if not transaction : try : if not self . _con . db . status : raise AttributeError if self . _maxusage : if self . _usage >= self . _maxusage : raise AttributeError except Exception : self . reset ( ) try : result = method ( * args , ** kwargs ) except Exception : if transaction : self . _transaction = False raise elif self . _con . db . status : raise else : self . reset ( ) result = method ( * args , ** kwargs ) self . _usage += 1 return result return tough_method
Return a tough version of a connection class method .
50,353
def connect ( creator , maxusage = None , setsession = None , failures = None , ping = 1 , closeable = True , * args , ** kwargs ) : return SteadyDBConnection ( creator , maxusage , setsession , failures , ping , closeable , * args , ** kwargs )
A tough version of the connection constructor of a DB - API 2 module .
50,354
def _create ( self ) : con = self . _creator ( * self . _args , ** self . _kwargs ) try : try : if self . _dbapi . connect != self . _creator : raise AttributeError except AttributeError : try : mod = con . __module__ except AttributeError : mod = None while mod : try : self . _dbapi = sys . modules [ mod ] if not callable ( self . _dbapi . connect ) : raise AttributeError except ( AttributeError , KeyError ) : pass else : break i = mod . rfind ( '.' ) if i < 0 : mod = None else : mod = mod [ : i ] else : try : mod = con . OperationalError . __module__ except AttributeError : mod = None while mod : try : self . _dbapi = sys . modules [ mod ] if not callable ( self . _dbapi . connect ) : raise AttributeError except ( AttributeError , KeyError ) : pass else : break i = mod . rfind ( '.' ) if i < 0 : mod = None else : mod = mod [ : i ] else : self . _dbapi = None if self . _threadsafety is None : try : self . _threadsafety = self . _dbapi . threadsafety except AttributeError : try : self . _threadsafety = con . threadsafety except AttributeError : pass if self . _failures is None : try : self . _failures = ( self . _dbapi . OperationalError , self . _dbapi . InternalError ) except AttributeError : try : self . _failures = ( self . _creator . OperationalError , self . _creator . InternalError ) except AttributeError : try : self . _failures = ( con . OperationalError , con . InternalError ) except AttributeError : raise AttributeError ( "Could not determine failure exceptions" " (please set failures or creator.dbapi)." ) if isinstance ( self . _failures , tuple ) : self . _failure = self . _failures [ 0 ] else : self . _failure = self . _failures self . _setsession ( con ) except Exception as error : try : con . close ( ) except Exception : pass raise error return con
Create a new connection using the creator function .
50,355
def _store ( self , con ) : self . _con = con self . _transaction = False self . _closed = False self . _usage = 0
Store a database connection for subsequent use .
50,356
def _reset ( self , force = False ) : if not self . _closed and ( force or self . _transaction ) : try : self . rollback ( ) except Exception : pass
Reset a tough connection .
50,357
def begin ( self , * args , ** kwargs ) : self . _transaction = True try : begin = self . _con . begin except AttributeError : pass else : begin ( * args , ** kwargs )
Indicate the beginning of a transaction .
50,358
def commit ( self ) : self . _transaction = False try : self . _con . commit ( ) except self . _failures as error : try : con = self . _create ( ) except Exception : pass else : self . _close ( ) self . _store ( con ) raise error
Commit any pending transaction .
50,359
def cancel ( self ) : self . _transaction = False try : cancel = self . _con . cancel except AttributeError : pass else : cancel ( )
Cancel a long - running transaction .
50,360
def _setsizes ( self , cursor = None ) : if cursor is None : cursor = self . _cursor if self . _inputsizes : cursor . setinputsizes ( self . _inputsizes ) for column , size in self . _outputsizes . items ( ) : if column is None : cursor . setoutputsize ( size ) else : cursor . setoutputsize ( size , column )
Set stored input and output sizes for cursor execution .
50,361
def close ( self ) : if not self . _closed : try : self . _cursor . close ( ) except Exception : pass self . _closed = True
Close the tough cursor .
50,362
def _get_tough_method ( self , name ) : def tough_method ( * args , ** kwargs ) : execute = name . startswith ( 'execute' ) con = self . _con transaction = con . _transaction if not transaction : con . _ping_check ( 4 ) try : if con . _maxusage : if con . _usage >= con . _maxusage : raise con . _failure if execute : self . _setsizes ( ) method = getattr ( self . _cursor , name ) result = method ( * args , ** kwargs ) if execute : self . _clearsizes ( ) except con . _failures as error : if not transaction : try : cursor2 = con . _cursor ( * self . _args , ** self . _kwargs ) except Exception : pass else : try : if execute : self . _setsizes ( cursor2 ) method = getattr ( cursor2 , name ) result = method ( * args , ** kwargs ) if execute : self . _clearsizes ( ) except Exception : pass else : self . close ( ) self . _cursor = cursor2 con . _usage += 1 return result try : cursor2 . close ( ) except Exception : pass try : con2 = con . _create ( ) except Exception : pass else : try : cursor2 = con2 . cursor ( * self . _args , ** self . _kwargs ) except Exception : pass else : if transaction : self . close ( ) con . _close ( ) con . _store ( con2 ) self . _cursor = cursor2 raise error error2 = None try : if execute : self . _setsizes ( cursor2 ) method2 = getattr ( cursor2 , name ) result = method2 ( * args , ** kwargs ) if execute : self . _clearsizes ( ) except error . __class__ : use2 = False error2 = error except Exception as error : use2 = True error2 = error else : use2 = True if use2 : self . close ( ) con . _close ( ) con . _store ( con2 ) self . _cursor = cursor2 con . _usage += 1 if error2 : raise error2 return result try : cursor2 . close ( ) except Exception : pass try : con2 . close ( ) except Exception : pass if transaction : self . _transaction = False raise error else : con . _usage += 1 return result return tough_method
Return a tough version of the given cursor method .
50,363
def train_punkt ( ctx , input , output , abbr , colloc ) : click . echo ( 'chemdataextractor.tokenize.train_punkt' ) import pickle from nltk . tokenize . punkt import PunktSentenceTokenizer , PunktTrainer punkt = PunktTrainer ( ) for fin in input : click . echo ( 'Training on %s' % fin . name ) sentences = fin . read ( ) punkt . train ( sentences , finalize = False , verbose = True ) punkt . finalize_training ( verbose = True ) if abbr : abbreviations = abbr . read ( ) . strip ( ) . split ( '\n' ) click . echo ( 'Manually adding abbreviations: %s' % abbreviations ) punkt . _params . abbrev_types . update ( abbreviations ) if colloc : collocations = [ tuple ( l . split ( '. ' , 1 ) ) for l in colloc . read ( ) . strip ( ) . split ( '\n' ) ] click . echo ( 'Manually adding collocs: %s' % collocations ) punkt . _params . collocations . update ( collocations ) model = PunktSentenceTokenizer ( punkt . get_params ( ) ) pickle . dump ( model , output , protocol = pickle . HIGHEST_PROTOCOL )
Train Punkt sentence splitter using sentences in input .
50,364
def sentences ( ctx , input , output ) : log . info ( 'chemdataextractor.read.elements' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input ) for element in doc . elements : if isinstance ( element , Text ) : for raw_sentence in element . raw_sentences : output . write ( raw_sentence . strip ( ) ) output . write ( u'\n' )
Read input document and output sentences .
50,365
def words ( ctx , input , output ) : log . info ( 'chemdataextractor.read.elements' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input ) for element in doc . elements : if isinstance ( element , Text ) : for sentence in element . sentences : output . write ( u' ' . join ( sentence . raw_tokens ) ) output . write ( u'\n' )
Read input document and output words .
50,366
def _in_stoplist ( self , entity ) : start = 0 end = len ( entity ) for prefix in IGNORE_PREFIX : if entity . startswith ( prefix ) : start += len ( prefix ) break for suffix in IGNORE_SUFFIX : if entity . endswith ( suffix ) : end -= len ( suffix ) break if start >= end : return True entity = entity [ start : end ] if entity in STOPLIST : return True for stop_re in STOP_RES : if re . search ( stop_re , entity ) : log . debug ( 'Killed: %s' , entity ) return True
Return True if the entity is in the stoplist .
50,367
def _process_name ( name ) : name = unescape ( name ) name = NG_RE . sub ( '' , name ) . strip ( ) name = END_RE . sub ( '' , name ) . strip ( ', ' ) name = RATIO_RE . sub ( '' , name ) . strip ( ', ' ) name = START_RE . sub ( '' , name ) . strip ( ) name = BRACKET_RE . sub ( '\g<1>' , name ) comps = name . split ( ', ' ) if len ( comps ) == 2 : if comps [ 1 ] . endswith ( '-' ) : name = comps [ 0 ] name = '%s%s' % ( comps [ 1 ] , name ) elif len ( comps ) > 2 : name = comps [ 0 ] for i in range ( 1 , len ( comps ) ) : if comps [ i ] . endswith ( '-' ) : name = '%s%s' % ( comps [ i ] , name ) else : name = '%s %s' % ( name , comps [ i ] ) return name
Fix issues with Jochem names .
50,368
def _get_variants ( name ) : names = [ name ] oldname = name if DOT_GREEK_RE . search ( name ) : wordname = name while True : m = DOT_GREEK_RE . search ( wordname ) if m : wordname = wordname [ : m . start ( 1 ) - 1 ] + m . group ( 1 ) + wordname [ m . end ( 1 ) + 1 : ] else : break symbolname = name while True : m = DOT_GREEK_RE . search ( symbolname ) if m : symbolname = symbolname [ : m . start ( 1 ) - 1 ] + GREEK_WORDS [ m . group ( 1 ) ] + symbolname [ m . end ( 1 ) + 1 : ] else : break names = [ wordname , symbolname ] else : while True : m = GREEK_RE . search ( name ) if m : name = name [ : m . start ( 2 ) ] + GREEK_WORDS [ m . group ( 2 ) ] + name [ m . end ( 2 ) : ] else : break while True : m = UNAMBIGUOUS_GREEK_RE . search ( name ) if m : name = name [ : m . start ( 1 ) ] + GREEK_WORDS [ m . group ( 1 ) ] + name [ m . end ( 1 ) : ] else : break if not name == oldname : names . append ( name ) newnames = [ ] for name in names : if NUM_END_RE . search ( name ) : newnames . append ( NUM_END_RE . sub ( '-\g<1>' , name ) ) newnames . append ( NUM_END_RE . sub ( '\g<1>' , name ) ) if ALPHANUM_END_RE . search ( name ) : newnames . append ( ALPHANUM_END_RE . sub ( '-\g<1>' , name ) ) names . extend ( newnames ) return names
Return variants of chemical name .
50,369
def prepare_jochem ( ctx , jochem , output , csoutput ) : click . echo ( 'chemdataextractor.dict.prepare_jochem' ) for i , line in enumerate ( jochem ) : print ( 'JC%s' % i ) if line . startswith ( 'TM ' ) : if line . endswith ( ' @match=ci\n' ) : for tokens in _make_tokens ( line [ 3 : - 11 ] ) : output . write ( ' ' . join ( tokens ) ) output . write ( '\n' ) else : for tokens in _make_tokens ( line [ 3 : - 1 ] ) : csoutput . write ( ' ' . join ( tokens ) ) csoutput . write ( '\n' )
Process and filter jochem file to produce list of names for dictionary .
50,370
def prepare_include ( ctx , include , output ) : click . echo ( 'chemdataextractor.dict.prepare_include' ) for i , line in enumerate ( include ) : print ( 'IN%s' % i ) for tokens in _make_tokens ( line . strip ( ) ) : output . write ( u' ' . join ( tokens ) ) output . write ( u'\n' )
Process and filter include file to produce list of names for dictionary .
50,371
def build ( ctx , inputs , output , cs ) : click . echo ( 'chemdataextractor.dict.build' ) dt = DictionaryTagger ( lexicon = ChemLexicon ( ) , case_sensitive = cs ) names = [ ] for input in inputs : for line in input : tokens = line . split ( ) names . append ( tokens ) dt . build ( words = names ) dt . save ( output )
Build chemical name dictionary .
50,372
def _parse_table_footnotes ( self , fns , refs , specials ) : footnotes = [ ] for fn in fns : footnote = self . _parse_text ( fn , refs = refs , specials = specials , element_cls = Footnote ) [ 0 ] footnote += Footnote ( '' , id = fn . getprevious ( ) . get ( 'id' ) ) footnotes . append ( footnote ) return footnotes
Override to account for awkward RSC table footnotes .
50,373
def extract ( ) : Paragraph . parsers = [ CompoundParser ( ) , ChemicalLabelParser ( ) , MpParser ( ) ] Table . parsers = [ ] patents = [ ] for root , dirs , files in os . walk ( '../examples/mp/grants' ) : for filename in files : if not filename . endswith ( '.xml' ) : continue path = os . path . abspath ( os . path . join ( root , filename ) ) size = os . path . getsize ( path ) patents . append ( ( path , filename , size ) ) patents = sorted ( patents , key = lambda p : p [ 2 ] ) for path , filename , size in patents : print ( path ) shutil . copyfile ( path , '../examples/mp/used/%s' % filename ) with open ( path ) as f : d = Document . from_file ( f ) if os . path . isfile ( '../examples/mp/results/%s.json' % filename ) : continue records = [ r . serialize ( ) for r in d . records if len ( r . melting_points ) == 1 ] with open ( '../examples/mp/results/%s.json' % filename , 'w' ) as fout : fout . write ( json . dumps ( records , ensure_ascii = False , indent = 2 ) . encode ( 'utf8' ) )
Extract melting points from patents .
50,374
def make_sdf ( ) : with open ( '../examples/mp/sdf/chemdataextractor-melting-points.sdf' , 'rb' ) as f_in , gzip . open ( '../examples/mp/sdf/chemdataextractor-melting-points.sdf.gz' , 'wb' ) as f_out : shutil . copyfileobj ( f_in , f_out ) with open ( '../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf' , 'rb' ) as f_in , gzip . open ( '../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf.gz' , 'wb' ) as f_out : shutil . copyfileobj ( f_in , f_out )
Produce SDF of ChemDataExtractor and Tetko sample melting points .
50,375
def cli ( ctx , verbose ) : log . debug ( 'ChemDataExtractor v%s' % __version__ ) logging . basicConfig ( level = logging . DEBUG if verbose else logging . INFO ) logging . getLogger ( 'requests' ) . setLevel ( logging . WARN ) ctx . obj = { }
ChemDataExtractor command line interface .
50,376
def extract ( ctx , input , output ) : log . info ( 'chemdataextractor.extract' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input , fname = input . name ) records = [ record . serialize ( primitive = True ) for record in doc . records ] jsonstring = json . dumps ( records , indent = 2 , ensure_ascii = False ) output . write ( jsonstring )
Run ChemDataExtractor on a document .
50,377
def read ( ctx , input , output ) : log . info ( 'chemdataextractor.read' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input ) for element in doc . elements : output . write ( u'%s : %s\n=====\n' % ( element . __class__ . __name__ , six . text_type ( element ) ) )
Output processed document elements .
50,378
def extract_smiles ( s ) : smiles = [ ] for t in s . split ( ) : if len ( t ) > 2 and SMILES_RE . match ( t ) and not t . endswith ( '.' ) and bracket_level ( t ) == 0 : smiles . append ( t ) return smiles
Return a list of SMILES identifiers extracted from the string .
50,379
def could_be ( self , other ) : if type ( other ) is not type ( self ) : return NotImplemented if self == other : return True for attr in [ 'title' , 'firstname' , 'middlename' , 'nickname' , 'prefix' , 'lastname' , 'suffix' ] : if attr not in self or attr not in other : continue puncmap = dict ( ( ord ( char ) , None ) for char in string . punctuation ) s = self [ attr ] . lower ( ) . translate ( puncmap ) o = other [ attr ] . lower ( ) . translate ( puncmap ) if s == o : continue if attr in { 'firstname' , 'middlename' , 'lastname' } : if ( ( { len ( comp ) for comp in s . split ( ) } == { 1 } and [ el [ 0 ] for el in o . split ( ) ] == s . split ( ) ) or ( { len ( comp ) for comp in o . split ( ) } == { 1 } and [ el [ 0 ] for el in s . split ( ) ] == o . split ( ) ) ) : continue return False return True
Return True if the other PersonName is not explicitly inconsistent .
50,380
def _is_suffix ( self , t ) : return t not in NOT_SUFFIX and ( t . replace ( '.' , '' ) in SUFFIXES or t . replace ( '.' , '' ) in SUFFIXES_LOWER )
Return true if t is a suffix .
50,381
def _tokenize ( self , comps ) : ps = [ ] for comp in comps : ps . extend ( [ c . strip ( ' ,' ) for c in re . split ( r'\s+(?=[^{}]*(?:\{|$))' , comp ) ] ) return [ p for p in ps if p ]
Split name on spaces unless inside curly brackets or quotes .
50,382
def _clean ( self , t , capitalize = None ) : if self . _from_bibtex : t = latex_to_unicode ( t , capitalize = capitalize ) t = ' ' . join ( [ el . rstrip ( '.' ) if el . count ( '.' ) == 1 else el for el in t . split ( ) ] ) return t
Convert to normalized unicode and strip trailing full stops .
50,383
def _strip ( self , tokens , criteria , prop , rev = False ) : num = len ( tokens ) res = [ ] for i , token in enumerate ( reversed ( tokens ) if rev else tokens ) : if criteria ( token ) and num > i + 1 : res . insert ( 0 , tokens . pop ( ) ) if rev else res . append ( tokens . pop ( 0 ) ) else : break if res : self [ prop ] = self . _clean ( ' ' . join ( res ) ) return tokens
Strip off contiguous tokens from the start or end of the list that meet the criteria .
50,384
def _parse_text ( self , el , refs = None , specials = None , element_cls = Paragraph ) : if specials is None : specials = { } if refs is None : refs = { } elements = self . _parse_element_r ( el , specials = specials , refs = refs , element_cls = element_cls ) if not elements : return [ element_cls ( '' ) ] element = elements [ 0 ] for next_element in elements [ 1 : ] : element += element_cls ( ' ' ) + next_element return [ element ]
Like _parse_element but ensure a single element .
50,385
def _parse_reference ( self , el ) : if '#' in el . get ( 'href' , '' ) : return [ el . get ( 'href' ) . split ( '#' , 1 ) [ 1 ] ] elif 'rid' in el . attrib : return [ el . attrib [ 'rid' ] ] elif 'idref' in el . attrib : return [ el . attrib [ 'idref' ] ] else : return [ '' . join ( el . itertext ( ) ) . strip ( ) ]
Return reference ID from href or text content .
50,386
def _is_inline ( self , element ) : if element . tag not in { etree . Comment , etree . ProcessingInstruction } and element . tag . lower ( ) in self . inline_elements : return True return False
Return True if an element is inline .
50,387
def _next_token ( self , skipws = True ) : self . _token = next ( self . _tokens ) . group ( 0 ) return self . _next_token ( ) if skipws and self . _token . isspace ( ) else self . _token
Increment _token to the next token and return it .
50,388
def _parse_entry ( self ) : entry_type = self . _next_token ( ) . lower ( ) if entry_type == 'string' : self . _parse_string ( ) elif entry_type not in [ 'comment' , 'preamble' ] : self . _parse_record ( entry_type )
Parse an entry .
50,389
def _parse_string ( self ) : if self . _next_token ( ) in [ '{' , '(' ] : field = self . _parse_field ( ) if field : self . definitions [ field [ 0 ] ] = field [ 1 ]
Parse a string entry and store the definition .
50,390
def _parse_record ( self , record_type ) : if self . _next_token ( ) in [ '{' , '(' ] : key = self . _next_token ( ) self . records [ key ] = { u'id' : key , u'type' : record_type . lower ( ) } if self . _next_token ( ) == ',' : while True : field = self . _parse_field ( ) if field : k , v = field [ 0 ] , field [ 1 ] if k in self . keynorms : k = self . keynorms [ k ] if k == 'pages' : v = v . replace ( ' ' , '' ) . replace ( '--' , '-' ) if k == 'author' or k == 'editor' : v = self . parse_names ( v ) else : v = latex_to_unicode ( v ) self . records [ key ] [ k ] = v if self . _token != ',' : break
Parse a record .
50,391
def _parse_field ( self ) : name = self . _next_token ( ) if self . _next_token ( ) == '=' : value = self . _parse_value ( ) return name , value
Parse a Field .
50,392
def _parse_value ( self ) : val = [ ] while True : t = self . _next_token ( ) if t == '"' : brac_counter = 0 while True : t = self . _next_token ( skipws = False ) if t == '{' : brac_counter += 1 if t == '}' : brac_counter -= 1 if t == '"' and brac_counter <= 0 : break else : val . append ( t ) elif t == '{' : brac_counter = 0 while True : t = self . _next_token ( skipws = False ) if t == '{' : brac_counter += 1 if t == '}' : brac_counter -= 1 if brac_counter < 0 : break else : val . append ( t ) elif re . match ( r'\w' , t ) : val . extend ( [ self . definitions . get ( t , t ) , ' ' ] ) elif t . isdigit ( ) : val . append ( [ t , ' ' ] ) elif t == '#' : pass else : break value = ' ' . join ( '' . join ( val ) . split ( ) ) return value
Parse a value . Digits definitions and the contents of double quotes or curly brackets .
50,393
def parse_names ( cls , names ) : names = [ latex_to_unicode ( n ) for n in re . split ( r'\sand\s(?=[^{}]*(?:\{|$))' , names ) if n ] return names
Parse a string of names separated by and like in a BibTeX authors field .
50,394
def metadata ( self ) : auto = { u'records' : self . size } auto . update ( self . meta ) return auto
Return metadata for the parsed collection of records .
50,395
def json ( self ) : return json . dumps ( OrderedDict ( [ ( 'metadata' , self . metadata ) , ( 'records' , self . records . values ( ) ) ] ) )
Return a list of records as a JSON string . Follows the BibJSON convention .
50,396
def _flush ( self ) : d = os . path . dirname ( self . path ) if not os . path . isdir ( d ) : os . makedirs ( d ) with io . open ( self . path , 'w' , encoding = 'utf8' ) as f : yaml . safe_dump ( self . _data , f , default_flow_style = False , encoding = None )
Save the contents of data to the file on disk . You should not need to call this manually .
50,397
def _is_allowed_abbr ( self , tokens ) : if len ( tokens ) <= 2 : abbr_text = '' . join ( tokens ) if self . abbr_min <= len ( abbr_text ) <= self . abbr_max and bracket_level ( abbr_text ) == 0 : if abbr_text [ 0 ] . isalnum ( ) and any ( c . isalpha ( ) for c in abbr_text ) : if re . match ( '^\d+(\.\d+)?(g|m[lL]|cm)$' , abbr_text ) : return False return True return False
Return True if text is an allowed abbreviation .
50,398
def name ( self ) : return '' . join ( '_%s' % c if c . isupper ( ) else c for c in self . __class__ . __name__ ) . strip ( '_' ) . lower ( )
A unique name for this scraper .
50,399
def _post_scrape ( self , value , processor = None ) : value = [ self . process ( v ) for v in value ] value = [ v for v in value if v is not None ] if processor : value = [ processor ( v ) for v in value ] value = [ v for v in value if v is not None ] if not self . all : value = value [ 0 ] if value else None log . debug ( 'Scraped %s: %s from %s' % ( self . name , value , self . selection ) ) return value
Apply processing to the scraped value .