idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
49,300
def _extract_local_mean_gauss ( image , mask = slice ( None ) , sigma = 1 , voxelspacing = None ) : if voxelspacing is None : voxelspacing = [ 1. ] * image . ndim sigma = _create_structure_array ( sigma , voxelspacing ) return _extract_intensities ( gaussian_filter ( image , sigma ) , mask )
Internal single - image version of local_mean_gauss .
49,301
def _extract_centerdistance ( image , mask = slice ( None ) , voxelspacing = None ) : image = numpy . array ( image , copy = False ) if None == voxelspacing : voxelspacing = [ 1. ] * image . ndim centers = [ ( x - 1 ) / 2. for x in image . shape ] indices = numpy . indices ( image . shape , dtype = numpy . float ) for dim_indices , c , vs in zip ( indices , centers , voxelspacing ) : dim_indices -= c dim_indices *= vs return numpy . sqrt ( numpy . sum ( numpy . square ( indices ) , 0 ) ) [ mask ] . ravel ( )
Internal single - image version of centerdistance .
49,302
def _extract_intensities ( image , mask = slice ( None ) ) : return numpy . array ( image , copy = True ) [ mask ] . ravel ( )
Internal single - image version of intensities .
49,303
def _substract_hemispheres ( active , reference , active_sigma , reference_sigma , voxel_spacing ) : active_kernel = _create_structure_array ( active_sigma , voxel_spacing ) active_smoothed = gaussian_filter ( active , sigma = active_kernel ) reference_kernel = _create_structure_array ( reference_sigma , voxel_spacing ) reference_smoothed = gaussian_filter ( reference , sigma = reference_kernel ) return active_smoothed - reference_smoothed
Helper function for _extract_hemispheric_difference . Smoothes both images and then substracts the reference from the active image .
49,304
def _dispatch ( self , tree ) : "_dispatcher function, _dispatching tree type T to method _T." if isinstance ( tree , list ) : for t in tree : self . _dispatch ( t ) return meth = getattr ( self , "_" + tree . __class__ . __name__ ) if tree . __class__ . __name__ == 'NoneType' and not self . _do_indent : return meth ( tree )
_dispatcher function _dispatching tree type T to method _T .
49,305
def _AssAttr ( self , t ) : self . _dispatch ( t . expr ) self . _write ( '.' + t . attrname )
Handle assigning an attribute of an object
49,306
def _Assign ( self , t ) : self . _fill ( ) for target in t . nodes : self . _dispatch ( target ) self . _write ( " = " ) self . _dispatch ( t . expr ) if not self . _do_indent : self . _write ( '; ' )
Expression Assignment such as a = 1 .
49,307
def _AssTuple ( self , t ) : for element in t . nodes [ : - 1 ] : self . _dispatch ( element ) self . _write ( ", " ) last_element = t . nodes [ - 1 ] self . _dispatch ( last_element )
Tuple on left hand side of an expression .
49,308
def _CallFunc ( self , t ) : self . _dispatch ( t . node ) self . _write ( "(" ) comma = False for e in t . args : if comma : self . _write ( ", " ) else : comma = True self . _dispatch ( e ) if t . star_args : if comma : self . _write ( ", " ) else : comma = True self . _write ( "*" ) self . _dispatch ( t . star_args ) if t . dstar_args : if comma : self . _write ( ", " ) else : comma = True self . _write ( "**" ) self . _dispatch ( t . dstar_args ) self . _write ( ")" )
Function call .
49,309
def _From ( self , t ) : self . _fill ( "from " ) self . _write ( t . modname ) self . _write ( " import " ) for i , ( name , asname ) in enumerate ( t . names ) : if i != 0 : self . _write ( ", " ) self . _write ( name ) if asname is not None : self . _write ( " as " + asname )
Handle from xyz import foo bar as baz .
49,310
def _Function ( self , t ) : if t . decorators is not None : self . _fill ( "@" ) self . _dispatch ( t . decorators ) self . _fill ( "def " + t . name + "(" ) defaults = [ None ] * ( len ( t . argnames ) - len ( t . defaults ) ) + list ( t . defaults ) for i , arg in enumerate ( zip ( t . argnames , defaults ) ) : self . _write ( arg [ 0 ] ) if arg [ 1 ] is not None : self . _write ( '=' ) self . _dispatch ( arg [ 1 ] ) if i < len ( t . argnames ) - 1 : self . _write ( ', ' ) self . _write ( ")" ) if self . _single_func : self . _do_indent = False self . _enter ( ) self . _dispatch ( t . code ) self . _leave ( ) self . _do_indent = True
Handle function definitions
49,311
def _Getattr ( self , t ) : if isinstance ( t . expr , ( Div , Mul , Sub , Add ) ) : self . _write ( '(' ) self . _dispatch ( t . expr ) self . _write ( ')' ) else : self . _dispatch ( t . expr ) self . _write ( '.' + t . attrname )
Handle getting an attribute of an object
49,312
def _Import ( self , t ) : self . _fill ( "import " ) for i , ( name , asname ) in enumerate ( t . names ) : if i != 0 : self . _write ( ", " ) self . _write ( name ) if asname is not None : self . _write ( " as " + asname )
Handle import xyz . foo .
49,313
def _Keyword ( self , t ) : self . _write ( t . name ) self . _write ( "=" ) self . _dispatch ( t . expr )
Keyword value assignment within function calls and definitions .
49,314
def __sequenceAscendingStrict ( l ) : "Test a sequences values to be in strictly ascending order." it = iter ( l ) next ( it ) if not all ( b > a for a , b in zip ( l , it ) ) : raise argparse . ArgumentTypeError ( 'All values must be given in strictly ascending order.' ) return l
Test a sequences values to be in strictly ascending order .
49,315
def __check_mapping ( self , landmarks ) : sc_udiff = numpy . asarray ( self . __sc_umaxs ) [ 1 : ] - numpy . asarray ( self . __sc_umins ) [ : - 1 ] l_diff = numpy . asarray ( landmarks ) [ 1 : ] - numpy . asarray ( landmarks ) [ : - 1 ] return numpy . all ( sc_udiff > numpy . asarray ( l_diff ) )
Checks whether the image from which the supplied landmarks were extracted can be transformed to the learned standard intensity space without loss of information .
49,316
def is_in_interval ( n , l , r , border = 'included' ) : if 'included' == border : return ( n >= l ) and ( n <= r ) elif 'excluded' == border : return ( n > l ) and ( n < r ) else : raise ValueError ( 'borders must be either \'included\' or \'excluded\'' )
Checks whether a number is inside the interval l r .
49,317
def are_in_interval ( s , l , r , border = 'included' ) : return numpy . all ( [ IntensityRangeStandardization . is_in_interval ( x , l , r , border ) for x in s ] )
Checks whether all number in the sequence s lie inside the interval formed by l and r .
49,318
def template_sphere ( radius , dimensions ) : r if int ( dimensions ) != dimensions : raise TypeError ( 'The supplied dimension parameter must be of type integer.' ) dimensions = int ( dimensions ) return template_ellipsoid ( dimensions * [ radius * 2 ] )
r Returns a spherical binary structure of a of the supplied radius that can be used as template input to the generalized hough transform .
49,319
def looks_like_issubclass ( obj , classname ) : t = obj if t . __name__ == classname : return True for klass in t . __mro__ : if klass . __name__ == classname : return True return False
Return True if the object has a class or superclass with the given class name .
49,320
def __make_footprint ( input , size , footprint ) : "Creates a standard footprint element ala scipy.ndimage." if footprint is None : if size is None : raise RuntimeError ( "no footprint or filter size provided" ) sizes = _ni_support . _normalize_sequence ( size , input . ndim ) footprint = numpy . ones ( sizes , dtype = bool ) else : footprint = numpy . asarray ( footprint , dtype = bool ) return footprint
Creates a standard footprint element ala scipy . ndimage .
49,321
def __check_label_image ( label_image ) : encountered_indices = scipy . unique ( label_image ) expected_indices = scipy . arange ( 1 , label_image . max ( ) + 1 ) if not encountered_indices . size == expected_indices . size or not ( encountered_indices == expected_indices ) . all ( ) : raise AttributeError ( 'The supplied label image does either not contain any regions or they are not labeled consecutively starting from 1.' )
Check the label image for consistent labelling starting from 1 .
49,322
def __xd_iterator_pass_on ( arr , view , fun ) : iterations = [ [ None ] if dim in view else list ( range ( arr . shape [ dim ] ) ) for dim in range ( arr . ndim ) ] passon = None for indices in itertools . product ( * iterations ) : slicer = [ slice ( None ) if idx is None else slice ( idx , idx + 1 ) for idx in indices ] passon = fun ( scipy . squeeze ( arr [ slicer ] ) , passon ) return passon
Like xd_iterator but the fun return values are always passed on to the next and only the last returned .
49,323
def set_pixel_spacing ( hdr , spacing ) : r warnings . warn ( 'get_pixel_spacing() is depreciated, use set_voxel_spacing() instead' , category = DeprecationWarning ) set_voxel_spacing ( hdr , spacing )
r Depreciated synonym of ~medpy . io . header . set_voxel_spacing .
49,324
def copy_to ( self , sitkimage ) : if self . sitkimage is not None : for k in self . sitkimage . GetMetaDataKeys ( ) : sitkimage . SetMetaData ( k , self . sitkimage . GetMetaData ( k ) ) ndim = len ( sitkimage . GetSize ( ) ) spacing , offset , direction = self . get_info_consistent ( ndim ) sitkimage . SetSpacing ( spacing ) sitkimage . SetOrigin ( offset ) sitkimage . SetDirection ( tuple ( direction . flatten ( ) ) ) return sitkimage
Copy all stored meta information info to an sitk Image .
49,325
def get_info_consistent ( self , ndim ) : if ndim > len ( self . spacing ) : spacing = self . spacing + ( 1.0 , ) * ( ndim - len ( self . spacing ) ) else : spacing = self . spacing [ : ndim ] if ndim > len ( self . offset ) : offset = self . offset + ( 0.0 , ) * ( ndim - len ( self . offset ) ) else : offset = self . offset [ : ndim ] if ndim > self . direction . shape [ 0 ] : direction = np . identity ( ndim ) direction [ : self . direction . shape [ 0 ] , : self . direction . shape [ 0 ] ] = self . direction else : direction = self . direction [ : ndim , : ndim ] return spacing , offset , direction
Returns the main meta - data information adapted to the supplied image dimensionality .
49,326
def hd95 ( result , reference , voxelspacing = None , connectivity = 1 ) : hd1 = __surface_distances ( result , reference , voxelspacing , connectivity ) hd2 = __surface_distances ( reference , result , voxelspacing , connectivity ) hd95 = numpy . percentile ( numpy . hstack ( ( hd1 , hd2 ) ) , 95 ) return hd95
95th percentile of the Hausdorff Distance .
49,327
def __surface_distances ( result , reference , voxelspacing = None , connectivity = 1 ) : result = numpy . atleast_1d ( result . astype ( numpy . bool ) ) reference = numpy . atleast_1d ( reference . astype ( numpy . bool ) ) if voxelspacing is not None : voxelspacing = _ni_support . _normalize_sequence ( voxelspacing , result . ndim ) voxelspacing = numpy . asarray ( voxelspacing , dtype = numpy . float64 ) if not voxelspacing . flags . contiguous : voxelspacing = voxelspacing . copy ( ) footprint = generate_binary_structure ( result . ndim , connectivity ) if 0 == numpy . count_nonzero ( result ) : raise RuntimeError ( 'The first supplied array does not contain any binary object.' ) if 0 == numpy . count_nonzero ( reference ) : raise RuntimeError ( 'The second supplied array does not contain any binary object.' ) result_border = result ^ binary_erosion ( result , structure = footprint , iterations = 1 ) reference_border = reference ^ binary_erosion ( reference , structure = footprint , iterations = 1 ) dt = distance_transform_edt ( ~ reference_border , sampling = voxelspacing ) sds = dt [ result_border ] return sds
The distances between the surface voxel of binary objects in result and their nearest partner surface voxel of a binary object in reference .
49,328
def __minowski_low_positive_integer_p ( h1 , h2 , p = 2 ) : mult = scipy . absolute ( h1 - h2 ) dif = mult for _ in range ( p - 1 ) : dif = scipy . multiply ( dif , mult ) return math . pow ( scipy . sum ( dif ) , 1. / p )
A faster implementation of the Minowski distance for positive integer < 25 .
49,329
def __kullback_leibler ( h1 , h2 ) : result = h1 . astype ( scipy . float_ ) mask = h1 != 0 result [ mask ] = scipy . multiply ( h1 [ mask ] , scipy . log ( h1 [ mask ] / h2 [ mask ] ) ) return scipy . sum ( result )
The actual KL implementation .
49,330
def __prepare_histogram ( h1 , h2 ) : h1 = h1 if scipy . ndarray == type ( h1 ) else scipy . asarray ( h1 ) h2 = h2 if scipy . ndarray == type ( h2 ) else scipy . asarray ( h2 ) if h1 . shape != h2 . shape or h1 . size != h2 . size : raise ValueError ( 'h1 and h2 must be of same shape and size' ) return h1 , h2
Convert the histograms to scipy . ndarrays if required .
49,331
def find ( ) : spark_home = os . environ . get ( 'SPARK_HOME' , None ) if not spark_home : for path in [ '/usr/local/opt/apache-spark/libexec' , '/usr/lib/spark/' , '/usr/local/spark/' , '/opt/spark/' , ] : if os . path . exists ( path ) : spark_home = path break if not spark_home : raise ValueError ( "Couldn't find Spark, make sure SPARK_HOME env is set" " or Spark is in an expected location (e.g. from homebrew installation)." ) return spark_home
Find a local spark installation .
49,332
def change_rc ( spark_home , spark_python , py4j ) : bashrc_location = os . path . expanduser ( "~/.bashrc" ) if os . path . isfile ( bashrc_location ) : with open ( bashrc_location , 'a' ) as bashrc : bashrc . write ( "\n# Added by findspark\n" ) bashrc . write ( "export SPARK_HOME=" + spark_home + "\n" ) bashrc . write ( "export PYTHONPATH=" + spark_python + ":" + py4j + ":$PYTHONPATH\n\n" )
Persists changes to environment by changing shell config .
49,333
def edit_ipython_profile ( spark_home , spark_python , py4j ) : from IPython import get_ipython ip = get_ipython ( ) if ip : profile_dir = ip . profile_dir . location else : from IPython . utils . path import locate_profile profile_dir = locate_profile ( ) startup_file_loc = os . path . join ( profile_dir , "startup" , "findspark.py" ) with open ( startup_file_loc , 'w' ) as startup_file : startup_file . write ( "import sys, os\n" ) startup_file . write ( "os.environ['SPARK_HOME'] = '" + spark_home + "'\n" ) startup_file . write ( "sys.path[:0] = " + str ( [ spark_python , py4j ] ) + "\n" ) startup_file . write ( "import pyspark\n" )
Adds a startup file to the current IPython profile to import pyspark .
49,334
def init ( spark_home = None , python_path = None , edit_rc = False , edit_profile = False ) : if not spark_home : spark_home = find ( ) if not python_path : python_path = os . environ . get ( 'PYSPARK_PYTHON' , sys . executable ) os . environ [ 'SPARK_HOME' ] = spark_home os . environ [ 'PYSPARK_PYTHON' ] = python_path if not os . environ . get ( "PYSPARK_SUBMIT_ARGS" , None ) : os . environ [ "PYSPARK_SUBMIT_ARGS" ] = '' spark_python = os . path . join ( spark_home , 'python' ) py4j = glob ( os . path . join ( spark_python , 'lib' , 'py4j-*.zip' ) ) [ 0 ] sys . path [ : 0 ] = [ spark_python , py4j ] if edit_rc : change_rc ( spark_home , spark_python , py4j ) if edit_profile : edit_ipython_profile ( spark_home , spark_python , py4j )
Make pyspark importable .
49,335
def _add_to_submit_args ( s ) : new_args = os . environ . get ( "PYSPARK_SUBMIT_ARGS" , "" ) + ( " %s" % s ) os . environ [ "PYSPARK_SUBMIT_ARGS" ] = new_args return new_args
Adds string s to the PYSPARK_SUBMIT_ARGS env var
49,336
def add_packages ( packages ) : if isinstance ( packages , str ) : packages = [ packages ] _add_to_submit_args ( "--packages " + "," . join ( packages ) + " pyspark-shell" )
Add external packages to the pyspark interpreter .
49,337
def add_jars ( jars ) : if isinstance ( jars , str ) : jars = [ jars ] _add_to_submit_args ( "--jars " + "," . join ( jars ) + " pyspark-shell" )
Add external jars to the pyspark interpreter .
49,338
def parse ( cls , expression ) : parsed = { "name" : None , "arguments" : [ ] , "options" : [ ] } if not expression . strip ( ) : raise ValueError ( "Console command signature is empty." ) expression = expression . replace ( os . linesep , "" ) matches = re . match ( r"[^\s]+" , expression ) if not matches : raise ValueError ( "Unable to determine command name from signature." ) name = matches . group ( 0 ) parsed [ "name" ] = name tokens = re . findall ( r"\{\s*(.*?)\s*\}" , expression ) if tokens : parsed . update ( cls . _parameters ( tokens ) ) return parsed
Parse the given console command definition into a dict .
49,339
def _parameters ( cls , tokens ) : arguments = [ ] options = [ ] for token in tokens : if not token . startswith ( "--" ) : arguments . append ( cls . _parse_argument ( token ) ) else : options . append ( cls . _parse_option ( token ) ) return { "arguments" : arguments , "options" : options }
Extract all of the parameters from the tokens .
49,340
def _parse_argument ( cls , token ) : description = "" validator = None if " : " in token : token , description = tuple ( token . split ( " : " , 2 ) ) token = token . strip ( ) description = description . strip ( ) matches = re . match ( r"(.*)\((.*?)\)" , token ) if matches : token = matches . group ( 1 ) . strip ( ) validator = matches . group ( 2 ) . strip ( ) if token . endswith ( "?*" ) : return _argument ( token . rstrip ( "?*" ) , Argument . MULTI_VALUED | Argument . OPTIONAL , description , None , ) elif token . endswith ( "*" ) : return _argument ( token . rstrip ( "*" ) , Argument . MULTI_VALUED | Argument . REQUIRED , description , None , ) elif token . endswith ( "?" ) : return _argument ( token . rstrip ( "?" ) , Argument . OPTIONAL , description , None ) matches = re . match ( r"(.+)=(.+)" , token ) if matches : return _argument ( matches . group ( 1 ) , Argument . OPTIONAL , description , matches . group ( 2 ) ) return _argument ( token , Argument . REQUIRED , description , None )
Parse an argument expression .
49,341
def _parse_option ( cls , token ) : description = "" validator = None if " : " in token : token , description = tuple ( token . split ( " : " , 2 ) ) token = token . strip ( ) description = description . strip ( ) matches = re . match ( r"(.*)\((.*?)\)" , token ) if matches : token = matches . group ( 1 ) . strip ( ) validator = matches . group ( 2 ) . strip ( ) shortcut = None matches = re . split ( r"\s*\|\s*" , token , 2 ) if len ( matches ) > 1 : shortcut = matches [ 0 ] . lstrip ( "-" ) token = matches [ 1 ] else : token = token . lstrip ( "-" ) default = None mode = Option . NO_VALUE if token . endswith ( "=*" ) : mode = Option . MULTI_VALUED token = token . rstrip ( "=*" ) elif token . endswith ( "=?*" ) : mode = Option . MULTI_VALUED token = token . rstrip ( "=?*" ) elif token . endswith ( "=?" ) : mode = Option . OPTIONAL_VALUE token = token . rstrip ( "=?" ) elif token . endswith ( "=" ) : mode = Option . REQUIRED_VALUE token = token . rstrip ( "=" ) matches = re . match ( r"(.+)(=[?*]*)(.+)" , token ) if matches : token = matches . group ( 1 ) operator = matches . group ( 2 ) default = matches . group ( 3 ) if operator == "=*" : mode = Option . REQUIRED_VALUE | Option . MULTI_VALUED elif operator == "=?*" : mode = Option . MULTI_VALUED elif operator == "=?" : mode = Option . OPTIONAL_VALUE elif operator == "=" : mode = Option . REQUIRED_VALUE return _option ( token , shortcut , mode , description , default )
Parse an option expression .
49,342
def add ( self , command ) : self . add_command ( command . config ) command . set_application ( self ) return self
Adds a command object .
49,343
def _configure_using_fluent_definition ( self ) : definition = Parser . parse ( self . signature ) self . _config . set_name ( definition [ "name" ] ) for name , flags , description , default in definition [ "arguments" ] : self . _config . add_argument ( name , flags , description , default ) for long_name , short_name , flags , description , default in definition [ "options" ] : self . _config . add_option ( long_name , short_name , flags , description , default )
Configure the console command using a fluent definition .
49,344
def argument ( self , key = None ) : if key is None : return self . _args . arguments ( ) return self . _args . argument ( key )
Get the value of a command argument .
49,345
def option ( self , key = None ) : if key is None : return self . _args . options ( ) return self . _args . option ( key )
Get the value of a command option .
49,346
def confirm ( self , question , default = False , true_answer_regex = "(?i)^y" ) : return self . _io . confirm ( question , default , true_answer_regex )
Confirm a question with the user .
49,347
def ask ( self , question , default = None ) : if isinstance ( question , Question ) : return self . _io . ask_question ( question ) return self . _io . ask ( question , default )
Prompt the user for input .
49,348
def choice ( self , question , choices , default = None , attempts = None , multiple = False ) : question = ChoiceQuestion ( question , choices , default ) question . set_max_attempts ( attempts ) question . set_multi_select ( multiple ) return self . _io . ask_question ( question )
Give the user a single choice from an list of answers .
49,349
def create_question ( self , question , type = None , ** kwargs ) : if not type : return Question ( question , ** kwargs ) if type == "choice" : return ChoiceQuestion ( question , ** kwargs ) if type == "confirmation" : return ConfirmationQuestion ( question , ** kwargs )
Returns a Question of specified type .
49,350
def table ( self , header = None , rows = None , style = None ) : if style is not None : style = self . TABLE_STYLES [ style ] table = Table ( style ) if header : table . set_header_row ( header ) if rows : table . set_rows ( rows ) return table
Return a Table instance .
49,351
def render_table ( self , headers , rows , style = None ) : table = self . table ( headers , rows , style ) table . render ( self . _io )
Format input to textual table .
49,352
def line ( self , text , style = None , verbosity = None ) : if style : styled = "<%s>%s</>" % ( style , text ) else : styled = text self . _io . write_line ( styled , verbosity )
Write a string as information output .
49,353
def line_error ( self , text , style = None , verbosity = None ) : if style : styled = "<%s>%s</>" % ( style , text ) else : styled = text self . _io . error_line ( styled , verbosity )
Write a string as information output to stderr .
49,354
def progress_indicator ( self , fmt = None , interval = 100 , values = None ) : return ProgressIndicator ( self . io , fmt , interval , values )
Creates a new progress indicator .
49,355
def spin ( self , start_message , end_message , fmt = None , interval = 100 , values = None ) : spinner = ProgressIndicator ( self . io , fmt , interval , values ) return spinner . auto ( start_message , end_message )
Automatically spin a progress indicator .
49,356
def add_style ( self , name , fg = None , bg = None , options = None ) : style = Style ( name ) if fg is not None : style . fg ( fg ) if bg is not None : style . bg ( bg ) if options is not None : if "bold" in options : style . bold ( ) if "underline" in options : style . underlined ( ) self . _io . output . formatter . add_style ( style ) self . _io . error_output . formatter . add_style ( style )
Adds a new style
49,357
def overwrite ( self , text , size = None ) : self . _io . overwrite ( text , size = size )
Overwrites the current line .
49,358
def get_github_hostname_user_repo_from_url ( url ) : parsed = parse . urlparse ( url ) if parsed . netloc == '' : host , sep , path = parsed . path . partition ( ":" ) if "@" in host : username , sep , host = host . partition ( "@" ) else : path = parsed . path [ 1 : ] . rstrip ( '/' ) host = parsed . netloc user , repo = path . split ( "/" , 1 ) return host , user , repo [ : - 4 ] if repo . endswith ( '.git' ) else repo
Return hostname user and repository to fork from .
49,359
def git_get_title_and_message ( begin , end ) : titles = git_get_log_titles ( begin , end ) title = "Pull request for " + end if len ( titles ) == 1 : title = titles [ 0 ] pr_template = find_pull_request_template ( ) if pr_template : message = get_pr_template_message ( pr_template ) else : if len ( titles ) == 1 : message = git_get_commit_body ( end ) else : message = "\n" . join ( titles ) return ( len ( titles ) , title , message )
Get title and message summary for patches between 2 commits .
49,360
def validate_commit_index ( func ) : @ functools . wraps ( func ) def wrapped ( self , * args , ** kwargs ) : for not_applied in range ( self . log . last_applied + 1 , self . log . commit_index + 1 ) : self . state_machine . apply ( self . log [ not_applied ] [ 'command' ] ) self . log . last_applied += 1 try : self . apply_future . set_result ( not_applied ) except ( asyncio . futures . InvalidStateError , AttributeError ) : pass return func ( self , * args , ** kwargs ) return wrapped
Apply to State Machine everything up to commit index
49,361
async def execute_command ( self , command ) : self . apply_future = asyncio . Future ( loop = self . loop ) entry = self . log . write ( self . storage . term , command ) asyncio . ensure_future ( self . append_entries ( ) , loop = self . loop ) await self . apply_future
Write to log & send AppendEntries RPC
49,362
def start ( self ) : self . storage . update ( { 'term' : self . storage . term + 1 , 'voted_for' : self . id } ) self . vote_count = 1 self . request_vote ( ) self . election_timer . start ( )
Increment current term vote for herself & send vote requests
49,363
def on_receive_request_vote_response ( self , data ) : if data . get ( 'vote_granted' ) : self . vote_count += 1 if self . state . is_majority ( self . vote_count ) : self . state . to_leader ( )
Receives response for vote request . If the vote was granted then check if we got majority and may become Leader
49,364
def init_storage ( self ) : if not self . storage . exists ( 'term' ) : self . storage . update ( { 'term' : 0 , } ) self . storage . update ( { 'voted_for' : None } )
Set current term to zero upon initialization & voted_for to None
49,365
async def wait_for_election_success ( cls ) : if cls . leader is None : cls . leader_future = asyncio . Future ( loop = cls . loop ) await cls . leader_future
Await this function if your cluster must have a leader
49,366
async def wait_until_leader ( cls , node_id ) : if node_id is None : raise ValueError ( 'Node id can not be None!' ) if cls . get_leader ( ) != node_id : cls . wait_until_leader_id = node_id cls . wait_until_leader_future = asyncio . Future ( loop = cls . loop ) await cls . wait_until_leader_future cls . wait_until_leader_id = None cls . wait_until_leader_future = None
Await this function if you want to do nothing until node_id becomes a leader
49,367
def declare ( full_table_name , definition , context ) : table_name = full_table_name . strip ( '`' ) . split ( '.' ) [ 1 ] if len ( table_name ) > MAX_TABLE_NAME_LENGTH : raise DataJointError ( 'Table name `{name}` exceeds the max length of {max_length}' . format ( name = table_name , max_length = MAX_TABLE_NAME_LENGTH ) ) definition = re . split ( r'\s*\n\s*' , definition . strip ( ) ) table_comment = definition . pop ( 0 ) [ 1 : ] . strip ( ) if definition [ 0 ] . startswith ( '#' ) else '' in_key = True primary_key = [ ] attributes = [ ] attribute_sql = [ ] foreign_key_sql = [ ] index_sql = [ ] uses_external = False for line in definition : if line . startswith ( '#' ) : pass elif line . startswith ( '---' ) or line . startswith ( ' ' ) : in_key = False elif is_foreign_key ( line ) : compile_foreign_key ( line , context , attributes , primary_key if in_key else None , attribute_sql , foreign_key_sql , index_sql ) elif re . match ( r'^(unique\s+)?index[^:]*$' , line , re . I ) : compile_index ( line , index_sql ) else : name , sql , is_external = compile_attribute ( line , in_key , foreign_key_sql ) uses_external = uses_external or is_external if in_key and name not in primary_key : primary_key . append ( name ) if name not in attributes : attributes . append ( name ) attribute_sql . append ( sql ) if not primary_key : raise DataJointError ( 'Table must have a primary key' ) return ( 'CREATE TABLE IF NOT EXISTS %s (\n' % full_table_name + ',\n' . join ( attribute_sql + [ 'PRIMARY KEY (`' + '`,`' . join ( primary_key ) + '`)' ] + foreign_key_sql + index_sql ) + '\n) ENGINE=InnoDB, COMMENT "%s"' % table_comment ) , uses_external
Parse declaration and create new SQL table accordingly .
49,368
def create_virtual_module ( module_name , schema_name , create_schema = False , create_tables = False , connection = None ) : module = types . ModuleType ( module_name ) _schema = Schema ( schema_name , create_schema = create_schema , create_tables = create_tables , connection = connection ) _schema . spawn_missing_classes ( context = module . __dict__ ) module . __dict__ [ 'schema' ] = _schema return module
Creates a python module with the given name from the name of a schema on the server and automatically adds classes to it corresponding to the tables in the schema .
49,369
def drop ( self , force = False ) : if not self . exists : logger . info ( "Schema named `{database}` does not exist. Doing nothing." . format ( database = self . database ) ) elif ( not config [ 'safemode' ] or force or user_choice ( "Proceed to delete entire schema `%s`?" % self . database , default = 'no' ) == 'yes' ) : logger . info ( "Dropping `{database}`." . format ( database = self . database ) ) try : self . connection . query ( "DROP DATABASE `{database}`" . format ( database = self . database ) ) logger . info ( "Schema `{database}` was dropped successfully." . format ( database = self . database ) ) except pymysql . OperationalError : raise DataJointError ( "An attempt to drop schema `{database}` " "has failed. Check permissions." . format ( database = self . database ) )
Drop the associated schema if it exists
49,370
def process_relation_class ( self , relation_class , context , assert_declared = False ) : relation_class . database = self . database relation_class . _connection = self . connection relation_class . _heading = Heading ( ) instance = relation_class ( ) is_declared = instance . is_declared if not is_declared : if not self . create_tables or assert_declared : raise DataJointError ( 'Table not declared %s' % instance . table_name ) else : instance . declare ( context ) is_declared = is_declared or instance . is_declared if isinstance ( instance , Lookup ) and hasattr ( instance , 'contents' ) and is_declared : contents = list ( instance . contents ) if len ( contents ) > len ( instance ) : if instance . heading . has_autoincrement : warnings . warn ( 'Contents has changed but cannot be inserted because {table} has autoincrement.' . format ( table = instance . __class__ . __name__ ) ) else : instance . insert ( contents , skip_duplicates = True )
assign schema properties to the relation class and declare the table
49,371
def declare ( self , context = None ) : try : sql , uses_external = declare ( self . full_table_name , self . definition , context ) if uses_external : sql = sql . format ( external_table = self . external_table . full_table_name ) self . connection . query ( sql ) except pymysql . OperationalError as error : if error . args [ 0 ] == server_error_codes [ 'command denied' ] : logger . warning ( error . args [ 1 ] ) else : raise else : self . _log ( 'Declared ' + self . full_table_name )
Use self . definition to declare the table in the schema .
49,372
def delete_quick ( self , get_count = False ) : query = 'DELETE FROM ' + self . full_table_name + self . where_clause self . connection . query ( query ) count = self . connection . query ( "SELECT ROW_COUNT()" ) . fetchone ( ) [ 0 ] if get_count else None self . _log ( query [ : 255 ] ) return count
Deletes the table without cascading and without user prompt . If this table has populated dependent tables this will fail .
49,373
def _update ( self , attrname , value = None ) : if len ( self ) != 1 : raise DataJointError ( 'Update is only allowed on one tuple at a time' ) if attrname not in self . heading : raise DataJointError ( 'Invalid attribute name' ) if attrname in self . heading . primary_key : raise DataJointError ( 'Cannot update a key value.' ) attr = self . heading [ attrname ] if attr . is_blob : value = pack ( value ) placeholder = '%s' elif attr . numeric : if value is None or np . isnan ( np . float ( value ) ) : placeholder = 'NULL' value = None else : placeholder = '%s' value = str ( int ( value ) if isinstance ( value , bool ) else value ) else : placeholder = '%s' command = "UPDATE {full_table_name} SET `{attrname}`={placeholder} {where_clause}" . format ( full_table_name = self . from_clause , attrname = attrname , placeholder = placeholder , where_clause = self . where_clause ) self . connection . query ( command , args = ( value , ) if value is not None else ( ) )
Updates a field in an existing tuple . This is not a datajoyous operation and should not be used routinely . Relational database maintain referential integrity on the level of a tuple . Therefore the UPDATE operator can violate referential integrity . The datajoyous way to update information is to delete the entire tuple and insert the entire update tuple .
49,374
def where_clause ( self ) : cond = self . _make_condition ( self . restriction ) return '' if cond is True else ' WHERE %s' % cond
convert self . restriction to the SQL WHERE clause
49,375
def preview ( self , limit = None , width = None ) : heading = self . heading rel = self . proj ( * heading . non_blobs ) if limit is None : limit = config [ 'display.limit' ] if width is None : width = config [ 'display.width' ] tuples = rel . fetch ( limit = limit + 1 , format = "array" ) has_more = len ( tuples ) > limit tuples = tuples [ : limit ] columns = heading . names widths = { f : min ( max ( [ len ( f ) ] + [ len ( str ( e ) ) for e in tuples [ f ] ] if f in tuples . dtype . names else [ len ( '=BLOB=' ) ] ) + 4 , width ) for f in columns } templates = { f : '%%-%d.%ds' % ( widths [ f ] , widths [ f ] ) for f in columns } return ( ' ' . join ( [ templates [ f ] % ( '*' + f if f in rel . primary_key else f ) for f in columns ] ) + '\n' + ' ' . join ( [ '+' + '-' * ( widths [ column ] - 2 ) + '+' for column in columns ] ) + '\n' + '\n' . join ( ' ' . join ( templates [ f ] % ( tup [ f ] if f in tup . dtype . names else '=BLOB=' ) for f in columns ) for tup in tuples ) + ( '\n ...\n' if has_more else '\n' ) + ( ' (Total: %d)\n' % len ( rel ) if config [ 'display.show_tuple_count' ] else '' ) )
returns a preview of the contents of the query .
49,376
def make_argument_subquery ( arg ) : return Subquery . create ( arg ) if isinstance ( arg , ( GroupBy , Projection ) ) or arg . restriction else arg
Decide when a Join argument needs to be wrapped in a subquery
49,377
def _need_subquery ( arg , attributes , named_attributes ) : if arg . heading . expressions or arg . distinct : return True restricting_attributes = arg . attributes_in_restriction ( ) return ( not restricting_attributes . issubset ( attributes ) or any ( v . strip ( ) in restricting_attributes for v in named_attributes . values ( ) ) )
Decide whether the projection argument needs to be wrapped in a subquery
49,378
def create ( cls , arg ) : obj = cls ( ) obj . _connection = arg . connection obj . _heading = arg . heading . make_subquery_heading ( ) obj . _arg = arg return obj
construct a subquery from arg
49,379
def user_choice ( prompt , choices = ( "yes" , "no" ) , default = None ) : assert default is None or default in choices choice_list = ', ' . join ( ( choice . title ( ) if choice == default else choice for choice in choices ) ) response = None while response not in choices : response = input ( prompt + ' [' + choice_list + ']: ' ) response = response . lower ( ) if response else default return response
Prompts the user for confirmation . The default value if any is capitalized .
49,380
def to_dicts ( recarray ) : for rec in recarray : yield dict ( zip ( recarray . dtype . names , rec . tolist ( ) ) )
convert record array to a dictionaries
49,381
def keys ( self , ** kwargs ) : warnings . warn ( 'Use of `rel.fetch.keys()` notation is deprecated. ' 'Please use `rel.fetch("KEY")` or `rel.fetch(dj.key)` for equivalent result' , stacklevel = 2 ) yield from self . _expression . proj ( ) . fetch ( as_dict = True , ** kwargs )
DEPRECATED Iterator that returns primary keys as a sequence of dicts .
49,382
def key_hash ( key ) : hashed = hashlib . md5 ( ) for k , v in sorted ( key . items ( ) ) : hashed . update ( str ( v ) . encode ( ) ) return hashed . hexdigest ( )
32 - byte hash used for lookup of primary keys of jobs
49,383
def conn ( host = None , user = None , password = None , init_fun = None , reset = False ) : if not hasattr ( conn , 'connection' ) or reset : host = host if host is not None else config [ 'database.host' ] user = user if user is not None else config [ 'database.user' ] password = password if password is not None else config [ 'database.password' ] if user is None : user = input ( "Please enter DataJoint username: " ) if password is None : password = getpass ( prompt = "Please enter DataJoint password: " ) init_fun = init_fun if init_fun is not None else config [ 'connection.init_function' ] conn . connection = Connection ( host , user , password , init_fun ) return conn . connection
Returns a persistent connection object to be shared by multiple modules . If the connection is not yet established or reset = True a new connection is set up . If connection information is not provided it is taken from config which takes the information from dj_local_conf . json . If the password is not specified in that file datajoint prompts for the password .
49,384
def connect ( self ) : with warnings . catch_warnings ( ) : warnings . filterwarnings ( 'ignore' , '.*deprecated.*' ) self . _conn = client . connect ( init_command = self . init_fun , sql_mode = "NO_ZERO_DATE,NO_ZERO_IN_DATE,ERROR_FOR_DIVISION_BY_ZERO," "STRICT_ALL_TABLES,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION" , charset = config [ 'connection.charset' ] , ** self . conn_info ) self . _conn . autocommit ( True )
Connects to the database server .
49,385
def start_transaction ( self ) : if self . in_transaction : raise DataJointError ( "Nested connections are not supported." ) self . query ( 'START TRANSACTION WITH CONSISTENT SNAPSHOT' ) self . _in_transaction = True logger . info ( "Transaction started" )
Starts a transaction error .
49,386
def save_global ( self , verbose = False ) : self . save ( os . path . expanduser ( os . path . join ( '~' , GLOBALCONFIG ) ) , verbose )
saves the settings in the global config file
49,387
def todict ( self ) : return OrderedDict ( ( name , self [ i ] ) for i , name in enumerate ( self . _fields ) )
Convert namedtuple to dict .
49,388
def as_dtype ( self ) : return np . dtype ( dict ( names = self . names , formats = [ v . dtype for v in self . attributes . values ( ) ] ) )
represent the heading as a numpy dtype
49,389
def as_sql ( self ) : return ',' . join ( '`%s`' % name if self . attributes [ name ] . sql_expression is None else '%s as `%s`' % ( self . attributes [ name ] . sql_expression , name ) for name in self . names )
represent heading as SQL field list
49,390
def join ( self , other ) : return Heading ( [ self . attributes [ name ] . todict ( ) for name in self . primary_key ] + [ other . attributes [ name ] . todict ( ) for name in other . primary_key if name not in self . primary_key ] + [ self . attributes [ name ] . todict ( ) for name in self . dependent_attributes if name not in other . primary_key ] + [ other . attributes [ name ] . todict ( ) for name in other . dependent_attributes if name not in self . primary_key ] )
Join two headings into a new one . It assumes that self and other are headings that share no common dependent attributes .
49,391
def make_subquery_heading ( self ) : return Heading ( dict ( v . todict ( ) , sql_expression = None ) for v in self . attributes . values ( ) )
Create a new heading with removed attribute sql_expressions . Used by subqueries which resolve the sql_expressions .
49,392
def squeeze ( self , array ) : if not self . _squeeze : return array array = array . copy ( ) array = array . squeeze ( ) if array . ndim == 0 : array = array [ ( ) ] return array
Simplify the given array as much as possible - squeeze out all singleton dimensions and also convert a zero dimensional array into array scalar
49,393
def read_string ( self , advance = True ) : target = self . _blob . find ( b'\0' , self . pos ) assert target >= self . _pos data = self . _blob [ self . _pos : target ] if advance : self . _pos = target + 1 return data . decode ( 'ascii' )
Read a string terminated by null byte \ 0 . The returned string object is ASCII decoded and will not include the terminating null byte .
49,394
def read_value ( self , dtype = 'uint64' , count = 1 , advance = True ) : data = np . frombuffer ( self . _blob , dtype = dtype , count = count , offset = self . pos ) if advance : self . _pos += data . dtype . itemsize * data . size if count == 1 : data = data [ 0 ] return data
Read one or more scalars of the indicated dtype . Count specifies the number of scalars to be read in .
49,395
def put ( self , store , obj ) : spec = self . _get_store_spec ( store ) blob = pack ( obj ) blob_hash = long_hash ( blob ) + store [ len ( 'external-' ) : ] if spec [ 'protocol' ] == 'file' : folder = os . path . join ( spec [ 'location' ] , self . database ) full_path = os . path . join ( folder , blob_hash ) if not os . path . isfile ( full_path ) : try : safe_write ( full_path , blob ) except FileNotFoundError : os . makedirs ( folder ) safe_write ( full_path , blob ) elif spec [ 'protocol' ] == 's3' : S3Folder ( database = self . database , ** spec ) . put ( blob_hash , blob ) else : raise DataJointError ( 'Unknown external storage protocol {protocol} for {store}' . format ( store = store , protocol = spec [ 'protocol' ] ) ) self . connection . query ( "INSERT INTO {tab} (hash, size) VALUES ('{hash}', {size}) " "ON DUPLICATE KEY UPDATE timestamp=CURRENT_TIMESTAMP" . format ( tab = self . full_table_name , hash = blob_hash , size = len ( blob ) ) ) return blob_hash
put an object in external store
49,396
def get ( self , blob_hash ) : if blob_hash is None : return None store = blob_hash [ STORE_HASH_LENGTH : ] store = 'external' + ( '-' if store else '' ) + store cache_folder = config . get ( 'cache' , None ) blob = None if cache_folder : try : with open ( os . path . join ( cache_folder , blob_hash ) , 'rb' ) as f : blob = f . read ( ) except FileNotFoundError : pass if blob is None : spec = self . _get_store_spec ( store ) if spec [ 'protocol' ] == 'file' : full_path = os . path . join ( spec [ 'location' ] , self . database , blob_hash ) try : with open ( full_path , 'rb' ) as f : blob = f . read ( ) except FileNotFoundError : raise DataJointError ( 'Lost access to external blob %s.' % full_path ) from None elif spec [ 'protocol' ] == 's3' : try : blob = S3Folder ( database = self . database , ** spec ) . get ( blob_hash ) except TypeError : raise DataJointError ( 'External store {store} configuration is incomplete.' . format ( store = store ) ) else : raise DataJointError ( 'Unknown external storage protocol "%s"' % spec [ 'protocol' ] ) if cache_folder : if not os . path . exists ( cache_folder ) : os . makedirs ( cache_folder ) safe_write ( os . path . join ( cache_folder , blob_hash ) , blob ) return unpack ( blob )
get an object from external store . Does not need to check whether it s in the table .
49,397
def delete_garbage ( self ) : self . connection . query ( "DELETE FROM `{db}`.`{tab}` WHERE " . format ( tab = self . table_name , db = self . database ) + " AND " . join ( 'hash NOT IN (SELECT {column_name} FROM {referencing_table})' . format ( ** ref ) for ref in self . references ) or "TRUE" ) print ( 'Deleted %d items' % self . connection . query ( "SELECT ROW_COUNT()" ) . fetchone ( ) [ 0 ] )
Delete items that are no longer referenced . This operation is safe to perform at any time .
49,398
def clean_store ( self , store , display_progress = True ) : spec = self . _get_store_spec ( store ) progress = tqdm if display_progress else lambda x : x if spec [ 'protocol' ] == 'file' : folder = os . path . join ( spec [ 'location' ] , self . database ) delete_list = set ( os . listdir ( folder ) ) . difference ( self . fetch ( 'hash' ) ) print ( 'Deleting %d unused items from %s' % ( len ( delete_list ) , folder ) , flush = True ) for f in progress ( delete_list ) : os . remove ( os . path . join ( folder , f ) ) elif spec [ 'protocol' ] == 's3' : try : S3Folder ( database = self . database , ** spec ) . clean ( self . fetch ( 'hash' ) ) except TypeError : raise DataJointError ( 'External store {store} configuration is incomplete.' . format ( store = store ) )
Clean unused data in an external storage repository from unused blobs . This must be performed after delete_garbage during low - usage periods to reduce risks of data loss .
49,399
def is_connection_error ( e ) : return ( isinstance ( e , err . InterfaceError ) and e . args [ 0 ] == "(0, '')" ) or ( isinstance ( e , err . OperationalError ) and e . args [ 0 ] in operation_error_codes . values ( ) )
Checks if error e pertains to a connection issue