idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
8,700
def create ( self ) : logger . info ( "creating snap7 client" ) self . library . Cli_Create . restype = c_void_p self . pointer = S7Object ( self . library . Cli_Create ( ) )
create a SNAP7 client .
8,701
def destroy ( self ) : logger . info ( "destroying snap7 client" ) if self . library : return self . library . Cli_Destroy ( byref ( self . pointer ) )
destroy a client .
8,702
def get_cpu_state ( self ) : state = c_int ( 0 ) self . library . Cli_GetPlcStatus ( self . pointer , byref ( state ) ) try : status_string = cpu_statuses [ state . value ] except KeyError : status_string = None if not status_string : raise Snap7Exception ( "The cpu state (%s) is invalid" % state . value ) logger . debug ( "CPU state is %s" % status_string ) return status_string
Retrieves CPU state from client
8,703
def get_cpu_info ( self ) : info = snap7 . snap7types . S7CpuInfo ( ) result = self . library . Cli_GetCpuInfo ( self . pointer , byref ( info ) ) check_error ( result , context = "client" ) return info
Retrieves CPU info from client
8,704
def connect ( self , address , rack , slot , tcpport = 102 ) : logger . info ( "connecting to %s:%s rack %s slot %s" % ( address , tcpport , rack , slot ) ) self . set_param ( snap7 . snap7types . RemotePort , tcpport ) return self . library . Cli_ConnectTo ( self . pointer , c_char_p ( six . b ( address ) ) , c_int ( rack ) , c_int ( slot ) )
Connect to a S7 server .
8,705
def upload ( self , block_num ) : logger . debug ( "db_upload block_num: %s" % ( block_num ) ) block_type = snap7 . snap7types . block_types [ 'DB' ] _buffer = buffer_type ( ) size = c_int ( sizeof ( _buffer ) ) result = self . library . Cli_Upload ( self . pointer , block_type , block_num , byref ( _buffer ) , byref ( size ) ) check_error ( result , context = "client" ) logger . info ( 'received %s bytes' % size ) return bytearray ( _buffer )
Uploads a block body from AG
8,706
def db_get ( self , db_number ) : logger . debug ( "db_get db_number: %s" % db_number ) _buffer = buffer_type ( ) result = self . library . Cli_DBGet ( self . pointer , db_number , byref ( _buffer ) , byref ( c_int ( buffer_size ) ) ) check_error ( result , context = "client" ) return bytearray ( _buffer )
Uploads a DB from AG .
8,707
def read_area ( self , area , dbnumber , start , size ) : assert area in snap7 . snap7types . areas . values ( ) wordlen = snap7 . snap7types . S7WLByte type_ = snap7 . snap7types . wordlen_to_ctypes [ wordlen ] logger . debug ( "reading area: %s dbnumber: %s start: %s: amount %s: " "wordlen: %s" % ( area , dbnumber , start , size , wordlen ) ) data = ( type_ * size ) ( ) result = self . library . Cli_ReadArea ( self . pointer , area , dbnumber , start , size , wordlen , byref ( data ) ) check_error ( result , context = "client" ) return bytearray ( data )
This is the main function to read data from a PLC . With it you can read DB Inputs Outputs Merkers Timers and Counters .
8,708
def read_multi_vars ( self , items ) : result = self . library . Cli_ReadMultiVars ( self . pointer , byref ( items ) , c_int32 ( len ( items ) ) ) check_error ( result , context = "client" ) return result , items
This function read multiple variables from the PLC .
8,709
def list_blocks ( self ) : logger . debug ( "listing blocks" ) blocksList = BlocksList ( ) result = self . library . Cli_ListBlocks ( self . pointer , byref ( blocksList ) ) check_error ( result , context = "client" ) logger . debug ( "blocks: %s" % blocksList ) return blocksList
Returns the AG blocks amount divided by type .
8,710
def list_blocks_of_type ( self , blocktype , size ) : blocktype = snap7 . snap7types . block_types . get ( blocktype ) if not blocktype : raise Snap7Exception ( "The blocktype parameter was invalid" ) logger . debug ( "listing blocks of type: %s size: %s" % ( blocktype , size ) ) if ( size == 0 ) : return 0 data = ( c_uint16 * size ) ( ) count = c_int ( size ) result = self . library . Cli_ListBlocksOfType ( self . pointer , blocktype , byref ( data ) , byref ( count ) ) logger . debug ( "number of items found: %s" % count ) check_error ( result , context = "client" ) return data
This function returns the AG list of a specified block type .
8,711
def get_block_info ( self , blocktype , db_number ) : blocktype = snap7 . snap7types . block_types . get ( blocktype ) if not blocktype : raise Snap7Exception ( "The blocktype parameter was invalid" ) logger . debug ( "retrieving block info for block %s of type %s" % ( db_number , blocktype ) ) data = TS7BlockInfo ( ) result = self . library . Cli_GetAgBlockInfo ( self . pointer , blocktype , db_number , byref ( data ) ) check_error ( result , context = "client" ) return data
Returns the block information for the specified block .
8,712
def set_session_password ( self , password ) : assert len ( password ) <= 8 , 'maximum password length is 8' return self . library . Cli_SetSessionPassword ( self . pointer , c_char_p ( six . b ( password ) ) )
Send the password to the PLC to meet its security level .
8,713
def set_connection_type ( self , connection_type ) : result = self . library . Cli_SetConnectionType ( self . pointer , c_uint16 ( connection_type ) ) if result != 0 : raise Snap7Exception ( "The parameter was invalid" )
Sets the connection resource type i . e the way in which the Clients connects to a PLC .
8,714
def get_connected ( self ) : connected = c_int32 ( ) result = self . library . Cli_GetConnected ( self . pointer , byref ( connected ) ) check_error ( result , context = "client" ) return bool ( connected )
Returns the connection status
8,715
def as_ab_write ( self , start , data ) : wordlen = snap7 . snap7types . S7WLByte type_ = snap7 . snap7types . wordlen_to_ctypes [ wordlen ] size = len ( data ) cdata = ( type_ * size ) . from_buffer_copy ( data ) logger . debug ( "ab write: start: %s: size: %s: " % ( start , size ) ) return self . library . Cli_AsABWrite ( self . pointer , start , size , byref ( cdata ) )
This is the asynchronous counterpart of Cli_ABWrite .
8,716
def as_db_get ( self , db_number ) : logger . debug ( "db_get db_number: %s" % db_number ) _buffer = buffer_type ( ) result = self . library . Cli_AsDBGet ( self . pointer , db_number , byref ( _buffer ) , byref ( c_int ( buffer_size ) ) ) check_error ( result , context = "client" ) return bytearray ( _buffer )
This is the asynchronous counterpart of Cli_DBGet .
8,717
def get_param ( self , number ) : logger . debug ( "retreiving param number %s" % number ) type_ = param_types [ number ] value = type_ ( ) code = self . library . Cli_GetParam ( self . pointer , c_int ( number ) , byref ( value ) ) check_error ( code ) return value . value
Reads an internal Client object parameter .
8,718
def get_pdu_length ( self ) : logger . info ( "getting PDU length" ) requested_ = c_uint16 ( ) negotiated_ = c_uint16 ( ) code = self . library . Cli_GetPduLength ( self . pointer , byref ( requested_ ) , byref ( negotiated_ ) ) check_error ( code ) return negotiated_ . value
Returns info about the PDU length .
8,719
def get_plc_datetime ( self ) : type_ = c_int32 buffer = ( type_ * 9 ) ( ) result = self . library . Cli_GetPlcDateTime ( self . pointer , byref ( buffer ) ) check_error ( result , context = "client" ) return datetime ( year = buffer [ 5 ] + 1900 , month = buffer [ 4 ] + 1 , day = buffer [ 3 ] , hour = buffer [ 2 ] , minute = buffer [ 1 ] , second = buffer [ 0 ] )
Get date and time from PLC .
8,720
def set_plc_datetime ( self , dt ) : type_ = c_int32 buffer = ( type_ * 9 ) ( ) buffer [ 0 ] = dt . second buffer [ 1 ] = dt . minute buffer [ 2 ] = dt . hour buffer [ 3 ] = dt . day buffer [ 4 ] = dt . month - 1 buffer [ 5 ] = dt . year - 1900 return self . library . Cli_SetPlcDateTime ( self . pointer , byref ( buffer ) )
Set date and time in PLC
8,721
def get_db1 ( ) : all_data = client . db_get ( 1 ) for i in range ( 400 ) : row_size = 130 index = i * row_size offset = index + row_size util . print_row ( all_data [ index : offset ] )
Here we read out DB1 all data we is put in the all_data variable and is a bytearray with the raw plc data
8,722
def get_db_row ( db , start , size ) : type_ = snap7 . snap7types . wordlen_to_ctypes [ snap7 . snap7types . S7WLByte ] data = client . db_read ( db , start , type_ , size ) return data
Here you see and example of readying out a part of a DB
8,723
def set_db_row ( db , start , size , _bytearray ) : client . db_write ( db , start , size , _bytearray )
Here we replace a piece of data in a db block with new data
8,724
def set_row ( x , row ) : row_size = 126 set_db_row ( 1 , 4 + x * row_size , row_size , row . _bytearray )
We use db 1 use offset 4 we replace row x . To find the correct start_index we mulitpy by row_size by x and we put the byte array representation of row in the PLC
8,725
def get_bool ( _bytearray , byte_index , bool_index ) : index_value = 1 << bool_index byte_value = _bytearray [ byte_index ] current_value = byte_value & index_value return current_value == index_value
Get the boolean value from location in bytearray
8,726
def set_bool ( _bytearray , byte_index , bool_index , value ) : assert value in [ 0 , 1 , True , False ] current_value = get_bool ( _bytearray , byte_index , bool_index ) index_value = 1 << bool_index if current_value == value : return if value : _bytearray [ byte_index ] += index_value else : _bytearray [ byte_index ] -= index_value
Set boolean value on location in bytearray
8,727
def set_int ( bytearray_ , byte_index , _int ) : _int = int ( _int ) _bytes = struct . unpack ( '2B' , struct . pack ( '>h' , _int ) ) bytearray_ [ byte_index : byte_index + 2 ] = _bytes return bytearray_
Set value in bytearray to int
8,728
def get_int ( bytearray_ , byte_index ) : data = bytearray_ [ byte_index : byte_index + 2 ] data [ 1 ] = data [ 1 ] & 0xff data [ 0 ] = data [ 0 ] & 0xff packed = struct . pack ( '2B' , * data ) value = struct . unpack ( '>h' , packed ) [ 0 ] return value
Get int value from bytearray .
8,729
def set_real ( _bytearray , byte_index , real ) : real = float ( real ) real = struct . pack ( '>f' , real ) _bytes = struct . unpack ( '4B' , real ) for i , b in enumerate ( _bytes ) : _bytearray [ byte_index + i ] = b
Set Real value
8,730
def get_real ( _bytearray , byte_index ) : x = _bytearray [ byte_index : byte_index + 4 ] real = struct . unpack ( '>f' , struct . pack ( '4B' , * x ) ) [ 0 ] return real
Get real value . create float from 4 bytes
8,731
def set_string ( _bytearray , byte_index , value , max_size ) : if six . PY2 : assert isinstance ( value , ( str , unicode ) ) else : assert isinstance ( value , str ) size = len ( value ) if size > max_size : raise ValueError ( 'size %s > max_size %s %s' % ( size , max_size , value ) ) _bytearray [ byte_index + 1 ] = len ( value ) i = 0 for i , c in enumerate ( value ) : _bytearray [ byte_index + 2 + i ] = ord ( c ) for r in range ( i + 1 , _bytearray [ byte_index ] ) : _bytearray [ byte_index + 2 + r ] = ord ( ' ' )
Set string value
8,732
def get_string ( _bytearray , byte_index , max_size ) : size = _bytearray [ byte_index + 1 ] if max_size < size : logger . error ( "the string is to big for the size encountered in specification" ) logger . error ( "WRONG SIZED STRING ENCOUNTERED" ) size = max_size data = map ( chr , _bytearray [ byte_index + 2 : byte_index + 2 + size ] ) return "" . join ( data )
parse string from bytearray
8,733
def parse_specification ( db_specification ) : parsed_db_specification = OrderedDict ( ) for line in db_specification . split ( '\n' ) : if line and not line . startswith ( '#' ) : row = line . split ( '#' ) [ 0 ] index , var_name , _type = row . split ( ) parsed_db_specification [ var_name ] = ( index , _type ) return parsed_db_specification
Create a db specification derived from a dataview of a db in which the byte layout is specified
8,734
def get_bytearray ( self ) : if isinstance ( self . _bytearray , DB ) : return self . _bytearray . _bytearray return self . _bytearray
return bytearray from self or DB parent
8,735
def export ( self ) : data = { } for key in self . _specification : data [ key ] = self [ key ] return data
export dictionary with values
8,736
def write ( self , client ) : assert ( isinstance ( self . _bytearray , DB ) ) assert ( self . row_size >= 0 ) db_nr = self . _bytearray . db_number offset = self . db_offset data = self . get_bytearray ( ) [ offset : offset + self . row_size ] db_offset = self . db_offset if self . row_offset : data = data [ self . row_offset : ] db_offset += self . row_offset client . db_write ( db_nr , db_offset , data )
Write current data to db in plc
8,737
def read ( self , client ) : assert ( isinstance ( self . _bytearray , DB ) ) assert ( self . row_size >= 0 ) db_nr = self . _bytearray . db_number _bytearray = client . db_read ( db_nr , self . db_offset , self . row_size ) data = self . get_bytearray ( ) for i , b in enumerate ( _bytearray ) : data [ i + self . db_offset ] = b
read current data of db row from plc
8,738
def disconnect ( self ) : logger . info ( "disconnecting snap7 client" ) result = self . library . Cli_Disconnect ( self . pointer ) check_error ( result , context = "client" ) return result
disconnect a client .
8,739
def check_as_b_send_completion ( self ) : op_result = ctypes . c_int32 ( ) result = self . library . Par_CheckAsBSendCompletion ( self . pointer , ctypes . byref ( op_result ) ) return_values = { 0 : "job complete" , 1 : "job in progress" , - 2 : "invalid handled supplied" , } if result == - 2 : raise Snap7Exception ( "The Client parameter was invalid" ) return return_values [ result ] , op_result
Checks if the current asynchronous send job was completed and terminates immediately .
8,740
def create ( self , active = False ) : self . library . Par_Create . restype = snap7 . snap7types . S7Object self . pointer = snap7 . snap7types . S7Object ( self . library . Par_Create ( int ( active ) ) )
Creates a Partner and returns its handle which is the reference that you have to use every time you refer to that Partner .
8,741
def destroy ( self ) : if self . library : return self . library . Par_Destroy ( ctypes . byref ( self . pointer ) )
Destroy a Partner of given handle . Before destruction the Partner is stopped all clients disconnected and all shared memory blocks released .
8,742
def get_last_error ( self ) : error = ctypes . c_int32 ( ) result = self . library . Par_GetLastError ( self . pointer , ctypes . byref ( error ) ) check_error ( result , "partner" ) return error
Returns the last job result .
8,743
def get_param ( self , number ) : logger . debug ( "retreiving param number %s" % number ) type_ = snap7 . snap7types . param_types [ number ] value = type_ ( ) code = self . library . Par_GetParam ( self . pointer , ctypes . c_int ( number ) , ctypes . byref ( value ) ) check_error ( code ) return value . value
Reads an internal Partner object parameter .
8,744
def get_stats ( self ) : sent = ctypes . c_uint32 ( ) recv = ctypes . c_uint32 ( ) send_errors = ctypes . c_uint32 ( ) recv_errors = ctypes . c_uint32 ( ) result = self . library . Par_GetStats ( self . pointer , ctypes . byref ( sent ) , ctypes . byref ( recv ) , ctypes . byref ( send_errors ) , ctypes . byref ( recv_errors ) ) check_error ( result , "partner" ) return sent , recv , send_errors , recv_errors
Returns some statistics .
8,745
def get_status ( self ) : status = ctypes . c_int32 ( ) result = self . library . Par_GetStatus ( self . pointer , ctypes . byref ( status ) ) check_error ( result , "partner" ) return status
Returns the Partner status .
8,746
def get_times ( self ) : send_time = ctypes . c_int32 ( ) recv_time = ctypes . c_int32 ( ) result = self . library . Par_GetTimes ( self . pointer , ctypes . byref ( send_time ) , ctypes . byref ( recv_time ) ) check_error ( result , "partner" ) return send_time , recv_time
Returns the last send and recv jobs execution time in milliseconds .
8,747
def start_to ( self , local_ip , remote_ip , local_tsap , remote_tsap ) : assert re . match ( ipv4 , local_ip ) , '%s is invalid ipv4' % local_ip assert re . match ( ipv4 , remote_ip ) , '%s is invalid ipv4' % remote_ip logger . info ( "starting partnering from %s to %s" % ( local_ip , remote_ip ) ) return self . library . Par_StartTo ( self . pointer , local_ip , remote_ip , ctypes . c_uint16 ( local_tsap ) , ctypes . c_uint16 ( remote_tsap ) )
Starts the Partner and binds it to the specified IP address and the IsoTCP port .
8,748
def wait_socket ( _socket , session , timeout = 1 ) : directions = session . block_directions ( ) if directions == 0 : return 0 readfds = [ _socket ] if ( directions & LIBSSH2_SESSION_BLOCK_INBOUND ) else ( ) writefds = [ _socket ] if ( directions & LIBSSH2_SESSION_BLOCK_OUTBOUND ) else ( ) return select ( readfds , writefds , ( ) , timeout )
Helper function for testing non - blocking mode .
8,749
def time_slices_to_layers ( graphs , interslice_weight = 1 , slice_attr = 'slice' , vertex_id_attr = 'id' , edge_type_attr = 'type' , weight_attr = 'weight' ) : G_slices = _ig . Graph . Tree ( len ( graphs ) , 1 , mode = _ig . TREE_UNDIRECTED ) G_slices . es [ weight_attr ] = interslice_weight G_slices . vs [ slice_attr ] = graphs return slices_to_layers ( G_slices , slice_attr , vertex_id_attr , edge_type_attr , weight_attr )
Convert time slices to layer graphs .
8,750
def FromPartition ( cls , partition , ** kwargs ) : new_partition = cls ( partition . graph , partition . membership , ** kwargs ) return new_partition
Create a new partition from an existing partition .
8,751
def aggregate_partition ( self , membership_partition = None ) : partition_agg = self . _FromCPartition ( _c_leiden . _MutableVertexPartition_aggregate_partition ( self . _partition ) ) if ( not membership_partition is None ) : membership = partition_agg . membership for v in self . graph . vs : membership [ self . membership [ v . index ] ] = membership_partition . membership [ v . index ] partition_agg . set_membership ( membership ) return partition_agg
Aggregate the graph according to the current partition and provide a default partition for it .
8,752
def move_node ( self , v , new_comm ) : _c_leiden . _MutableVertexPartition_move_node ( self . _partition , v , new_comm ) self . _membership [ v ] = new_comm self . _modularity_dirty = True
Move node v to community new_comm .
8,753
def from_coarse_partition ( self , partition , coarse_node = None ) : _c_leiden . _MutableVertexPartition_from_coarse_partition ( self . _partition , partition . membership , coarse_node ) self . _update_internal_membership ( )
Update current partition according to coarser partition .
8,754
def cleanup_tmpdir ( dirname ) : if dirname is not None and os . path . exists ( dirname ) : shutil . rmtree ( dirname )
Removes the given temporary directory if it exists .
8,755
def create_dir_unless_exists ( * args ) : path = os . path . join ( * args ) if not os . path . isdir ( path ) : os . makedirs ( path )
Creates a directory unless it exists already .
8,756
def ensure_dir_does_not_exist ( * args ) : path = os . path . join ( * args ) if os . path . isdir ( path ) : shutil . rmtree ( path )
Ensures that the given directory does not exist .
8,757
def find_static_library ( library_name , library_path ) : variants = [ "lib{0}.a" , "{0}.a" , "{0}.lib" , "lib{0}.lib" ] if is_unix_like ( ) : extra_libdirs = [ "/usr/local/lib64" , "/usr/local/lib" , "/usr/lib64" , "/usr/lib" , "/lib64" , "/lib" ] else : extra_libdirs = [ ] for path in extra_libdirs : if path not in library_path and os . path . isdir ( path ) : library_path . append ( path ) for path in library_path : for variant in variants : full_path = os . path . join ( path , variant . format ( library_name ) ) if os . path . isfile ( full_path ) : return full_path
Given the raw name of a library in library_name tries to find a static library with this name in the given library_path . library_path is automatically extended with common library directories on Linux and Mac OS X .
8,758
def get_output ( command ) : p = Popen ( command , shell = True , stdin = PIPE , stdout = PIPE , stderr = PIPE ) p . stdin . close ( ) p . stderr . close ( ) line = p . stdout . readline ( ) . strip ( ) p . wait ( ) if type ( line ) . __name__ == "bytes" : line = str ( line , encoding = "utf-8" ) return line , p . returncode
Returns the output of a command returning a single line of output .
8,759
def http_url_exists ( url ) : class HEADRequest ( Request ) : def get_method ( self ) : return "HEAD" try : response = urlopen ( HEADRequest ( url ) ) return True except URLError : return False
Returns whether the given HTTP URL exists in the sense that it is returning an HTTP error code or not . A URL is considered to exist if it does not return an HTTP error code .
8,760
def is_unix_like ( platform = None ) : platform = platform or sys . platform platform = platform . lower ( ) return platform . startswith ( "linux" ) or platform . startswith ( "darwin" ) or platform . startswith ( "cygwin" )
Returns whether the given platform is a Unix - like platform with the usual Unix filesystem . When the parameter is omitted it defaults to sys . platform
8,761
def preprocess_fallback_config ( ) : global LIBIGRAPH_FALLBACK_INCLUDE_DIRS global LIBIGRAPH_FALLBACK_LIBRARY_DIRS global LIBIGRAPH_FALLBACK_LIBRARIES if os . name == 'nt' and distutils . ccompiler . get_default_compiler ( ) == 'msvc' : version = '' if sys . version_info >= ( 2 , 7 ) and sys . version_info < ( 3 , 0 ) : version = '27' elif sys . version_info >= ( 3 , 4 ) and sys . version_info < ( 3 , 5 ) : version = '34' elif sys . version_info >= ( 3 , 5 ) : version = '35' all_msvc_dirs = glob . glob ( os . path . join ( '..' , 'igraph' , 'igraph-*-msvc-py{0}' . format ( version ) ) ) if len ( all_msvc_dirs ) > 0 : if len ( all_msvc_dirs ) > 1 : print ( "More than one MSVC build directory (igraph-*-msvc-py{0}) found!" . format ( version ) ) print ( "It could happen that setup.py uses the wrong one! Please remove all but the right one!\n\n" ) msvc_builddir = all_msvc_dirs [ - 1 ] if not os . path . exists ( os . path . join ( msvc_builddir , "Release" ) ) : print ( "There is no 'Release' dir in the MSVC build directory\n(%s)" % msvc_builddir ) print ( "Please build the MSVC build first!\n" ) else : print ( "Using MSVC build dir as a fallback: %s\n\n" % msvc_builddir ) LIBIGRAPH_FALLBACK_INCLUDE_DIRS = [ os . path . join ( msvc_builddir , "include" ) ] is_64bits = sys . maxsize > 2 ** 32 LIBIGRAPH_FALLBACK_LIBRARY_DIRS = [ os . path . join ( msvc_builddir , "Release" , "x64" if is_64bits else "win32" ) ]
Preprocesses the fallback include and library paths depending on the platform .
8,762
def version_variants ( version ) : result = [ version ] parts = version . split ( "." ) while len ( parts ) < 3 : parts . append ( "0" ) result . append ( "." . join ( parts ) ) return result
Given an igraph version number returns a list of possible version number variants to try when looking for a suitable nightly build of the C core to download from igraph . org .
8,763
def tmpdir ( self ) : if self . _tmpdir is None : self . _tmpdir = tempfile . mkdtemp ( prefix = "igraph." ) atexit . register ( cleanup_tmpdir , self . _tmpdir ) return self . _tmpdir
The temporary directory in which igraph is downloaded and extracted .
8,764
def find_first_version ( self ) : for version in self . versions_to_try : remote_url = self . get_download_url ( version = version ) if http_url_exists ( remote_url ) : return version , remote_url return None , None
Finds the first version of igraph that exists in the nightly build repo from the version numbers provided in self . versions_to_try .
8,765
def has_pkgconfig ( self ) : if self . _has_pkgconfig is None : if self . use_pkgconfig : line , exit_code = get_output ( "pkg-config igraph" ) self . _has_pkgconfig = ( exit_code == 0 ) else : self . _has_pkgconfig = False return self . _has_pkgconfig
Returns whether pkg - config is available on the current system and it knows about igraph or not .
8,766
def configure ( self , ext ) : ext . include_dirs += self . include_dirs ext . library_dirs += self . library_dirs ext . libraries += self . libraries ext . extra_compile_args += self . extra_compile_args ext . extra_link_args += self . extra_link_args ext . extra_objects += self . extra_objects
Configures the given Extension object using this build configuration .
8,767
def detect_from_pkgconfig ( self ) : if not buildcfg . has_pkgconfig : print ( "Cannot find the C core of igraph on this system using pkg-config." ) return False cmd = "pkg-config igraph --cflags --libs" if self . static_extension : cmd += " --static" line , exit_code = get_output ( cmd ) if exit_code > 0 or len ( line ) == 0 : return False opts = line . strip ( ) . split ( ) self . libraries = [ opt [ 2 : ] for opt in opts if opt . startswith ( "-l" ) ] self . library_dirs = [ opt [ 2 : ] for opt in opts if opt . startswith ( "-L" ) ] self . include_dirs = [ opt [ 2 : ] for opt in opts if opt . startswith ( "-I" ) ] return True
Detects the igraph include directory library directory and the list of libraries to link to using pkg - config .
8,768
def print_build_info ( self ) : if self . static_extension : build_type = "static extension" else : build_type = "dynamic extension" print ( "Build type: %s" % build_type ) print ( "Include path: %s" % " " . join ( self . include_dirs ) ) print ( "Library path: %s" % " " . join ( self . library_dirs ) ) print ( "Linked dynamic libraries: %s" % " " . join ( self . libraries ) ) print ( "Linked static libraries: %s" % " " . join ( self . extra_objects ) ) print ( "Extra compiler options: %s" % " " . join ( self . extra_compile_args ) ) print ( "Extra linker options: %s" % " " . join ( self . extra_link_args ) )
Prints the include and library path being used for debugging purposes .
8,769
def process_args_from_command_line ( self ) : opts_to_remove = [ ] for idx , option in enumerate ( sys . argv ) : if not option . startswith ( "--" ) : continue if option == "--static" : opts_to_remove . append ( idx ) self . static_extension = True elif option == "--no-download" : opts_to_remove . append ( idx ) self . download_igraph_if_needed = False elif option == "--no-pkg-config" : opts_to_remove . append ( idx ) self . use_pkgconfig = False elif option == "--no-progress-bar" : opts_to_remove . append ( idx ) self . show_progress_bar = False elif option == "--no-wait" : opts_to_remove . append ( idx ) self . wait = False elif option . startswith ( "--c-core-version" ) : opts_to_remove . append ( idx ) if option == "--c-core-version" : value = sys . argv [ idx + 1 ] opts_to_remove . append ( idx + 1 ) else : value = option . split ( "=" , 1 ) [ 1 ] self . c_core_versions = [ value ] elif option . startswith ( "--c-core-url" ) : opts_to_remove . append ( idx ) if option == "--c-core-url" : value = sys . argv [ idx + 1 ] opts_to_remove . append ( idx + 1 ) else : value = option . split ( "=" , 1 ) [ 1 ] self . c_core_url = value for idx in reversed ( opts_to_remove ) : sys . argv [ idx : ( idx + 1 ) ] = [ ]
Preprocesses the command line options before they are passed to setup . py and sets up the build configuration .
8,770
def replace_static_libraries ( self , exclusions = None ) : if "stdc++" not in self . libraries : self . libraries . append ( "stdc++" ) if exclusions is None : exclusions = [ ] for library_name in set ( self . libraries ) - set ( exclusions ) : static_lib = find_static_library ( library_name , self . library_dirs ) if static_lib : self . libraries . remove ( library_name ) self . extra_objects . append ( static_lib )
Replaces references to libraries with full paths to their static versions if the static version is to be found on the library path .
8,771
def use_built_igraph ( self ) : buildcfg . include_dirs = [ os . path . join ( "igraphcore" , "include" ) ] buildcfg . library_dirs = [ os . path . join ( "igraphcore" , "lib" ) ] buildcfg . static_extension = True buildcfg_file = os . path . join ( "igraphcore" , "build.cfg" ) if os . path . exists ( buildcfg_file ) : buildcfg . libraries = eval ( open ( buildcfg_file ) . read ( ) )
Assumes that igraph is built already in igraphcore and sets up the include and library paths and the library names accordingly .
8,772
def use_educated_guess ( self ) : preprocess_fallback_config ( ) global LIBIGRAPH_FALLBACK_LIBRARIES global LIBIGRAPH_FALLBACK_INCLUDE_DIRS global LIBIGRAPH_FALLBACK_LIBRARY_DIRS print ( "WARNING: we were not able to detect where igraph is installed on" ) print ( "your machine (if it is installed at all). We will use the fallback" ) print ( "library and include paths hardcoded in setup.py and hope that the" ) print ( "C core of igraph is installed there." ) print ( "" ) print ( "If the compilation fails and you are sure that igraph is installed" ) print ( "on your machine, adjust the following two variables in setup.py" ) print ( "accordingly and try again:" ) print ( "" ) print ( "- LIBIGRAPH_FALLBACK_INCLUDE_DIRS" , LIBIGRAPH_FALLBACK_INCLUDE_DIRS ) print ( "- LIBIGRAPH_FALLBACK_LIBRARY_DIRS" , LIBIGRAPH_FALLBACK_LIBRARY_DIRS ) print ( "" ) seconds_remaining = 10 if self . wait else 0 while seconds_remaining > 0 : if seconds_remaining > 1 : plural = "s" else : plural = "" sys . stdout . write ( "\rContinuing in %2d second%s; press Enter to continue " "immediately. " % ( seconds_remaining , plural ) ) sys . stdout . flush ( ) if os . name == 'nt' : if msvcrt . kbhit ( ) : if msvcrt . getch ( ) == b'\r' : break time . sleep ( 1 ) else : rlist , _ , _ = select ( [ sys . stdin ] , [ ] , [ ] , 1 ) if rlist : sys . stdin . readline ( ) break seconds_remaining -= 1 sys . stdout . write ( "\r" + " " * 65 + "\r" ) self . libraries = LIBIGRAPH_FALLBACK_LIBRARIES [ : ] if self . static_extension : self . libraries . extend ( [ "xml2" , "z" , "m" , "stdc++" ] ) self . include_dirs = LIBIGRAPH_FALLBACK_INCLUDE_DIRS [ : ] self . library_dirs = LIBIGRAPH_FALLBACK_LIBRARY_DIRS [ : ]
Tries to guess the proper library names include and library paths if everything else failed .
8,773
def capture ( self , data_buffer = None , log_time = False , debug_print = False , retry_reset = True ) : start = time . time ( ) if data_buffer is None : data_buffer = np . ndarray ( ( Lepton . ROWS , Lepton . COLS , 1 ) , dtype = np . uint16 ) elif data_buffer . ndim < 2 or data_buffer . shape [ 0 ] < Lepton . ROWS or data_buffer . shape [ 1 ] < Lepton . COLS or data_buffer . itemsize < 2 : raise Exception ( "Provided input array not large enough" ) while True : Lepton . capture_segment ( self . __handle , self . __xmit_buf , self . __msg_size , self . __capture_buf [ 0 ] ) if retry_reset and ( self . __capture_buf [ 20 , 0 ] & 0xFF0F ) != 0x1400 : if debug_print : print ( "Garbage frame number reset waiting..." ) time . sleep ( 0.185 ) else : break self . __capture_buf . byteswap ( True ) data_buffer [ : , : ] = self . __capture_buf [ : , 2 : ] end = time . time ( ) if debug_print : print ( "---" ) for i in range ( Lepton . ROWS ) : fid = self . __capture_buf [ i , 0 , 0 ] crc = self . __capture_buf [ i , 1 , 0 ] fnum = fid & 0xFFF print ( "0x{0:04x} 0x{1:04x} : Row {2:2} : crc={1}" . format ( fid , crc , fnum ) ) print ( "---" ) if log_time : print ( "frame processed int {0}s, {1}hz" . format ( end - start , 1.0 / ( end - start ) ) ) return data_buffer , data_buffer . sum ( )
Capture a frame of data .
8,774
def stem ( self , text ) : normalizedText = TextNormalizer . normalize_text ( text ) words = normalizedText . split ( ' ' ) stems = [ ] for word in words : stems . append ( self . stem_word ( word ) ) return ' ' . join ( stems )
Stem a text string to its common stem form .
8,775
def stem_word ( self , word ) : if self . is_plural ( word ) : return self . stem_plural_word ( word ) else : return self . stem_singular_word ( word )
Stem a word to its common stem form .
8,776
def stem_singular_word ( self , word ) : context = Context ( word , self . dictionary , self . visitor_provider ) context . execute ( ) return context . result
Stem a singular word to its common stem form .
8,777
def execute ( self ) : self . start_stemming_process ( ) if self . dictionary . contains ( self . current_word ) : self . result = self . current_word else : self . result = self . original_word
Execute stemming process ; the result can be retrieved with result
8,778
def loop_pengembalian_akhiran ( self ) : self . restore_prefix ( ) removals = self . removals reversed_removals = reversed ( removals ) current_word = self . current_word for removal in reversed_removals : if not self . is_suffix_removal ( removal ) : continue if removal . get_removed_part ( ) == 'kan' : self . current_word = removal . result + 'k' self . remove_prefixes ( ) if self . dictionary . contains ( self . current_word ) : return self . current_word = removal . result + 'kan' else : self . current_word = removal . get_subject ( ) self . remove_prefixes ( ) if self . dictionary . contains ( self . current_word ) : return self . removals = removals self . current_word = current_word
ECS Loop Pengembalian Akhiran
8,779
def restore_prefix ( self ) : for removal in self . removals : self . current_word = removal . get_subject ( ) break for removal in self . removals : if removal . get_affix_type ( ) == 'DP' : self . removals . remove ( removal )
Restore prefix to proceed with ECS loop pengembalian akhiran
8,780
def create_stemmer ( self , isDev = False ) : words = self . get_words ( isDev ) dictionary = ArrayDictionary ( words ) stemmer = Stemmer ( dictionary ) resultCache = ArrayCache ( ) cachedStemmer = CachedStemmer ( resultCache , stemmer ) return cachedStemmer
Returns Stemmer instance
8,781
def add ( self , word ) : if not word or word . strip ( ) == '' : return self . words [ word ] = word
Add a word to the dictionary
8,782
def truth ( f ) : @ wraps ( f ) def check ( v ) : t = f ( v ) if not t : raise ValueError return v return check
Convenience decorator to convert truth functions into validators .
8,783
def Boolean ( v ) : if isinstance ( v , basestring ) : v = v . lower ( ) if v in ( '1' , 'true' , 'yes' , 'on' , 'enable' ) : return True if v in ( '0' , 'false' , 'no' , 'off' , 'disable' ) : return False raise ValueError return bool ( v )
Convert human - readable boolean values to a bool .
8,784
def Email ( v ) : try : if not v or "@" not in v : raise EmailInvalid ( "Invalid Email" ) user_part , domain_part = v . rsplit ( '@' , 1 ) if not ( USER_REGEX . match ( user_part ) and DOMAIN_REGEX . match ( domain_part ) ) : raise EmailInvalid ( "Invalid Email" ) return v except : raise ValueError
Verify that the value is an Email or not .
8,785
def FqdnUrl ( v ) : try : parsed_url = _url_validation ( v ) if "." not in parsed_url . netloc : raise UrlInvalid ( "must have a domain name in URL" ) return v except : raise ValueError
Verify that the value is a Fully qualified domain name URL .
8,786
def IsFile ( v ) : try : if v : v = str ( v ) return os . path . isfile ( v ) else : raise FileInvalid ( 'Not a file' ) except TypeError : raise FileInvalid ( 'Not a file' )
Verify the file exists .
8,787
def IsDir ( v ) : try : if v : v = str ( v ) return os . path . isdir ( v ) else : raise DirInvalid ( "Not a directory" ) except TypeError : raise DirInvalid ( "Not a directory" )
Verify the directory exists .
8,788
def PathExists ( v ) : try : if v : v = str ( v ) return os . path . exists ( v ) else : raise PathInvalid ( "Not a Path" ) except TypeError : raise PathInvalid ( "Not a Path" )
Verify the path exists regardless of its type .
8,789
def _compile_scalar ( schema ) : if inspect . isclass ( schema ) : def validate_instance ( path , data ) : if isinstance ( data , schema ) : return data else : msg = 'expected %s' % schema . __name__ raise er . TypeInvalid ( msg , path ) return validate_instance if callable ( schema ) : def validate_callable ( path , data ) : try : return schema ( data ) except ValueError as e : raise er . ValueInvalid ( 'not a valid value' , path ) except er . Invalid as e : e . prepend ( path ) raise return validate_callable def validate_value ( path , data ) : if data != schema : raise er . ScalarInvalid ( 'not a valid value' , path ) return data return validate_value
A scalar value .
8,790
def _compile_itemsort ( ) : def is_extra ( key_ ) : return key_ is Extra def is_remove ( key_ ) : return isinstance ( key_ , Remove ) def is_marker ( key_ ) : return isinstance ( key_ , Marker ) def is_type ( key_ ) : return inspect . isclass ( key_ ) def is_callable ( key_ ) : return callable ( key_ ) priority = [ ( 1 , is_remove ) , ( 2 , is_marker ) , ( 4 , is_type ) , ( 3 , is_callable ) , ( 5 , is_extra ) ] def item_priority ( item_ ) : key_ = item_ [ 0 ] for i , check_ in priority : if check_ ( key_ ) : return i return 0 return item_priority
return sort function of mappings
8,791
def message ( default = None , cls = None ) : if cls and not issubclass ( cls , er . Invalid ) : raise er . SchemaError ( "message can only use subclases of Invalid as custom class" ) def decorator ( f ) : @ wraps ( f ) def check ( msg = None , clsoverride = None ) : @ wraps ( f ) def wrapper ( * args , ** kwargs ) : try : return f ( * args , ** kwargs ) except ValueError : raise ( clsoverride or cls or er . ValueInvalid ) ( msg or default or 'invalid value' ) return wrapper return check return decorator
Convenience decorator to allow functions to provide a message .
8,792
def _args_to_dict ( func , args ) : if sys . version_info >= ( 3 , 0 ) : arg_count = func . __code__ . co_argcount arg_names = func . __code__ . co_varnames [ : arg_count ] else : arg_count = func . func_code . co_argcount arg_names = func . func_code . co_varnames [ : arg_count ] arg_value_list = list ( args ) arguments = dict ( ( arg_name , arg_value_list [ i ] ) for i , arg_name in enumerate ( arg_names ) if i < len ( arg_value_list ) ) return arguments
Returns argument names as values as key - value pairs .
8,793
def _merge_args_with_kwargs ( args_dict , kwargs_dict ) : ret = args_dict . copy ( ) ret . update ( kwargs_dict ) return ret
Merge args with kwargs .
8,794
def validate ( * a , ** kw ) : RETURNS_KEY = '__return__' def validate_schema_decorator ( func ) : returns_defined = False returns = None schema_args_dict = _args_to_dict ( func , a ) schema_arguments = _merge_args_with_kwargs ( schema_args_dict , kw ) if RETURNS_KEY in schema_arguments : returns_defined = True returns = schema_arguments [ RETURNS_KEY ] del schema_arguments [ RETURNS_KEY ] input_schema = ( Schema ( schema_arguments , extra = ALLOW_EXTRA ) if len ( schema_arguments ) != 0 else lambda x : x ) output_schema = Schema ( returns ) if returns_defined else lambda x : x @ wraps ( func ) def func_wrapper ( * args , ** kwargs ) : args_dict = _args_to_dict ( func , args ) arguments = _merge_args_with_kwargs ( args_dict , kwargs ) validated_arguments = input_schema ( arguments ) output = func ( ** validated_arguments ) return output_schema ( output ) return func_wrapper return validate_schema_decorator
Decorator for validating arguments of a function against a given schema .
8,795
def _compile_object ( self , schema ) : base_validate = self . _compile_mapping ( schema , invalid_msg = 'object value' ) def validate_object ( path , data ) : if schema . cls is not UNDEFINED and not isinstance ( data , schema . cls ) : raise er . ObjectInvalid ( 'expected a {0!r}' . format ( schema . cls ) , path ) iterable = _iterate_object ( data ) iterable = ifilter ( lambda item : item [ 1 ] is not None , iterable ) out = base_validate ( path , iterable , { } ) return type ( data ) ( ** out ) return validate_object
Validate an object .
8,796
def _compile_dict ( self , schema ) : base_validate = self . _compile_mapping ( schema , invalid_msg = 'dictionary value' ) groups_of_exclusion = { } groups_of_inclusion = { } for node in schema : if isinstance ( node , Exclusive ) : g = groups_of_exclusion . setdefault ( node . group_of_exclusion , [ ] ) g . append ( node ) elif isinstance ( node , Inclusive ) : g = groups_of_inclusion . setdefault ( node . group_of_inclusion , [ ] ) g . append ( node ) def validate_dict ( path , data ) : if not isinstance ( data , dict ) : raise er . DictInvalid ( 'expected a dictionary' , path ) errors = [ ] for label , group in groups_of_exclusion . items ( ) : exists = False for exclusive in group : if exclusive . schema in data : if exists : msg = exclusive . msg if hasattr ( exclusive , 'msg' ) and exclusive . msg else "two or more values in the same group of exclusion '%s'" % label next_path = path + [ VirtualPathComponent ( label ) ] errors . append ( er . ExclusiveInvalid ( msg , next_path ) ) break exists = True if errors : raise er . MultipleInvalid ( errors ) for label , group in groups_of_inclusion . items ( ) : included = [ node . schema in data for node in group ] if any ( included ) and not all ( included ) : msg = "some but not all values in the same group of inclusion '%s'" % label for g in group : if hasattr ( g , 'msg' ) and g . msg : msg = g . msg break next_path = path + [ VirtualPathComponent ( label ) ] errors . append ( er . InclusiveInvalid ( msg , next_path ) ) break if errors : raise er . MultipleInvalid ( errors ) out = data . __class__ ( ) return base_validate ( path , iteritems ( data ) , out ) return validate_dict
Validate a dictionary .
8,797
def _compile_sequence ( self , schema , seq_type ) : _compiled = [ self . _compile ( s ) for s in schema ] seq_type_name = seq_type . __name__ def validate_sequence ( path , data ) : if not isinstance ( data , seq_type ) : raise er . SequenceTypeInvalid ( 'expected a %s' % seq_type_name , path ) if not schema : if data : raise er . MultipleInvalid ( [ er . ValueInvalid ( 'not a valid value' , [ value ] ) for value in data ] ) return data out = [ ] invalid = None errors = [ ] index_path = UNDEFINED for i , value in enumerate ( data ) : index_path = path + [ i ] invalid = None for validate in _compiled : try : cval = validate ( index_path , value ) if cval is not Remove : out . append ( cval ) break except er . Invalid as e : if len ( e . path ) > len ( index_path ) : raise invalid = e else : errors . append ( invalid ) if errors : raise er . MultipleInvalid ( errors ) if _isnamedtuple ( data ) : return type ( data ) ( * out ) else : return type ( data ) ( out ) return validate_sequence
Validate a sequence type .
8,798
def _compile_set ( self , schema ) : type_ = type ( schema ) type_name = type_ . __name__ def validate_set ( path , data ) : if not isinstance ( data , type_ ) : raise er . Invalid ( 'expected a %s' % type_name , path ) _compiled = [ self . _compile ( s ) for s in schema ] errors = [ ] for value in data : for validate in _compiled : try : validate ( path , value ) break except er . Invalid : pass else : invalid = er . Invalid ( 'invalid value in %s' % type_name , path ) errors . append ( invalid ) if errors : raise er . MultipleInvalid ( errors ) return data return validate_set
Validate a set .
8,799
def extend ( self , schema , required = None , extra = None ) : assert type ( self . schema ) == dict and type ( schema ) == dict , 'Both schemas must be dictionary-based' result = self . schema . copy ( ) def key_literal ( key ) : return ( key . schema if isinstance ( key , Marker ) else key ) result_key_map = dict ( ( key_literal ( key ) , key ) for key in result ) for key , value in iteritems ( schema ) : if key_literal ( key ) in result_key_map : result_key = result_key_map [ key_literal ( key ) ] result_value = result [ result_key ] if type ( result_value ) == dict and type ( value ) == dict : new_value = Schema ( result_value ) . extend ( value ) . schema del result [ result_key ] result [ key ] = new_value else : del result [ result_key ] result [ key ] = value else : result [ key ] = value result_cls = type ( self ) result_required = ( required if required is not None else self . required ) result_extra = ( extra if extra is not None else self . extra ) return result_cls ( result , required = result_required , extra = result_extra )
Create a new Schema by merging this and the provided schema .