idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
8,600
|
def download ( self , attachment_id , destination ) : attachment = self ( id = attachment_id ) if os . path . isdir ( destination ) : destination = os . path . join ( destination , attachment . file_name ) return self . _download_file ( attachment . content_url , destination )
|
Download an attachment from Zendesk .
|
8,601
|
def organization_fields ( self , organization ) : return self . _query_zendesk ( self . endpoint . organization_fields , 'organization_field' , id = organization )
|
Retrieve the organization fields for this organization .
|
8,602
|
def organization_memberships ( self , organization ) : return self . _query_zendesk ( self . endpoint . organization_memberships , 'organization_membership' , id = organization )
|
Retrieve tche organization fields for this organization .
|
8,603
|
def external ( self , external_id , include = None ) : return self . _query_zendesk ( self . endpoint . external , 'organization' , id = external_id , include = include )
|
Locate an Organization by it s external_id attribute .
|
8,604
|
def organizations ( self , organization , include = None ) : return self . _query_zendesk ( self . endpoint . organizations , 'ticket' , id = organization , include = include )
|
Retrieve the tickets for this organization .
|
8,605
|
def recent ( self , include = None ) : return self . _query_zendesk ( self . endpoint . recent , 'ticket' , id = None , include = include )
|
Retrieve the most recent tickets
|
8,606
|
def comments ( self , ticket , include_inline_images = False ) : return self . _query_zendesk ( self . endpoint . comments , 'comment' , id = ticket , include_inline_images = repr ( include_inline_images ) . lower ( ) )
|
Retrieve the comments for a ticket .
|
8,607
|
def audits ( self , ticket = None , include = None , ** kwargs ) : if ticket is not None : return self . _query_zendesk ( self . endpoint . audits , 'ticket_audit' , id = ticket , include = include ) else : return self . _query_zendesk ( self . endpoint . audits . cursor , 'ticket_audit' , include = include , ** kwargs )
|
Retrieve TicketAudits . If ticket is passed return the tickets for a specific audit .
|
8,608
|
def metrics ( self , ticket ) : return self . _query_zendesk ( self . endpoint . metrics , 'ticket_metric' , id = ticket )
|
Retrieve TicketMetric .
|
8,609
|
def metrics_incremental ( self , start_time ) : return self . _query_zendesk ( self . endpoint . metrics . incremental , 'ticket_metric_events' , start_time = start_time )
|
Retrieve TicketMetric incremental
|
8,610
|
def show ( self , item , variant ) : url = self . _build_url ( self . endpoint . show ( item , variant ) ) return self . _get ( url )
|
Show a variant .
|
8,611
|
def comments ( self , request_id ) : return self . _query_zendesk ( self . endpoint . comments , 'comment' , id = request_id )
|
Return comments for request
|
8,612
|
def memberships ( self , group , include = None ) : return self . _get ( self . _build_url ( self . endpoint . memberships ( id = group , include = include ) ) )
|
Return the GroupMemberships for this group .
|
8,613
|
def memberships_assignable ( self , group , include = None ) : return self . _get ( self . _build_url ( self . endpoint . memberships_assignable ( id = group , include = include ) ) )
|
Return memberships that are assignable for this group .
|
8,614
|
def active ( self , include = None ) : return self . _get ( self . _build_url ( self . endpoint . active ( include = include ) ) )
|
Return all active views .
|
8,615
|
def execute ( self , view , include = None ) : return self . _get ( self . _build_url ( self . endpoint . execute ( id = view , include = include ) ) )
|
Execute a view .
|
8,616
|
def tickets ( self , view , include = None ) : return self . _get ( self . _build_url ( self . endpoint . tickets ( id = view , include = include ) ) )
|
Return the tickets in a view .
|
8,617
|
def count ( self , view , include = None ) : return self . _get ( self . _build_url ( self . endpoint . count ( id = view , include = include ) ) )
|
Return a ViewCount for a view .
|
8,618
|
def count_many ( self , views , include = None ) : return self . _get ( self . _build_url ( self . endpoint ( count_many = views , include = include ) ) )
|
Return many ViewCounts .
|
8,619
|
def export ( self , view , include = None ) : return self . _get ( self . _build_url ( self . endpoint . export ( id = view , include = include ) ) )
|
Export a view . Returns an Export object .
|
8,620
|
def make_default ( self , user , group_membership ) : return self . _put ( self . _build_url ( self . endpoint . make_default ( user , group_membership ) ) , payload = { } )
|
Set the passed GroupMembership as default for the specified user .
|
8,621
|
def comments ( self , article ) : return self . _query_zendesk ( self . endpoint . comments , object_type = 'comment' , id = article )
|
Retrieve comments for an article
|
8,622
|
def create ( self , article , attachment , inline = False , file_name = None , content_type = None ) : return HelpdeskAttachmentRequest ( self ) . post ( self . endpoint . create , article = article , attachments = attachment , inline = inline , file_name = file_name , content_type = content_type )
|
This function creates attachment attached to article .
|
8,623
|
def delete ( self , article_attachment ) : return HelpdeskAttachmentRequest ( self ) . delete ( self . endpoint . delete , article_attachment )
|
This function completely wipes attachment from Zendesk Helpdesk article .
|
8,624
|
def recipients_incremental ( self , start_time ) : return self . _query_zendesk ( self . endpoint . recipients_incremental , 'recipients' , start_time = start_time )
|
Retrieve NPS Recipients incremental
|
8,625
|
def responses_incremental ( self , start_time ) : return self . _query_zendesk ( self . endpoint . responses_incremental , 'responses' , start_time = start_time )
|
Retrieve NPS Responses incremental
|
8,626
|
def _wrap_element ( self , element ) : def dirty_callback ( ) : self . _set_dirty ( ) if isinstance ( element , list ) : element = ProxyList ( element , dirty_callback = dirty_callback ) elif isinstance ( element , dict ) : element = ProxyDict ( element , dirty_callback = dirty_callback ) elif getattr ( element , '_dirty_callback' , self . _sentinel ) is not self . _sentinel : if not callable ( element . _dirty_callback ) : element . _dirty_callback = dirty_callback return element
|
We want to know if an item is modified that is stored in this dict . If the element is a list or dict we wrap it in a ProxyList or ProxyDict and if it is modified execute a callback that updates this instance . If it is a ZenpyObject then the callback updates the parent object .
|
8,627
|
def handle_pagination ( self , page_num = None , page_size = None ) : self . _response_json = self . get_next_page ( page_num = page_num , page_size = page_size ) self . update_attrs ( ) self . position = 0 self . values = self . process_page ( )
|
Handle retrieving and processing the next page of results .
|
8,628
|
def get_next_page ( self , page_num , page_size ) : url = self . _response_json . get ( self . next_page_attr , None ) if url is None : raise StopIteration ( ) params , url = self . process_url ( page_num , page_size , url ) response = self . response_handler . api . _get ( url , raw_response = True , params = params ) return response . json ( )
|
Retrieve the next page of results .
|
8,629
|
def process_url ( self , page_num , page_size , url ) : params = dict ( ) if page_num is not None : url = re . sub ( 'page=\d+' , '' , url ) params [ 'page' ] = page_num if page_size is not None : url = re . sub ( 'per_page=\d+' , '' , url ) params [ 'per_page' ] = page_size return params , url
|
When slicing remove the per_page and page parameters and pass to requests in the params dict
|
8,630
|
def _clean_dirty ( self , obj = None ) : obj = obj or self obj . __dict__ [ '_dirty_attributes' ] . clear ( ) obj . _dirty = False for key , val in vars ( obj ) . items ( ) : if isinstance ( val , BaseObject ) : self . _clean_dirty ( val ) else : func = getattr ( val , '_clean_dirty' , None ) if callable ( func ) : func ( )
|
Recursively clean self and all child objects .
|
8,631
|
def _set_dirty ( self , obj = None ) : obj = obj or self for key , value in vars ( obj ) . items ( ) : if key not in ( 'api' , '_dirty_attributes' , '_always_dirty' , '_dirty_callback' , '_dirty' ) : setattr ( obj , key , value ) if isinstance ( value , BaseObject ) : self . _set_dirty ( value )
|
Recursively set self and all child objects _dirty flag .
|
8,632
|
def to_json ( self , indent = 2 ) : return json . dumps ( self , default = json_encode_for_printing , indent = indent )
|
Return self formatted as JSON .
|
8,633
|
def deserialize ( self , response_json ) : response_objects = dict ( ) if all ( ( t in response_json for t in ( 'ticket' , 'audit' ) ) ) : response_objects [ "ticket_audit" ] = self . object_mapping . object_from_json ( "ticket_audit" , response_json ) for zenpy_object_name in self . object_mapping . class_mapping : if zenpy_object_name in response_json : zenpy_object = self . object_mapping . object_from_json ( zenpy_object_name , response_json [ zenpy_object_name ] ) response_objects [ zenpy_object_name ] = zenpy_object for key , value in response_json . items ( ) : if isinstance ( value , list ) : zenpy_object_name = as_singular ( key ) if zenpy_object_name in self . object_mapping . class_mapping : response_objects [ key ] = [ ] for object_json in response_json [ key ] : zenpy_object = self . object_mapping . object_from_json ( zenpy_object_name , object_json ) response_objects [ key ] . append ( zenpy_object ) return response_objects
|
Locate and deserialize all objects in the returned JSON .
|
8,634
|
def build ( self , response ) : response_json = response . json ( ) if get_endpoint_path ( self . api , response ) . startswith ( '/ticket_audits.json' ) : return TicketAuditGenerator ( self , response_json ) zenpy_objects = self . deserialize ( response_json ) plural_object_type = as_plural ( self . api . object_type ) if plural_object_type in zenpy_objects : return ZendeskResultGenerator ( self , response_json , response_objects = zenpy_objects [ plural_object_type ] ) if self . api . object_type in zenpy_objects : return zenpy_objects [ self . api . object_type ] for zenpy_object_name in self . object_mapping . class_mapping : if zenpy_object_name in zenpy_objects : return zenpy_objects [ zenpy_object_name ] for zenpy_object_name in self . object_mapping . class_mapping : plural_zenpy_object_name = as_plural ( zenpy_object_name ) if plural_zenpy_object_name in zenpy_objects : return ZendeskResultGenerator ( self , response_json , object_type = plural_zenpy_object_name ) raise ZenpyException ( "Unknown Response: " + str ( response_json ) )
|
Deserialize the returned objects and return either a single Zenpy object or a ResultGenerator in the case of multiple results .
|
8,635
|
def set_cache_impl ( self , cache_impl , maxsize , ** kwargs ) : new_cache = self . _get_cache_impl ( cache_impl , maxsize , ** kwargs ) self . _populate_new_cache ( new_cache ) self . cache = new_cache
|
Change cache implementation . The contents of the old cache will be transferred to the new one .
|
8,636
|
def set_maxsize ( self , maxsize , ** kwargs ) : new_cache = self . _get_cache_impl ( self . impl_name , maxsize , ** kwargs ) self . _populate_new_cache ( new_cache ) self . cache = new_cache
|
Set maxsize . This involves creating a new cache and transferring the items .
|
8,637
|
def add ( self , zenpy_object ) : object_type = get_object_type ( zenpy_object ) if object_type not in self . mapping or self . disabled : return attr_name = self . _cache_key_attribute ( object_type ) cache_key = getattr ( zenpy_object , attr_name ) log . debug ( "Caching: [{}({}={})]" . format ( zenpy_object . __class__ . __name__ , attr_name , cache_key ) ) self . mapping [ object_type ] [ cache_key ] = zenpy_object
|
Add a Zenpy object to the relevant cache . If no cache exists for this object nothing is done .
|
8,638
|
def delete ( self , to_delete ) : if not isinstance ( to_delete , list ) : to_delete = [ to_delete ] for zenpy_object in to_delete : object_type = get_object_type ( zenpy_object ) object_cache = self . mapping . get ( object_type , None ) if object_cache : removed_object = object_cache . pop ( zenpy_object . id , None ) if removed_object : log . debug ( "Cache RM: [%s %s]" % ( object_type . capitalize ( ) , zenpy_object . id ) )
|
Purge one or more items from the relevant caches
|
8,639
|
def get ( self , object_type , cache_key ) : if object_type not in self . mapping or self . disabled : return None cache = self . mapping [ object_type ] if cache_key in cache : log . debug ( "Cache HIT: [%s %s]" % ( object_type . capitalize ( ) , cache_key ) ) return cache [ cache_key ] else : log . debug ( 'Cache MISS: [%s %s]' % ( object_type . capitalize ( ) , cache_key ) )
|
Query the cache for a Zenpy object
|
8,640
|
def query_cache_by_object ( self , zenpy_object ) : object_type = get_object_type ( zenpy_object ) cache_key = self . _cache_key_attribute ( object_type ) return self . get ( object_type , getattr ( zenpy_object , cache_key ) )
|
Convenience method for testing . Given an object return the cached version
|
8,641
|
def purge_cache ( self , object_type ) : if object_type in self . mapping : cache = self . mapping [ object_type ] log . debug ( "Purging [{}] cache of {} values." . format ( object_type , len ( cache ) ) ) cache . purge ( )
|
Purge the named cache of all values . If no cache exists for object_type nothing is done
|
8,642
|
def in_cache ( self , zenpy_object ) : object_type = get_object_type ( zenpy_object ) cache_key_attr = self . _cache_key_attribute ( object_type ) return self . get ( object_type , getattr ( zenpy_object , cache_key_attr ) ) is not None
|
Determine whether or not this object is in the cache
|
8,643
|
def http_adapter_kwargs ( ) : return dict ( max_retries = Retry ( total = 3 , status_forcelist = [ r for r in Retry . RETRY_AFTER_STATUS_CODES if r != 429 ] , respect_retry_after_header = False ) )
|
Provides Zenpy s default HTTPAdapter args for those users providing their own adapter .
|
8,644
|
def set_cache_max ( self , cache_name , maxsize , ** kwargs ) : cache = self . _get_cache ( cache_name ) cache . set_maxsize ( maxsize , ** kwargs )
|
Sets the maxsize attribute of the named cache
|
8,645
|
def set_cache_implementation ( self , cache_name , impl_name , maxsize , ** kwargs ) : self . _get_cache ( cache_name ) . set_cache_impl ( impl_name , maxsize , ** kwargs )
|
Changes the cache implementation for the named cache
|
8,646
|
def add_cache ( self , object_type , cache_impl_name , maxsize , ** kwargs ) : if object_type not in ZendeskObjectMapping . class_mapping : raise ZenpyException ( "No such object type: %s" % object_type ) self . cache . mapping [ object_type ] = ZenpyCache ( cache_impl_name , maxsize , ** kwargs )
|
Add a new cache for the named object type and cache implementation
|
8,647
|
def object_from_json ( self , object_type , object_json , parent = None ) : if not isinstance ( object_json , dict ) : return object_json obj = self . instantiate_object ( object_type , parent ) for key , value in object_json . items ( ) : if key not in self . skip_attrs : key , value = self . _deserialize ( key , obj , value ) if isinstance ( value , dict ) : value = ProxyDict ( value , dirty_callback = getattr ( obj , '_dirty_callback' , None ) ) elif isinstance ( value , list ) : value = ProxyList ( value , dirty_callback = getattr ( obj , '_dirty_callback' , None ) ) setattr ( obj , key , value ) if hasattr ( obj , '_clean_dirty' ) : obj . _clean_dirty ( ) self . api . cache . add ( obj ) return obj
|
Given a blob of JSON representing a Zenpy object recursively deserialize it and any nested objects it contains . This method also adds the deserialized object to the relevant cache if applicable .
|
8,648
|
def class_for_type ( self , object_type ) : if object_type not in self . class_mapping : raise ZenpyException ( "Unknown object_type: " + str ( object_type ) ) else : return self . class_mapping [ object_type ]
|
Given an object_type return the class associated with it .
|
8,649
|
def destroy ( self , force = False ) : if self . persisted : if self . soft_delete and not force : self . __class__ . update ( values = { self . __class__ . deleted_at_column ( ) : datetime . utcnow ( ) } , where = self . where_self ) else : self . __class__ . delete ( where = self . where_self , skip_soft_delete = True ) else : raise RecordNotPersisted ( "Record's primary key is None" )
|
Deletes the record . If the model has soft_delete activated the record will not actually be deleted .
|
8,650
|
def select ( self , ** kwargs ) : db_adapter = self . db_adapter ( db_name = kwargs . get ( 'db' ) , role = kwargs . get ( 'role' , 'replica' ) ) kwargs [ 'stack' ] = self . stack_mark ( inspect . stack ( ) , db_conn = db_adapter . db ) return self . collection_instance ( db_adapter . select ( ** kwargs ) )
|
Perform a SELECT statement on the model s table in the replica database .
|
8,651
|
def query ( self , sql = None , filename = None , ** kwargs ) : results = query ( sql = sql , filename = filename , db = self . db_names [ kwargs . get ( 'role' , 'replica' ) ] , ** kwargs ) if results is None : return None else : return self . collection_instance ( results )
|
run raw sql from sql or file against .
|
8,652
|
def count ( self , ** kwargs ) : if 'select' in kwargs : kwargs [ 'select' ] = { 'cnt' : 'COUNT(%s)' % kwargs [ 'select' ] } else : kwargs [ 'select' ] = { 'cnt' : 'COUNT(*)' } res = self . db_adapter ( db_name = kwargs . get ( 'db' ) , role = kwargs . get ( 'role' , 'replica' ) ) . select ( ** kwargs ) return res . cnt [ 0 ]
|
Performs a COUNT statement on the model s table in the replica database .
|
8,653
|
def insert ( self , ** kwargs ) : if len ( kwargs [ 'values' ] ) == 0 : config . logger . warning ( 'No values to insert.' ) return values = kwargs [ 'values' ] if isinstance ( values , self ) : values = values . attributes . copy ( ) if isinstance ( values , dict ) : for ( k , v ) in values . items ( ) : if v is None : del kwargs [ 'values' ] [ k ] kwargs [ 'stack' ] = self . stack_mark ( inspect . stack ( ) ) kwargs [ 'primary_key' ] = self . primary_key column_names = self . table_schema ( ) . keys ( ) now = datetime . utcnow ( ) for field in ( 'created_at' , 'updated_at' ) : if field in column_names : kwargs [ 'values' ] [ field ] = now results = self . db_adapter ( role = 'master' ) . insert ( ** kwargs ) return self . record_or_model ( results )
|
Performs an INSERT statement on the model s table in the master database .
|
8,654
|
def update ( self , ** kwargs ) : kwargs [ 'stack' ] = self . stack_mark ( inspect . stack ( ) ) kwargs [ 'primary_key' ] = self . primary_key column_names = self . table_schema ( ) . keys ( ) now = datetime . utcnow ( ) if 'updated_at' in column_names : kwargs [ 'values' ] [ 'updated_at' ] = now results = self . db_adapter ( role = 'master' ) . update ( ** kwargs ) return self . record_or_model ( results )
|
Performs an UPDATE statement on the model s table in the master database .
|
8,655
|
def delete ( self , ** kwargs ) : kwargs [ 'stack' ] = self . stack_mark ( inspect . stack ( ) ) return self . db_adapter ( role = 'master' ) . delete ( ** kwargs )
|
Performs a DELETE statement on the model s table in the master database .
|
8,656
|
def last ( self , limit = 1 , ** kwargs ) : return self . collection_instance ( self . db_adapter ( db_name = kwargs . get ( 'db' ) , role = kwargs . get ( 'role' , 'replica' ) ) . select ( where = 'created_at IS NOT NULL' , order = 'created_at DESC' , limit = limit ) )
|
Returns the last limit records inserted in the model s table in the replica database . Rows are sorted by created_at .
|
8,657
|
def find_by ( self , values = { } , ** kwargs ) : try : return self ( ** self . select ( where = values , limit = 1 , ** kwargs ) . to_dict ( orient = 'records' ) [ 0 ] ) except IndexError : return None
|
Returns a single record matching the criteria in values found in the model s table in the replica database .
|
8,658
|
def replica_lag ( self , ** kwargs ) : if not self . _use_replica ( ) : return 0 try : kwargs [ 'stack' ] = self . stack_mark ( inspect . stack ( ) ) sql = "select EXTRACT(EPOCH FROM NOW() - pg_last_xact_replay_timestamp()) AS replication_lag" return self . collection_instance ( self . db_adapter ( ) . raw_query ( sql = sql , ** kwargs ) ) . squeeze ( ) except : return 0
|
Returns the current replication lag in seconds between the master and replica databases .
|
8,659
|
def table_schema ( self ) : if self . __dict__ . get ( '_table_schema' ) is None : self . _table_schema = None table_schema = { } for row in self . query_schema ( ) : name , default , dtype = self . db ( ) . lexicon . column_info ( row ) if isinstance ( default , str ) : json_matches = re . findall ( r"^\'(.*)\'::jsonb$" , default ) if len ( json_matches ) > 0 : default = json . loads ( json_matches [ 0 ] ) if name == self . primary_key : default = None table_schema [ name ] = { 'default' : default , 'type' : dtype } if len ( table_schema ) : self . _table_schema = table_schema return self . _table_schema
|
Returns the table schema .
|
8,660
|
def load_vertex_buffer ( self , fd , material , length ) : material . vertices = struct . unpack ( '{}f' . format ( length // 4 ) , fd . read ( length ) )
|
Load vertex data from file . Can be overriden to reduce data copy
|
8,661
|
def _load_vertex_buffers ( self ) : fd = gzip . open ( cache_name ( self . file_name ) , 'rb' ) for buff in self . meta . vertex_buffers : mat = self . wavefront . materials . get ( buff [ 'material' ] ) if not mat : mat = Material ( name = buff [ 'material' ] , is_default = True ) self . wavefront . materials [ mat . name ] = mat mat . vertex_format = buff [ 'vertex_format' ] self . load_vertex_buffer ( fd , mat , buff [ 'byte_length' ] ) fd . close ( )
|
Load each vertex buffer into each material
|
8,662
|
def _parse_mtllibs ( self ) : for mtllib in self . meta . mtllibs : try : materials = self . material_parser_cls ( os . path . join ( self . path , mtllib ) , encoding = self . encoding , strict = self . strict ) . materials except IOError : raise IOError ( "Failed to load mtl file:" . format ( os . path . join ( self . path , mtllib ) ) ) for name , material in materials . items ( ) : self . wavefront . materials [ name ] = material
|
Load mtl files
|
8,663
|
def add_vertex_buffer ( self , material , vertex_format , byte_offset , byte_length ) : self . _vertex_buffers . append ( { "material" : material , "vertex_format" : vertex_format , "byte_offset" : byte_offset , "byte_length" : byte_length , } )
|
Add a vertex buffer
|
8,664
|
def write ( self , path ) : with open ( path , 'w' ) as fd : fd . write ( json . dumps ( { "created_at" : self . _created_at , "version" : self . _version , "mtllibs" : self . _mtllibs , "vertex_buffers" : self . _vertex_buffers , } , indent = 2 , ) )
|
Save the metadata as json
|
8,665
|
def has_material ( self , new_material ) : for material in self . materials : if material . name == new_material . name : return True return False
|
Determine whether we already have a material of this name .
|
8,666
|
def add_material ( self , material ) : if self . has_material ( material ) : return self . materials . append ( material )
|
Add a material to the mesh IF it s not already present .
|
8,667
|
def auto_consume ( func ) : def inner ( * args , ** kwargs ) : func ( * args , ** kwargs ) args [ 0 ] . consume_line ( ) return inner
|
Decorator for auto consuming lines when leaving the function
|
8,668
|
def create_line_generator ( self ) : if self . file_name . endswith ( ".gz" ) : if sys . version_info . major == 3 : gz = gzip . open ( self . file_name , mode = 'rt' , encoding = self . encoding ) else : gz = gzip . open ( self . file_name , mode = 'rt' ) for line in gz . readlines ( ) : yield line gz . close ( ) else : if sys . version_info . major == 3 : file = open ( self . file_name , mode = 'r' , encoding = self . encoding ) else : file = codecs . open ( self . file_name , mode = 'r' , encoding = self . encoding ) for line in file : yield line file . close ( )
|
Creates a generator function yielding lines in the file Should only yield non - empty lines
|
8,669
|
def next_line ( self ) : self . line = next ( self . lines ) self . values = self . line . split ( )
|
Read the next line from the line generator and split it
|
8,670
|
def parse ( self ) : try : while True : if not self . line : self . next_line ( ) if self . line [ 0 ] == '#' or len ( self . values ) < 2 : self . consume_line ( ) continue self . dispatcher . get ( self . values [ 0 ] , self . parse_fallback ) ( ) except StopIteration : pass if self . auto_post_parse : self . post_parse ( )
|
Parse all the lines in the obj file Determines what type of line we are and dispatch appropriately .
|
8,671
|
def parse_fallback ( self ) : if self . strict : raise PywavefrontException ( "Unimplemented OBJ format statement '%s' on line '%s'" % ( self . values [ 0 ] , self . line . rstrip ( ) ) ) else : logger . warning ( "Unimplemented OBJ format statement '%s' on line '%s'" % ( self . values [ 0 ] , self . line . rstrip ( ) ) )
|
Fallback method when parser doesn t know the statement
|
8,672
|
def draw ( instance ) : if isinstance ( instance , Wavefront ) : draw_materials ( instance . materials ) elif isinstance ( instance , Material ) : draw_material ( instance ) elif isinstance ( instance , dict ) : draw_materials ( instance ) else : raise ValueError ( "Cannot figure out how to draw: {}" . format ( instance ) )
|
Generic draw function
|
8,673
|
def draw_material ( material , face = GL_FRONT_AND_BACK ) : if material . gl_floats is None : material . gl_floats = ( GLfloat * len ( material . vertices ) ) ( * material . vertices ) material . triangle_count = len ( material . vertices ) / material . vertex_size vertex_format = VERTEX_FORMATS . get ( material . vertex_format ) if not vertex_format : raise ValueError ( "Vertex format {} not supported by pyglet" . format ( material . vertex_format ) ) glPushClientAttrib ( GL_CLIENT_VERTEX_ARRAY_BIT ) glPushAttrib ( GL_CURRENT_BIT | GL_ENABLE_BIT | GL_LIGHTING_BIT ) glEnable ( GL_CULL_FACE ) glCullFace ( GL_BACK ) texture = material . texture or material . texture_ambient if texture and material . has_uvs : bind_texture ( texture ) else : glDisable ( GL_TEXTURE_2D ) glMaterialfv ( face , GL_DIFFUSE , gl_light ( material . diffuse ) ) glMaterialfv ( face , GL_AMBIENT , gl_light ( material . ambient ) ) glMaterialfv ( face , GL_SPECULAR , gl_light ( material . specular ) ) glMaterialfv ( face , GL_EMISSION , gl_light ( material . emissive ) ) glMaterialf ( face , GL_SHININESS , min ( 128.0 , material . shininess ) ) glEnable ( GL_LIGHT0 ) if material . has_normals : glEnable ( GL_LIGHTING ) else : glDisable ( GL_LIGHTING ) glInterleavedArrays ( vertex_format , 0 , material . gl_floats ) glDrawArrays ( GL_TRIANGLES , 0 , int ( material . triangle_count ) ) glPopAttrib ( ) glPopClientAttrib ( )
|
Draw a single material
|
8,674
|
def bind_texture ( texture ) : if not getattr ( texture , 'image' , None ) : texture . image = load_image ( texture . path ) glEnable ( texture . image . target ) glBindTexture ( texture . image . target , texture . image . id ) gl . glTexParameterf ( texture . image . target , gl . GL_TEXTURE_WRAP_S , gl . GL_CLAMP_TO_EDGE ) gl . glTexParameterf ( texture . image . target , gl . GL_TEXTURE_WRAP_T , gl . GL_CLAMP_TO_EDGE )
|
Draw a single texture
|
8,675
|
def load_image ( name ) : image = pyglet . image . load ( name ) . texture verify_dimensions ( image ) return image
|
Load an image
|
8,676
|
def pad_light ( self , values ) : while len ( values ) < 4 : values . append ( 0. ) return list ( map ( float , values ) )
|
Accept an array of up to 4 values and return an array of 4 values . If the input array is less than length 4 pad it with zeroes until it is length 4 . Also ensure each value is a float
|
8,677
|
def load_cache ( self ) : self . cache_loaded = self . cache_loader_cls ( self . file_name , self . wavefront , strict = self . strict , create_materials = self . create_materials , encoding = self . encoding , parse = self . parse , ) . parse ( )
|
Loads the file using cached data
|
8,678
|
def post_parse ( self ) : if self . cache and not self . cache_loaded : self . cache_writer_cls ( self . file_name , self . wavefront ) . write ( )
|
Called after parsing is done
|
8,679
|
def consume_normals ( self ) : while True : yield ( float ( self . values [ 1 ] ) , float ( self . values [ 2 ] ) , float ( self . values [ 3 ] ) , ) try : self . next_line ( ) except StopIteration : break if not self . values : break if self . values [ 0 ] != "vn" : break
|
Consumes all consecutive texture coordinate lines
|
8,680
|
def consume_texture_coordinates ( self ) : while True : yield ( float ( self . values [ 1 ] ) , float ( self . values [ 2 ] ) , ) try : self . next_line ( ) except StopIteration : break if not self . values : break if self . values [ 0 ] != "vt" : break
|
Consume all consecutive texture coordinates
|
8,681
|
def check_error ( code , context = "client" ) : if code : error = error_text ( code , context ) logger . error ( error ) raise Snap7Exception ( error )
|
check if the error code is set . If so a Python log message is generated and an error is raised .
|
8,682
|
def error_text ( error , context = "client" ) : assert context in ( "client" , "server" , "partner" ) logger . debug ( "error text for %s" % hex ( error ) ) len_ = 1024 text_type = c_char * len_ text = text_type ( ) library = load_library ( ) if context == "client" : library . Cli_ErrorText ( error , text , len_ ) elif context == "server" : library . Srv_ErrorText ( error , text , len_ ) elif context == "partner" : library . Par_ErrorText ( error , text , len_ ) return text . value
|
Returns a textual explanation of a given error number
|
8,683
|
def event_text ( self , event ) : logger . debug ( "error text for %s" % hex ( event . EvtCode ) ) len_ = 1024 text_type = ctypes . c_char * len_ text = text_type ( ) error = self . library . Srv_EventText ( ctypes . byref ( event ) , ctypes . byref ( text ) , len_ ) check_error ( error ) if six . PY2 : return text . value else : return text . value . decode ( 'ascii' )
|
Returns a textual explanation of a given event object
|
8,684
|
def create ( self ) : logger . info ( "creating server" ) self . library . Srv_Create . restype = snap7 . snap7types . S7Object self . pointer = snap7 . snap7types . S7Object ( self . library . Srv_Create ( ) )
|
create the server .
|
8,685
|
def register_area ( self , area_code , index , userdata ) : size = ctypes . sizeof ( userdata ) logger . info ( "registering area %s, index %s, size %s" % ( area_code , index , size ) ) size = ctypes . sizeof ( userdata ) return self . library . Srv_RegisterArea ( self . pointer , area_code , index , ctypes . byref ( userdata ) , size )
|
Shares a memory area with the server . That memory block will be visible by the clients .
|
8,686
|
def set_events_callback ( self , call_back ) : logger . info ( "setting event callback" ) callback_wrap = ctypes . CFUNCTYPE ( None , ctypes . c_void_p , ctypes . POINTER ( snap7 . snap7types . SrvEvent ) , ctypes . c_int ) def wrapper ( usrptr , pevent , size ) : logger . info ( "callback event: " + self . event_text ( pevent . contents ) ) call_back ( pevent . contents ) return 0 self . _callback = callback_wrap ( wrapper ) usrPtr = ctypes . c_void_p ( ) return self . library . Srv_SetEventsCallback ( self . pointer , self . _callback , usrPtr )
|
Sets the user callback that the Server object has to call when an event is created .
|
8,687
|
def set_read_events_callback ( self , call_back ) : logger . info ( "setting read event callback" ) callback_wrapper = ctypes . CFUNCTYPE ( None , ctypes . c_void_p , ctypes . POINTER ( snap7 . snap7types . SrvEvent ) , ctypes . c_int ) def wrapper ( usrptr , pevent , size ) : logger . info ( "callback event: " + self . event_text ( pevent . contents ) ) call_back ( pevent . contents ) return 0 self . _read_callback = callback_wrapper ( wrapper ) return self . library . Srv_SetReadEventsCallback ( self . pointer , self . _read_callback )
|
Sets the user callback that the Server object has to call when a Read event is created .
|
8,688
|
def _set_log_callback ( self ) : logger . debug ( "setting up event logger" ) def log_callback ( event ) : logger . info ( "callback event: " + self . event_text ( event ) ) self . set_events_callback ( log_callback )
|
Sets a callback that logs the events
|
8,689
|
def start ( self , tcpport = 102 ) : if tcpport != 102 : logger . info ( "setting server TCP port to %s" % tcpport ) self . set_param ( snap7 . snap7types . LocalPort , tcpport ) logger . info ( "starting server on 0.0.0.0:%s" % tcpport ) return self . library . Srv_Start ( self . pointer )
|
start the server .
|
8,690
|
def destroy ( self ) : logger . info ( "destroying server" ) if self . library : self . library . Srv_Destroy ( ctypes . byref ( self . pointer ) )
|
destroy the server .
|
8,691
|
def get_status ( self ) : logger . debug ( "get server status" ) server_status = ctypes . c_int ( ) cpu_status = ctypes . c_int ( ) clients_count = ctypes . c_int ( ) error = self . library . Srv_GetStatus ( self . pointer , ctypes . byref ( server_status ) , ctypes . byref ( cpu_status ) , ctypes . byref ( clients_count ) ) check_error ( error ) logger . debug ( "status server %s cpu %s clients %s" % ( server_status . value , cpu_status . value , clients_count . value ) ) return snap7 . snap7types . server_statuses [ server_status . value ] , snap7 . snap7types . cpu_statuses [ cpu_status . value ] , clients_count . value
|
Reads the server status the Virtual CPU status and the number of the clients connected .
|
8,692
|
def unlock_area ( self , code , index ) : logger . debug ( "unlocking area code %s index %s" % ( code , index ) ) return self . library . Srv_UnlockArea ( self . pointer , code , index )
|
Unlocks a previously locked shared memory area .
|
8,693
|
def lock_area ( self , code , index ) : logger . debug ( "locking area code %s index %s" % ( code , index ) ) return self . library . Srv_LockArea ( self . pointer , code , index )
|
Locks a shared memory area .
|
8,694
|
def start_to ( self , ip , tcpport = 102 ) : if tcpport != 102 : logger . info ( "setting server TCP port to %s" % tcpport ) self . set_param ( snap7 . snap7types . LocalPort , tcpport ) assert re . match ( ipv4 , ip ) , '%s is invalid ipv4' % ip logger . info ( "starting server to %s:102" % ip ) return self . library . Srv_Start ( self . pointer , ip )
|
start server on a specific interface .
|
8,695
|
def set_mask ( self , kind , mask ) : logger . debug ( "setting mask kind %s to %s" % ( kind , mask ) ) return self . library . Srv_SetMask ( self . pointer , kind , mask )
|
Writes the specified filter mask .
|
8,696
|
def set_cpu_status ( self , status ) : assert status in snap7 . snap7types . cpu_statuses , 'unknown cpu state %s' % status logger . debug ( "setting cpu status to %s" % status ) return self . library . Srv_SetCpuStatus ( self . pointer , status )
|
Sets the Virtual CPU status .
|
8,697
|
def get_param ( self , number ) : logger . debug ( "retreiving param number %s" % number ) value = ctypes . c_int ( ) code = self . library . Srv_GetParam ( self . pointer , number , ctypes . byref ( value ) ) check_error ( code ) return value . value
|
Reads an internal Server object parameter .
|
8,698
|
def get_mask ( self , kind ) : logger . debug ( "retrieving mask kind %s" % kind ) mask = snap7 . snap7types . longword ( ) code = self . library . Srv_GetMask ( self . pointer , kind , ctypes . byref ( mask ) ) check_error ( code ) return mask
|
Reads the specified filter mask .
|
8,699
|
def error_wrap ( func ) : def f ( * args , ** kw ) : code = func ( * args , ** kw ) check_error ( code , context = "client" ) return f
|
Parses a s7 error code returned the decorated function .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.