sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def search_read_all(self, domain, order, fields, batch_size=500, context=None, offset=0, limit=None): """ An endless iterator that iterates over records. :param domain: A search domain :param order: The order clause for search read :param fields: The fields argument for search_read :param batch_size: The optimal batch size when sending paginated requests """ if context is None: context = {} if limit is None: # When no limit is specified, all the records # should be fetched. record_count = self.search_count(domain, context=context) end = record_count + offset else: end = limit + offset for page_offset in range(offset, end, batch_size): if page_offset + batch_size > end: batch_size = end - page_offset for record in self.search_read( domain, page_offset, batch_size, order, fields, context=context): yield record
An endless iterator that iterates over records. :param domain: A search domain :param order: The order clause for search read :param fields: The fields argument for search_read :param batch_size: The optimal batch size when sending paginated requests
entailment
def find(self, filter=None, page=1, per_page=10, fields=None, context=None): """ Find records that match the filter. Pro Tip: The fields could have nested fields names if the field is a relationship type. For example if you were looking up an order and also want to get the shipping address country then fields would be: `['shipment_address', 'shipment_address.country']` but country in this case is the ID of the country which is not very useful if you don't already have a map. You can fetch the country code by adding `'shipment_address.country.code'` to the fields. :param filter: A domain expression (Refer docs for domain syntax) :param page: The page to fetch to get paginated results :param per_page: The number of records to fetch per page :param fields: A list of field names to fetch. :param context: Any overrides to the context. """ if filter is None: filter = [] rv = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, 'context': dumps(context or self.client.context), } ) response_received.send(rv) return rv
Find records that match the filter. Pro Tip: The fields could have nested fields names if the field is a relationship type. For example if you were looking up an order and also want to get the shipping address country then fields would be: `['shipment_address', 'shipment_address.country']` but country in this case is the ID of the country which is not very useful if you don't already have a map. You can fetch the country code by adding `'shipment_address.country.code'` to the fields. :param filter: A domain expression (Refer docs for domain syntax) :param page: The page to fetch to get paginated results :param per_page: The number of records to fetch per page :param fields: A list of field names to fetch. :param context: Any overrides to the context.
entailment
def attach(self, id, filename, url): """Add an attachmemt to record from url :param id: ID of record :param filename: File name of attachment :param url: Public url to download file from. """ Attachment = self.client.model('ir.attachment') return Attachment.add_attachment_from_url( filename, url, '%s,%s' % (self.model_name, id) )
Add an attachmemt to record from url :param id: ID of record :param filename: File name of attachment :param url: Public url to download file from.
entailment
def refresh_if_needed(self): """ Refresh the status of the task from server if required. """ if self.state in (self.PENDING, self.STARTED): try: response, = self._fetch_result()['tasks'] except (KeyError, ValueError): raise Exception( "Unable to find results for task." ) if 'error' in response: self.state == self.FAILURE raise ServerError(response['error']) if 'state' in response: self.state = response['state'] self.result = response['result']
Refresh the status of the task from server if required.
entailment
def get_product_inventory(product_id, warehouse_ids): """ Return the product inventory in each location. The returned response will look like:: { 12: { // Product ID 4: { // Location ID 'quantity_on_hand': 12.0, 'quantity_available': 8.0 }, 5: { // Location ID 'quantity_on_hand': 8.0, 'quantity_available': 8.0 }, }, 126: { // Product ID 4: { // Location ID 'quantity_on_hand': 16.0, 'quantity_available': 15.0 }, 5: { // Location ID 'quantity_on_hand': 9.0, 'quantity_available': 8.0 }, } } Read more: http://docs.fulfiliorestapi.apiary.io/#reference/product/product-inventory """ Product = client.model('product.product') return Product.get_product_inventory( [product_id], warehouse_ids )[product_id]
Return the product inventory in each location. The returned response will look like:: { 12: { // Product ID 4: { // Location ID 'quantity_on_hand': 12.0, 'quantity_available': 8.0 }, 5: { // Location ID 'quantity_on_hand': 8.0, 'quantity_available': 8.0 }, }, 126: { // Product ID 4: { // Location ID 'quantity_on_hand': 16.0, 'quantity_available': 15.0 }, 5: { // Location ID 'quantity_on_hand': 9.0, 'quantity_available': 8.0 }, } } Read more: http://docs.fulfiliorestapi.apiary.io/#reference/product/product-inventory
entailment
def get_customer(code): """ Fetch a customer with the code. Returns None if the customer is not found. """ Party = client.model('party.party') results = Party.find([('code', '=', code)]) if results: return results[0]['id']
Fetch a customer with the code. Returns None if the customer is not found.
entailment
def get_address(customer_id, data): """ Easier to fetch the addresses of customer and then check one by one. You can get fancy by using some validation mechanism too """ Address = client.model('party.address') addresses = Address.find( [('party', '=', customer_id)], fields=[ 'name', 'street', 'street_bis', 'city', 'zip', 'subdivision.code', 'country.code' ] ) for address in addresses: if ( address['name'] == data['name'] and address['street'] == data['street'] and address['street_bis'] == data['street_bis'] and address['city'] == data['city'] and address['zip'] == data['zip'] and address['subdivision.code'].endswith(data['state']) and address['country.code'] == data['country']): return address['id']
Easier to fetch the addresses of customer and then check one by one. You can get fancy by using some validation mechanism too
entailment
def create_address(customer_id, data): """ Create an address and return the id """ Address = client.model('party.address') Country = client.model('country.country') Subdivision = client.model('country.subdivision') country, = Country.find([('code', '=', data['country'])]) state, = Subdivision.find([ ('code', 'ilike', '%-' + data['state']), # state codes are US-CA, IN-KL ('country', '=', country['id']) ]) address, = Address.create([{ 'party': customer_id, 'name': data['name'], 'street': data['street'], 'street_bis': data['street_bis'], 'city': data['city'], 'zip': data['zip'], 'country': country['id'], 'subdivision': state['id'], }]) return address['id']
Create an address and return the id
entailment
def create_customer(name, email, phone): """ Create a customer with the name. Then attach the email and phone as contact methods """ Party = client.model('party.party') ContactMechanism = client.model('party.contact_mechanism') party, = Party.create([{'name': name}]) # Bulk create the email and phone ContactMechanism.create([ {'type': 'email', 'value': email, 'party': party}, {'type': 'phone', 'value': phone, 'party': party}, ]) return party
Create a customer with the name. Then attach the email and phone as contact methods
entailment
def create_order(order): """ Create an order on fulfil from order_details. See the calling function below for an example of the order_details """ SaleOrder = client.model('sale.sale') SaleOrderLine = client.model('sale.line') # Check if customer exists, if not create one customer_id = get_customer(order['customer']['code']) if not customer_id: customer_id = create_customer( order['customer']['name'], order['customer']['email'], order['customer']['phone'], ) # No check if there is a matching address invoice_address = get_address( customer_id, order['invoice_address'] ) if not invoice_address: invoice_address = create_address( customer_id, order['invoice_address'] ) # See if the shipping address exists, if not create it shipment_address = get_address( customer_id, order['shipment_address'] ) if not shipment_address: shipment_address = create_address( customer_id, order['shipment_address'] ) sale_order_id, = SaleOrder.create([{ 'reference': order['number'], 'sale_date': order['date'], 'party': customer_id, 'invoice_address': invoice_address, 'shipment_address': shipment_address, }]) # fetch inventory of all the products before we create lines warehouses = get_warehouses() warehouse_ids = [warehouse['id'] for warehouse in warehouses] lines = [] for item in order['items']: # get the product. We assume ti already exists. product = get_product(item['product']) # find the first location that has inventory product_inventory = get_product_inventory(product, warehouse_ids) for location, quantities in product_inventory.items(): if quantities['quantity_available'] >= item['quantity']: break lines.append({ 'sale': sale_order_id, 'product': product, 'quantity': item['quantity'], 'unit_price': item['unit_price'], 'warehouse': location, }) SaleOrderLine.create(lines) SaleOrder.quote([sale_order_id]) SaleOrder.confirm([sale_order_id])
Create an order on fulfil from order_details. See the calling function below for an example of the order_details
entailment
def model_base(fulfil_client, cache_backend=None, cache_expire=10 * 60): """ Return a Base Model class that binds to the fulfil client instance and the cache instance. This design is inspired by the declarative base pattern in SQL Alchemy. """ return type( 'BaseModel', (Model,), { 'fulfil_client': fulfil_client, 'cache_backend': cache_backend, 'cache_expire': cache_expire, '__abstract__': True, '__modelregistry__': {}, }, )
Return a Base Model class that binds to the fulfil client instance and the cache instance. This design is inspired by the declarative base pattern in SQL Alchemy.
entailment
def all(self): """ Return the results represented by this Query as a list. .. versionchanged:: 0.10.0 Returns an iterator that lazily loads records instead of fetching thousands of records at once. """ return self.rpc_model.search_read_all( self.domain, self._order_by, self.fields, context=self.context, offset=self._offset or 0, limit=self._limit, )
Return the results represented by this Query as a list. .. versionchanged:: 0.10.0 Returns an iterator that lazily loads records instead of fetching thousands of records at once.
entailment
def count(self): "Return a count of rows this Query would return." return self.rpc_model.search_count( self.domain, context=self.context )
Return a count of rows this Query would return.
entailment
def exists(self): """ A convenience method that returns True if a record satisfying the query exists """ return self.rpc_model.search_count( self.domain, context=self.context ) > 0
A convenience method that returns True if a record satisfying the query exists
entailment
def show_active_only(self, state): """ Set active only to true or false on a copy of this query """ query = self._copy() query.active_only = state return query
Set active only to true or false on a copy of this query
entailment
def filter_by(self, **kwargs): """ Apply the given filtering criterion to a copy of this Query, using keyword expressions. """ query = self._copy() for field, value in kwargs.items(): query.domain.append( (field, '=', value) ) return query
Apply the given filtering criterion to a copy of this Query, using keyword expressions.
entailment
def filter_by_domain(self, domain): """ Apply the given domain to a copy of this query """ query = self._copy() query.domain = domain return query
Apply the given domain to a copy of this query
entailment
def first(self): """ Return the first result of this Query or None if the result doesn't contain any row. """ results = self.rpc_model.search_read( self.domain, None, 1, self._order_by, self.fields, context=self.context ) return results and results[0] or None
Return the first result of this Query or None if the result doesn't contain any row.
entailment
def get(self, id): """ Return an instance based on the given primary key identifier, or None if not found. This returns a record whether active or not. """ ctx = self.context.copy() ctx['active_test'] = False results = self.rpc_model.search_read( [('id', '=', id)], None, None, None, self.fields, context=ctx ) return results and results[0] or None
Return an instance based on the given primary key identifier, or None if not found. This returns a record whether active or not.
entailment
def limit(self, limit): """ Apply a LIMIT to the query and return the newly resulting Query. """ query = self._copy() query._limit = limit return query
Apply a LIMIT to the query and return the newly resulting Query.
entailment
def offset(self, offset): """ Apply an OFFSET to the query and return the newly resulting Query. """ query = self._copy() query._offset = offset return query
Apply an OFFSET to the query and return the newly resulting Query.
entailment
def one(self): """ Return exactly one result or raise an exception. Raises fulfil_client.exc.NoResultFound if the query selects no rows. Raises fulfil_client.exc.MultipleResultsFound if multiple rows are found. """ results = self.rpc_model.search_read( self.domain, 2, None, self._order_by, self.fields, context=self.context ) if not results: raise fulfil_client.exc.NoResultFound if len(results) > 1: raise fulfil_client.exc.MultipleResultsFound return results[0]
Return exactly one result or raise an exception. Raises fulfil_client.exc.NoResultFound if the query selects no rows. Raises fulfil_client.exc.MultipleResultsFound if multiple rows are found.
entailment
def order_by(self, *criterion): """ apply one or more ORDER BY criterion to the query and return the newly resulting Query All existing ORDER BY settings can be suppressed by passing None - this will suppress any ORDER BY configured on mappers as well. """ query = self._copy() query._order_by = criterion return query
apply one or more ORDER BY criterion to the query and return the newly resulting Query All existing ORDER BY settings can be suppressed by passing None - this will suppress any ORDER BY configured on mappers as well.
entailment
def delete(self): """ Delete all records matching the query. Warning: This is a desctructive operation. Not every model allows deletion of records and several models even restrict based on status. For example, deleting products that have been transacted is restricted. Another example is sales orders which can be deleted only when they are draft. If deletion fails, a server error is thrown. """ ids = self.rpc_model.search(self.domain, context=self.context) if ids: self.rpc_model.delete(ids)
Delete all records matching the query. Warning: This is a desctructive operation. Not every model allows deletion of records and several models even restrict based on status. For example, deleting products that have been transacted is restricted. Another example is sales orders which can be deleted only when they are draft. If deletion fails, a server error is thrown.
entailment
def archive(self): """ Archives (soft delete) all the records matching the query. This assumes that the model allows archiving (not many do - especially transactional documents). Internal implementation sets the active field to False. """ ids = self.rpc_model.search(self.domain, context=self.context) if ids: self.rpc_model.write(ids, {'active': False})
Archives (soft delete) all the records matching the query. This assumes that the model allows archiving (not many do - especially transactional documents). Internal implementation sets the active field to False.
entailment
def _logged_in_successful(data): """ Test the login status from the returned communication of the server. :param data: bytes received from server during login :type data: list of bytes :return boolean, True when you are logged in. """ if re.match(r'^:(testserver\.local|tmi\.twitch\.tv)' r' NOTICE \* :' r'(Login unsuccessful|Error logging in)*$', data.strip()): return False else: return True
Test the login status from the returned communication of the server. :param data: bytes received from server during login :type data: list of bytes :return boolean, True when you are logged in.
entailment
def connect(self): """ Connect to Twitch """ # Do not use non-blocking stream, they are not reliably # non-blocking # s.setblocking(False) # s.settimeout(1.0) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) connect_host = "irc.twitch.tv" connect_port = 6667 try: s.connect((connect_host, connect_port)) except (Exception, IOError): print("Unable to create a socket to %s:%s" % ( connect_host, connect_port)) raise # unexpected, because it is a blocking socket # Connected to twitch # Sending our details to twitch... s.send(('PASS %s\r\n' % self.oauth).encode('utf-8')) s.send(('NICK %s\r\n' % self.username).encode('utf-8')) if self.verbose: print('PASS %s\r\n' % self.oauth) print('NICK %s\r\n' % self.username) received = s.recv(1024).decode() if self.verbose: print(received) if not TwitchChatStream._logged_in_successful(received): # ... and they didn't accept our details raise IOError("Twitch did not accept the username-oauth " "combination") else: # ... and they accepted our details # Connected to twitch.tv! # now make this socket non-blocking on the OS-level fcntl.fcntl(s, fcntl.F_SETFL, os.O_NONBLOCK) if self.s is not None: self.s.close() # close the previous socket self.s = s # store the new socket self.join_channel(self.username) # Wait until we have switched channels while self.current_channel != self.username: self.twitch_receive_messages()
Connect to Twitch
entailment
def _push_from_buffer(self): """ Push a message on the stack to the IRC stream. This is necessary to avoid Twitch overflow control. """ if len(self.buffer) > 0: if time.time() - self.last_sent_time > 5: try: message = self.buffer.pop(0) self.s.send(message.encode('utf-8')) if self.verbose: print(message) finally: self.last_sent_time = time.time()
Push a message on the stack to the IRC stream. This is necessary to avoid Twitch overflow control.
entailment
def join_channel(self, channel): """ Join a different chat channel on Twitch. Note, this function returns immediately, but the switch might take a moment :param channel: name of the channel (without #) """ self.s.send(('JOIN #%s\r\n' % channel).encode('utf-8')) if self.verbose: print('JOIN #%s\r\n' % channel)
Join a different chat channel on Twitch. Note, this function returns immediately, but the switch might take a moment :param channel: name of the channel (without #)
entailment
def _parse_message(self, data): """ Parse the bytes received from the socket. :param data: the bytes received from the socket :return: """ if TwitchChatStream._check_has_ping(data): self._send_pong() if TwitchChatStream._check_has_channel(data): self.current_channel = \ TwitchChatStream._check_has_channel(data)[0] if TwitchChatStream._check_has_message(data): return { 'channel': re.findall(r'^:.+![a-zA-Z0-9_]+' r'@[a-zA-Z0-9_]+' r'.+ ' r'PRIVMSG (.*?) :', data)[0], 'username': re.findall(r'^:([a-zA-Z0-9_]+)!', data)[0], 'message': re.findall(r'PRIVMSG #[a-zA-Z0-9_]+ :(.+)', data)[0].decode('utf8') } else: return None
Parse the bytes received from the socket. :param data: the bytes received from the socket :return:
entailment
def twitch_receive_messages(self): """ Call this function to process everything received by the socket This needs to be called frequently enough (~10s) Twitch logs off users not replying to ping commands. :return: list of chat messages received. Each message is a dict with the keys ['channel', 'username', 'message'] """ self._push_from_buffer() result = [] while True: # process the complete buffer, until no data is left no more try: msg = self.s.recv(4096).decode() # NON-BLOCKING RECEIVE! except socket.error as e: err = e.args[0] if err == errno.EAGAIN or err == errno.EWOULDBLOCK: # There is no more data available to read return result else: # a "real" error occurred # import traceback # import sys # print(traceback.format_exc()) # print("Trying to recover...") self.connect() return result else: if self.verbose: print(msg) rec = [self._parse_message(line) for line in filter(None, msg.split('\r\n'))] rec = [r for r in rec if r] # remove Nones result.extend(rec)
Call this function to process everything received by the socket This needs to be called frequently enough (~10s) Twitch logs off users not replying to ping commands. :return: list of chat messages received. Each message is a dict with the keys ['channel', 'username', 'message']
entailment
def reset(self): """ Reset the videostream by restarting ffmpeg """ if self.ffmpeg_process is not None: # Close the previous stream try: self.ffmpeg_process.send_signal(signal.SIGINT) except OSError: pass command = [] command.extend([ self.ffmpeg_binary, '-loglevel', 'verbose', '-y', # overwrite previous file/stream # '-re', # native frame-rate '-analyzeduration', '1', '-f', 'rawvideo', '-r', '%d' % self.fps, # set a fixed frame rate '-vcodec', 'rawvideo', # size of one frame '-s', '%dx%d' % (self.width, self.height), '-pix_fmt', 'rgb24', # The input are raw bytes '-thread_queue_size', '1024', '-i', '/tmp/videopipe', # The input comes from a pipe # Twitch needs to receive sound in their streams! # '-an', # Tells FFMPEG not to expect any audio ]) if self.audio_enabled: command.extend([ '-ar', '%d' % AUDIORATE, '-ac', '2', '-f', 's16le', '-thread_queue_size', '1024', '-i', '/tmp/audiopipe' ]) else: command.extend([ '-ar', '8000', '-ac', '1', '-f', 's16le', '-i', '/dev/zero', # silence alternative, works forever # '-i','http://stream1.radiostyle.ru:8001/tunguska', # '-filter_complex', # '[0:1][1:0]amix=inputs=2:duration=first[all_audio]' ]) command.extend([ # VIDEO CODEC PARAMETERS '-vcodec', 'libx264', '-r', '%d' % self.fps, '-b:v', '3000k', '-s', '%dx%d' % (self.width, self.height), '-preset', 'faster', '-tune', 'zerolatency', '-crf', '23', '-pix_fmt', 'yuv420p', # '-force_key_frames', r'expr:gte(t,n_forced*2)', '-minrate', '3000k', '-maxrate', '3000k', '-bufsize', '12000k', '-g', '60', # key frame distance '-keyint_min', '1', # '-filter:v "setpts=0.25*PTS"' # '-vsync','passthrough', # AUDIO CODEC PARAMETERS '-acodec', 'libmp3lame', '-ar', '44100', '-b:a', '160k', # '-bufsize', '8192k', '-ac', '1', # '-acodec', 'aac', '-strict', 'experimental', # '-ab', '128k', '-ar', '44100', '-ac', '1', # '-async','44100', # '-filter_complex', 'asplit', #for audio sync? # STORE THE VIDEO PARAMETERS # '-vcodec', 'libx264', '-s', '%dx%d'%(width, height), # '-preset', 'libx264-fast', # 'my_output_videofile2.avi' # MAP THE STREAMS # use only video from first input and only audio from second '-map', '0:v', '-map', '1:a', # NUMBER OF THREADS '-threads', '2', # STREAM TO TWITCH '-f', 'flv', 'rtmp://live-ams.twitch.tv/app/%s' % self.twitch_stream_key ]) devnullpipe = open("/dev/null", "w") # Throw away stream if self.verbose: devnullpipe = None self.ffmpeg_process = subprocess.Popen( command, stdin=subprocess.PIPE, stderr=devnullpipe, stdout=devnullpipe)
Reset the videostream by restarting ffmpeg
entailment
def send_video_frame(self, frame): """Send frame of shape (height, width, 3) with values between 0 and 1. Raises an OSError when the stream is closed. :param frame: array containing the frame. :type frame: numpy array with shape (height, width, 3) containing values between 0.0 and 1.0 """ if self.video_pipe is None: if not os.path.exists('/tmp/videopipe'): os.mkfifo('/tmp/videopipe') self.video_pipe = os.open('/tmp/videopipe', os.O_WRONLY) assert frame.shape == (self.height, self.width, 3) frame = np.clip(255*frame, 0, 255).astype('uint8') try: os.write(self.video_pipe, frame.tostring()) except OSError: # The pipe has been closed. Reraise and handle it further # downstream raise
Send frame of shape (height, width, 3) with values between 0 and 1. Raises an OSError when the stream is closed. :param frame: array containing the frame. :type frame: numpy array with shape (height, width, 3) containing values between 0.0 and 1.0
entailment
def send_audio(self, left_channel, right_channel): """Add the audio samples to the stream. The left and the right channel should have the same shape. Raises an OSError when the stream is closed. :param left_channel: array containing the audio signal. :type left_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer :param right_channel: array containing the audio signal. :type right_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer """ if self.audio_pipe is None: if not os.path.exists('/tmp/audiopipe'): os.mkfifo('/tmp/audiopipe') self.audio_pipe = os.open('/tmp/audiopipe', os.O_WRONLY) assert len(left_channel.shape) == 1 assert left_channel.shape == right_channel.shape frame = np.column_stack((left_channel, right_channel)).flatten() frame = np.clip(32767*frame, -32767, 32767).astype('int16') try: os.write(self.audio_pipe, frame.tostring()) except OSError: # The pipe has been closed. Reraise and handle it further # downstream raise
Add the audio samples to the stream. The left and the right channel should have the same shape. Raises an OSError when the stream is closed. :param left_channel: array containing the audio signal. :type left_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer :param right_channel: array containing the audio signal. :type right_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer
entailment
def send_audio(self, left_channel, right_channel): """Add the audio samples to the stream. The left and the right channel should have the same shape. :param left_channel: array containing the audio signal. :type left_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer :param right_channel: array containing the audio signal. :type right_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer """ self.lastaudioframe_left = left_channel self.lastaudioframe_right = right_channel
Add the audio samples to the stream. The left and the right channel should have the same shape. :param left_channel: array containing the audio signal. :type left_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer :param right_channel: array containing the audio signal. :type right_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. k can be any integer
entailment
def send_video_frame(self, frame, frame_counter=None): """send frame of shape (height, width, 3) with values between 0 and 1 :param frame: array containing the frame. :type frame: numpy array with shape (height, width, 3) containing values between 0.0 and 1.0 :param frame_counter: frame position number within stream. Provide this when multi-threading to make sure frames don't switch position :type frame_counter: int """ if frame_counter is None: frame_counter = self.frame_counter self.frame_counter += 1 self.q_video.put((frame_counter, frame))
send frame of shape (height, width, 3) with values between 0 and 1 :param frame: array containing the frame. :type frame: numpy array with shape (height, width, 3) containing values between 0.0 and 1.0 :param frame_counter: frame position number within stream. Provide this when multi-threading to make sure frames don't switch position :type frame_counter: int
entailment
def send_audio(self, left_channel, right_channel, frame_counter=None): """Add the audio samples to the stream. The left and the right channel should have the same shape. :param left_channel: array containing the audio signal. :type left_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. l can be any integer :param right_channel: array containing the audio signal. :type right_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. l can be any integer :param frame_counter: frame position number within stream. Provide this when multi-threading to make sure frames don't switch position :type frame_counter: int """ if frame_counter is None: frame_counter = self.audio_frame_counter self.audio_frame_counter += 1 self.q_audio.put((frame_counter, left_channel, right_channel))
Add the audio samples to the stream. The left and the right channel should have the same shape. :param left_channel: array containing the audio signal. :type left_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. l can be any integer :param right_channel: array containing the audio signal. :type right_channel: numpy array with shape (k, ) containing values between -1.0 and 1.0. l can be any integer :param frame_counter: frame position number within stream. Provide this when multi-threading to make sure frames don't switch position :type frame_counter: int
entailment
def import_attribute(self, path): """ Import an attribute from a module. """ module = '.'.join(path.split('.')[:-1]) function = path.split('.')[-1] module = importlib.import_module(module) return getattr(module, function)
Import an attribute from a module.
entailment
def get_next(self, request): """ Returns a url to redirect to after the login / signup. """ if 'next' in request.session: next = request.session['next'] del request.session['next'] elif 'next' in request.GET: next = request.GET.get('next') elif 'next' in request.POST: next = request.POST.get('next') else: next = getattr(settings, 'LOGIN_REDIRECT_URL', '/') netloc = urlparse.urlparse(next)[1] if netloc and netloc != request.get_host(): next = getattr(settings, 'LOGIN_REDIRECT_URL', '/') return next
Returns a url to redirect to after the login / signup.
entailment
def inactive_response(self, request): """ Return an inactive message. """ inactive_url = getattr(settings, 'LOGIN_INACTIVE_REDIRECT_URL', '') if inactive_url: return HttpResponseRedirect(inactive_url) else: return self.error_to_response(request, {'error': _("This user account is marked as inactive.")})
Return an inactive message.
entailment
def create_profile(self, user, save=False, **kwargs): """ Create a profile model. :param user: A user object :param save: If this is set, the profile will be saved to DB straight away :type save: bool """ profile = self.get_model()(user=user, **kwargs) if save: profile.save() return profile
Create a profile model. :param user: A user object :param save: If this is set, the profile will be saved to DB straight away :type save: bool
entailment
def get_or_create_profile(self, user, save=False, **kwargs): """ Return a profile from DB or if there is none, create a new one. :param user: A user object :param save: If set, a new profile will be saved. :type save: bool """ try: profile = self.get_model().objects.get(user=user, **kwargs) return profile, False except self.get_model().DoesNotExist: profile = self.create_profile(user, save=save, **kwargs) return profile, True
Return a profile from DB or if there is none, create a new one. :param user: A user object :param save: If set, a new profile will be saved. :type save: bool
entailment
def get_session_data(self, request): """ Return a tuple ``(user, profile, client)`` from the session. """ user = request.session['%suser' % SESSION_KEY] profile = request.session['%sprofile' % SESSION_KEY] client = request.session['%sclient' % SESSION_KEY] return user, profile, client
Return a tuple ``(user, profile, client)`` from the session.
entailment
def send_login_signal(self, request, user, profile, client): """ Send a signal that a user logged in. This signal should be sent only if the user was *not* logged into Django. """ signals.login.send(sender=profile.__class__, user=user, profile=profile, client=client, request=request)
Send a signal that a user logged in. This signal should be sent only if the user was *not* logged into Django.
entailment
def send_connect_signal(self, request, user, profile, client): """ Send a signal that a user connected a social profile to his Django account. This signal should be sent *only* when the a new social connection was created. """ signals.connect.send(sender=profile.__class__, user=user, profile=profile, client=client, request=request)
Send a signal that a user connected a social profile to his Django account. This signal should be sent *only* when the a new social connection was created.
entailment
def request_access_token(self, params): """ Foursquare does not accept POST requests to retrieve an access token, so we'll be doing a GET request instead. """ return self.request(self.access_token_url, method="GET", params=params)
Foursquare does not accept POST requests to retrieve an access token, so we'll be doing a GET request instead.
entailment
def openid_form(parser, token): """ Render OpenID form. Allows to pre set the provider:: {% openid_form "https://www.google.com/accounts/o8/id" %} Also creates custom button URLs by concatenating all arguments after the provider's URL {% openid_form "https://www.google.com/accounts/o8/id" STATIC_URL "image/for/google.jpg" %} """ bits = get_bits(token) if len(bits) > 1: return FormNode(bits[0], bits[1:]) if len(bits) == 1: return FormNode(bits[0]) return FormNode(None)
Render OpenID form. Allows to pre set the provider:: {% openid_form "https://www.google.com/accounts/o8/id" %} Also creates custom button URLs by concatenating all arguments after the provider's URL {% openid_form "https://www.google.com/accounts/o8/id" STATIC_URL "image/for/google.jpg" %}
entailment
def get_initial_data(self, request, user, profile, client): """ Return initial data for the setup form. The function can be controlled with ``SOCIALREGISTRATION_INITIAL_DATA_FUNCTION``. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client """ if INITAL_DATA_FUNCTION: func = self.import_attribute(INITAL_DATA_FUNCTION) return func(request, user, profile, client) return {}
Return initial data for the setup form. The function can be controlled with ``SOCIALREGISTRATION_INITIAL_DATA_FUNCTION``. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client
entailment
def get_context(self, request, user, profile, client): """ Return additional context for the setup view. The function can be controlled with ``SOCIALREGISTRATION_SETUP_CONTEXT_FUNCTION``. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client """ if CONTEXT_FUNCTION: func = self.import_attribute(CONTEXT_FUNCTION) return func(request, user, profile, client) return {}
Return additional context for the setup view. The function can be controlled with ``SOCIALREGISTRATION_SETUP_CONTEXT_FUNCTION``. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client
entailment
def generate_username_and_redirect(self, request, user, profile, client): """ Generate a username and then redirect the user to the correct place. This method is called when ``SOCIALREGISTRATION_GENERATE_USERNAME`` is set. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client """ func = self.get_username_function() user.username = func(user, profile, client) user.set_unusable_password() user.save() profile.user = user profile.save() user = profile.authenticate() self.send_connect_signal(request, user, profile, client) self.login(request, user) self.send_login_signal(request, user, profile, client) self.delete_session_data(request) return HttpResponseRedirect(self.get_next(request))
Generate a username and then redirect the user to the correct place. This method is called when ``SOCIALREGISTRATION_GENERATE_USERNAME`` is set. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client
entailment
def get(self, request): """ When signing a new user up - either display a setup form, or generate the username automatically. """ if request.user.is_authenticated(): return HttpResponseRedirect(self.get_next(request)) try: user, profile, client = self.get_session_data(request) except KeyError: return self.error_to_response(request, dict( error=_("Social profile is missing from your session."))) if GENERATE_USERNAME: return self.generate_username_and_redirect(request, user, profile, client) form = self.get_form()(initial=self.get_initial_data(request, user, profile, client)) additional_context = self.get_context(request, user, profile, client) return self.render_to_response(dict({'form': form}, **additional_context))
When signing a new user up - either display a setup form, or generate the username automatically.
entailment
def post(self, request): """ Save the user and profile, login and send the right signals. """ if request.user.is_authenticated(): return self.error_to_response(request, dict( error=_("You are already logged in."))) try: user, profile, client = self.get_session_data(request) except KeyError: return self.error_to_response(request, dict( error=_("A social profile is missing from your session."))) form = self.get_form()(request.POST, request.FILES, initial=self.get_initial_data(request, user, profile, client)) if not form.is_valid(): additional_context = self.get_context(request, user, profile, client) return self.render_to_response(dict({'form': form}, **additional_context)) user, profile = form.save(request, user, profile, client) user = profile.authenticate() self.send_connect_signal(request, user, profile, client) self.login(request, user) self.send_login_signal(request, user, profile, client) self.delete_session_data(request) return HttpResponseRedirect(self.get_next(request))
Save the user and profile, login and send the right signals.
entailment
def post(self, request): """ Create a client, store it in the user's session and redirect the user to the API provider to authorize our app and permissions. """ request.session['next'] = self.get_next(request) client = self.get_client()() request.session[self.get_client().get_session_key()] = client url = client.get_redirect_url(request=request) logger.debug("Redirecting to %s", url) try: return HttpResponseRedirect(url) except OAuthError, error: return self.error_to_response(request, {'error': error}) except socket.timeout: return self.error_to_response(request, {'error': _('Could not connect to service (timed out)')})
Create a client, store it in the user's session and redirect the user to the API provider to authorize our app and permissions.
entailment
def get(self, request): """ Called after the user is redirected back to our application. Tries to: - Complete the OAuth / OAuth2 flow - Redirect the user to another view that deals with login, connecting or user creation. """ try: client = request.session[self.get_client().get_session_key()] logger.debug("API returned: %s", request.GET) client.complete(dict(request.GET.items())) request.session[self.get_client().get_session_key()] = client return HttpResponseRedirect(self.get_redirect()) except KeyError: return self.error_to_response(request, {'error': "Session expired."}) except OAuthError, error: return self.error_to_response(request, {'error': error}) except socket.timeout: return self.error_to_response(request, {'error': _('Could not connect to service (timed out)')})
Called after the user is redirected back to our application. Tries to: - Complete the OAuth / OAuth2 flow - Redirect the user to another view that deals with login, connecting or user creation.
entailment
def get(self, request): """ Called after authorization was granted and the OAuth flow successfully completed. Tries to: - Connect the remote account if the user is logged in already - Log the user in if a local profile of the remote account exists already - Create a user and profile object if none of the above succeed and redirect the user further to either capture some data via form or generate a username automatically """ try: client = request.session[self.get_client().get_session_key()] except KeyError: return self.error_to_response(request, {'error': "Session expired."}) # Get the lookup dictionary to find the user's profile lookup_kwargs = self.get_lookup_kwargs(request, client) # Logged in user (re-)connecting an account if request.user.is_authenticated(): try: profile = self.get_profile(**lookup_kwargs) # Make sure that there is only *one* account per profile. if not profile.user == request.user: self.delete_session_data(request) return self.error_to_response(request, { 'error': _('This profile is already connected to another user account.') }) except self.get_model().DoesNotExist: profile, created = self.get_or_create_profile(request.user, save=True, **lookup_kwargs) self.send_connect_signal(request, request.user, profile, client) return self.redirect(request) # Logged out user - let's see if we've got the identity saved already. # If so - just log the user in. If not, create profile and redirect # to the setup view user = self.authenticate(**lookup_kwargs) # No user existing - create a new one and redirect to the final setup view if user is None: if not ALLOW_OPENID_SIGNUPS and self.client is OpenIDClient: return self.error_to_response(request, { 'error': _('We are not currently accepting new OpenID signups.') }) user = self.create_user() profile = self.create_profile(user, **lookup_kwargs) self.store_user(request, user) self.store_profile(request, profile) self.store_client(request, client) return HttpResponseRedirect(reverse('socialregistration:setup')) # Inactive user - displaying / redirect to the appropriate place. if not user.is_active: return self.inactive_response(request) # Active user with existing profile: login, send signal and redirect self.login(request, user) profile = self.get_profile(user=user, **lookup_kwargs) self.send_login_signal(request, user, profile, client) return self.redirect(request)
Called after authorization was granted and the OAuth flow successfully completed. Tries to: - Connect the remote account if the user is logged in already - Log the user in if a local profile of the remote account exists already - Create a user and profile object if none of the above succeed and redirect the user further to either capture some data via form or generate a username automatically
entailment
def client(self, verifier=None): """ Return the correct client depending on which stage of the OAuth process we're in. """ # We're just starting out and don't have neither request nor access # token. Return the standard client if not self._request_token and not self._access_token: client = oauth.Client(self.consumer, timeout=TIMEOUT) # We're one step in, we've got the request token and can add that to # the client. if self._request_token and not self._access_token: if verifier is not None: self._request_token.set_verifier(verifier) client = oauth.Client(self.consumer, self._request_token, timeout=TIMEOUT) # Two steps in, we've got an access token and can now properly sign # our client requests with it. if self._access_token: client = oauth.Client(self.consumer, self._access_token, timeout=TIMEOUT) return client
Return the correct client depending on which stage of the OAuth process we're in.
entailment
def _get_request_token(self): """ Fetch a request token from `self.request_token_url`. """ params = { 'oauth_callback': self.get_callback_url() } response, content = self.client().request(self.request_token_url, "POST", body=urllib.urlencode(params)) content = smart_unicode(content) if not response['status'] == '200': raise OAuthError(_( u"Invalid status code %s while obtaining request token from %s: %s") % ( response['status'], self.request_token_url, content)) token = dict(urlparse.parse_qsl(content)) return oauth.Token(token['oauth_token'], token['oauth_token_secret'])
Fetch a request token from `self.request_token_url`.
entailment
def _get_access_token(self, verifier=None): """ Fetch an access token from `self.access_token_url`. """ response, content = self.client(verifier).request( self.access_token_url, "POST") content = smart_unicode(content) if not response['status'] == '200': raise OAuthError(_( u"Invalid status code %s while obtaining access token from %s: %s") % (response['status'], self.access_token_url, content)) token = dict(urlparse.parse_qsl(content)) return (oauth.Token(token['oauth_token'], token['oauth_token_secret']), token)
Fetch an access token from `self.access_token_url`.
entailment
def get_request_token(self): """ Return the request token for this API. If we've not fetched it yet, go out, request and memoize it. """ if self._request_token is None: self._request_token = self._get_request_token() return self._request_token
Return the request token for this API. If we've not fetched it yet, go out, request and memoize it.
entailment
def get_access_token(self, verifier=None): """ Return the access token for this API. If we've not fetched it yet, go out, request and memoize it. """ if self._access_token is None: self._access_token, self._access_token_dict = self._get_access_token(verifier) return self._access_token
Return the access token for this API. If we've not fetched it yet, go out, request and memoize it.
entailment
def get_redirect_url(self, **kwargs): """ Return the authorization/authentication URL signed with the request token. """ params = { 'oauth_token': self.get_request_token().key, } return '%s?%s' % (self.auth_url, urllib.urlencode(params))
Return the authorization/authentication URL signed with the request token.
entailment
def complete(self, GET): """ When redirect back to our application, try to complete the flow by requesting an access token. If the access token request fails, it'll throw an `OAuthError`. Tries to complete the flow by validating against the `GET` paramters received. """ token = self.get_access_token(verifier=GET.get('oauth_verifier', None)) return token
When redirect back to our application, try to complete the flow by requesting an access token. If the access token request fails, it'll throw an `OAuthError`. Tries to complete the flow by validating against the `GET` paramters received.
entailment
def request(self, url, method="GET", params=None, headers=None): """ Make signed requests against `url`. """ params = params or {} headers = headers or {} logger.debug("URL: %s", url) logger.debug("Method: %s", method) logger.debug("Headers: %s", headers) logger.debug("Params: %s", params) response, content = self.client().request(url, method, headers=headers, body=urllib.urlencode(params)) content = smart_unicode(content) logger.debug("Status: %s", response['status']) logger.debug("Content: %s", content) if response['status'] != '200': raise OAuthError(_( u"Invalid status code %s while requesting %s: %s") % ( response['status'], url, content)) return content
Make signed requests against `url`.
entailment
def get_redirect_url(self, state='', **kwargs): """ Assemble the URL to where we'll be redirecting the user to to request permissions. """ params = { 'response_type': 'code', 'client_id': self.client_id, 'redirect_uri': self.get_callback_url(**kwargs), 'scope': self.scope or '', 'state': state, } return '%s?%s' % (self.auth_url, urllib.urlencode(params))
Assemble the URL to where we'll be redirecting the user to to request permissions.
entailment
def request_access_token(self, params): """ Request the access token from `self.access_token_url`. The default behaviour is to use a `POST` request, but some services use `GET` requests. Individual clients can override this method to use the correct HTTP method. """ return self.request(self.access_token_url, method="POST", params=params, is_signed=False)
Request the access token from `self.access_token_url`. The default behaviour is to use a `POST` request, but some services use `GET` requests. Individual clients can override this method to use the correct HTTP method.
entailment
def _get_access_token(self, code, **params): """ Fetch an access token with the provided `code`. """ params.update({ 'code': code, 'client_id': self.client_id, 'client_secret': self.secret, 'redirect_uri': self.get_callback_url(), }) logger.debug("Params: %s", params) resp, content = self.request_access_token(params=params) content = smart_unicode(content) logger.debug("Status: %s", resp['status']) logger.debug("Content: %s", content) content = self.parse_access_token(content) if 'error' in content: raise OAuthError(_( u"Received error while obtaining access token from %s: %s") % ( self.access_token_url, content['error'])) return content
Fetch an access token with the provided `code`.
entailment
def get_access_token(self, code=None, **params): """ Return the memoized access token or go out and fetch one. """ if self._access_token is None: if code is None: raise ValueError(_('Invalid code.')) self.access_token_dict = self._get_access_token(code, **params) try: self._access_token = self.access_token_dict['access_token'] except KeyError, e: raise OAuthError("Credentials could not be validated, the provider returned no access token.") return self._access_token
Return the memoized access token or go out and fetch one.
entailment
def complete(self, GET): """ Complete the OAuth2 flow by fetching an access token with the provided code in the GET parameters. """ if 'error' in GET: raise OAuthError( _("Received error while obtaining access token from %s: %s") % ( self.access_token_url, GET['error'])) return self.get_access_token(code=GET.get('code'))
Complete the OAuth2 flow by fetching an access token with the provided code in the GET parameters.
entailment
def request(self, url, method="GET", params=None, headers=None, is_signed=True): """ Make a request against ``url``. By default, the request is signed with an access token, but can be turned off by passing ``is_signed=False``. """ params = params or {} headers = headers or {} if is_signed: params.update(self.get_signing_params()) if method.upper() == "GET": url = '%s?%s' % (url, urllib.urlencode(params)) return self.client().request(url, method=method, headers=headers) return self.client().request(url, method, body=urllib.urlencode(params), headers=headers)
Make a request against ``url``. By default, the request is signed with an access token, but can be turned off by passing ``is_signed=False``.
entailment
def request_access_token(self, params): """ Google requires correct content-type for POST requests """ return self.client().request(self.access_token_url, method="POST", body=urllib.urlencode(params), headers={'Content-Type':'application/x-www-form-urlencoded'})
Google requires correct content-type for POST requests
entailment
def readInternalC(self): """Return internal temperature value in degrees celsius.""" v = self._read32() # Ignore bottom 4 bits of thermocouple data. v >>= 4 # Grab bottom 11 bits as internal temperature data. internal = v & 0x7FF if v & 0x800: # Negative value, take 2's compliment. Compute this with subtraction # because python is a little odd about handling signed/unsigned. internal -= 4096 # Scale by 0.0625 degrees C per bit and return value. return internal * 0.0625
Return internal temperature value in degrees celsius.
entailment
def readTempC(self): """Return the thermocouple temperature value in degrees celsius.""" v = self._read32() # Check for error reading value. if v & 0x7: return float('NaN') # Check if signed bit is set. if v & 0x80000000: # Negative value, take 2's compliment. Compute this with subtraction # because python is a little odd about handling signed/unsigned. v >>= 18 v -= 16384 else: # Positive value, just shift the bits to get the value. v >>= 18 # Scale by 0.25 degrees C per bit and return value. return v * 0.25
Return the thermocouple temperature value in degrees celsius.
entailment
def readLinearizedTempC(self): """Return the NIST-linearized thermocouple temperature value in degrees celsius. See https://learn.adafruit.com/calibrating-sensors/maxim-31855-linearization for more info. """ # MAX31855 thermocouple voltage reading in mV thermocoupleVoltage = (self.readTempC() - self.readInternalC()) * 0.041276 # MAX31855 cold junction voltage reading in mV coldJunctionTemperature = self.readInternalC() coldJunctionVoltage = (-0.176004136860E-01 + 0.389212049750E-01 * coldJunctionTemperature + 0.185587700320E-04 * math.pow(coldJunctionTemperature, 2.0) + -0.994575928740E-07 * math.pow(coldJunctionTemperature, 3.0) + 0.318409457190E-09 * math.pow(coldJunctionTemperature, 4.0) + -0.560728448890E-12 * math.pow(coldJunctionTemperature, 5.0) + 0.560750590590E-15 * math.pow(coldJunctionTemperature, 6.0) + -0.320207200030E-18 * math.pow(coldJunctionTemperature, 7.0) + 0.971511471520E-22 * math.pow(coldJunctionTemperature, 8.0) + -0.121047212750E-25 * math.pow(coldJunctionTemperature, 9.0) + 0.118597600000E+00 * math.exp(-0.118343200000E-03 * math.pow((coldJunctionTemperature-0.126968600000E+03), 2.0))) # cold junction voltage + thermocouple voltage voltageSum = thermocoupleVoltage + coldJunctionVoltage # calculate corrected temperature reading based on coefficients for 3 different ranges # float b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10; if thermocoupleVoltage < 0: b0 = 0.0000000E+00 b1 = 2.5173462E+01 b2 = -1.1662878E+00 b3 = -1.0833638E+00 b4 = -8.9773540E-01 b5 = -3.7342377E-01 b6 = -8.6632643E-02 b7 = -1.0450598E-02 b8 = -5.1920577E-04 b9 = 0.0000000E+00 elif thermocoupleVoltage < 20.644: b0 = 0.000000E+00 b1 = 2.508355E+01 b2 = 7.860106E-02 b3 = -2.503131E-01 b4 = 8.315270E-02 b5 = -1.228034E-02 b6 = 9.804036E-04 b7 = -4.413030E-05 b8 = 1.057734E-06 b9 = -1.052755E-08 elif thermocoupleVoltage < 54.886: b0 = -1.318058E+02 b1 = 4.830222E+01 b2 = -1.646031E+00 b3 = 5.464731E-02 b4 = -9.650715E-04 b5 = 8.802193E-06 b6 = -3.110810E-08 b7 = 0.000000E+00 b8 = 0.000000E+00 b9 = 0.000000E+00 else: # TODO: handle error - out of range return 0 return (b0 + b1 * voltageSum + b2 * pow(voltageSum, 2.0) + b3 * pow(voltageSum, 3.0) + b4 * pow(voltageSum, 4.0) + b5 * pow(voltageSum, 5.0) + b6 * pow(voltageSum, 6.0) + b7 * pow(voltageSum, 7.0) + b8 * pow(voltageSum, 8.0) + b9 * pow(voltageSum, 9.0))
Return the NIST-linearized thermocouple temperature value in degrees celsius. See https://learn.adafruit.com/calibrating-sensors/maxim-31855-linearization for more info.
entailment
def escape_quotes(self, val): """ Escape any quotes in a value """ if self.is_string(val) and self._in_quotes(val, self.quote): # make sure any previously escaped quotes are not re-escaped middle = self.remove_quotes(val).replace("\\" + self.quote, self.quote) middle = middle.replace(self.quote, "\\" + self.quote) val = self.add_quotes(middle) return val
Escape any quotes in a value
entailment
def standardise_quotes(self, val): """ Change the quotes used to wrap a value to the pprint default E.g. "val" to 'val' or 'val' to "val" """ if self._in_quotes(val, self.altquote): middle = self.remove_quotes(val) val = self.add_quotes(middle) return self.escape_quotes(val)
Change the quotes used to wrap a value to the pprint default E.g. "val" to 'val' or 'val' to "val"
entailment
def process_key_dict(self, key, d, level): """ Process key value dicts e.g. METADATA "key" "value" """ # add any composite level comments comments = d.get("__comments__", {}) lines = [] self._add_type_comment(level, comments, lines) lines += [self.add_start_line(key, level)] lines += self.process_dict(d, level, comments) lines.append(self.add_end_line(level, 1, key)) return lines
Process key value dicts e.g. METADATA "key" "value"
entailment
def process_dict(self, d, level, comments): """ Process keys and values within a block """ lines = [] for k, v in d.items(): if not self.__is_metadata(k): qk = self.quoter.add_quotes(k) qv = self.quoter.add_quotes(v) line = self.__format_line(self.whitespace(level, 2), qk, qv) line += self.process_attribute_comment(comments, k) lines.append(line) return lines
Process keys and values within a block
entailment
def process_config_dict(self, key, d, level): """ Process the CONFIG block """ lines = [] for k, v in d.items(): k = "CONFIG {}".format(self.quoter.add_quotes(k.upper())) v = self.quoter.add_quotes(v) lines.append(self.__format_line(self.whitespace(level, 1), k, v)) return lines
Process the CONFIG block
entailment
def process_repeated_list(self, key, lst, level): """ Process blocks of repeated keys e.g. FORMATOPTION """ lines = [] for v in lst: k = key.upper() v = self.quoter.add_quotes(v) lines.append(self.__format_line(self.whitespace(level, 1), k, v)) return lines
Process blocks of repeated keys e.g. FORMATOPTION
entailment
def format_pair_list(self, key, pair_list, level): """ Process lists of pairs (e.g. PATTERN block) """ lines = [self.add_start_line(key, level)] list_spacer = self.spacer * (level + 2) pairs = ["{}{} {}".format(list_spacer, p[0], p[1]) for p in pair_list] lines += pairs lines.append(self.add_end_line(level, 1, key)) return lines
Process lists of pairs (e.g. PATTERN block)
entailment
def format_repeated_pair_list(self, key, root_list, level): """ Process (possibly) repeated lists of pairs e.g. POINTs blocks """ lines = [] def depth(L): return isinstance(L, (tuple, list)) and max(map(depth, L)) + 1 if depth(root_list) == 2: # single set of points only root_list = [root_list] for pair_list in root_list: lines += self.format_pair_list(key, pair_list, level) return lines
Process (possibly) repeated lists of pairs e.g. POINTs blocks
entailment
def is_hidden_container(self, key, val): """ The key is not one of the Mapfile keywords, and its values are a list """ if key in ("layers", "classes", "styles", "symbols", "labels", "outputformats", "features", "scaletokens", "composites") and isinstance(val, list): return True else: return False
The key is not one of the Mapfile keywords, and its values are a list
entailment
def pprint(self, composites): """ Print out a nicely indented Mapfile """ # if only a single composite is used then cast to list # and allow for multiple root composites if composites and not isinstance(composites, list): composites = [composites] lines = [] for composite in composites: type_ = composite["__type__"] if type_ in ("metadata", "validation"): # types are being parsed directly, and not as an attr of a parent lines += self.process_key_dict(type_, composite, level=0) else: lines += self._format(composite) result = str(self.newlinechar.join(lines)) return result
Print out a nicely indented Mapfile
entailment
def format_value(self, attr, attr_props, value): """ TODO - refactor and add more specific tests (particularly for expressions) """ if isinstance(value, bool): return str(value).upper() if any(i in ["enum"] for i in attr_props): if not isinstance(value, numbers.Number): return value.upper() # value is from a set list, no need for quote else: return value if "type" in attr_props and attr_props["type"] == "string": # and "enum" not in attr_props # check schemas for expressions and handle accordingly if self.is_expression(attr_props) and self.quoter.in_slashes(value): return value elif self.is_expression(attr_props) and (value.endswith("'i") or value.endswith('"i')): # for case insensitive regex return value else: return self.quoter.add_quotes(value) # expressions can be one of a string or an expression in brackets if any(i in ["oneOf", "anyOf"] for i in attr_props): # and check that type string is in list if "oneOf" in attr_props: options_list = attr_props["oneOf"] else: options_list = attr_props["anyOf"] if self.quoter.is_string(value): if self.quoter.in_parenthesis(value): pass elif attr == "expression" and self.quoter.in_braces(value): # don't add quotes to list expressions such as {val1, val2} pass elif attr != "text" and self.quoter.in_brackets(value): # TEXT expressions are often "[field1]-[field2]" so need to leave quotes for these pass elif value.startswith("NOT ") and self.quoter.in_parenthesis(value[4:]): value = "NOT {}".format(value[4:]) else: value = self.check_options_list(options_list, value) if isinstance(value, list): new_values = [] for v in value: if not isinstance(v, numbers.Number): v = self.quoter.add_quotes(v) new_values.append(v) value = " ".join(list(map(str, new_values))) else: value = self.quoter.escape_quotes(value) return value
TODO - refactor and add more specific tests (particularly for expressions)
entailment
def process_attribute(self, type_, attr, value, level): """ Process one of the main composite types (see the type_ value) """ attr_props = self.get_attribute_properties(type_, attr) value = self.format_value(attr, attr_props, value) line = self.__format_line(self.whitespace(level, 1), attr.upper(), value) return line
Process one of the main composite types (see the type_ value)
entailment
def process_composite_comment(self, level, comments, key): """ Process comments for composites such as MAP, LAYER etc. """ if key not in comments: comment = "" else: value = comments[key] spacer = self.whitespace(level, 0) if isinstance(value, list): comments = [self.format_comment(spacer, v) for v in value] comment = self.newlinechar.join(comments) else: comment = self.format_comment(spacer, value) return comment
Process comments for composites such as MAP, LAYER etc.
entailment
def create_animation(img_files): """ See http://pillow.readthedocs.io/en/4.2.x/handbook/image-file-formats.html?highlight=append_images#saving """ open_images = [] for fn in img_files: print(fn) im = Image.open(fn) open_images.append(im) im = open_images[0] im.save(r"C:\temp\animation.gif", save_all=True, append_images=open_images[1:], duration=120, loop=100, optimize=True)
See http://pillow.readthedocs.io/en/4.2.x/handbook/image-file-formats.html?highlight=append_images#saving
entailment
def start(self, children): """ Parses a MapServer Mapfile Parsing of partial Mapfiles or lists of composites is also possible """ composites = [] for composite_dict in children: if False and self.include_position: key_token = composite_dict[1] key_name = key_token.value.lower() composites_position = self.get_position_dict(composite_dict) composites_position[key_name] = self.create_position_dict(key_token, None) composites.append(composite_dict) # only return a list when there are multiple root composites (e.g. # several CLASSes) if len(composites) == 1: return composites[0] else: return composites
Parses a MapServer Mapfile Parsing of partial Mapfiles or lists of composites is also possible
entailment
def composite(self, t): """ Handle the composite types e.g. CLASS..END t is a list in the form [[Token(__LAYER36, 'LAYER')], [OrderedDict([...])]] """ if len(t) == 1: return t[0] # metadata and values - already processed key_token = t[0][0] attribute_dicts = t[1] if not isinstance(attribute_dicts, list): # always handle a list of attributes attribute_dicts = [attribute_dicts] key_name = self.key_name(key_token) composite_dict = CaseInsensitiveOrderedDict(CaseInsensitiveOrderedDict) composite_dict["__type__"] = key_name if self.include_position: pd = self.create_position_dict(key_token, None) composite_dict["__position__"] = pd if self.include_comments: comments_dict = composite_dict["__comments__"] = OrderedDict() for d in attribute_dicts: keys = d.keys() if "__type__" in keys: k = d["__type__"] if k in SINGLETON_COMPOSITE_NAMES: composite_dict[k] = d else: plural_key = self.plural(k) if plural_key not in composite_dict: composite_dict[plural_key] = [] composite_dict[plural_key].append(d) else: # simple attribute pos = d.pop("__position__") d.pop("__tokens__", None) # tokens are no longer needed now we have the positions comments = d.pop("__comments__", None) key_name = self.get_single_key(d) if key_name == "config": # there may be several config dicts - one for each setting if key_name not in composite_dict: # create an initial OrderedDict composite_dict[key_name] = CaseInsensitiveOrderedDict(CaseInsensitiveOrderedDict) # populate the existing config dict cfg_dict = composite_dict[key_name] cfg_dict.update(d[key_name]) if self.include_position: if key_name not in pd: pd[key_name] = OrderedDict() subkey_name = self.get_single_key(d[key_name]) pd[key_name][subkey_name] = pos elif key_name == "points": if key_name not in composite_dict: composite_dict[key_name] = d[key_name] else: # if points are already in a feature then # allow for multipart features in a nested list existing_points = composite_dict[key_name] def depth(L): return isinstance(L, (tuple, list)) and max(map(depth, L)) + 1 if depth(existing_points) == 2: composite_dict[key_name] = [existing_points] if key_name not in composite_dict: composite_dict[key_name] = [] composite_dict[key_name].append(d[key_name]) if self.include_position: if key_name not in pd: pd[key_name] = pos else: existing_pos = pd[key_name] if isinstance(existing_pos, dict): pd[key_name] = [existing_pos] pd[key_name].append(pos) elif key_name in REPEATED_KEYS: if key_name not in composite_dict: composite_dict[key_name] = [] composite_dict[key_name].append(d[key_name]) if self.include_position: if key_name not in pd: pd[key_name] = [] pd[key_name].append(pos) else: assert len(d.items()) == 1 if self.include_position: # hoist position details to composite pd[key_name] = pos if self.include_comments and comments: # hoist comments to composite comments_dict[key_name] = comments composite_dict[key_name] = d[key_name] return composite_dict
Handle the composite types e.g. CLASS..END t is a list in the form [[Token(__LAYER36, 'LAYER')], [OrderedDict([...])]]
entailment
def check_composite_tokens(self, name, tokens): """ Return the key and contents of a KEY..END block for PATTERN, POINTS, and PROJECTION """ assert len(tokens) >= 2 key = tokens[0] assert key.value.lower() == name assert tokens[-1].value.lower() == "end" if len(tokens) == 2: body = [] # empty TYPE..END block else: body = tokens[1:-1] body_tokens = [] for t in body: if isinstance(t, dict): body_tokens.append(t["__tokens__"]) else: body_tokens.append(t) return key, body_tokens
Return the key and contents of a KEY..END block for PATTERN, POINTS, and PROJECTION
entailment
def process_value_pairs(self, tokens, type_): """ Metadata, Values, and Validation blocks can either have string pairs or attributes Attributes will already be processed """ key, body = self.check_composite_tokens(type_, tokens) key_name = self.key_name(key) d = CaseInsensitiveOrderedDict(CaseInsensitiveOrderedDict) for t in body: k = self.clean_string(t[0].value).lower() v = self.clean_string(t[1].value) if k in d.keys(): log.warning("A duplicate key ({}) was found in {}. Only the last value ({}) will be used. ".format( k, type_, v)) d[k] = v if self.include_position: pd = self.create_position_dict(key, body) d["__position__"] = pd d["__type__"] = key_name # return the token as well as the processed dict so the # composites function works the same way return d
Metadata, Values, and Validation blocks can either have string pairs or attributes Attributes will already be processed
entailment
def func_call(self, t): """ For function calls e.g. TEXT (tostring([area],"%.2f")) """ func, params = t func_name = func.value func.value = "({}({}))".format(func_name, params) return func
For function calls e.g. TEXT (tostring([area],"%.2f"))
entailment
def add_metadata_comments(self, d, metadata): """ Any duplicate keys will be replaced with the last duplicate along with comments """ if len(metadata) > 2: string_pairs = metadata[1:-1] # get all metadata pairs for sp in string_pairs: # get the raw metadata key if isinstance(sp.children[0], Token): token = sp.children[0] assert token.type == "UNQUOTED_STRING" key = token.value else: # quoted string (double or single) token = sp.children[0].children[0] key = token.value # clean it to match the dict key key = self._mapfile_todict.clean_string(key).lower() assert key in d.keys() key_comments = self.get_comments(sp.meta) d["__comments__"][key] = key_comments return d
Any duplicate keys will be replaced with the last duplicate along with comments
entailment
def assign_comments(self, tree, comments): """ Capture any comments in the tree header_comments stores comments preceding a node """ comments = list(comments) comments.sort(key=lambda c: c.line) idx_by_line = {0: 0} # {line_no: comment_idx} for i, c in enumerate(comments): if c.line not in idx_by_line: idx_by_line[c.line] = i idx = [] # convert comment tokens to strings, and remove any line breaks self.comments = [c.value.strip() for c in comments] last_comment_line = max(idx_by_line.keys()) # make a list with an entry for each line # number associated with a comment list index for i in range(last_comment_line, 0, -1): if i in idx_by_line: # associate line with new comment idx.append(idx_by_line[i]) else: # associate line with following comment idx.append(idx[-1]) idx.append(0) # line numbers start from 1 idx.reverse() self.idx = idx self._assign_comments(tree, 0)
Capture any comments in the tree header_comments stores comments preceding a node
entailment
def parse(self, text, fn=None): """ Parse the Mapfile """ if PY2 and not isinstance(text, unicode): # specify Unicode for Python 2.7 text = unicode(text, 'utf-8') if self.expand_includes: text = self.load_includes(text, fn=fn) try: self._comments[:] = [] # clear any comments from a previous parse tree = self.lalr.parse(text) if self.include_comments: self.assign_comments(tree, self._comments) return tree except (ParseError, UnexpectedInput) as ex: if fn: log.error("Parsing of {} unsuccessful".format(fn)) else: log.error("Parsing of Mapfile unsuccessful") log.info(ex) raise
Parse the Mapfile
entailment
def add_child(graph, child_id, child_label, parent_id, colour): """ http://www.graphviz.org/doc/info/shapes.html#polygon """ node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT) graph.add_node(node) graph.add_edge(pydot.Edge(parent_id, node))
http://www.graphviz.org/doc/info/shapes.html#polygon
entailment
def get_schema_path(self, schemas_folder): """ Return a file protocol URI e.g. file:///D:/mappyfile/mappyfile/schemas/ on Windows and file:////home/user/mappyfile/mappyfile/schemas/ on Linux """ # replace any Windows path back slashes with forward slashes schemas_folder = schemas_folder.replace("\\", "/") # HACK Python 2.7 on Linux seems to remove the root slash # so add this back in if schemas_folder.startswith("/"): schemas_folder = "/" + schemas_folder host = "" root_schema_path = "file://{}/{}".format(host, schemas_folder) + "/" return root_schema_path
Return a file protocol URI e.g. file:///D:/mappyfile/mappyfile/schemas/ on Windows and file:////home/user/mappyfile/mappyfile/schemas/ on Linux
entailment
def get_schema_validator(self, schema_name): """ Had to remove the id property from map.json or it uses URLs for validation See various issues at https://github.com/Julian/jsonschema/pull/306 """ if schema_name not in self.schemas: schema_file = self.get_schema_file(schema_name) with open(schema_file) as f: try: jsn_schema = json.load(f) except ValueError as ex: log.error("Could not load %s", schema_file) raise ex schemas_folder = self.get_schemas_folder() root_schema_path = self.get_schema_path(schemas_folder) resolver = jsonschema.RefResolver(root_schema_path, None) # cache the schema for future use self.schemas[schema_name] = (jsn_schema, resolver) else: jsn_schema, resolver = self.schemas[schema_name] validator = jsonschema.Draft4Validator(schema=jsn_schema, resolver=resolver) # validator.check_schema(jsn_schema) # check schema is valid return validator
Had to remove the id property from map.json or it uses URLs for validation See various issues at https://github.com/Julian/jsonschema/pull/306
entailment
def create_message(self, rootdict, path, error, add_comments): """ Add a validation comment to the dictionary path is the path to the error object, it can be empty if the error is in the root object http://python-jsonschema.readthedocs.io/en/latest/errors/#jsonschema.exceptions.ValidationError.absolute_path It can also reference an object in a list e.g. [u'layers', 0] Unfortunately it is not currently possible to get the name of the failing property from the JSONSchema error object, even though it is in the error message. See https://github.com/Julian/jsonschema/issues/119 """ if not path: # error applies to the root type d = rootdict key = d["__type__"] elif isinstance(path[-1], int): # the error is on an object in a list d = utils.findkey(rootdict, *path) key = d["__type__"] else: key = path[-1] d = utils.findkey(rootdict, *path[:-1]) error_message = "ERROR: Invalid value in {}".format(key.upper()) # add a comment to the dict structure if add_comments: if "__comments__" not in d: d["__comments__"] = OrderedDict() d["__comments__"][key] = "# {}".format(error_message) error_message = {"error": error.message, "message": error_message} # add in details of the error line, when Mapfile was parsed to # include position details if "__position__" in d: if not path: # position for the root object is stored in the root of the dict pd = d["__position__"] else: pd = d["__position__"][key] error_message["line"] = pd.get("line") error_message["column"] = pd.get("column") return error_message
Add a validation comment to the dictionary path is the path to the error object, it can be empty if the error is in the root object http://python-jsonschema.readthedocs.io/en/latest/errors/#jsonschema.exceptions.ValidationError.absolute_path It can also reference an object in a list e.g. [u'layers', 0] Unfortunately it is not currently possible to get the name of the failing property from the JSONSchema error object, even though it is in the error message. See https://github.com/Julian/jsonschema/issues/119
entailment
def validate(self, value, add_comments=False, schema_name="map"): """ verbose - also return the jsonschema error details """ validator = self.get_schema_validator(schema_name) error_messages = [] if isinstance(value, list): for d in value: error_messages += self._validate(d, validator, add_comments, schema_name) else: error_messages = self._validate(value, validator, add_comments, schema_name) return error_messages
verbose - also return the jsonschema error details
entailment