code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
needs_recommendation = True full_indexes = [] partial_indexes = [] coverage = "unknown" if indexes is not None: for index_key in indexes.keys(): index = indexes[index_key] index_report = self._generate_index_report(index, query_analysis) if index_report['supported'] is True: if index_report['coverage'] == 'full': full_indexes.append(index_report) if index_report['idealOrder']: needs_recommendation = False elif index_report['coverage'] == 'partial': partial_indexes.append(index_report) if len(full_indexes) > 0: coverage = "full" elif (len(partial_indexes)) > 0: coverage = "partial" elif query_analysis['supported']: coverage = "none" # INDEX ANALYSIS return OrderedDict([('indexStatus', coverage), ('fullIndexes', full_indexes), ('partialIndexes', partial_indexes)])
def _generate_index_analysis(self, query_analysis, indexes)
Compares a query signature to the index cache to identify complete and partial indexes available to the query
3.071653
2.971103
1.033843
all_fields = [] equiv_fields = [] sort_fields = [] range_fields = [] for query_field in query_analysis['analyzedFields']: all_fields.append(query_field['fieldName']) if query_field['fieldType'] is EQUIV_TYPE: equiv_fields.append(query_field['fieldName']) elif query_field['fieldType'] is SORT_TYPE: sort_fields.append(query_field['fieldName']) elif query_field['fieldType'] is RANGE_TYPE: range_fields.append(query_field['fieldName']) max_equiv_seq = len(equiv_fields) max_sort_seq = max_equiv_seq + len(sort_fields) max_range_seq = max_sort_seq + len(range_fields) coverage = 'none' query_fields_covered = 0 query_field_count = query_analysis['fieldCount'] supported = True ideal_order = True for index_field in index['key']: field_name = index_field[0] if index_field[1] == '2d': supported = False break if field_name not in all_fields: break if query_fields_covered == 0: coverage = 'partial' if query_fields_covered < max_equiv_seq: if field_name not in equiv_fields: ideal_order = False elif query_fields_covered < max_sort_seq: if field_name not in sort_fields: ideal_order = False elif query_fields_covered < max_range_seq: if field_name not in range_fields: ideal_order = False query_fields_covered += 1 if query_fields_covered == query_field_count: coverage = 'full' # INDEX REPORT return OrderedDict({ 'coverage': coverage, 'idealOrder': ideal_order, 'queryFieldsCovered': query_fields_covered, 'index': index, 'supported': supported })
def _generate_index_report(self, index, query_analysis)
Analyzes an existing index against the results of query analysis
2.286639
2.263443
1.010248
index_rec = '{' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is EQUIV_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is SORT_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is RANGE_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' index_rec += '}' # RECOMMENDATION return OrderedDict([('index',index_rec), ('shellCommand', self.generate_shell_command(collection_name, index_rec))])
def _generate_recommendation(self, query_analysis, db_name, collection_name)
Generates an ideal query recommendation
2.301091
2.278343
1.009984
initial_millis = int(report['parsed']['stats']['millis']) mask = report['queryMask'] existing_report = self._get_existing_report(mask, report) if existing_report is not None: self._merge_report(existing_report, report) else: time = None if 'ts' in report['parsed']: time = report['parsed']['ts'] self._reports.append(OrderedDict([ ('namespace', report['namespace']), ('lastSeenDate', time), ('queryMask', mask), ('supported', report['queryAnalysis']['supported']), ('indexStatus', report['indexStatus']), ('recommendation', report['recommendation']), ('stats', OrderedDict([('count', 1), ('totalTimeMillis', initial_millis), ('avgTimeMillis', initial_millis)]))]))
def add_query_occurrence(self, report)
Adds a report to the report aggregation
4.601306
4.521416
1.017669
return sorted(self._reports, key=lambda x: x['stats']['totalTimeMillis'], reverse=True)
def get_reports(self)
Returns a minimized version of the aggregation
9.010783
7.937954
1.135152
for existing_report in self._reports: if existing_report['namespace'] == report['namespace']: if mask == existing_report['queryMask']: return existing_report return None
def _get_existing_report(self, mask, report)
Returns the aggregated report that matches report
4.217524
3.861196
1.092284
time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis = int(new['parsed']['stats']['millis']) target['stats']['totalTimeMillis'] += query_millis target['stats']['count'] += 1 target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']
def _merge_report(self, target, new)
Merges a new report into the target report
4.05788
3.897813
1.041066
query = None for handler in self._line_handlers: try: query = handler.handle(input) except Exception as e: query = None finally: if query is not None: return query return None
def parse(self, input)
Passes input to each QueryLineHandler in use
4.26509
2.909758
1.465789
return self._query_analyzer.generate_query_report(db_uri, query, db_name, collection_name)
def generate_query_report(self, db_uri, query, db_name, collection_name)
Analyzes a single query
3.723878
3.737906
0.996247
profile_parser = ProfileParser() databases = self._get_requested_databases() connection = pymongo.MongoClient(self._db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) if databases == []: try: databases = connection.database_names() except: message = "Error: Could not list databases on server. Please " \ + "check the auth components of your URI or provide " \ + "a namespace filter with -n.\n" sys.stderr.write(message) databases = [] for ignore_db in IGNORE_DBS: if ignore_db in databases: databases.remove(ignore_db) for database in databases: db = connection[database] profile_entries = db['system.profile'].find() for profile_entry in profile_entries: self._process_query(profile_entry, profile_parser) self._output_aggregated_report(sys.stdout) return 0
def analyze_profile(self)
Analyzes queries from a given log file
4.676979
4.549959
1.027917
profile_parser = ProfileParser() databases = self._get_requested_databases() connection = pymongo.MongoClient(self._db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) enabled_profile = False if databases == []: try: databases = connection.database_names() except: message = "Error: Could not list databases on server. Please " \ + "check the auth components of your URI.\n" sys.stderr.write(message) databases = [] for ignore_db in IGNORE_DBS: if ignore_db in databases: databases.remove(ignore_db) if len(databases) != 1: message = "Error: Please use namespaces (-n) to specify a single " \ + "database for profile watching.\n" sys.stderr.write(message) return 1 database = databases[0] db = connection[database] initial_profile_level = db.profiling_level() if initial_profile_level is pymongo.OFF: message = "Profile level currently 0. Dex is setting profile " \ + "level 1. To run --watch at profile level 2, " \ + "enable profile level 2 before running Dex.\n" sys.stderr.write(message) db.set_profiling_level(DEFAULT_PROFILE_LEVEL) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: for profile_entry in self._tail_profile(db, WATCH_INTERVAL_SECONDS): self._process_query(profile_entry, profile_parser) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) if initial_profile_level is pymongo.OFF: message = "Dex is resetting profile level to initial value " \ + "of 0. You may wish to drop the system.profile " \ + "collection.\n" sys.stderr.write(message) db.set_profiling_level(initial_profile_level) return 0
def watch_profile(self)
Analyzes queries from a given log file
3.8979
3.844559
1.013874
self._run_stats['logSource'] = logfile_path with open(logfile_path) as obj: self.analyze_logfile_object(obj) self._output_aggregated_report(sys.stdout) return 0
def analyze_logfile(self, logfile_path)
Analyzes queries from a given log file
10.87793
11.40275
0.953974
log_parser = LogParser() if self._start_time is None: self._start_time = datetime.now() if self._timeout != 0: self._end_time = self._start_time + timedelta(minutes=self._timeout) else: self._end_time = None # For each line in the logfile ... for line in file_object: if self._end_time is not None and datetime.now() > self._end_time: self._run_stats['timedOut'] = True self._run_stats['timeoutInMinutes'] = self._timeout break self._process_query(line, log_parser) return 0
def analyze_logfile_object(self, file_object)
Analyzes queries from a given log file
2.94349
2.775937
1.060359
self._run_stats['logSource'] = logfile_path log_parser = LogParser() # For each new line in the logfile ... output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: firstLine = True for line in self._tail_file(open(logfile_path), WATCH_INTERVAL_SECONDS): if firstLine: self._run_stats['timeRange']['start'] = get_line_time(line) self._process_query(line, log_parser) self._run_stats['timeRange']['end'] = get_line_time(line) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) return 0
def watch_logfile(self, logfile_path)
Analyzes queries from the tail of a given log file
4.124421
3.867356
1.06647
file.seek(0,2) while True: where = file.tell() line = file.readline() if not line: time.sleep(interval) file.seek(where) else: yield line
def _tail_file(self, file, interval)
Tails a file
2.391914
2.537735
0.942539
latest_doc = None while latest_doc is None: time.sleep(interval) latest_doc = db['system.profile'].find_one() current_time = latest_doc['ts'] while True: time.sleep(interval) cursor = db['system.profile'].find({'ts': {'$gte': current_time}}).sort('ts', pymongo.ASCENDING) for doc in cursor: current_time = doc['ts'] yield doc
def _tail_profile(self, db, interval)
Tails the system.profile collection
2.649857
2.361369
1.12217
namespace_split = namespace.split('.', 1) if len(namespace_split) is 1: # we treat a single element as a collection name. # this also properly tuplefies '*' namespace_tuple = ('*', namespace_split[0]) elif len(namespace_split) is 2: namespace_tuple = (namespace_split[0],namespace_split[1]) else: return None return namespace_tuple
def _tuplefy_namespace(self, namespace)
Converts a mongodb namespace to a db, collection tuple
4.234667
3.716973
1.139278
output_namespaces = [] if input_namespaces == []: return output_namespaces elif '*' in input_namespaces: if len(input_namespaces) > 1: warning = 'Warning: Multiple namespaces are ' warning += 'ignored when one namespace is "*"\n' sys.stderr.write(warning) return output_namespaces else: for namespace in input_namespaces: if not isinstance(namespace, unicode): namespace = unicode(namespace) namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple is None: warning = 'Warning: Invalid namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) else: if namespace_tuple not in output_namespaces: output_namespaces.append(namespace_tuple) else: warning = 'Warning: Duplicate namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) return output_namespaces
def _validate_namespaces(self, input_namespaces)
Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards
2.303374
2.277004
1.011581
if namespace is None: return False namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple[0] in IGNORE_DBS: return False elif namespace_tuple[1] in IGNORE_COLLECTIONS: return False else: return self._tuple_requested(namespace_tuple)
def _namespace_requested(self, namespace)
Checks whether the requested_namespaces contain the provided namespace
3.961488
4.048732
0.978452
if not isinstance(namespace_tuple[0], unicode): encoded_db = unicode(namespace_tuple[0]) else: encoded_db = namespace_tuple[0] if not isinstance(namespace_tuple[1], unicode): encoded_coll = unicode(namespace_tuple[1]) else: encoded_coll = namespace_tuple[1] if namespace_tuple is None: return False elif len(self._requested_namespaces) is 0: return True for requested_namespace in self._requested_namespaces: if ((((requested_namespace[0]) == u'*') or (encoded_db == requested_namespace[0])) and (((requested_namespace[1]) == u'*') or (encoded_coll == requested_namespace[1]))): return True return False
def _tuple_requested(self, namespace_tuple)
Helper for _namespace_requested. Supports limited wildcards
2.370303
2.313529
1.02454
requested_databases = [] if ((self._requested_namespaces is not None) and (self._requested_namespaces != [])): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is '*': return [] elif requested_namespace[0] not in IGNORE_DBS: requested_databases.append(requested_namespace[0]) return requested_databases
def _get_requested_databases(self)
Returns a list of databases requested, not including ignored dbs
3.25865
2.820349
1.155407
return self._make_request('get')
def retrieve(self, state=None, favorite=None, tag=None, contentType=None, sort=None, detailType=None, search=None, domain=None, since=None, count=None, offset=None)
Retrieve the list of your articles See: https://getpocket.com/developer/docs/v3/retrieve :param state: filter by state :param favorite: only fetch favorite :param tag: filter by tag or _untagged_ :param contentType: get article, video or image :param sort: sort by provided value :param detailType: defines the response details to return :param search: search term :param domain: search domain :param since: search modified since unix timestamp :param count: the number of required items :param offset: the position to start results from :return: A dictionary containing the response result :rtype: dict
20.95281
29.484743
0.710632
self._add_action('add') return self
def bulk_add(self, item_id, ref_id=None, tags=None, time=None, title=None, url=None)
Add an item to list See: https://getpocket.com/developer/docs/v3/modify :param item_id: int :param ref_id: tweet_id :param tags: list of tags :param time: time of action :param title: given title :param url: item url :return: self for chaining :rtype: Pocket
18.533422
27.051762
0.68511
kwargs = self._get_method_params() kwargs['action'] = action self._bulk_actions.append(kwargs)
def _add_action(self, action)
Register an action into bulk :param action: action name
8.927718
8.537368
1.045723
if isinstance(action, list): kwargs = {'actions': action} action = 'send' else: kwargs = self._get_method_params() kwargs.update({ 'consumer_key': self._consumer_key, 'access_token': self._access_token }) response = requests.post( self._get_url(action), json=kwargs, headers=self._get_headers() ) if response.status_code != requests.codes.ok: raise self._make_exception(response) return response.json()
def _make_request(self, action)
Perform the request :param action: action name :return: a dict containing the request result :rtype: dict
2.689395
2.78675
0.965065
caller = sys._getframe(2) var_names = list(caller.f_code.co_varnames) caller_locals = caller.f_locals var_names.remove('self') kwargs = {key: value for key, value in caller_locals.items() if key in var_names and value is not None} return kwargs
def _get_method_params(self)
This method makes reading and filtering each method implemented in this class a more general approach. It reads the previous frame from Python and filters the params passed to the caller of _make_request. :return: a dictionary of caller's parameters and values :rtype: dict
2.97789
3.050834
0.97609
headers = response.headers limit_headers = [] if 'X-Limit-User-Limit' in headers: limit_headers = [ headers['X-Limit-User-Limit'], headers['X-Limit-User-Remaining'], headers['X-Limit-User-Reset'], headers['X-Limit-Key-Limit'], headers['X-Limit-Key-Remaining'], headers['X-Limit-Key-Reset'] ] x_error_code = int(headers['X-Error-Code']) exc = PocketException if x_error_code in self.auth_error_codes: exc = PocketAutException return exc( response.status_code, x_error_code, headers['X-Error'], *limit_headers )
def _make_exception(self, response)
In case of exception, construct the exception object that holds all important values returned by the response. :return: The exception instance :rtype: PocketException
2.831619
2.685695
1.054334
if money_currency not in self.money_formats: raise CurrencyDoesNotExist self.money_currency = money_currency
def set_money_currency(self, money_currency)
:type money_currency: str
5.797209
4.991499
1.161416
return self.money_formats[ self.get_money_currency() ]['money_format'].format(amount=amount)
def get_money_format(self, amount)
:type amount: int or float or str Usage: >>> currency = Currency('USD') >>> currency.get_money_format(13) >>> '$13' >>> currency.get_money_format(13.99) >>> '$13.99' >>> currency.get_money_format('13,2313,33') >>> '$13,2313,33' :rtype: str
6.818545
7.995805
0.852765
return self.money_formats[ self.get_money_currency() ]['money_with_currency_format'].format(amount=amount)
def get_money_with_currency_format(self, amount)
:type amount: int or float or str Usage: >>> currency = Currency('USD') >>> currency.get_money_with_currency_format(13) >>> '$13 USD' >>> currency.get_money_with_currency_format(13.99) >>> '$13.99 USD' >>> currency.get_money_with_currency_format('13,2313,33') >>> '$13,2313,33 USD' :rtype: str
5.602801
6.494735
0.862668
get_startup = retrieve == "all" or retrieve == "startup" get_running = retrieve == "all" or retrieve == "running" get_candidate = retrieve == "all" or retrieve == "candidate" if retrieve == "all" or get_running: result = self._execute_command_with_vdom('show') text_result = '\n'.join(result) return { 'startup': u"", 'running': py23_compat.text_type(text_result), 'candidate': u"", } elif get_startup or get_candidate: return { 'startup': u"", 'running': u"", 'candidate': u"", }
def get_config(self, retrieve="all")
get_config implementation for FortiOS.
3.260185
3.082981
1.057478
self._connect() logger.debug('Connected to Deluge, detecting daemon version') self._detect_deluge_version() logger.debug('Daemon version {} detected, logging in'.format(self.deluge_version)) if self.deluge_version == 2: result = self.call('daemon.login', self.username, self.password, client_version='deluge-client') else: result = self.call('daemon.login', self.username, self.password) logger.debug('Logged in with value %r' % result) self.connected = True
def connect(self)
Connects to the Deluge instance
4.057703
3.537692
1.146992
if self.connected: self._socket.close() self._socket = None self.connected = False
def disconnect(self)
Disconnect from deluge
3.500774
3.411558
1.026151
tried_reconnect = False for _ in range(2): try: self._send_call(self.deluge_version, self.deluge_protocol_version, method, *args, **kwargs) return self._receive_response(self.deluge_version, self.deluge_protocol_version) except (socket.error, ConnectionLostException, CallTimeoutException): if self.automatic_reconnect: if tried_reconnect: raise FailedToReconnectException() else: try: self.reconnect() except (socket.error, ConnectionLostException, CallTimeoutException): raise FailedToReconnectException() tried_reconnect = True else: raise
def call(self, method, *args, **kwargs)
Calls an RPC function
3.32606
3.34442
0.99451
array = super(StickerSet, self).to_array() array['name'] = u(self.name) # py2: type unicode, py3: type str array['title'] = u(self.title) # py2: type unicode, py3: type str array['contains_masks'] = bool(self.contains_masks) # type bool array['stickers'] = self._as_array(self.stickers) # type list of Sticker return array
def to_array(self)
Serializes this StickerSet to a dictionary. :return: dictionary representation of this object. :rtype: dict
2.503221
2.228515
1.123268
if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.media import Sticker data = {} data['name'] = u(array.get('name')) data['title'] = u(array.get('title')) data['contains_masks'] = bool(array.get('contains_masks')) data['stickers'] = Sticker.from_array_list(array.get('stickers'), list_level=1) data['_raw'] = array return StickerSet(**data)
def from_array(array)
Deserialize a new StickerSet from a given dictionary. :return: new StickerSet instance. :rtype: StickerSet
2.96331
2.419196
1.224915
array = super(MaskPosition, self).to_array() array['point'] = u(self.point) # py2: type unicode, py3: type str array['x_shift'] = float(self.x_shift) # type float array['y_shift'] = float(self.y_shift) # type float array['scale'] = float(self.scale) # type float return array
def to_array(self)
Serializes this MaskPosition to a dictionary. :return: dictionary representation of this object. :rtype: dict
3.3715
2.729129
1.235376
if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['point'] = u(array.get('point')) data['x_shift'] = float(array.get('x_shift')) data['y_shift'] = float(array.get('y_shift')) data['scale'] = float(array.get('scale')) data['_raw'] = array return MaskPosition(**data)
def from_array(array)
Deserialize a new MaskPosition from a given dictionary. :return: new MaskPosition instance. :rtype: MaskPosition
3.191976
2.587161
1.233775
try: check_call( [(executable or 'ace'), '-g', cfg_path, '-G', out_path], stdout=log, stderr=log, close_fds=True, env=(env or os.environ) ) except (CalledProcessError, OSError): logging.error( 'Failed to compile grammar with ACE. See {}' .format(log.name if log is not None else '<stderr>') ) raise
def compile(cfg_path, out_path, executable=None, env=None, log=None)
Use ACE to compile a grammar. Args: cfg_path (str): the path to the ACE config file out_path (str): the path where the compiled grammar will be written executable (str, optional): the path to the ACE binary; if `None`, the `ace` command will be used env (dict, optional): environment variables to pass to the ACE subprocess log (file, optional): if given, the file, opened for writing, or stream to write ACE's stdout and stderr compile messages
3.934409
3.345116
1.176165
with AceParser(grm, **kwargs) as parser: for datum in data: yield parser.interact(datum)
def parse_from_iterable(grm, data, **kwargs)
Parse each sentence in *data* with ACE using grammar *grm*. Args: grm (str): path to a compiled grammar image data (iterable): the sentences to parse **kwargs: additional keyword arguments to pass to the AceParser Yields: :class:`~delphin.interfaces.ParseResponse` Example: >>> sentences = ['Dogs bark.', 'It rained'] >>> responses = list(ace.parse_from_iterable('erg.dat', sentences)) NOTE: parsed 2 / 2 sentences, avg 723k, time 0.01026s
7.304302
9.901488
0.737697
with AceTransferer(grm, **kwargs) as transferer: for datum in data: yield transferer.interact(datum)
def transfer_from_iterable(grm, data, **kwargs)
Transfer from each MRS in *data* with ACE using grammar *grm*. Args: grm (str): path to a compiled grammar image data (iterable): source MRSs as SimpleMRS strings **kwargs: additional keyword arguments to pass to the AceTransferer Yields: :class:`~delphin.interfaces.ParseResponse`
7.698643
6.464736
1.190867
with AceGenerator(grm, **kwargs) as generator: for datum in data: yield generator.interact(datum)
def generate_from_iterable(grm, data, **kwargs)
Generate from each MRS in *data* with ACE using grammar *grm*. Args: grm (str): path to a compiled grammar image data (iterable): MRSs as SimpleMRS strings **kwargs: additional keyword arguments to pass to the AceGenerator Yields: :class:`~delphin.interfaces.ParseResponse`
7.048168
8.342351
0.844866
try: self._p.stdin.write((datum.rstrip() + '\n')) self._p.stdin.flush() except (IOError, OSError): # ValueError if file was closed manually logging.info( 'Attempted to write to a closed process; attempting to reopen' ) self._open() self._p.stdin.write((datum.rstrip() + '\n')) self._p.stdin.flush()
def send(self, datum)
Send *datum* (e.g. a sentence or MRS) to ACE. Warning: Sending data without reading (e.g., via :meth:`receive`) can fill the buffer and cause data to be lost. Use the :meth:`interact` method for most data-processing tasks with ACE.
3.717844
3.91725
0.949095
validated = self._validate_input(datum) if validated: self.send(validated) result = self.receive() else: result, lines = _make_response( [('NOTE: PyDelphin could not validate the input and ' 'refused to send it to ACE'), 'SKIP: {}'.format(datum)], self.run_info) result['input'] = datum return result
def interact(self, datum)
Send *datum* to ACE and return the response. This is the recommended method for sending and receiving data to/from an ACE process as it reduces the chances of over-filling or reading past the end of the buffer. It also performs a simple validation of the input to help ensure that one complete item is processed at a time. If input item identifiers need to be tracked throughout processing, see :meth:`process_item`. Args: datum (str): the input sentence or MRS Returns: :class:`~delphin.interfaces.ParseResponse`
10.792035
8.927888
1.2088
response = self.interact(datum) if keys is not None: response['keys'] = keys if 'task' not in response and self.task is not None: response['task'] = self.task return response
def process_item(self, datum, keys=None)
Send *datum* to ACE and return the response with context. The *keys* parameter can be used to track item identifiers through an ACE interaction. If the `task` member is set on the AceProcess instance (or one of its subclasses), it is kept in the response as well. Args: datum (str): the input sentence or MRS keys (dict): a mapping of item identifier names and values Returns: :class:`~delphin.interfaces.ParseResponse`
3.908588
3.608139
1.08327
self.run_info['end'] = datetime.now() self._p.stdin.close() for line in self._p.stdout: if line.startswith('NOTE: tsdb run:'): self._read_run_info(line) else: logging.debug('ACE cleanup: {}'.format(line.rstrip())) retval = self._p.wait() return retval
def close(self)
Close the ACE process and return the process's exit code.
7.220914
5.667012
1.274201
ms = deserialize(fh) if single: ms = next(ms) return ms
def load(fh, single=False)
Deserialize DMRX from a file (handle or filename) Args: fh (str, file): input filename or file object single: if `True`, only return the first read Xmrs object Returns: a generator of Xmrs objects (unless the *single* option is `True`)
7.499639
8.128758
0.922606
corpus = etree.fromstring(s) if single: ds = _deserialize_dmrs(next(iter(corpus))) else: ds = (_deserialize_dmrs(dmrs_elem) for dmrs_elem in corpus) return ds
def loads(s, single=False)
Deserialize DMRX string representations Args: s (str): a DMRX string single (bool): if `True`, only return the first Xmrs object Returns: a generator of Xmrs objects (unless *single* is `True`)
5.033101
4.822917
1.04358
drv = self.get('derivation') if drv is not None: if isinstance(drv, dict): drv = Derivation.from_dict(drv) elif isinstance(drv, stringtypes): drv = Derivation.from_string(drv) return drv
def derivation(self)
Deserialize and return a Derivation object for UDF- or JSON-formatted derivation data; otherwise return the original string.
2.831529
2.580396
1.097324
tree = self.get('tree') if isinstance(tree, stringtypes): tree = SExpr.parse(tree).data elif tree is None: drv = self.get('derivation') if isinstance(drv, dict) and 'label' in drv: def _extract_tree(d): t = [d.get('label', '')] if 'tokens' in d: t.append([d.get('form', '')]) else: for dtr in d.get('daughters', []): t.append(_extract_tree(dtr)) return t tree = _extract_tree(drv) return tree
def tree(self)
Deserialize and return a labeled syntax tree. The tree data may be a standalone datum, or embedded in the derivation.
3.978324
3.568541
1.114832
mrs = self.get('mrs') if mrs is not None: if isinstance(mrs, dict): mrs = Mrs.from_dict(mrs) elif isinstance(mrs, stringtypes): mrs = simplemrs.loads_one(mrs) return mrs
def mrs(self)
Deserialize and return an Mrs object for simplemrs or JSON-formatted MRS data; otherwise return the original string.
3.489209
2.75577
1.266147
_eds = self.get('eds') if _eds is not None: if isinstance(_eds, dict): _eds = eds.Eds.from_dict(_eds) elif isinstance(_eds, stringtypes): _eds = eds.loads_one(_eds) return _eds
def eds(self)
Deserialize and return an Eds object for native- or JSON-formatted EDS data; otherwise return the original string.
3.938698
3.480987
1.131489
dmrs = self.get('dmrs') if dmrs is not None: if isinstance(dmrs, dict): dmrs = Dmrs.from_dict(dmrs) return dmrs
def dmrs(self)
Deserialize and return a Dmrs object for JSON-formatted DMRS data; otherwise return the original string.
2.921892
2.476629
1.179786
toks = self.get('tokens', {}).get(tokenset) if toks is not None: if isinstance(toks, stringtypes): toks = YyTokenLattice.from_string(toks) elif isinstance(toks, Sequence): toks = YyTokenLattice.from_list(toks) return toks
def tokens(self, tokenset='internal')
Deserialize and return a YyTokenLattice object for the initial or internal token set, if provided, from the YY format or the JSON-formatted data; otherwise return the original string. Args: tokenset (str): return `'initial'` or `'internal'` tokens (default: `'internal'`) Returns: :class:`YyTokenLattice`
3.850435
3.005001
1.281343
inserts = [] parse = {} # custom remapping, cleanup, and filling in holes parse['i-id'] = response.get('keys', {}).get('i-id', -1) self._parse_id = max(self._parse_id + 1, parse['i-id']) parse['parse-id'] = self._parse_id parse['run-id'] = response.get('run', {}).get('run-id', -1) if 'tokens' in response: parse['p-input'] = response['tokens'].get('initial') parse['p-tokens'] = response['tokens'].get('internal') if 'ninputs' not in response: toks = response.tokens('initial') if toks is not None: response['ninputs'] = len(toks.tokens) if 'ntokens' not in response: toks = response.tokens('internal') if toks is not None: response['ntokens'] = len(toks.tokens) if 'readings' not in response and 'results' in response: response['readings'] = len(response['results']) # basic mapping for key in self._parse_keys: if key in response: parse[key] = response[key] inserts.append(('parse', parse)) for result in response.get('results', []): d = {'parse-id': self._parse_id} if 'flags' in result: d['flags'] = SExpr.format(result['flags']) for key in self._result_keys: if key in result: d[key] = result[key] inserts.append(('result', d)) if 'run' in response: run_id = response['run'].get('run-id', -1) # check if last run was not closed properly if run_id not in self._runs and self._last_run_id in self._runs: last_run = self._runs[self._last_run_id] if 'end' not in last_run: last_run['end'] = datetime.now() self._runs[run_id] = response['run'] self._last_run_id = run_id return inserts
def map(self, response)
Process *response* and return a list of (table, rowdata) tuples.
3.085253
3.016874
1.022666
inserts = [] last_run = self._runs[self._last_run_id] if 'end' not in last_run: last_run['end'] = datetime.now() for run_id in sorted(self._runs): run = self._runs[run_id] d = {'run-id': run.get('run-id', -1)} for key in self._run_keys: if key in run: d[key] = run[key] inserts.append(('run', d)) # reset for next task self._parse_id = -1 self._runs = {} self._last_run_id = -1 return inserts
def cleanup(self)
Return aggregated (table, rowdata) tuples and clear the state.
3.436268
3.240034
1.060566
array = super(GameHighScore, self).to_array() array['position'] = int(self.position) # type int array['user'] = self.user.to_array() # type User array['score'] = int(self.score) # type int return array
def to_array(self)
Serializes this GameHighScore to a dictionary. :return: dictionary representation of this object. :rtype: dict
3.242249
2.625489
1.234913
if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.peer import User data = {} data['position'] = int(array.get('position')) data['user'] = User.from_array(array.get('user')) data['score'] = int(array.get('score')) data['_raw'] = array return GameHighScore(**data)
def from_array(array)
Deserialize a new GameHighScore from a given dictionary. :return: new GameHighScore instance. :rtype: GameHighScore
3.293889
2.785353
1.182575
def link_label(link): return '{}/{}'.format(link.rargname or '', link.post) def label_edge(link): if link.post == H_POST and link.rargname == RSTR_ROLE: return 'rstr' elif link.post == EQ_POST: return 'eq' else: return 'arg' if isinstance(xs, Xmrs): xs = [xs] lines = .split("\n") for ix, x in enumerate(xs): lines.append("%%%\n%%% {}\n%%%".format(ix+1)) lines.append("\\begin{dependency}[dmrs]") ns = nodes(x) ### predicates lines.append(" \\begin{deptext}[column sep=10pt]") for i, n in enumerate(ns): sep = "\\&" if (i < len(ns) - 1) else "\\\\" pred = _latex_escape(n.pred.short_form()) pred = "\\named{}" if pred == 'named' else pred if n.carg is not None: print(n.carg.strip('"')) pred += "\\smaller ({})".format(n.carg.strip('"')) lines.append(" \\spred{{{}}} {} % node {}".format( pred, sep, i+1)) lines.append(" \\end{deptext}") nodeidx = {n.nodeid: i+1 for i, n in enumerate(ns)} ### links for link in links(x): if link.start == 0: lines.append( ' \\deproot[root]{{{}}}{{{}}}'.format( nodeidx[link.end], '\\srl{TOP}' # _latex_escape('/' + link.post) ) ) else: lines.append(' \\depedge[{}]{{{}}}{{{}}}{{\\srl{{{}}}}}'.format( label_edge(link), nodeidx[link.start], nodeidx[link.end], _latex_escape(link_label(link)) )) ### placeholder for icons lines.append('% \\depedge[icons]{f}{t}{FOCUS}') lines.append('\\end{dependency}\n') lines.append('\\end{document}') return '\n'.join(lines)
def dmrs_tikz_dependency(xs, **kwargs)
Return a LaTeX document with each Xmrs in *xs* rendered as DMRSs. DMRSs use the `tikz-dependency` package for visualization.
5.05501
4.819216
1.048928
@wraps(f) def wrapper(*args, **kwargs): if 'value' in kwargs: val = kwargs['value'] del kwargs['value'] _f = f(*args, **kwargs) def valued_f(*args, **kwargs): result = _f(*args, **kwargs) s, obj, span = result if callable(val): return PegreResult(s, val(obj), span) else: return PegreResult(s, val, span) return valued_f else: return f(*args, **kwargs) return wrapper
def valuemap(f)
Decorator to help PEG functions handle value conversions.
3.034737
2.776646
1.092951
xlen = len(x) msg = 'Expected: "{}"'.format(x) def match_literal(s, grm=None, pos=0): if s[:xlen] == x: return PegreResult(s[xlen:], x, (pos, pos+xlen)) raise PegreError(msg, pos) return match_literal
def literal(x)
Create a PEG function to consume a literal.
5.770205
5.16227
1.117765
if isinstance(r, stringtypes): p = re.compile(r) else: p = r msg = 'Expected to match: {}'.format(p.pattern) def match_regex(s, grm=None, pos=0): m = p.match(s) if m is not None: start, end = m.span() data = m.groupdict() if p.groupindex else m.group() return PegreResult(s[m.end():], data, (pos+start, pos+end)) raise PegreError(msg, pos) return match_regex
def regex(r)
Create a PEG function to match a regular expression.
4.131106
3.792467
1.089293
def match_nonterminal(s, grm=None, pos=0): if grm is None: grm = {} expr = grm[n] return expr(s, grm, pos) return match_nonterminal
def nonterminal(n)
Create a PEG function to match a nonterminal.
4.600618
4.1456
1.109759
def match_and_next(s, grm=None, pos=0): try: e(s, grm, pos) except PegreError as ex: raise PegreError('Positive lookahead failed', pos) else: return PegreResult(s, Ignore, (pos, pos)) return match_and_next
def and_next(e)
Create a PEG function for positive lookahead.
8.543658
7.315938
1.167815
def match_not_next(s, grm=None, pos=0): try: e(s, grm, pos) except PegreError as ex: return PegreResult(s, Ignore, (pos, pos)) else: raise PegreError('Negative lookahead failed', pos) return match_not_next
def not_next(e)
Create a PEG function for negative lookahead.
7.552296
6.678859
1.130776
def match_sequence(s, grm=None, pos=0): data = [] start = pos for e in es: s, obj, span = e(s, grm, pos) pos = span[1] if obj is not Ignore: data.append(obj) return PegreResult(s, data, (start, pos)) return match_sequence
def sequence(*es)
Create a PEG function to match a sequence.
5.53686
5.001789
1.106976
msg = 'Expected one of: {}'.format(', '.join(map(repr, es))) def match_choice(s, grm=None, pos=0): errs = [] for e in es: try: return e(s, grm, pos) except PegreError as ex: errs.append((ex.message, ex.position)) if errs: raise PegreChoiceError(errs, pos) return match_choice
def choice(*es)
Create a PEG function to match an ordered choice.
4.411441
3.654927
1.206985
def match_optional(s, grm=None, pos=0): try: return e(s, grm, pos) except PegreError: return PegreResult(s, default, (pos, pos)) return match_optional
def optional(e, default=Ignore)
Create a PEG function to optionally match an expression.
6.753987
6.260562
1.078815
if delimiter is None: delimiter = lambda s, grm, pos: (s, Ignore, (pos, pos)) def match_zero_or_more(s, grm=None, pos=0): start = pos try: s, obj, span = e(s, grm, pos) pos = span[1] data = [] if obj is Ignore else [obj] except PegreError: return PegreResult(s, [], (pos, pos)) try: while True: s, obj, span = delimiter(s, grm, pos) pos = span[1] if obj is not Ignore: data.append(obj) s, obj, span = e(s, grm, pos) pos = span[1] if obj is not Ignore: data.append(obj) except PegreError: pass return PegreResult(s, data, (start, pos)) return match_zero_or_more
def zero_or_more(e, delimiter=None)
Create a PEG function to match zero or more expressions. Args: e: the expression to match delimiter: an optional expression to match between the primary *e* matches.
2.706764
2.761013
0.980352
if delimiter is None: delimiter = lambda s, grm, pos: (s, Ignore, (pos, pos)) msg = 'Expected one or more of: {}'.format(repr(e)) def match_one_or_more(s, grm=None, pos=0): start = pos s, obj, span = e(s, grm, pos) pos = span[1] data = [] if obj is Ignore else [obj] try: while True: s, obj, span = delimiter(s, grm, pos) pos = span[1] if obj is not Ignore: data.append(obj) s, obj, span = e(s, grm, pos) pos = span[1] if obj is not Ignore: data.append(obj) except PegreError: pass return PegreResult(s, data, (start, pos)) return match_one_or_more
def one_or_more(e, delimiter=None)
Create a PEG function to match one or more expressions. Args: e: the expression to match delimiter: an optional expression to match between the primary *e* matches.
2.939436
3.017742
0.974051
# pump object to it's location with dummy nodes while location: axis = location.pop() obj = XmrsPathNode(None, None, links={axis: obj}) if base is None: return obj _merge(base, obj) # if isinstance(base, XmrsPath): # base.calculate_metrics() return base
def merge(base, obj, location=None)
merge is like XmrsPathNode.update() except it raises errors on unequal non-None values.
12.032069
9.096674
1.322689
links = node.links o_links = node._overlapping_links overlap = {ax2 for ax in links for ax2 in o_links.get(ax, [])} axes = [] for axis in sorted(links.keys(), key=sort_key): if axis in overlap: continue tgt = links[axis] if axis in o_links: s, e = axis[0], axis[-1] axis = '%s%s%s' % ( s, '&'.join(a[1:-1] for a in [axis] + o_links[axis]), e ) axes.append((axis, tgt)) return axes
def _prepare_axes(node, sort_key)
Sort axes and combine those that point to the same target and go in the same direction.
4.428381
4.296818
1.030619
try: val = self[key] except KeyError: val = default return val
def get(self, key, default=None)
Return the value for *key* if it exists, otherwise *default*.
3.499363
3.122795
1.120587
fs = [] if self._avm is not None: if len(self._feats) == len(self._avm): feats = self._feats else: feats = list(self._avm) for feat in feats: val = self._avm[feat] if isinstance(val, FeatureStructure): if not expand and val._is_notable(): fs.append((feat, val)) else: for subfeat, subval in val.features(expand=expand): fs.append(('{}.{}'.format(feat, subfeat), subval)) else: fs.append((feat, val)) return fs
def features(self, expand=False)
Return the list of tuples of feature paths and feature values. Args: expand (bool): if `True`, expand all feature paths Example: >>> fs = FeatureStructure([('A.B', 1), ('A.C', 2)]) >>> fs.features() [('A', <FeatureStructure object at ...>)] >>> fs.features(expand=True) [('A.B', 1), ('A.C', 2)]
2.89963
2.928382
0.990181
xs = [] for parent in self._hier[typename][0]: xs.append(parent) xs.extend(self.ancestors(parent)) return xs
def ancestors(self, typename)
Return the ancestor types of *typename*.
4.155352
3.975871
1.045143
xs = [] for child in self._hier[typename][1]: xs.append(child) xs.extend(self.descendants(child)) return xs
def descendants(self, typename)
Return the descendant types of *typename*.
4.086132
3.97675
1.027505
return a == b or b in self.descendants(a)
def subsumes(self, a, b)
Return `True` if type *a* subsumes type *b*.
8.982337
7.845657
1.14488
return len(set([a] + self.descendants(a)) .intersection([b] + self.descendants(b))) > 0
def compatible(self, a, b)
Return `True` if type *a* is compatible with type *b*.
5.657367
5.232806
1.081134
qdg = _make_digraph(q, check_varprops) gdg = _make_digraph(g, check_varprops) def nem(qd, gd): # node-edge-match return qd.get('sig') == gd.get('sig') return nx.is_isomorphic(qdg, gdg, node_match=nem, edge_match=nem)
def isomorphic(q, g, check_varprops=True)
Return `True` if Xmrs objects *q* and *g* are isomorphic. Isomorphicity compares the predicates of an Xmrs, the variable properties of their predications (if `check_varprops=True`), constant arguments, and the argument structure between predications. Node IDs and Lnk values are ignored. Args: q: the left Xmrs to compare g: the right Xmrs to compare check_varprops: if `True`, make sure variable properties are equal for mapped predications
3.893921
4.406466
0.883684
# first some quick checks if len(q.eps()) != len(g.eps()): return False if len(q.variables()) != len(g.variables()): return False #if len(a.hcons()) != len(b.hcons()): return False try: next(_isomorphisms(q, g, check_varprops=check_varprops)) return True except StopIteration: return False
def _turbo_isomorphic(q, g, check_varprops=True)
Query Xmrs q is isomorphic to given Xmrs g if there exists an isomorphism (bijection of eps and vars) from q to g.
3.692087
3.074826
1.200747
# convert MRSs to be more graph-like, and add some indices qig = _IsoGraph(q, varprops=check_varprops) # qig = q isograph gig = _IsoGraph(g, varprops=check_varprops) # gig = q isograph # qsigs, qsigidx = _isomorphism_sigs(q, check_varprops) # gsigs, gsigidx = _isomorphism_sigs(g, check_varprops) # (it would be nice to not have to do this... maybe later) # qadj = _isomorphism_adj(q, qsigidx) # gadj = _isomorphism_adj(g, gsigidx) # the degree of each node is useful (but can it be combined with adj?) # qdeg = _isomorphism_deg(qadj) # gdeg = _isomorphism_deg(gadj) u_s = _isomorphism_choose_start_q_vertex(qig, gig, subgraph=False) q_ = _isomorphism_rewrite_to_NECtree(u_s, qig) for v_s in gsigs.get(qsigidx[u_s], []): cr = _isomorphism_explore_CR(q_, {v_s}, qig, gig) if cr is None: continue order = _isomorphism_determine_matching_order(q_, cr) update_state(M,F,{u_s}, {v_s}) subraph_search(q, q_, g, order, 1) # 1="the first query vertex to match" restore_state(M, F, {u_s}, {v_s})
def _isomorphisms(q, g, check_varprops=True)
Inspired by Turbo_ISO: http://dl.acm.org/citation.cfm?id=2465300
5.692785
5.657596
1.00622
qadj = qgraph.adj adjsets = lambda x: set(chain.from_iterable(qadj[x].values())) t = ([q_s], []) # (NEC_set, children) visited = {q_s} vcur, vnext = [t], [] while vcur: for (nec, children) in vcur: c = defaultdict(list) for u in nec: for sig, adjlist in qadj[u].items(): c[sig].extend(x for x, _, _ in adjlist if x not in visited) for sig, c_adjlist in c.items(): visited.update(c_adjlist) # these are already grouped by label; now group by adjacents for key, grp in groupby(c_adjlist, key=adjsets): grp = list(grp) if len(grp) > 1: children.append((list(grp), [])) else: # NOTE: the paper says to look for mergeable things, # but I don't know what else to merge by. children.append((list(grp), [])) vnext.extend(children) vcur, vnext = vnext, [] return t
def _isomorphism_rewrite_to_NECtree(q_s, qgraph)
Neighborhood Equivalence Class tree (see Turbo_ISO paper)
5.044533
4.97885
1.013192
# first some quick checks a_var_refs = sorted(len(vd['refs']) for vd in a._vars.values()) b_var_refs = sorted(len(vd['refs']) for vd in b._vars.values()) if a_var_refs != b_var_refs: return False print() # these signature: [node] indices are meant to avoid unnecessary # comparisons; they also take care of "semantic feasibility" # constraints (comparing node values and properties). All that's # left is the "syntactic feasibility", or node-edge shapes. # nodedicts are {sig: [(id, edges), ...], ...} a_nd = _node_isomorphic_build_nodedict(a, check_varprops) #print('a', a_nd) b_nd = _node_isomorphic_build_nodedict(b, check_varprops) #print('b', b_nd) #return a_sigs = {} # for node -> sig mapping # don't recurse when things are unique agenda = [] isomap = {} for sig, a_pairs in sorted(a_nd.items(), key=lambda x: len(x[1])): b_pairs = b_nd.get(sig, []) if len(a_pairs) != len(b_pairs): return False if len(a_pairs) == 1: a_, a_edges = a_pairs[0] b_, b_edges = b_pairs[0] if len(a_edges) != len(b_edges): return False a_sigs[a_] = sig isomap[a_] = b_ for edge, a_tgt in a_edges.items(): if edge not in b_edges: return False isomap[a_tgt] = b_edges[edge] else: for a_, ed in a_pairs: a_sigs[a_] = sig agenda.append((a_, sig, ed)) #print(agenda) #return isomaps = _node_isomorphic(agenda, a_sigs, b_nd, isomap, {}) # for sig, a_candidates in sorted(a_nodes.items(), key=lambda x: len(x[1])): # b_candidates = b_nodes.get(sig, []) # if len(a_candidates) != len(b_candidates): return False # candidates.append((a_candidates, b_candidates)) # # nodemaps = _isomorphic(a, b, candidates, {}) try: next(isomaps) return True except StopIteration: return False
def _node_isomorphic(a, b, check_varprops=True)
Two Xmrs objects are isomorphic if they have the same structure as determined by variable linkages between preds.
3.956501
3.938488
1.004574
# first some quick checks if len(a.eps()) != len(b.eps()): return False if len(a.variables()) != len(b.variables()): return False #if len(a.hcons()) != len(b.hcons()): return False # pre-populate varmap; first top variables varmap = {} for pair in [(a.top, b.top), (a.index, b.index), (a.xarg, b.xarg)]: if pair != (None, None): v1, v2 = pair if None in pair: return False if check_varprops and a.properties(v1) != b.properties(v2): return False varmap[v1] = v2 # find permutations of variables, grouped by those that share the # same signature. a_sigs = defaultdict(list) for var, vd in a._vars.items(): if var not in varmap: var_sig = _isomorphic_var_signature(vd, a, check_varprops) a_sigs[var_sig].append(var) b_sigs = defaultdict(list) tgtmapped = set(varmap.values()) for var, vd in b._vars.items(): if var not in tgtmapped: var_sig = _isomorphic_var_signature(vd, b, check_varprops) b_sigs[var_sig].append(var) candidates = [] for sig, a_vars in sorted(a_sigs.items(), key=lambda x: len(x[1])): b_vars = b_sigs.get(sig, []) if len(a_vars) != len(b_vars): return False print(sig, a_vars, b_vars) candidates.append((a_vars, b_vars)) varmaps = _var_isomorphic(a, b, candidates, varmap) # double check HCONS (it's hard to do with var signatures) for vm in varmaps: if all(vm[lo] == b._hcons.get(vm[hi], (None, None, None))[2] for hi, _, lo in a.hcons()): return True return False
def _var_isomorphic(a, b, check_varprops=True)
Two Xmrs objects are isomorphic if they have the same structure as determined by variable linkages between preds.
3.510091
3.472946
1.010695
gold_remaining = list(goldbag) test_unique = [] shared = [] for test in testbag: gold_match = None for gold in gold_remaining: if isomorphic(test, gold): gold_match = gold break if gold_match is not None: gold_remaining.remove(gold_match) shared.append(test) else: test_unique.append(test) if count_only: return (len(test_unique), len(shared), len(gold_remaining)) else: return (test_unique, shared, gold_remaining)
def compare_bags(testbag, goldbag, count_only=True)
Compare two bags of Xmrs objects, returning a triple of (unique in test, shared, unique in gold). Args: testbag: An iterable of Xmrs objects to test. goldbag: An iterable of Xmrs objects to compare against. count_only: If True, the returned triple will only have the counts of each; if False, a list of Xmrs objects will be returned for each (using the ones from testbag for the shared set) Returns: A triple of (unique in test, shared, unique in gold), where each of the three items is an integer count if the count_only parameter is True, or a list of Xmrs objects otherwise.
2.228411
1.939159
1.149164
queryobj = _parse_query(query) if queryobj['querytype'] in ('select', 'retrieve'): return _select( queryobj['projection'], queryobj['tables'], queryobj['where'], ts, mode=kwargs.get('mode', 'list'), cast=kwargs.get('cast', True)) else: # not really a syntax error; replace with TSQLError or something # when the proper exception class exists raise TSQLSyntaxError(queryobj['querytype'] + ' queries are not supported')
def query(query, ts, **kwargs)
Perform *query* on the testsuite *ts*. Note: currently only 'select' queries are supported. Args: query (str): TSQL query string ts (:class:`delphin.itsdb.TestSuite`): testsuite to query over kwargs: keyword arguments passed to the more specific query function (e.g., :func:`select`) Example: >>> list(tsql.query('select i-id where i-length < 4', ts)) [[142], [1061]]
6.704148
6.613562
1.013697
queryobj = _parse_select(query) return _select( queryobj['projection'], queryobj['tables'], queryobj['where'], ts, mode, cast)
def select(query, ts, mode='list', cast=True)
Perform the TSQL selection query *query* on testsuite *ts*. Note: The `select`/`retrieve` part of the query is not included. Args: query (str): TSQL select query ts (:class:`delphin.itsdb.TestSuite`): testsuite to query over mode (str): how to return the results (see :func:`delphin.itsdb.select_rows` for more information about the *mode* parameter; default: `list`) cast (bool): if `True`, values will be cast to their datatype according to the testsuite's relations (default: `True`) Example: >>> list(tsql.select('i-id where i-length < 4', ts)) [[142], [1061]]
4.893757
7.255731
0.674468
s += '.' # make sure there's a terminator to know when to stop parsing lines = enumerate(s.splitlines(), 1) lineno = pos = 0 try: for lineno, line in lines: matches = _tsql_lex_re.finditer(line) for m in matches: gid = m.lastindex if gid == 11: raise TSQLSyntaxError('unexpected input', lineno=lineno, offset=m.start(), text=line) else: token = m.group(gid) yield (gid, token, lineno) except StopIteration: pass
def _lex(s)
Lex the input string according to _tsql_lex_re. Yields (gid, token, line_number)
4.64128
3.817024
1.215942
assert_type_or_raise(offset, None, int, parameter_name="offset") assert_type_or_raise(limit, None, int, parameter_name="limit") assert_type_or_raise(timeout, None, int, parameter_name="timeout") assert_type_or_raise(allowed_updates, None, list, parameter_name="allowed_updates") result = self.do("getUpdates", offset=offset, limit=limit, timeout=timeout, allowed_updates=allowed_updates) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.updates import Update try: return Update.from_array_list(result, list_level=1) except TgApiParseException: logger.debug("Failed parsing as api_type Update", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def get_updates(self, offset=None, limit=None, timeout=None, allowed_updates=None)
Use this method to receive incoming updates using long polling (wiki). An Array of Update objects is returned. Notes1. This method will not work if an outgoing webhook is set up.2. In order to avoid getting duplicate updates, recalculate offset after each server response. https://core.telegram.org/bots/api#getupdates Optional keyword parameters: :param offset: Identifier of the first update to be returned. Must be greater by one than the highest among the identifiers of previously received updates. By default, updates starting with the earliest unconfirmed update are returned. An update is considered confirmed as soon as getUpdates is called with an offset higher than its update_id. The negative offset can be specified to retrieve updates starting from -offset update from the end of the updates queue. All previous updates will forgotten. :type offset: int :param limit: Limits the number of updates to be retrieved. Values between 1—100 are accepted. Defaults to 100. :type limit: int :param timeout: Timeout in seconds for long polling. Defaults to 0, i.e. usual short polling. Should be positive, short polling should be used for testing purposes only. :type timeout: int :param allowed_updates: List the types of updates you want your bot to receive. For example, specify [“message”, “edited_channel_post”, “callback_query”] to only receive updates of these types. See Update for a complete list of available update types. Specify an empty list to receive all updates regardless of type (default). If not specified, the previous setting will be used.Please note that this parameter doesn't affect updates created before the call to the getUpdates, so unwanted updates may be received for a short period of time. :type allowed_updates: list of str|unicode Returns: :return: An Array of Update objects is returned :rtype: list of pytgbot.api_types.receivable.updates.Update
2.502429
2.392623
1.045893
from pytgbot.api_types.sendable.files import InputFile assert_type_or_raise(url, unicode_type, parameter_name="url") assert_type_or_raise(certificate, None, InputFile, parameter_name="certificate") assert_type_or_raise(max_connections, None, int, parameter_name="max_connections") assert_type_or_raise(allowed_updates, None, list, parameter_name="allowed_updates") result = self.do("setWebhook", url=url, certificate=certificate, max_connections=max_connections, allowed_updates=allowed_updates) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) try: return from_array_list(bool, result, list_level=0, is_builtin=True) except TgApiParseException: logger.debug("Failed parsing as primitive bool", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def set_webhook(self, url, certificate=None, max_connections=None, allowed_updates=None)
Use this method to specify a url and receive incoming updates via an outgoing webhook. Whenever there is an update for the bot, we will send an HTTPS POST request to the specified url, containing a JSON-serialized Update. In case of an unsuccessful request, we will give up after a reasonable amount of attempts. Returns True on success. If you'd like to make sure that the Webhook request comes from Telegram, we recommend using a secret path in the URL, e.g. https://www.example.com/<token>. Since nobody else knows your bot‘s token, you can be pretty sure it’s us. Notes1. You will not be able to receive updates using getUpdates for as long as an outgoing webhook is set up.2. To use a self-signed certificate, you need to upload your public key certificate using certificate parameter. Please upload as InputFile, sending a String will not work.3. Ports currently supported for Webhooks: 443, 80, 88, 8443. NEW! If you're having any trouble setting up webhooks, please check out this amazing guide to Webhooks. https://core.telegram.org/bots/api#setwebhook Parameters: :param url: HTTPS url to send updates to. Use an empty string to remove webhook integration :type url: str|unicode Optional keyword parameters: :param certificate: Upload your public key certificate so that the root certificate in use can be checked. See our self-signed guide for details. :type certificate: pytgbot.api_types.sendable.files.InputFile :param max_connections: Maximum allowed number of simultaneous HTTPS connections to the webhook for update delivery, 1-100. Defaults to 40. Use lower values to limit the load on your bot‘s server, and higher values to increase your bot’s throughput. :type max_connections: int :param allowed_updates: List the types of updates you want your bot to receive. For example, specify [“message”, “edited_channel_post”, “callback_query”] to only receive updates of these types. See Update for a complete list of available update types. Specify an empty list to receive all updates regardless of type (default). If not specified, the previous setting will be used.Please note that this parameter doesn't affect updates created before the call to the setWebhook, so unwanted updates may be received for a short period of time. :type allowed_updates: list of str|unicode Returns: :return: Returns True on success :rtype: bool
2.756443
2.571871
1.071766
result = self.do("deleteWebhook", ) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) try: return from_array_list(bool, result, list_level=0, is_builtin=True) except TgApiParseException: logger.debug("Failed parsing as primitive bool", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def delete_webhook(self, )
Use this method to remove webhook integration if you decide to switch back to getUpdates. Returns True on success. Requires no parameters. https://core.telegram.org/bots/api#deletewebhook Returns: :return: Returns True on success :rtype: bool
6.736531
6.152714
1.094888
result = self.do("getWebhookInfo", ) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.updates import WebhookInfo try: return WebhookInfo.from_array(result) except TgApiParseException: logger.debug("Failed parsing as api_type WebhookInfo", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def get_webhook_info(self, )
Use this method to get current webhook status. Requires no parameters. On success, returns a WebhookInfo object. If the bot is using getUpdates, will return an object with the url field empty. https://core.telegram.org/bots/api#getwebhookinfo Returns: :return: On success, returns a WebhookInfo object :rtype: pytgbot.api_types.receivable.updates.WebhookInfo
4.089351
4.018221
1.017702
from pytgbot.api_types.sendable.files import InputFile from pytgbot.api_types.sendable.reply_markup import ForceReply from pytgbot.api_types.sendable.reply_markup import InlineKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardRemove assert_type_or_raise(chat_id, (int, unicode_type), parameter_name="chat_id") assert_type_or_raise(photo, (InputFile, unicode_type), parameter_name="photo") assert_type_or_raise(caption, None, unicode_type, parameter_name="caption") assert_type_or_raise(parse_mode, None, unicode_type, parameter_name="parse_mode") assert_type_or_raise(disable_notification, None, bool, parameter_name="disable_notification") assert_type_or_raise(reply_to_message_id, None, int, parameter_name="reply_to_message_id") assert_type_or_raise(reply_markup, None, (InlineKeyboardMarkup, ReplyKeyboardMarkup, ReplyKeyboardRemove, ForceReply), parameter_name="reply_markup") result = self.do("sendPhoto", chat_id=chat_id, photo=photo, caption=caption, parse_mode=parse_mode, disable_notification=disable_notification, reply_to_message_id=reply_to_message_id, reply_markup=reply_markup) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.updates import Message try: return Message.from_array(result) except TgApiParseException: logger.debug("Failed parsing as api_type Message", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def send_photo(self, chat_id, photo, caption=None, parse_mode=None, disable_notification=None, reply_to_message_id=None, reply_markup=None)
Use this method to send photos. On success, the sent Message is returned. https://core.telegram.org/bots/api#sendphoto Parameters: :param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername) :type chat_id: int | str|unicode :param photo: Photo to send. Pass a file_id as String to send a photo that exists on the Telegram servers (recommended), pass an HTTP URL as a String for Telegram to get a photo from the Internet, or upload a new photo using multipart/form-data. More info on Sending Files » :type photo: pytgbot.api_types.sendable.files.InputFile | str|unicode Optional keyword parameters: :param caption: Photo caption (may also be used when resending photos by file_id), 0-1024 characters :type caption: str|unicode :param parse_mode: Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in the media caption. :type parse_mode: str|unicode :param disable_notification: Sends the message silently. Users will receive a notification with no sound. :type disable_notification: bool :param reply_to_message_id: If the message is a reply, ID of the original message :type reply_to_message_id: int :param reply_markup: Additional interface options. A JSON-serialized object for an inline keyboard, custom reply keyboard, instructions to remove reply keyboard or to force a reply from the user. :type reply_markup: pytgbot.api_types.sendable.reply_markup.InlineKeyboardMarkup | pytgbot.api_types.sendable.reply_markup.ReplyKeyboardMarkup | pytgbot.api_types.sendable.reply_markup.ReplyKeyboardRemove | pytgbot.api_types.sendable.reply_markup.ForceReply Returns: :return: On success, the sent Message is returned :rtype: pytgbot.api_types.receivable.updates.Message
1.664108
1.609398
1.033994
from pytgbot.api_types.sendable.input_media import InputMediaPhoto from pytgbot.api_types.sendable.input_media import InputMediaVideo assert_type_or_raise(chat_id, (int, unicode_type), parameter_name="chat_id") assert_type_or_raise(media, (list, list), parameter_name="media") assert_type_or_raise(disable_notification, None, bool, parameter_name="disable_notification") assert_type_or_raise(reply_to_message_id, None, int, parameter_name="reply_to_message_id") result = self.do("sendMediaGroup", chat_id=chat_id, media=media, disable_notification=disable_notification, reply_to_message_id=reply_to_message_id) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.updates import Message try: return Message.from_array_list(result, list_level=1) except TgApiParseException: logger.debug("Failed parsing as api_type Message", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def send_media_group(self, chat_id, media, disable_notification=None, reply_to_message_id=None)
Use this method to send a group of photos or videos as an album. On success, an array of the sent Messages is returned. https://core.telegram.org/bots/api#sendmediagroup Parameters: :param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername) :type chat_id: int | str|unicode :param media: A JSON-serialized array describing photos and videos to be sent, must include 2–10 items :type media: list of pytgbot.api_types.sendable.input_media.InputMediaPhoto | list of pytgbot.api_types.sendable.input_media.InputMediaVideo Optional keyword parameters: :param disable_notification: Sends the messages silently. Users will receive a notification with no sound. :type disable_notification: bool :param reply_to_message_id: If the messages are a reply, ID of the original message :type reply_to_message_id: int Returns: :return: On success, an array of the sent Messages is returned :rtype: list of pytgbot.api_types.receivable.updates.Message
2.255271
2.169268
1.039646
from pytgbot.api_types.sendable.reply_markup import ForceReply from pytgbot.api_types.sendable.reply_markup import InlineKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardRemove assert_type_or_raise(chat_id, (int, unicode_type), parameter_name="chat_id") assert_type_or_raise(latitude, float, parameter_name="latitude") assert_type_or_raise(longitude, float, parameter_name="longitude") assert_type_or_raise(live_period, None, int, parameter_name="live_period") assert_type_or_raise(disable_notification, None, bool, parameter_name="disable_notification") assert_type_or_raise(reply_to_message_id, None, int, parameter_name="reply_to_message_id") assert_type_or_raise(reply_markup, None, (InlineKeyboardMarkup, ReplyKeyboardMarkup, ReplyKeyboardRemove, ForceReply), parameter_name="reply_markup") result = self.do("sendLocation", chat_id=chat_id, latitude=latitude, longitude=longitude, live_period=live_period, disable_notification=disable_notification, reply_to_message_id=reply_to_message_id, reply_markup=reply_markup) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.updates import Message try: return Message.from_array(result) except TgApiParseException: logger.debug("Failed parsing as api_type Message", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def send_location(self, chat_id, latitude, longitude, live_period=None, disable_notification=None, reply_to_message_id=None, reply_markup=None)
Use this method to send point on the map. On success, the sent Message is returned. https://core.telegram.org/bots/api#sendlocation Parameters: :param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername) :type chat_id: int | str|unicode :param latitude: Latitude of the location :type latitude: float :param longitude: Longitude of the location :type longitude: float Optional keyword parameters: :param live_period: Period in seconds for which the location will be updated (see Live Locations, should be between 60 and 86400. :type live_period: int :param disable_notification: Sends the message silently. Users will receive a notification with no sound. :type disable_notification: bool :param reply_to_message_id: If the message is a reply, ID of the original message :type reply_to_message_id: int :param reply_markup: Additional interface options. A JSON-serialized object for an inline keyboard, custom reply keyboard, instructions to remove reply keyboard or to force a reply from the user. :type reply_markup: pytgbot.api_types.sendable.reply_markup.InlineKeyboardMarkup | pytgbot.api_types.sendable.reply_markup.ReplyKeyboardMarkup | pytgbot.api_types.sendable.reply_markup.ReplyKeyboardRemove | pytgbot.api_types.sendable.reply_markup.ForceReply Returns: :return: On success, the sent Message is returned :rtype: pytgbot.api_types.receivable.updates.Message
1.707387
1.656352
1.030812
from pytgbot.api_types.sendable.reply_markup import ForceReply from pytgbot.api_types.sendable.reply_markup import InlineKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardRemove assert_type_or_raise(chat_id, (int, unicode_type), parameter_name="chat_id") assert_type_or_raise(phone_number, unicode_type, parameter_name="phone_number") assert_type_or_raise(first_name, unicode_type, parameter_name="first_name") assert_type_or_raise(last_name, None, unicode_type, parameter_name="last_name") assert_type_or_raise(vcard, None, unicode_type, parameter_name="vcard") assert_type_or_raise(disable_notification, None, bool, parameter_name="disable_notification") assert_type_or_raise(reply_to_message_id, None, int, parameter_name="reply_to_message_id") assert_type_or_raise(reply_markup, None, (InlineKeyboardMarkup, ReplyKeyboardMarkup, ReplyKeyboardRemove, ForceReply), parameter_name="reply_markup") result = self.do("sendContact", chat_id=chat_id, phone_number=phone_number, first_name=first_name, last_name=last_name, vcard=vcard, disable_notification=disable_notification, reply_to_message_id=reply_to_message_id, reply_markup=reply_markup) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.updates import Message try: return Message.from_array(result) except TgApiParseException: logger.debug("Failed parsing as api_type Message", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def send_contact(self, chat_id, phone_number, first_name, last_name=None, vcard=None, disable_notification=None, reply_to_message_id=None, reply_markup=None)
Use this method to send phone contacts. On success, the sent Message is returned. https://core.telegram.org/bots/api#sendcontact Parameters: :param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername) :type chat_id: int | str|unicode :param phone_number: Contact's phone number :type phone_number: str|unicode :param first_name: Contact's first name :type first_name: str|unicode Optional keyword parameters: :param last_name: Contact's last name :type last_name: str|unicode :param vcard: Additional data about the contact in the form of a vCard, 0-2048 bytes :type vcard: str|unicode :param disable_notification: Sends the message silently. Users will receive a notification with no sound. :type disable_notification: bool :param reply_to_message_id: If the message is a reply, ID of the original message :type reply_to_message_id: int :param reply_markup: Additional interface options. A JSON-serialized object for an inline keyboard, custom reply keyboard, instructions to remove keyboard or to force a reply from the user. :type reply_markup: pytgbot.api_types.sendable.reply_markup.InlineKeyboardMarkup | pytgbot.api_types.sendable.reply_markup.ReplyKeyboardMarkup | pytgbot.api_types.sendable.reply_markup.ReplyKeyboardRemove | pytgbot.api_types.sendable.reply_markup.ForceReply Returns: :return: On success, the sent Message is returned :rtype: pytgbot.api_types.receivable.updates.Message
1.639756
1.588035
1.03257
assert_type_or_raise(user_id, int, parameter_name="user_id") assert_type_or_raise(offset, None, int, parameter_name="offset") assert_type_or_raise(limit, None, int, parameter_name="limit") result = self.do("getUserProfilePhotos", user_id=user_id, offset=offset, limit=limit) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.media import UserProfilePhotos try: return UserProfilePhotos.from_array(result) except TgApiParseException: logger.debug("Failed parsing as api_type UserProfilePhotos", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def get_user_profile_photos(self, user_id, offset=None, limit=None)
Use this method to get a list of profile pictures for a user. Returns a UserProfilePhotos object. https://core.telegram.org/bots/api#getuserprofilephotos Parameters: :param user_id: Unique identifier of the target user :type user_id: int Optional keyword parameters: :param offset: Sequential number of the first photo to be returned. By default, all photos are returned. :type offset: int :param limit: Limits the number of photos to be retrieved. Values between 1—100 are accepted. Defaults to 100. :type limit: int Returns: :return: Returns a UserProfilePhotos object :rtype: pytgbot.api_types.receivable.media.UserProfilePhotos
2.960453
2.669351
1.109053
assert_type_or_raise(file_id, unicode_type, parameter_name="file_id") result = self.do("getFile", file_id=file_id) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) from pytgbot.api_types.receivable.media import File try: return File.from_array(result) except TgApiParseException: logger.debug("Failed parsing as api_type File", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def get_file(self, file_id)
Use this method to get basic info about a file and prepare it for downloading. For the moment, bots can download files of up to 20MB in size. On success, a File object is returned. The file can then be downloaded via the link https://api.telegram.org/file/bot<token>/<file_path>, where <file_path> is taken from the response. It is guaranteed that the link will be valid for at least 1 hour. When the link expires, a new one can be requested by calling getFile again. Note: This function may not preserve the original file name and MIME type. You should save the file's MIME type and name (if available) when the File object is received. https://core.telegram.org/bots/api#getfile Parameters: :param file_id: File identifier to get info about :type file_id: str|unicode Returns: :return: On success, a File object is returned :rtype: pytgbot.api_types.receivable.media.File
3.773364
3.639001
1.036923
assert_type_or_raise(chat_id, (int, unicode_type), parameter_name="chat_id") assert_type_or_raise(user_id, int, parameter_name="user_id") assert_type_or_raise(until_date, None, int, parameter_name="until_date") assert_type_or_raise(can_send_messages, None, bool, parameter_name="can_send_messages") assert_type_or_raise(can_send_media_messages, None, bool, parameter_name="can_send_media_messages") assert_type_or_raise(can_send_other_messages, None, bool, parameter_name="can_send_other_messages") assert_type_or_raise(can_add_web_page_previews, None, bool, parameter_name="can_add_web_page_previews") result = self.do("restrictChatMember", chat_id=chat_id, user_id=user_id, until_date=until_date, can_send_messages=can_send_messages, can_send_media_messages=can_send_media_messages, can_send_other_messages=can_send_other_messages, can_add_web_page_previews=can_add_web_page_previews) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) try: return from_array_list(bool, result, list_level=0, is_builtin=True) except TgApiParseException: logger.debug("Failed parsing as primitive bool", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def restrict_chat_member(self, chat_id, user_id, until_date=None, can_send_messages=None, can_send_media_messages=None, can_send_other_messages=None, can_add_web_page_previews=None)
Use this method to restrict a user in a supergroup. The bot must be an administrator in the supergroup for this to work and must have the appropriate admin rights. Pass True for all boolean parameters to lift restrictions from a user. Returns True on success. https://core.telegram.org/bots/api#restrictchatmember Parameters: :param chat_id: Unique identifier for the target chat or username of the target supergroup (in the format @supergroupusername) :type chat_id: int | str|unicode :param user_id: Unique identifier of the target user :type user_id: int Optional keyword parameters: :param until_date: Date when restrictions will be lifted for the user, unix time. If user is restricted for more than 366 days or less than 30 seconds from the current time, they are considered to be restricted forever :type until_date: int :param can_send_messages: Pass True, if the user can send text messages, contacts, locations and venues :type can_send_messages: bool :param can_send_media_messages: Pass True, if the user can send audios, documents, photos, videos, video notes and voice notes, implies can_send_messages :type can_send_media_messages: bool :param can_send_other_messages: Pass True, if the user can send animations, games, stickers and use inline bots, implies can_send_media_messages :type can_send_other_messages: bool :param can_add_web_page_previews: Pass True, if the user may add web page previews to their messages, implies can_send_media_messages :type can_add_web_page_previews: bool Returns: :return: Returns True on success :rtype: bool
1.868462
1.762245
1.060273
from pytgbot.api_types.sendable.files import InputFile assert_type_or_raise(chat_id, (int, unicode_type), parameter_name="chat_id") assert_type_or_raise(photo, InputFile, parameter_name="photo") result = self.do("setChatPhoto", chat_id=chat_id, photo=photo) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) try: return from_array_list(bool, result, list_level=0, is_builtin=True) except TgApiParseException: logger.debug("Failed parsing as primitive bool", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
def set_chat_photo(self, chat_id, photo)
Use this method to set a new profile photo for the chat. Photos can't be changed for private chats. The bot must be an administrator in the chat for this to work and must have the appropriate admin rights. Returns True on success. Note: In regular groups (non-supergroups), this method will only work if the ‘All Members Are Admins’ setting is off in the target group. https://core.telegram.org/bots/api#setchatphoto Parameters: :param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername) :type chat_id: int | str|unicode :param photo: New chat photo, uploaded using multipart/form-data :type photo: pytgbot.api_types.sendable.files.InputFile Returns: :return: Returns True on success :rtype: bool
3.608132
3.20537
1.125652