code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['file_id'] = u(array.get('file_id'))
data['length'] = int(array.get('length'))
data['duration'] = int(array.get('duration'))
data['thumb'] = PhotoSize.from_array(array.get('thumb')) if array.get('thumb') is not None else None
data['file_size'] = int(array.get('file_size')) if array.get('file_size') is not None else None
data['_raw'] = array
return VideoNote(**data)
|
def from_array(array)
|
Deserialize a new VideoNote from a given dictionary.
:return: new VideoNote instance.
:rtype: VideoNote
| 2.30891
| 1.867333
| 1.236475
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['location'] = Location.from_array(array.get('location'))
data['title'] = u(array.get('title'))
data['address'] = u(array.get('address'))
data['foursquare_id'] = u(array.get('foursquare_id')) if array.get('foursquare_id') is not None else None
data['foursquare_type'] = u(array.get('foursquare_type')) if array.get('foursquare_type') is not None else None
data['_raw'] = array
return Venue(**data)
|
def from_array(array)
|
Deserialize a new Venue from a given dictionary.
:return: new Venue instance.
:rtype: Venue
| 2.096475
| 1.806507
| 1.160513
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['total_count'] = int(array.get('total_count'))
data['photos'] = PhotoSize.from_array_list(array.get('photos'), list_level=2)
data['_raw'] = array
return UserProfilePhotos(**data)
|
def from_array(array)
|
Deserialize a new UserProfilePhotos from a given dictionary.
:return: new UserProfilePhotos instance.
:rtype: UserProfilePhotos
| 3.58084
| 2.859687
| 1.252179
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['title'] = u(array.get('title'))
data['description'] = u(array.get('description'))
data['photo'] = PhotoSize.from_array_list(array.get('photo'), list_level=1)
data['text'] = u(array.get('text')) if array.get('text') is not None else None
data['text_entities'] = MessageEntity.from_array_list(array.get('text_entities'), list_level=1) if array.get('text_entities') is not None else None
data['animation'] = Animation.from_array(array.get('animation')) if array.get('animation') is not None else None
data['_raw'] = array
return Game(**data)
|
def from_array(array)
|
Deserialize a new Game from a given dictionary.
:return: new Game instance.
:rtype: Game
| 2.21469
| 1.887861
| 1.173121
|
array = super(ReplyKeyboardMarkup, self).to_array()
array['keyboard'] = self._as_array(self.keyboard) # type list of list of KeyboardButton
if self.resize_keyboard is not None:
array['resize_keyboard'] = bool(self.resize_keyboard) # type bool
if self.one_time_keyboard is not None:
array['one_time_keyboard'] = bool(self.one_time_keyboard) # type bool
if self.selective is not None:
array['selective'] = bool(self.selective) # type bool
return array
|
def to_array(self)
|
Serializes this ReplyKeyboardMarkup to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.036623
| 1.823194
| 1.117063
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
from pytgbot.api_types.sendable.reply_markup import KeyboardButton
data = {}
data['keyboard'] = KeyboardButton.from_array_list(array.get('keyboard'), list_level=2)
data['resize_keyboard'] = bool(array.get('resize_keyboard')) if array.get('resize_keyboard') is not None else None
data['one_time_keyboard'] = bool(array.get('one_time_keyboard')) if array.get('one_time_keyboard') is not None else None
data['selective'] = bool(array.get('selective')) if array.get('selective') is not None else None
instance = ReplyKeyboardMarkup(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new ReplyKeyboardMarkup from a given dictionary.
:return: new ReplyKeyboardMarkup instance.
:rtype: ReplyKeyboardMarkup
| 2.147186
| 1.857438
| 1.155993
|
array = super(KeyboardButton, self).to_array()
array['text'] = u(self.text) # py2: type unicode, py3: type str
if self.request_contact is not None:
array['request_contact'] = bool(self.request_contact) # type bool
if self.request_location is not None:
array['request_location'] = bool(self.request_location) # type bool
return array
|
def to_array(self)
|
Serializes this KeyboardButton to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.35072
| 2.045989
| 1.148941
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['text'] = u(array.get('text'))
data['request_contact'] = bool(array.get('request_contact')) if array.get('request_contact') is not None else None
data['request_location'] = bool(array.get('request_location')) if array.get('request_location') is not None else None
instance = KeyboardButton(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new KeyboardButton from a given dictionary.
:return: new KeyboardButton instance.
:rtype: KeyboardButton
| 2.6346
| 1.958409
| 1.345275
|
array = super(ReplyKeyboardRemove, self).to_array()
array['remove_keyboard'] = bool(self.remove_keyboard) # type bool
if self.selective is not None:
array['selective'] = bool(self.selective) # type bool
return array
|
def to_array(self)
|
Serializes this ReplyKeyboardRemove to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.806526
| 2.316442
| 1.211567
|
array = super(InlineKeyboardMarkup, self).to_array()
array['inline_keyboard'] = self._as_array(self.inline_keyboard) # type list of list of InlineKeyboardButton
return array
|
def to_array(self)
|
Serializes this InlineKeyboardMarkup to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 4.721693
| 3.858722
| 1.223642
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
from pytgbot.api_types.sendable.reply_markup import InlineKeyboardButton
data = {}
data['inline_keyboard'] = InlineKeyboardButton.from_array_list(array.get('inline_keyboard'), list_level=2)
instance = InlineKeyboardMarkup(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new InlineKeyboardMarkup from a given dictionary.
:return: new InlineKeyboardMarkup instance.
:rtype: InlineKeyboardMarkup
| 3.854225
| 3.024307
| 1.274416
|
array = super(InlineKeyboardButton, self).to_array()
array['text'] = u(self.text) # py2: type unicode, py3: type str
if self.url is not None:
array['url'] = u(self.url) # py2: type unicode, py3: type str
if self.callback_data is not None:
array['callback_data'] = u(self.callback_data) # py2: type unicode, py3: type str
if self.switch_inline_query is not None:
array['switch_inline_query'] = u(self.switch_inline_query) # py2: type unicode, py3: type str
if self.switch_inline_query_current_chat is not None:
array['switch_inline_query_current_chat'] = u(self.switch_inline_query_current_chat) # py2: type unicode, py3: type str
if self.callback_game is not None:
array['callback_game'] = self.callback_game.to_array() # type CallbackGame
if self.pay is not None:
array['pay'] = bool(self.pay) # type bool
return array
|
def to_array(self)
|
Serializes this InlineKeyboardButton to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 1.375062
| 1.343164
| 1.023748
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
from pytgbot.api_types.receivable.updates import CallbackGame
data = {}
data['text'] = u(array.get('text'))
data['url'] = u(array.get('url')) if array.get('url') is not None else None
data['callback_data'] = u(array.get('callback_data')) if array.get('callback_data') is not None else None
data['switch_inline_query'] = u(array.get('switch_inline_query')) if array.get('switch_inline_query') is not None else None
data['switch_inline_query_current_chat'] = u(array.get('switch_inline_query_current_chat')) if array.get('switch_inline_query_current_chat') is not None else None
data['callback_game'] = CallbackGame.from_array(array.get('callback_game')) if array.get('callback_game') is not None else None
data['pay'] = bool(array.get('pay')) if array.get('pay') is not None else None
instance = InlineKeyboardButton(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new InlineKeyboardButton from a given dictionary.
:return: new InlineKeyboardButton instance.
:rtype: InlineKeyboardButton
| 1.813384
| 1.505222
| 1.204729
|
array = super(ForceReply, self).to_array()
array['force_reply'] = bool(self.force_reply) # type bool
if self.selective is not None:
array['selective'] = bool(self.selective) # type bool
return array
|
def to_array(self)
|
Serializes this ForceReply to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.938655
| 2.700912
| 1.088023
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['force_reply'] = bool(array.get('force_reply'))
data['selective'] = bool(array.get('selective')) if array.get('selective') is not None else None
instance = ForceReply(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new ForceReply from a given dictionary.
:return: new ForceReply instance.
:rtype: ForceReply
| 3.309624
| 2.519109
| 1.313808
|
array = super(PassportElementErrorDataField, self).to_array()
array['source'] = u(self.source) # py2: type unicode, py3: type str
array['type'] = u(self.type) # py2: type unicode, py3: type str
array['field_name'] = u(self.field_name) # py2: type unicode, py3: type str
array['data_hash'] = u(self.data_hash) # py2: type unicode, py3: type str
array['message'] = u(self.message) # py2: type unicode, py3: type str
return array
|
def to_array(self)
|
Serializes this PassportElementErrorDataField to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 1.759358
| 1.650873
| 1.065714
|
array = super(PassportElementErrorReverseSide, self).to_array()
array['source'] = u(self.source) # py2: type unicode, py3: type str
array['type'] = u(self.type) # py2: type unicode, py3: type str
array['file_hash'] = u(self.file_hash) # py2: type unicode, py3: type str
array['message'] = u(self.message) # py2: type unicode, py3: type str
return array
|
def to_array(self)
|
Serializes this PassportElementErrorReverseSide to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.061409
| 1.808144
| 1.140069
|
array = super(PassportElementErrorFiles, self).to_array()
array['source'] = u(self.source) # py2: type unicode, py3: type str
array['type'] = u(self.type) # py2: type unicode, py3: type str
array['file_hashes'] = self._as_array(self.file_hashes) # type list of str
array['message'] = u(self.message) # py2: type unicode, py3: type str
return array
|
def to_array(self)
|
Serializes this PassportElementErrorFiles to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.398742
| 2.11461
| 1.134366
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['source'] = u(array.get('source'))
data['type'] = u(array.get('type'))
data['file_hashes'] = PassportElementErrorFiles._builtin_from_array_list(required_type=unicode_type, value=array.get('file_hashes'), list_level=1)
data['message'] = u(array.get('message'))
instance = PassportElementErrorFiles(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new PassportElementErrorFiles from a given dictionary.
:return: new PassportElementErrorFiles instance.
:rtype: PassportElementErrorFiles
| 5.077134
| 3.572991
| 1.420976
|
array = super(PassportElementErrorUnspecified, self).to_array()
array['source'] = u(self.source) # py2: type unicode, py3: type str
array['type'] = u(self.type) # py2: type unicode, py3: type str
array['element_hash'] = u(self.element_hash) # py2: type unicode, py3: type str
array['message'] = u(self.message) # py2: type unicode, py3: type str
return array
|
def to_array(self)
|
Serializes this PassportElementErrorUnspecified to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.228325
| 2.029329
| 1.09806
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['source'] = u(array.get('source'))
data['type'] = u(array.get('type'))
data['element_hash'] = u(array.get('element_hash'))
data['message'] = u(array.get('message'))
instance = PassportElementErrorUnspecified(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new PassportElementErrorUnspecified from a given dictionary.
:return: new PassportElementErrorUnspecified instance.
:rtype: PassportElementErrorUnspecified
| 3.359073
| 2.479526
| 1.354724
|
array = super(InlineQuery, self).to_array()
array['id'] = u(self.id) # py2: type unicode, py3: type str
array['from'] = self.from_peer.to_array() # type User
array['query'] = u(self.query) # py2: type unicode, py3: type str
array['offset'] = u(self.offset) # py2: type unicode, py3: type str
if self.location is not None:
array['location'] = self.location.to_array() # type Location
return array
|
def to_array(self)
|
Serializes this InlineQuery to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 2.000122
| 1.831608
| 1.092003
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
from pytgbot.api_types.receivable.media import Location
from pytgbot.api_types.receivable.peer import User
data = {}
data['id'] = u(array.get('id'))
data['from_peer'] = User.from_array(array.get('from'))
data['query'] = u(array.get('query'))
data['offset'] = u(array.get('offset'))
data['location'] = Location.from_array(array.get('location')) if array.get('location') is not None else None
data['_raw'] = array
return InlineQuery(**data)
|
def from_array(array)
|
Deserialize a new InlineQuery from a given dictionary.
:return: new InlineQuery instance.
:rtype: InlineQuery
| 2.764687
| 2.1374
| 1.293481
|
array = super(ChosenInlineResult, self).to_array()
array['result_id'] = u(self.result_id) # py2: type unicode, py3: type str
array['from'] = self.from_peer.to_array() # type User
array['query'] = u(self.query) # py2: type unicode, py3: type str
if self.location is not None:
array['location'] = self.location.to_array() # type Location
if self.inline_message_id is not None:
array['inline_message_id'] = u(self.inline_message_id) # py2: type unicode, py3: type str
return array
|
def to_array(self)
|
Serializes this ChosenInlineResult to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
| 1.84956
| 1.715372
| 1.078227
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
from ..receivable.media import Location
from ..receivable.peer import User
data = {}
data['result_id'] = u(array.get('result_id'))
data['from_peer'] = User.from_array(array.get('from'))
data['query'] = u(array.get('query'))
data['location'] = Location.from_array(array.get('location')) if array.get('location') is not None else None
data['inline_message_id'] = u(array.get('inline_message_id')) if array.get('inline_message_id') is not None else None
data['_raw'] = array
return ChosenInlineResult(**data)
|
def from_array(array)
|
Deserialize a new ChosenInlineResult from a given dictionary.
:return: new ChosenInlineResult instance.
:rtype: ChosenInlineResult
| 2.743306
| 2.164196
| 1.267587
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
from pytgbot.api_types.receivable.inline import ChosenInlineResult
from pytgbot.api_types.receivable.inline import InlineQuery
from pytgbot.api_types.receivable.payments import PreCheckoutQuery
from pytgbot.api_types.receivable.payments import ShippingQuery
from pytgbot.api_types.receivable.updates import CallbackQuery
from pytgbot.api_types.receivable.updates import Message
data = {}
data['update_id'] = int(array.get('update_id'))
data['message'] = Message.from_array(array.get('message')) if array.get('message') is not None else None
data['edited_message'] = Message.from_array(array.get('edited_message')) if array.get('edited_message') is not None else None
data['channel_post'] = Message.from_array(array.get('channel_post')) if array.get('channel_post') is not None else None
data['edited_channel_post'] = Message.from_array(array.get('edited_channel_post')) if array.get('edited_channel_post') is not None else None
data['inline_query'] = InlineQuery.from_array(array.get('inline_query')) if array.get('inline_query') is not None else None
data['chosen_inline_result'] = ChosenInlineResult.from_array(array.get('chosen_inline_result')) if array.get('chosen_inline_result') is not None else None
data['callback_query'] = CallbackQuery.from_array(array.get('callback_query')) if array.get('callback_query') is not None else None
data['shipping_query'] = ShippingQuery.from_array(array.get('shipping_query')) if array.get('shipping_query') is not None else None
data['pre_checkout_query'] = PreCheckoutQuery.from_array(array.get('pre_checkout_query')) if array.get('pre_checkout_query') is not None else None
data['_raw'] = array
return Update(**data)
|
def from_array(array)
|
Deserialize a new Update from a given dictionary.
:return: new Update instance.
:rtype: Update
| 1.423048
| 1.399137
| 1.01709
|
return cls(
d['id'],
d['start'],
d['end'],
Lnk.charspan(d['from'], d['to']) if 'from' in d else None,
# d.get('paths', [1]),
form=d['form'],
surface=d.get('surface'),
# ipos=
# lrules=
pos=zip(d.get('tags', []), d.get('probabilities', []))
)
|
def from_dict(cls, d)
|
Decode from a dictionary as from :meth:`to_dict`.
| 8.233483
| 7.913085
| 1.04049
|
d = {
'id': self.id,
'start': self.start,
'end': self.end,
'form': self.form
}
if self.lnk is not None:
cfrom, cto = self.lnk.data
d['from'] = cfrom
d['to'] = cto
# d['paths'] = self.paths
if self.surface is not None:
d['surface'] = self.surface
# d['ipos'] = self.ipos
# d['lrules'] = self.lrules
if self.pos:
d['tags'] = [ps[0] for ps in self.pos]
d['probabilities'] = [ps[1] for ps in self.pos]
return d
|
def to_dict(self)
|
Encode the token as a dictionary suitable for JSON serialization.
| 3.357907
| 2.896843
| 1.159161
|
def _qstrip(s):
return s[1:-1] # remove assumed quote characters
tokens = []
for match in _yy_re.finditer(s):
d = match.groupdict()
lnk, pos = None, []
if d['lnkfrom'] is not None:
lnk = Lnk.charspan(d['lnkfrom'], d['lnkto'])
if d['pos'] is not None:
ps = d['pos'].strip().split()
pos = list(zip(map(_qstrip, ps[::2]), map(float, ps[1::2])))
tokens.append(
YyToken(
int(d['id']),
int(d['start']),
int(d['end']),
lnk,
list(map(int, d['paths'].strip().split())),
_qstrip(d['form']),
None if d['surface'] is None else _qstrip(d['surface']),
int(d['ipos']),
list(map(_qstrip, d['lrules'].strip().split())),
pos
)
)
return cls(tokens)
|
def from_string(cls, s)
|
Decode from the YY token lattice format.
| 3.869416
| 3.563566
| 1.085827
|
if isinstance(fh, stringtypes):
s = open(fh, 'r').read()
else:
s = fh.read()
return loads(s, single=single, version=version,
strict=strict, errors=errors)
|
def load(fh, single=False, version=_default_version,
strict=False, errors='warn')
|
Deserialize SimpleMRSs from a file (handle or filename)
Args:
fh (str, file): input filename or file object
single: if `True`, only return the first read Xmrs object
strict: deprecated; a `True` value is the same as
`errors='strict'`, and a `False` value is the same as
`errors='warn'`
errors: if `'strict'`, ill-formed MRSs raise an error; if
`'warn'`, raise a warning instead; if `'ignore'`, do not
warn or raise errors for ill-formed MRSs
Returns:
a generator of Xmrs objects (unless the *single* option is
`True`)
| 2.517766
| 3.315671
| 0.759353
|
ms = deserialize(s, version=version, strict=strict, errors=errors)
if single:
return next(ms)
else:
return ms
|
def loads(s, single=False, version=_default_version,
strict=False, errors='warn')
|
Deserialize SimpleMRS string representations
Args:
s (str): a SimpleMRS string
single (bool): if `True`, only return the first Xmrs object
Returns:
a generator of Xmrs objects (unless *single* is `True`)
| 3.49213
| 4.23462
| 0.824662
|
if not pretty_print and kwargs.get('indent'):
pretty_print = True
if single:
ms = [ms]
return serialize(ms, version=version, properties=properties,
pretty_print=pretty_print, color=color)
|
def dumps(ms, single=False, version=_default_version, properties=True,
pretty_print=False, color=False, **kwargs)
|
Serialize an Xmrs object to a SimpleMRS representation
Args:
ms: an iterator of Xmrs objects to serialize (unless the
*single* option is `True`)
single: if `True`, treat *ms* as a single Xmrs object instead
of as an iterator
properties: if `False`, suppress variable properties
pretty_print: if `True`, add newlines and indentation
color: if `True`, colorize the output with ANSI color codes
Returns:
a SimpleMrs string representation of a corpus of Xmrs
| 2.641029
| 3.676818
| 0.718292
|
# < FROM : TO > or < FROM # TO > or < TOK... > or < @ EDGE >
lnk = None
if tokens[0] == '<':
tokens.popleft() # we just checked this is a left angle
if tokens[0] == '>':
pass # empty <> brackets the same as no lnk specified
# edge lnk: ['@', EDGE, ...]
elif tokens[0] == '@':
tokens.popleft() # remove the @
lnk = Lnk.edge(tokens.popleft()) # edge lnks only have one number
# character span lnk: [FROM, ':', TO, ...]
elif tokens[1] == ':':
lnk = Lnk.charspan(tokens.popleft(), tokens[1])
tokens.popleft() # this should be the colon
tokens.popleft() # and this is the cto
# chart vertex range lnk: [FROM, '#', TO, ...]
elif tokens[1] == '#':
lnk = Lnk.chartspan(tokens.popleft(), tokens[1])
tokens.popleft() # this should be the hash
tokens.popleft() # and this is the to vertex
# tokens lnk: [(TOK,)+ ...]
else:
lnkdata = []
while tokens[0] != '>':
lnkdata.append(int(tokens.popleft()))
lnk = Lnk.tokens(lnkdata)
_read_literals(tokens, '>')
return lnk
|
def _read_lnk(tokens)
|
Read and return a tuple of the pred's lnk type and lnk value,
if a pred lnk is specified.
| 5.018074
| 4.990562
| 1.005513
|
delim = '\n' if pretty_print else _default_mrs_delim
output = delim.join(
_serialize_mrs(m, properties=properties,
version=version, pretty_print=pretty_print)
for m in ms
)
if color:
output = highlight(output)
return output
|
def serialize(ms, version=_default_version, properties=True,
pretty_print=False, color=False)
|
Serialize an MRS structure into a SimpleMRS string.
| 3.649093
| 3.350596
| 1.089088
|
_argument = '{rargname}: {value}{props}'
if rargname == CONSTARG_ROLE:
value = '"{}"'.format(value)
props = ''
if value in varprops:
props = ' [ {} ]'.format(
' '.join(
[var_sort(value)] +
list(map('{0[0]}: {0[1]}'.format,
[(k.upper(), v) for k, v in varprops[value]]))
)
)
del varprops[value] # only print props once
return _argument.format(
rargname=rargname,
value=str(value),
props=props
)
|
def _serialize_argument(rargname, value, varprops)
|
Serialize an MRS argument into the SimpleMRS format.
| 4.46641
| 4.375227
| 1.020841
|
# ('nodeid', 'pred', 'label', 'args', 'lnk', 'surface', 'base')
args = ep[3]
arglist = ' '.join([_serialize_argument(rarg, args[rarg], varprops)
for rarg in sorted(args, key=rargname_sortkey)])
if version < 1.1 or len(ep) < 6 or ep[5] is None:
surface = ''
else:
surface = ' "%s"' % ep[5]
lnk = None if len(ep) < 5 else ep[4]
pred = ep[1]
predstr = pred.string
return '[ {pred}{lnk}{surface} LBL: {label}{s}{args} ]'.format(
pred=predstr,
lnk=_serialize_lnk(lnk),
surface=surface,
label=str(ep[2]),
s=' ' if arglist else '',
args=arglist
)
|
def _serialize_ep(ep, varprops, version=_default_version)
|
Serialize an Elementary Predication into the SimpleMRS encoding.
| 4.671371
| 4.628834
| 1.00919
|
s = ""
if lnk is not None:
s = '<'
if lnk.type == Lnk.CHARSPAN:
cfrom, cto = lnk.data
s += ''.join([str(cfrom), ':', str(cto)])
elif lnk.type == Lnk.CHARTSPAN:
cfrom, cto = lnk.data
s += ''.join([str(cfrom), '#', str(cto)])
elif lnk.type == Lnk.TOKENS:
s += ' '.join([str(t) for t in lnk.data])
elif lnk.type == Lnk.EDGE:
s += ''.join(['@', str(lnk.data)])
s += '>'
return s
|
def _serialize_lnk(lnk)
|
Serialize a predication lnk to surface form into the SimpleMRS
encoding.
| 2.887243
| 2.738004
| 1.054507
|
toks = ['HCONS:', '<']
for hc in hcons:
toks.extend(hc)
# reln = hcon[1]
# toks += [hcon[0], rel, str(hcon.lo)]
toks += ['>']
return ' '.join(toks)
|
def _serialize_hcons(hcons)
|
Serialize [HandleConstraints] into the SimpleMRS encoding.
| 8.10202
| 8.061115
| 1.005074
|
toks = ['ICONS:', '<']
for ic in icons:
toks.extend(ic)
# toks += [str(icon.left),
# icon.relation,
# str(icon.right)]
toks += ['>']
return ' '.join(toks)
|
def _serialize_icons(icons)
|
Serialize [IndividualConstraints] into the SimpleMRS encoding.
| 6.504158
| 5.42267
| 1.199438
|
fields = set(fields)
diff = fields.difference(_all_fields)
if isinstance(labels, Sequence):
labels = _map_labels(self, labels)
elif labels is None:
labels = {}
if diff:
raise ValueError(
'Invalid field(s): {}'.format(', '.join(diff))
)
return _to_dict(self, fields, labels)
|
def to_dict(self, fields=_all_fields, labels=None)
|
Encode the node as a dictionary suitable for JSON serialization.
Args:
fields: if given, this is a whitelist of fields to include
on nodes (`daughters` and `form` are always shown)
labels: optional label annotations to embed in the
derivation dict; the value is a list of lists matching
the structure of the derivation (e.g.,
`["S" ["NP" ["NNS" ["Dogs"]]] ["VP" ["VBZ" ["bark"]]]]`)
Returns:
dict: the dictionary representation of the structure
| 3.261701
| 3.938098
| 0.828243
|
if (self._head or self.is_root() or
len(getattr(self._parent, 'daughters', [None])) == 1):
return True
elif any(dtr._head for dtr in self._parent.daughters):
return False
return None
|
def is_head(self)
|
Return `True` if the node is a head.
A node is a head if it is marked as a head in the UDX format or
it has no siblings. `False` is returned if the node is known
to not be a head (has a sibling that is a head). Otherwise it
is indeterminate whether the node is a head, and `None` is
returned.
| 6.098336
| 6.060694
| 1.006211
|
nodes = []
for dtr in self.daughters:
if isinstance(dtr, UdfTerminal):
nodes.append(self)
else:
nodes.extend(dtr.preterminals())
return nodes
|
def preterminals(self)
|
Return the list of preterminals (i.e. lexical grammar-entities).
| 4.632183
| 4.281219
| 1.081978
|
if not (s.startswith('(') and s.endswith(')')):
raise ValueError(
'Derivations must begin and end with parentheses: ( )'
)
s_ = s[1:] # get rid of initial open-parenthesis
stack = []
deriv = None
try:
matches = cls.udf_re.finditer(s_)
for match in matches:
if match.group('done'):
node = stack.pop()
if len(stack) == 0:
deriv = node
break
else:
stack[-1].daughters.append(node)
elif match.group('form'):
if len(stack) == 0:
raise ValueError('Possible leaf node with no parent.')
gd = match.groupdict()
# ignore LKB-style start/end data if it exists on gd
term = UdfTerminal(
_unquote(gd['form']),
tokens=_udf_tokens(gd.get('tokens')),
parent=stack[-1] if stack else None
)
stack[-1].daughters.append(term)
elif match.group('id'):
gd = match.groupdict()
head = None
entity, _, type = gd['entity'].partition('@')
if entity[0] == '^':
entity = entity[1:]
head = True
if type == '':
type = None
udf = UdfNode(gd['id'], entity, gd['score'],
gd['start'], gd['end'],
head=head, type=type,
parent=stack[-1] if stack else None)
stack.append(udf)
elif match.group('root'):
udf = UdfNode(None, match.group('root'))
stack.append(udf)
except (ValueError, AttributeError):
raise ValueError('Invalid derivation: %s' % s)
if stack or deriv is None:
raise ValueError('Invalid derivation; possibly unbalanced '
'parentheses: %s' % s)
return cls(*deriv, head=deriv._head, type=deriv.type)
|
def from_string(cls, s)
|
Instantiate a `Derivation` from a UDF or UDX string representation.
The UDF/UDX representations are as output by a processor like the
`LKB <http://moin.delph-in.net/LkbTop>`_ or
`ACE <http://sweaglesw.org/linguistics/ace/>`_, or from the
:meth:`UdfNode.to_udf` or :meth:`UdfNode.to_udx` methods.
Args:
s (str): UDF or UDX serialization
| 4.12376
| 4.023616
| 1.024889
|
graphs = penman.load(fh, cls=XMRSCodec)
xs = [model.from_triples(g.triples()) for g in graphs]
return xs
|
def load(fh, model)
|
Deserialize PENMAN graphs from a file (handle or filename)
Args:
fh: filename or file object
model: Xmrs subclass instantiated from decoded triples
Returns:
a list of objects (of class *model*)
| 15.604127
| 13.639212
| 1.144064
|
graphs = penman.loads(s, cls=XMRSCodec)
xs = [model.from_triples(g.triples()) for g in graphs]
return xs
|
def loads(s, model)
|
Deserialize PENMAN graphs from a string
Args:
s (str): serialized PENMAN graphs
model: Xmrs subclass instantiated from decoded triples
Returns:
a list of objects (of class *model*)
| 15.636409
| 10.854506
| 1.440545
|
text = dumps(
xs, model=model, properties=properties, indent=indent, **kwargs
)
if hasattr(destination, 'write'):
print(text, file=destination)
else:
with open(destination, 'w') as fh:
print(text, file=fh)
|
def dump(destination, xs, model=None, properties=False, indent=True, **kwargs)
|
Serialize Xmrs (or subclass) objects to PENMAN and write to a file.
Args:
destination: filename or file object
xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to
serialize
model: Xmrs subclass used to get triples
properties: if `True`, encode variable properties
indent: if `True`, adaptively indent; if `False` or `None`,
don't indent; if a non-negative integer N, indent N spaces
per level
| 2.150949
| 3.222061
| 0.667569
|
xs = list(xs)
if not xs:
return ''
given_class = xs[0].__class__ # assume they are all the same
if model is None:
model = xs[0].__class__
if not hasattr(model, 'to_triples'):
raise TypeError(
'{} class does not implement to_triples()'.format(model.__name__)
)
# convert MRS to DMRS if necessary; EDS cannot convert
if given_class.__name__ in ('Mrs', 'Xmrs'):
xs = [model.from_xmrs(x, **kwargs) for x in xs]
elif given_class.__name__ == 'Eds' and model.__name__ != 'Eds':
raise ValueError('Cannot convert EDS to non-EDS')
codec = XMRSCodec()
graphs = [
codec.triples_to_graph(model.to_triples(x, properties=properties))
for x in xs
]
if 'pretty_print' in kwargs:
indent = kwargs['pretty_print']
return penman.dumps(graphs, cls=XMRSCodec, indent=indent)
|
def dumps(xs, model=None, properties=False, indent=True, **kwargs)
|
Serialize Xmrs (or subclass) objects to PENMAN notation
Args:
xs: iterator of :class:`~delphin.mrs.xmrs.Xmrs` objects to
serialize
model: Xmrs subclass used to get triples
properties: if `True`, encode variable properties
indent: if `True`, adaptively indent; if `False` or `None`,
don't indent; if a non-negative integer N, indent N spaces
per level
Returns:
the PENMAN serialization of *xs*
| 3.892098
| 3.918684
| 0.993216
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['keyboard'] = KeyboardButton.from_array_list(array.get('keyboard'), list_level=2)
data['resize_keyboard'] = bool(array.get('resize_keyboard')) if array.get('resize_keyboard') is not None else None
data['one_time_keyboard'] = bool(array.get('one_time_keyboard')) if array.get('one_time_keyboard') is not None else None
data['selective'] = bool(array.get('selective')) if array.get('selective') is not None else None
instance = ReplyKeyboardMarkup(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new ReplyKeyboardMarkup from a given dictionary.
:return: new ReplyKeyboardMarkup instance.
:rtype: ReplyKeyboardMarkup
| 2.189272
| 1.888042
| 1.159546
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['inline_keyboard'] = InlineKeyboardButton.from_array_list(array.get('inline_keyboard'), list_level=2)
instance = InlineKeyboardMarkup(**data)
instance._raw = array
return instance
|
def from_array(array)
|
Deserialize a new InlineKeyboardMarkup from a given dictionary.
:return: new InlineKeyboardMarkup instance.
:rtype: InlineKeyboardMarkup
| 4.414695
| 3.283587
| 1.344473
|
from luckydonaldUtils.encoding import to_native as n
from pytgbot.api_types.sendable import Sendable
from pytgbot.api_types import as_array
from DictObject import DictObject
import json
params = {}
for key in query.keys():
element = query[key]
if element is not None:
if isinstance(element, Sendable):
params[key] = json.dumps(as_array(element))
else:
params[key] = element
url = self._base_url.format(api_key=n(self.api_key), command=n(command))
return DictObject(url=url, params=params)
|
def _prepare_request(self, command, query)
|
:param command: The Url command parameter
:type command: str
:param query: will get json encoded.
:type query: dict
:return:
| 3.224368
| 3.215993
| 1.002604
|
params = self._prepare_request(command, query)
r = self._do_request(
params.url, params=params.params,
files=files, stream=use_long_polling, timeout=request_timeout
)
return self._process_response(r)
|
def do(self, command, files=None, use_long_polling=False, request_timeout=None, **query)
|
Send a request to the api.
If the bot is set to return the json objects, it will look like this:
```json
{
"ok": bool,
"result": {...},
# optionally present:
"description": "human-readable description of the result",
"error_code": int
}
```
:param command: The Url command parameter
:type command: str
:keyword request_timeout: When the request should time out.
:type request_timeout: int
:keyword files: if it needs to send files.
:keyword use_long_polling: if it should use long polling.
(see http://docs.python-requests.org/en/latest/api/#requests.Response.iter_content)
:type use_long_polling: bool
:param query: will get json encoded.
:return: The json response from the server, or, if `self.return_python_objects` is `True`, a parsed return type.
:rtype: DictObject.DictObject | pytgbot.api_types.receivable.Receivable
| 3.788836
| 5.357701
| 0.707176
|
first = True
table_header = None
table_type = 'unknown'
param_strings = []
thead = tag.find('thead', recursive=False)
theads = None # list (items in <tr> row) of <th>/<tr> elements.
if thead:
theads = thead.find_all(["th", "td"])
# end if
tbody = tag.find('tbody', recursive=False)
if tbody:
tbody_rows = tbody.find_all("tr")
else:
tbody_rows = tag.find_all("tr")
# end if
tbodys = [ # list (rows) of list (items in <tr> row) of <tr> elements.
row.find_all(["td" ,"th"]) for row in tbody_rows
]
if not thead: # so first row = header
theads = tbody_rows[0]
tbodys = tbody_rows[1:]
# end if
# TABLE HEADER
found_columns = []
for column in theads:
# Either (a) `<td><strong> ... </strong></td>`
# or new (b) `<th> ... </th>`
col = column.find("strong")
if col:
# (a) `<td><strong> ... </strong></td>`
col_text = col.text
else:
# (b) `<th> ... </th>`
col_text = column.text
# end if
found_columns.append(col_text)
# end def
# if TABLE is func
for test_columns in func_fields:
if found_columns == test_columns:
table_header = test_columns
table_type = 'func'
break
# end if
# end for
# if TABLE is class
if not table_header: # only check if we don't have a result yet
# search class now
for test_columns in class_fields:
if found_columns == test_columns:
if table_header is not None:
raise AssertionError("Table detected as func and class: {!r}".format(found_columns))
table_header = test_columns
table_type = 'class'
break
# end if
# end for
# end if
# TABLE is none of the above
if not table_header: # we don't have a result yet
raise AssertionError("Unknown table, {!r}".format(found_columns))
# end if
# TABLE BODY
for tds in tbodys:
string = "\t".join([col.text for col in tds])
logger.debug("t: " + string)
param_strings.append(string)
pass
# end for row
return table_type, param_strings
|
def parse_table(tag)
|
returns tuple of type ("class"/"func") and list of param strings.
:param tag:
:return:
| 3.468734
| 3.391714
| 1.022708
|
functions = []
message_send_functions = []
clazzes = {} # "filepath": [Class, Class, ...]
# split results into functions and classes
for result in results:
assert isinstance(result, (Clazz, Function))
if isinstance(result, Clazz):
import_path = get_type_path(result.clazz)
import_path = import_path.rstrip(".")
file_path = calc_path_and_create_folders(folder, import_path)
result.filepath = file_path
if file_path not in clazzes:
clazzes[file_path] = []
clazzes[file_path].append(result)
else:
assert isinstance(result, Function)
import_path = "pytgbot.bot."
file_path = calc_path_and_create_folders(folder, import_path)
result.filepath = file_path
functions.append(result)
if result.name.startswith('send_'):
import_path = "teleflask_messages."
file_path = calc_path_and_create_folders(folder, import_path)
result2 = safe_eval(repr(result), SAVE_VALUES) # serialize + unserialize = deepcopy
result2.filepath = file_path
message_send_functions.append(result2)
# end if
# end if
# end for
bot_template = get_template("bot.template")
clazzfile_template = get_template("classfile.template")
teleflask_messages_template = get_template("teleflask_messages_file.template")
for path, clazz_list in clazzes.items():
clazz_imports = set()
for clazz_ in clazz_list:
assert isinstance(clazz_, Clazz)
assert isinstance(clazz_.parent_clazz, Type)
clazz_imports.add(clazz_.parent_clazz.as_import)
# end for
clazz_imports = list(clazz_imports)
clazz_imports.sort()
is_sendable = ("sendable" in path)
try:
with open(path, "w") as f:
result = clazzfile_template.render(clazzes=clazz_list, imports=clazz_imports, is_sendable=is_sendable)
result = result.replace("\t", " ")
f.write(result)
# end with
except IOError:
raise # lol
# end try
# end for classes
if functions:
txt = bot_template.render(functions=functions)
with open(functions[0].filepath, "w") as f:
f.write(txt)
# end with
# end if
if message_send_functions:
txt = teleflask_messages_template.render(functions=message_send_functions)
with open(message_send_functions[0].filepath, "w") as f:
f.write(txt)
|
def safe_to_file(folder, results)
|
Receives a list of results (type :class:`Clazz` or :class:`Function`), and put them into the right files in :var:`folder`
:param folder: Where the files should be in.
:type folder: str
:param results: A list of :class:`Clazz` or :class:`Function` objects, which will be used to calculate the source code.
:type results: Union(Clazz, Function)
| 2.96764
| 2.907731
| 1.020604
|
file_path = abspath(path_join(folder, import_path[:import_path.rfind(".")].replace(".", folder_seperator) + ".py"))
mkdir_p(dirname(file_path))
return file_path
|
def calc_path_and_create_folders(folder, import_path)
|
calculate the path and create the needed folders
| 4.771793
| 4.780458
| 0.998187
|
f = open(filename, "r")
buf = BytesIO(f.read())
f.close()
return buf
|
def read_file_to_buffer(filename)
|
Reads a file to string buffer
:param filename:
:return:
| 2.650285
| 3.08322
| 0.859584
|
import ast
is_quoted = False
result_parts = []
current_str = ""
while len(string) > 0:
if string[0] == "\"":
is_quoted = not is_quoted
current_str += string[0]
elif string[0].isspace():
if is_quoted:
current_str += string[0]
else:
result_parts.append(current_str)
current_str = ""
# end if
else:
current_str += string[0]
# end if
string = string[1:]
# end while
if current_str: # last part of the array
result_parts.append(current_str)
# end if
for i in range(len(result_parts)):
# Will try for each element if it is something pythonic. Parsed type will replace original list element.
try:
part = ast.literal_eval(result_parts[i])
result_parts[i] = part # write it back.
except ValueError:
# could not parse -> is string
pass # because already is str.
# end try
# end for
return result_parts
|
def parse_args(string)
|
`"yada hoa" yupi yeah 12 "" None "None"` -> `["yada hoa", "yupi", "yeah", 12, "", None, "None"]`
:param str:
:return:
| 3.391137
| 3.328866
| 1.018706
|
if isinstance(color, int):
color = self.prepare_color(color)
# end if
prefix = color if prefix else ""
if isinstance(reset, int):
reset = self.prepare_color(reset)
elif isinstance(reset, bool):
reset = self.formatter.color_off if reset else ""
# end if
return (
prefix +
string.replace(self.formatter.color_off, self.formatter.color_off+color).replace(self.formatter.all_off, self.formatter.all_off + color) +
reset
)
|
def overwrite_color(self, string, color, prefix=False, reset=False)
|
:param string: input
:param color: new color
:param prefix: if it also should start the color to at the beginning.
:param reset: if it also should end the color at the ending.
:type reset: bool | int | str
:return:
| 3.345588
| 3.466338
| 0.965165
|
if isinstance(id_prefix, bool):
if id_prefix: # True
if isinstance(peer, User):
id_prefix = "user"
elif isinstance(peer, Chat):
id_prefix = peer.type
else:
id_prefix = "unknown"
# end if
else: # False
id_prefix = ""
# end if
# end if
peer_string = self.peer_to_string(peer)
if show_id and "id" in peer:
peer_string += " ({color_lightblue}{id_prefix}#{id}{color_off})".format(id_prefix=id_prefix, id=peer.id, **self.color.formatter)
return peer_string
|
def print_peer(self, peer, show_id=True, id_prefix="", reply=True)
|
:param id_prefix: Prefix of the #id thing. Set a string, or true to have it generated.
:type id_prefix: str|bool
| 3.238233
| 2.951837
| 1.097023
|
from importlib import import_module
reader = import_module('{}.{}'.format('delphin.mrs', src_fmt.lower()))
writer = import_module('{}.{}'.format('delphin.mrs', tgt_fmt.lower()))
return writer.dumps(
reader.loads(txt, single=single),
single=single,
**kwargs
)
|
def convert(txt, src_fmt, tgt_fmt, single=True, **kwargs)
|
Convert a textual representation of \*MRS from one the src_fmt
representation to the tgt_fmt representation. By default, only
read and convert a single \*MRS object (e.g. for `mrx` this
starts at <mrs> and not <mrs-list>), but changing the `mode`
argument to `corpus` (alternatively: `list`) reads and converts
multiple \*MRSs.
Args:
txt: A string of semantic data.
src_fmt: The original representation format of txt.
tgt_fmt: The representation format to convert to.
single: If True, assume txt represents a single \*MRS, otherwise
read it as a corpus (or list) of \*MRSs.
kwargs: Any other keyword arguments to pass to the serializer
of the target format. See Notes.
Returns:
A string in the target format.
Notes:
src_fmt and tgt_fmt may be one of the following:
| format | description |
| --------- | ---------------------------- |
| simplemrs | The popular SimpleMRS format |
| mrx | The XML format of MRS |
| dmrx | The XML format of DMRS |
Additional keyword arguments for the serializer may include:
| option | description |
| ------------ | ----------------------------------- |
| pretty_print | print with newlines and indentation |
| color | print with syntax highlighting |
| 3.129132
| 3.170006
| 0.987106
|
return tokens.peek(n=n, skip=_is_comment, drop=True)
|
def _peek(tokens, n=0)
|
peek and drop comments
| 17.300842
| 10.334326
| 1.674114
|
after = tokens.peek(n=1, skip=_is_comment, drop=True)
tok = tokens._buffer.popleft()
return tok[0], tok[1], tok[2], after[0]
|
def _shift(tokens)
|
pop the next token, then peek the gid of the following
| 9.956371
| 8.216611
| 1.211737
|
data = []
stack = []
break_on = 10
in_def = False
for item in lexitems:
gid = item[0]
# only yield comments outside of definitions
if gid in (2, 3):
if len(data) == 0:
yield [item]
else:
continue
elif gid == 20:
assert len(data) == 0
yield [item]
# the following just checks if the previous definition was not
# terminated when the next one is read in
elif gid in (7, 8):
if in_def:
yield data[:-1]
data = data[-1:] + [item]
stack = []
break_on = 10
else:
data.append(item)
in_def = True
else:
data.append(item)
if gid == break_on:
if len(stack) == 0:
yield data
data = []
in_def = False
else:
break_on = stack.pop()
elif gid in (13, 14, 15):
stack.append(break_on)
break_on = gid + 3
if data:
yield data
|
def _accumulate(lexitems)
|
Yield lists of tokens based on very simple parsing that checks the
level of nesting within a structure. This is probably much faster
than the LookaheadIterator method, but it is less safe; an unclosed
list or AVM may cause it to build a list including the rest of the
file, or it may return a list that doesn't span a full definition.
As PyDelphin's goals for TDL parsing do not include speed, this
method is not currently used, although it is retained in the source
code as an example if future priorities change.
| 3.53527
| 3.487372
| 1.013735
|
lines = enumerate(stream, 1)
line_no = pos = 0
try:
while True:
if pos == 0:
line_no, line = next(lines)
matches = _tdl_lex_re.finditer(line, pos)
pos = 0 # reset; only used for multiline patterns
for m in matches:
gid = m.lastindex
if gid <= 2: # potentially multiline patterns
if gid == 1: # docstring
s, start_line_no, line_no, line, pos = _bounded(
'', line, m.end(), line_no, lines)
elif gid == 2: # comment
s, start_line_no, line_no, line, pos = _bounded(
'#|', '|#', line, m.end(), line_no, lines)
yield (gid, s, line_no)
break
elif gid == 30:
raise TdlParsingError(
('Syntax error:\n {}\n {}^'
.format(line, ' ' * m.start())),
line_number=line_no)
else:
# token = None
# if not (6 < gid < 20):
# token = m.group(gid)
token = m.group(gid)
yield (gid, token, line_no)
except StopIteration:
pass
|
def _lex(stream)
|
Lex the input stream according to _tdl_lex_re.
Yields
(gid, token, line_number)
| 4.317958
| 3.800848
| 1.136051
|
substrings = []
start_line_no = line_no
end = pos
while not line.startswith(p2, end):
if line[end] == '\\':
end += 2
else:
end += 1
if end >= len(line):
substrings.append(line[pos:])
try:
line_no, line = next(lines)
except StopIteration:
pattern = 'docstring' if p1 == '"""' else 'block comment'
raise TdlParsingError('Unterminated {}'.format(pattern),
line_number=start_line_no)
pos = end = 0
substrings.append(line[pos:end])
end += len(p2)
return ''.join(substrings), start_line_no, line_no, line, end
|
def _bounded(p1, p2, line, pos, line_no, lines)
|
Collect the contents of a bounded multiline string
| 3.648773
| 3.479498
| 1.048649
|
if hasattr(source, 'read'):
for event in _parse2(source):
yield event
else:
with io.open(source, encoding=encoding) as fh:
for event in _parse2(fh):
yield event
|
def iterparse(source, encoding='utf-8')
|
Parse the TDL file *source* and iteratively yield parse events.
If *source* is a filename, the file is opened and closed when the
generator has finished, otherwise *source* is an open file object
and will not be closed when the generator has finished.
Parse events are `(event, object, lineno)` tuples, where `event`
is a string (`"TypeDefinition"`, `"TypeAddendum"`,
`"LexicalRuleDefinition"`, `"LetterSet"`, `"WildCard"`,
`"LineComment"`, or `"BlockComment"`), `object` is the interpreted
TDL object, and `lineno` is the line number where the entity began
in *source*.
Args:
source (str, file): a filename or open file object
encoding (str): the encoding of the file (default: `"utf-8"`;
ignored if *source* is an open file)
Yields:
`(event, object, lineno)` tuples
Example:
>>> lex = {}
>>> for event, obj, lineno in tdl.iterparse('erg/lexicon.tdl'):
... if event == 'TypeDefinition':
... lex[obj.identifier] = obj
...
>>> lex['eucalyptus_n1']['SYNSEM.LKEYS.KEYREL.PRED']
<String object (_eucalyptus_n_1_rel) at 140625748595960>
| 2.991462
| 3.875836
| 0.771824
|
return [m.group(m.lastindex) for m in _tdl_re.finditer(s)]
|
def tokenize(s)
|
Tokenize a string *s* of TDL code.
| 7.809646
| 4.336385
| 1.800958
|
if hasattr(f, 'read'):
for event in _parse(f):
yield event
else:
with io.open(f, encoding=encoding) as fh:
for event in _parse(fh):
yield event
|
def parse(f, encoding='utf-8')
|
Parse the TDL file *f* and yield the interpreted contents.
If *f* is a filename, the file is opened and closed when the
generator has finished, otherwise *f* is an open file object and
will not be closed when the generator has finished.
Args:
f (str, file): a filename or open file object
encoding (str): the encoding of the file (default: `"utf-8"`;
ignored if *f* is an open file)
| 2.972294
| 3.367436
| 0.882658
|
if isinstance(obj, TypeDefinition):
return _format_typedef(obj, indent)
elif isinstance(obj, Conjunction):
return _format_conjunction(obj, indent)
elif isinstance(obj, Term):
return _format_term(obj, indent)
elif isinstance(obj, _MorphSet):
return _format_morphset(obj, indent)
elif isinstance(obj, _Environment):
return _format_environment(obj, indent)
elif isinstance(obj, FileInclude):
return _format_include(obj, indent)
else:
raise ValueError('cannot format object as TDL: {!r}'.format(obj))
|
def format(obj, indent=0)
|
Serialize TDL objects to strings.
Args:
obj: instance of :class:`Term`, :class:`Conjunction`, or
:class:`TypeDefinition` classes or subclasses
indent (int): number of spaces to indent the formatted object
Returns:
str: serialized form of *obj*
Example:
>>> conj = tdl.Conjunction([
... tdl.TypeIdentifier('lex-item'),
... tdl.AVM([('SYNSEM.LOCAL.CAT.HEAD.MOD',
... tdl.ConsList(end=tdl.EMPTY_LIST_TYPE))])
... ])
>>> t = tdl.TypeDefinition('non-mod-lex-item', conj)
>>> print(format(t))
non-mod-lex-item := lex-item &
[ SYNSEM.LOCAL.CAT.HEAD.MOD < > ].
| 2.662052
| 2.379107
| 1.118929
|
for attr in self._avm:
val = self._avm[attr]
if isinstance(val, Conjunction):
val.normalize()
if len(val.terms) == 1 and isinstance(val.terms[0], AVM):
self._avm[attr] = val.terms[0]
elif isinstance(val, AVM):
val.normalize()
|
def normalize(self)
|
Reduce trivial AVM conjunctions to just the AVM.
For example, in `[ ATTR1 [ ATTR2 val ] ]` the value of `ATTR1`
could be a conjunction with the sub-AVM `[ ATTR2 val ]`. This
method removes the conjunction so the sub-AVM nests directly
(equivalent to `[ ATTR1.ATTR2 val ]` in TDL).
| 3.29338
| 2.466117
| 1.335452
|
fs = []
for featpath, val in super(AVM, self).features(expand=expand):
# don't juse Conjunction.features() here because we want to
# include the non-AVM terms, too
if expand and isinstance(val, Conjunction):
for term in val.terms:
if isinstance(term, AVM):
for fp, v in term.features(True):
fs.append(('{}.{}'.format(featpath, fp), v))
else:
fs.append((featpath, term))
else:
fs.append((featpath, val))
return fs
|
def features(self, expand=False)
|
Return the list of tuples of feature paths and feature values.
Args:
expand (bool): if `True`, expand all feature paths
Example:
>>> avm = AVM([('A.B', TypeIdentifier('1')),
... ('A.C', TypeIdentifier('2')])
>>> avm.features()
[('A', <AVM object at ...>)]
>>> avm.features(expand=True)
[('A.B', <TypeIdentifier object (1) at ...>),
('A.C', <TypeIdentifier object (2) at ...>)]
| 4.518197
| 3.995116
| 1.13093
|
if self._avm is None:
return []
else:
vals = [val for _, val in _collect_list_items(self)]
# the < a . b > notation puts b on the last REST path,
# which is not returned by _collect_list_items()
if self.terminated and self[self._last_path] is not None:
vals.append(self[self._last_path])
return vals
|
def values(self)
|
Return the list of values in the ConsList feature structure.
| 10.49942
| 9.737496
| 1.078246
|
if self._avm is not None and not self.terminated:
path = self._last_path
if path:
path += '.'
self[path + LIST_HEAD] = value
self._last_path = path + LIST_TAIL
self[self._last_path] = AVM()
else:
raise TdlError('Cannot append to a closed list.')
|
def append(self, value)
|
Append an item to the end of an open ConsList.
Args:
value (:class:`Conjunction`, :class:`Term`): item to add
Raises:
:class:`TdlError`: when appending to a closed list
| 7.539835
| 6.62858
| 1.137474
|
if self.terminated:
raise TdlError('Cannot terminate a closed list.')
if end == LIST_TYPE:
self.terminated = False
elif end == EMPTY_LIST_TYPE:
if self._last_path:
self[self._last_path] = None
else:
self._avm = None
self.terminated = True
elif self._last_path:
self[self._last_path] = end
self.terminated = True
else:
raise TdlError('Empty list must be {} or {}'.format(
LIST_TYPE, EMPTY_LIST_TYPE))
|
def terminate(self, end)
|
Set the value of the tail of the list.
Adding values via :meth:`append` places them on the `FIRST`
feature of some level of the feature structure (e.g.,
`REST.FIRST`), while :meth:`terminate` places them on the
final `REST` feature (e.g., `REST.REST`). If *end* is a
:class:`Conjunction` or :class:`Term`, it is typically a
:class:`Coreference`, otherwise *end* is set to
`tdl.EMPTY_LIST_TYPE` or `tdl.LIST_TYPE`. This method does
not necessarily close the list; if *end* is `tdl.LIST_TYPE`,
the list is left open, otherwise it is closed.
Args:
end (str, :class:`Conjunction`, :class:`Term`): value to
use as the end of the list.
| 4.329885
| 3.679208
| 1.176852
|
corefs = []
types = []
avms = []
for term in self._terms:
if isinstance(term, TypeTerm):
types.append(term)
elif isinstance(term, AVM):
term.normalize()
avms.append(term)
elif isinstance(term, Coreference):
corefs.append(term)
else:
raise TdlError('unexpected term {}'.format(term))
self._terms = corefs + types + avms
|
def normalize(self)
|
Rearrange the conjunction to a conventional form.
This puts any coreference(s) first, followed by type terms,
then followed by AVM(s) (including lists). AVMs are
normalized via :meth:`AVM.normalize`.
| 3.73831
| 2.543004
| 1.470037
|
if isinstance(term, Conjunction):
for term_ in term.terms:
self.add(term_)
elif isinstance(term, Term):
self._terms.append(term)
else:
raise TypeError('Not a Term or Conjunction')
|
def add(self, term)
|
Add a term to the conjunction.
Args:
term (:class:`Term`, :class:`Conjunction`): term to add;
if a :class:`Conjunction`, all of its terms are added
to the current conjunction.
Raises:
:class:`TypeError`: when *term* is an invalid type
| 2.577324
| 2.625415
| 0.981682
|
return [term for term in self._terms
if isinstance(term, (TypeIdentifier, String, Regex))]
|
def types(self)
|
Return the list of type terms in the conjunction.
| 16.59993
| 9.440737
| 1.75833
|
featvals = []
for term in self._terms:
if isinstance(term, AVM):
featvals.extend(term.features(expand=expand))
return featvals
|
def features(self, expand=False)
|
Return the list of feature-value pairs in the conjunction.
| 5.834572
| 4.432342
| 1.316363
|
for term in self._terms:
if isinstance(term, String):
return str(term)
return None
|
def string(self)
|
Return the first string term in the conjunction, or `None`.
| 6.371191
| 3.341645
| 1.906603
|
docs = (t.docstring for t in list(self.conjunction.terms) + [self]
if t.docstring is not None)
if level.lower() == 'first':
doc = next(docs, None)
elif level.lower() == 'top':
doc = list(docs)
return doc
|
def documentation(self, level='first')
|
Return the documentation of the type.
By default, this is the first docstring on a top-level term.
By setting *level* to `"top"`, the list of all docstrings on
top-level terms is returned, including the type's `docstring`
value, if not `None`, as the last item. The docstring for the
type itself is available via :attr:`TypeDefinition.docstring`.
Args:
level (str): `"first"` or `"top"`
Returns:
a single docstring or a list of docstrings
| 4.202169
| 3.755981
| 1.118794
|
cs = []
for feat, val in self._avm.items():
try:
if val.supertypes and not val._avm:
cs.append((feat, val))
else:
for subfeat, subval in val.features():
cs.append(('{}.{}'.format(feat, subfeat), subval))
except AttributeError:
cs.append((feat, val))
return cs
|
def local_constraints(self)
|
Return the constraints defined in the local AVM.
| 4.595507
| 3.946675
| 1.1644
|
def collect(d):
if d is None or d.get('FIRST') is None:
return []
vals = [d['FIRST']]
vals.extend(collect(d.get('REST')))
return vals
return collect(self)
|
def values(self)
|
Return the list of values.
| 5.727001
| 5.075705
| 1.128316
|
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data['user'] = User.from_array(array.get('user'))
data['status'] = u(array.get('status'))
data['until_date'] = int(array.get('until_date')) if array.get('until_date') is not None else None
data['can_be_edited'] = bool(array.get('can_be_edited')) if array.get('can_be_edited') is not None else None
data['can_change_info'] = bool(array.get('can_change_info')) if array.get('can_change_info') is not None else None
data['can_post_messages'] = bool(array.get('can_post_messages')) if array.get('can_post_messages') is not None else None
data['can_edit_messages'] = bool(array.get('can_edit_messages')) if array.get('can_edit_messages') is not None else None
data['can_delete_messages'] = bool(array.get('can_delete_messages')) if array.get('can_delete_messages') is not None else None
data['can_invite_users'] = bool(array.get('can_invite_users')) if array.get('can_invite_users') is not None else None
data['can_restrict_members'] = bool(array.get('can_restrict_members')) if array.get('can_restrict_members') is not None else None
data['can_pin_messages'] = bool(array.get('can_pin_messages')) if array.get('can_pin_messages') is not None else None
data['can_promote_members'] = bool(array.get('can_promote_members')) if array.get('can_promote_members') is not None else None
data['can_send_messages'] = bool(array.get('can_send_messages')) if array.get('can_send_messages') is not None else None
data['can_send_media_messages'] = bool(array.get('can_send_media_messages')) if array.get('can_send_media_messages') is not None else None
data['can_send_other_messages'] = bool(array.get('can_send_other_messages')) if array.get('can_send_other_messages') is not None else None
data['can_add_web_page_previews'] = bool(array.get('can_add_web_page_previews')) if array.get('can_add_web_page_previews') is not None else None
data['_raw'] = array
return ChatMember(**data)
|
def from_array(array)
|
Deserialize a new ChatMember from a given dictionary.
:return: new ChatMember instance.
:rtype: ChatMember
| 1.280781
| 1.184628
| 1.081167
|
return cls(
d['type'], tuple(d['parents']), list(d['properties'].items())
)
|
def from_dict(cls, d)
|
Instantiate a Variable from a dictionary representation.
| 11.70383
| 9.5187
| 1.229562
|
return cls(
d['rargname'],
d['value'],
list(d.get('properties', {}).items()),
d.get('optional', False)
)
|
def from_dict(cls, d)
|
Instantiate a Role from a dictionary representation.
| 6.731723
| 6.606701
| 1.018924
|
d = {'rargname': self.rargname, 'value': self.value}
if self.properties:
d['properties'] = self.properties
if self.optional:
d['optional'] = self.optional
return d
|
def to_dict(self)
|
Return a dictionary representation of the Role.
| 3.491862
| 3.221852
| 1.083806
|
synopses = [tuple(map(Role.from_dict, synopsis))
for synopsis in d.get('synopses', [])]
return cls(d['predicate'], tuple(d['parents']), synopses)
|
def from_dict(cls, d)
|
Instantiate a Predicate from a dictionary representation.
| 6.358302
| 4.841731
| 1.313229
|
return {
'predicate': self.predicate,
'parents': list(self.supertypes),
'synopses': [[role.to_dict() for role in synopsis]
for synopsis in self.synopses]
}
|
def to_dict(self)
|
Return a dictionary representation of the Predicate.
| 5.80288
| 4.426159
| 1.311042
|
read = lambda cls: (lambda pair: (pair[0], cls.from_dict(pair[1])))
return cls(
variables=map(read(Variable), d.get('variables', {}).items()),
properties=map(read(Property), d.get('properties', {}).items()),
roles=map(read(Role), d.get('roles', {}).items()),
predicates=map(read(Predicate), d.get('predicates', {}).items())
)
|
def from_dict(cls, d)
|
Instantiate a SemI from a dictionary representation.
| 2.957987
| 2.722165
| 1.08663
|
make = lambda pair: (pair[0], pair[1].to_dict())
return dict(
variables=dict(make(v) for v in self.variables.items()),
properties=dict(make(p) for p in self.properties.items()),
roles=dict(make(r) for r in self.roles.items()),
predicates=dict(make(p) for p in self.predicates.items())
)
|
def to_dict(self)
|
Return a dictionary representation of the SemI.
| 2.487482
| 2.317322
| 1.07343
|
match = var_re.match(vs)
if match is None:
raise ValueError('Invalid variable string: {}'.format(str(vs)))
else:
return match.groups()
|
def sort_vid_split(vs)
|
Split a valid variable string into its variable sort and id.
Examples:
>>> sort_vid_split('h3')
('h', '3')
>>> sort_vid_split('ref-ind12')
('ref-ind', '12')
| 5.253458
| 4.670145
| 1.124903
|
# Links exist for every non-intrinsic argument that has a variable
# that is the intrinsic variable of some other predicate, as well
# as for label equalities when no argument link exists (even
# considering transitivity).
links = []
prelinks = []
_eps = xmrs._eps
_hcons = xmrs._hcons
_vars = xmrs._vars
lsh = xmrs.labelset_heads
lblheads = {v: lsh(v) for v, vd in _vars.items() if 'LBL' in vd['refs']}
top = xmrs.top
if top is not None:
prelinks.append((0, top, None, top, _vars[top]))
for nid, ep in _eps.items():
for role, val in ep[3].items():
if role == IVARG_ROLE or val not in _vars:
continue
prelinks.append((nid, ep[2], role, val, _vars[val]))
for src, srclbl, role, val, vd in prelinks:
if IVARG_ROLE in vd['refs']:
tgtnids = [n for n in vd['refs'][IVARG_ROLE]
if not _eps[n].is_quantifier()]
if len(tgtnids) == 0:
continue # maybe some bad MRS with a lonely quantifier
tgt = tgtnids[0] # what do we do if len > 1?
tgtlbl = _eps[tgt][2]
post = EQ_POST if srclbl == tgtlbl else NEQ_POST
elif val in _hcons:
lbl = _hcons[val][2]
if lbl not in lblheads or len(lblheads[lbl]) == 0:
continue # broken MRS; log this?
tgt = lblheads[lbl][0] # sorted list; first item is most "heady"
post = H_POST
elif 'LBL' in vd['refs']:
if val not in lblheads or len(lblheads[val]) == 0:
continue # broken MRS; log this?
tgt = lblheads[val][0] # again, should be sorted already
post = HEQ_POST
else:
continue # CARGs, maybe?
links.append(Link(src, tgt, role, post))
# now EQ links unattested by arg links
for lbl, heads in lblheads.items():
# I'm pretty sure this does what we want
if len(heads) > 1:
first = heads[0]
for other in heads[1:]:
links.append(Link(other, first, BARE_EQ_ROLE, EQ_POST))
# If not, something like this is more explicit
# lblset = self.labelset(lbl)
# sg = g.subgraph(lblset)
# ns = [nid for nid, deg in sg.degree(lblset).items() if deg == 0]
# head = self.labelset_head(lbl)
# for n in ns:
# links.append(Link(head, n, post=EQ_POST))
def _int(x):
try:
return int(x)
except ValueError:
return 0
return sorted(
links,
key=lambda link: (_int(link.start), _int(link.end), link.rargname)
)
|
def links(xmrs)
|
Return the list of Links for the *xmrs*.
| 5.181802
| 5.139372
| 1.008256
|
return [
HandleConstraint(hi, reln, lo)
for hi, reln, lo in sorted(xmrs.hcons(), key=lambda hc: var_id(hc[0]))
]
|
def hcons(xmrs)
|
Return the list of all HandleConstraints in *xmrs*.
| 10.18579
| 6.898286
| 1.476568
|
return [
IndividualConstraint(left, reln, right)
for left, reln, right in sorted(xmrs.icons(),
key=lambda ic: var_id(ic[0]))
]
|
def icons(xmrs)
|
Return the list of all IndividualConstraints in *xmrs*.
| 11.974582
| 7.337063
| 1.632067
|
predstr = predstr.strip('"\'') # surrounding quotes don't matter
rel_added = False
if not predstr.lower().endswith('_rel'):
logging.debug('Predicate does not end in "_rel": {}'
.format(predstr))
rel_added = True
predstr += '_rel'
match = Pred.pred_re.search(predstr)
if match is None:
logging.debug('Unexpected predicate string: {}'.format(predstr))
return (predstr, None, None, None)
# _lemma_pos(_sense)?_end
return (match.group('lemma'), match.group('pos'),
match.group('sense'), None if rel_added else match.group('end'))
|
def split_pred_string(predstr)
|
Split *predstr* and return the (lemma, pos, sense, suffix) components.
Examples:
>>> Pred.split_pred_string('_dog_n_1_rel')
('dog', 'n', '1', 'rel')
>>> Pred.split_pred_string('quant_rel')
('quant', None, None, 'rel')
| 4.396162
| 3.854181
| 1.140622
|
predstr = predstr.strip('"').lstrip("'")
# this is a stricter regex than in Pred, but doesn't check POS
return re.match(
r'_([^ _\\]|\\.)+_[a-z](_([^ _\\]|\\.)+)?(_rel)?$'
r'|[^_]([^ \\]|\\.)+(_rel)?$',
predstr
) is not None
|
def is_valid_pred_string(predstr)
|
Return `True` if *predstr* is a valid predicate string.
Examples:
>>> is_valid_pred_string('"_dog_n_1_rel"')
True
>>> is_valid_pred_string('_dog_n_1')
True
>>> is_valid_pred_string('_dog_noun_1')
False
>>> is_valid_pred_string('dog_noun_1')
True
| 9.009438
| 9.193869
| 0.97994
|
tokens = [t for t in split_pred_string(predstr)[:3] if t is not None]
if predstr.lstrip('\'"')[:1] == '_':
tokens = [''] + tokens
return '_'.join(tokens).lower()
|
def normalize_pred_string(predstr)
|
Normalize the predicate string *predstr* to a conventional form.
This makes predicate strings more consistent by removing quotes and
the `_rel` suffix, and by lowercasing them.
Examples:
>>> normalize_pred_string('"_dog_n_1_rel"')
'_dog_n_1'
>>> normalize_pred_string('_dog_n_1')
'_dog_n_1'
| 5.911631
| 5.828183
| 1.014318
|
nodes = []
_props = xmrs.properties
varsplit = sort_vid_split
for p in xmrs.eps():
sortinfo = None
iv = p.intrinsic_variable
if iv is not None:
sort, _ = varsplit(iv)
sortinfo = _props(iv)
sortinfo[CVARSORT] = sort
nodes.append(
Node(p.nodeid, p.pred, sortinfo, p.lnk, p.surface, p.base, p.carg)
)
return nodes
|
def nodes(xmrs)
|
Return the list of Nodes for *xmrs*.
| 9.050192
| 8.668208
| 1.044067
|
if sort is None:
sort = UNKNOWNSORT
# find next available vid
vid, index = self.vid, self.index
while vid in index:
vid += 1
varstring = '{}{}'.format(sort, vid)
index[vid] = varstring
if properties is None:
properties = []
self.store[varstring] = properties
self.vid = vid + 1
return (varstring, properties)
|
def new(self, sort, properties=None)
|
Create a new variable for the given *sort*.
| 5.293002
| 4.875855
| 1.085554
|
return cls(Lnk.CHARSPAN, (int(start), int(end)))
|
def charspan(cls, start, end)
|
Create a Lnk object for a character span.
Args:
start: the initial character position (cfrom)
end: the final character position (cto)
| 12.218432
| 8.351211
| 1.463073
|
return cls(Lnk.CHARTSPAN, (int(start), int(end)))
|
def chartspan(cls, start, end)
|
Create a Lnk object for a chart span.
Args:
start: the initial chart vertex
end: the final chart vertex
| 10.400138
| 7.372651
| 1.410637
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.