_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q24500
|
UserDefined.has_documented_fields
|
train
|
def has_documented_fields(self, include_inherited_fields=False):
"""Returns whether at least one field is documented."""
fields = self.all_fields if include_inherited_fields else self.fields
for field in fields:
if field.doc:
return True
return False
|
python
|
{
"resource": ""
}
|
q24501
|
UserDefined.get_examples
|
train
|
def get_examples(self, compact=False):
"""
Returns an OrderedDict mapping labels to Example objects.
Args:
compact (bool): If True, union members of void type are converted
to their compact representation: no ".tag" key or containing
dict, just the tag as a string.
"""
# Copy it just in case the caller wants to mutate the object.
examples = copy.deepcopy(self._examples)
if not compact:
return examples
def make_compact(d):
# Traverse through dicts looking for ones that have a lone .tag
# key, which can be converted into the compact form.
if not isinstance(d, dict):
return
for key in d:
if isinstance(d[key], dict):
inner_d = d[key]
if len(inner_d) == 1 and '.tag' in inner_d:
d[key] = inner_d['.tag']
else:
make_compact(inner_d)
if isinstance(d[key], list):
for item in d[key]:
make_compact(item)
for example in examples.values():
if (isinstance(example.value, dict) and
len(example.value) == 1 and '.tag' in example.value):
# Handle the case where the top-level of the example can be
# made compact.
example.value = example.value['.tag']
else:
make_compact(example.value)
return examples
|
python
|
{
"resource": ""
}
|
q24502
|
Struct.all_required_fields
|
train
|
def all_required_fields(self):
"""
Returns an iterator that traverses required fields in all super types
first, and then for this type.
"""
def required_check(f):
return not is_nullable_type(f.data_type) and not f.has_default
return self._filter_fields(required_check)
|
python
|
{
"resource": ""
}
|
q24503
|
Struct.all_optional_fields
|
train
|
def all_optional_fields(self):
"""
Returns an iterator that traverses optional fields in all super types
first, and then for this type.
"""
def optional_check(f):
return is_nullable_type(f.data_type) or f.has_default
return self._filter_fields(optional_check)
|
python
|
{
"resource": ""
}
|
q24504
|
Struct.set_enumerated_subtypes
|
train
|
def set_enumerated_subtypes(self, subtype_fields, is_catch_all):
"""
Sets the list of "enumerated subtypes" for this struct. This differs
from regular subtyping in that each subtype is associated with a tag
that is used in the serialized format to indicate the subtype. Also,
this list of subtypes was explicitly defined in an "inner-union" in the
specification. The list of fields must include all defined subtypes of
this struct.
NOTE(kelkabany): For this to work with upcoming forward references, the
hierarchy of parent types for this struct must have had this method
called on them already.
:type subtype_fields: List[UnionField]
"""
assert self._enumerated_subtypes is None, \
'Enumerated subtypes already set.'
assert isinstance(is_catch_all, bool), type(is_catch_all)
self._is_catch_all = is_catch_all
self._enumerated_subtypes = []
if self.parent_type:
raise InvalidSpec(
"'%s' enumerates subtypes so it cannot extend another struct."
% self.name, self._ast_node.lineno, self._ast_node.path)
# Require that if this struct enumerates subtypes, its parent (and thus
# the entire hierarchy above this struct) does as well.
if self.parent_type and not self.parent_type.has_enumerated_subtypes():
raise InvalidSpec(
"'%s' cannot enumerate subtypes if parent '%s' does not." %
(self.name, self.parent_type.name),
self._ast_node.lineno, self._ast_node.path)
enumerated_subtype_names = set() # Set[str]
for subtype_field in subtype_fields:
path = subtype_field._ast_node.path
lineno = subtype_field._ast_node.lineno
# Require that a subtype only has a single type tag.
if subtype_field.data_type.name in enumerated_subtype_names:
raise InvalidSpec(
"Subtype '%s' can only be specified once." %
subtype_field.data_type.name, lineno, path)
# Require that a subtype has this struct as its parent.
if subtype_field.data_type.parent_type != self:
raise InvalidSpec(
"'%s' is not a subtype of '%s'." %
(subtype_field.data_type.name, self.name), lineno, path)
# Check for subtype tags that conflict with this struct's
# non-inherited fields.
if subtype_field.name in self._fields_by_name:
# Since the union definition comes first, use its line number
# as the source of the field's original declaration.
orig_field = self._fields_by_name[subtype_field.name]
raise InvalidSpec(
"Field '%s' already defined on line %d." %
(subtype_field.name, lineno),
orig_field._ast_node.lineno,
orig_field._ast_node.path)
# Walk up parent tree hierarchy to ensure no field conflicts.
# Checks for conflicts with subtype tags and regular fields.
cur_type = self.parent_type
while cur_type:
if subtype_field.name in cur_type._fields_by_name:
orig_field = cur_type._fields_by_name[subtype_field.name]
raise InvalidSpec(
"Field '%s' already defined in parent '%s' (%s:%d)."
% (subtype_field.name, cur_type.name,
orig_field._ast_node.path, orig_field._ast_node.lineno),
lineno, path)
cur_type = cur_type.parent_type
# Note the discrepancy between `fields` which contains only the
# struct fields, and `_fields_by_name` which contains the struct
# fields and enumerated subtype fields.
self._fields_by_name[subtype_field.name] = subtype_field
enumerated_subtype_names.add(subtype_field.data_type.name)
self._enumerated_subtypes.append(subtype_field)
assert len(self._enumerated_subtypes) > 0
# Check that all known subtypes are listed in the enumeration.
for subtype in self.subtypes:
if subtype.name not in enumerated_subtype_names:
raise InvalidSpec(
"'%s' does not enumerate all subtypes, missing '%s'" %
(self.name, subtype.name),
self._ast_node.lineno)
|
python
|
{
"resource": ""
}
|
q24505
|
Struct.get_all_subtypes_with_tags
|
train
|
def get_all_subtypes_with_tags(self):
"""
Unlike other enumerated-subtypes-related functionality, this method
returns not just direct subtypes, but all subtypes of this struct. The
tag of each subtype is the list of tags from which the type descends.
This method only applies to structs that enumerate subtypes.
Use this when you need to generate a lookup table for a root struct
that maps a generated class representing a subtype to the tag it needs
in the serialized format.
Returns:
List[Tuple[List[String], Struct]]
"""
assert self.has_enumerated_subtypes(), 'Enumerated subtypes not set.'
subtypes_with_tags = [] # List[Tuple[List[String], Struct]]
fifo = deque([subtype_field.data_type
for subtype_field in self.get_enumerated_subtypes()])
# Traverse down the hierarchy registering subtypes as they're found.
while fifo:
data_type = fifo.popleft()
subtypes_with_tags.append((data_type._get_subtype_tags(), data_type))
if data_type.has_enumerated_subtypes():
for subtype_field in data_type.get_enumerated_subtypes():
fifo.append(subtype_field.data_type)
return subtypes_with_tags
|
python
|
{
"resource": ""
}
|
q24506
|
Struct._get_subtype_tags
|
train
|
def _get_subtype_tags(self):
"""
Returns a list of type tags that refer to this type starting from the
base of the struct hierarchy.
"""
assert self.is_member_of_enumerated_subtypes_tree(), \
'Not a part of a subtypes tree.'
cur = self.parent_type
cur_dt = self
tags = []
while cur:
assert cur.has_enumerated_subtypes()
for subtype_field in cur.get_enumerated_subtypes():
if subtype_field.data_type is cur_dt:
tags.append(subtype_field.name)
break
else:
assert False, 'Could not find?!'
cur_dt = cur
cur = cur.parent_type
tags.reverse()
return tuple(tags)
|
python
|
{
"resource": ""
}
|
q24507
|
Struct._add_example_enumerated_subtypes_helper
|
train
|
def _add_example_enumerated_subtypes_helper(self, example):
"""Validates examples for structs with enumerated subtypes."""
if len(example.fields) != 1:
raise InvalidSpec(
'Example for struct with enumerated subtypes must only '
'specify one subtype tag.', example.lineno, example.path)
# Extract the only tag in the example.
example_field = list(example.fields.values())[0]
tag = example_field.name
val = example_field.value
if not isinstance(val, AstExampleRef):
raise InvalidSpec(
"Example of struct with enumerated subtypes must be a "
"reference to a subtype's example.",
example_field.lineno, example_field.path)
for subtype_field in self.get_enumerated_subtypes():
if subtype_field.name == tag:
self._raw_examples[example.label] = example
break
else:
raise InvalidSpec(
"Unknown subtype tag '%s' in example." % tag,
example_field.lineno, example_field.path)
|
python
|
{
"resource": ""
}
|
q24508
|
Struct._add_example_helper
|
train
|
def _add_example_helper(self, example):
"""Validates examples for structs without enumerated subtypes."""
# Check for fields in the example that don't belong.
for label, example_field in example.fields.items():
if not any(label == f.name for f in self.all_fields):
raise InvalidSpec(
"Example for '%s' has unknown field '%s'." %
(self.name, label),
example_field.lineno, example_field.path,
)
for field in self.all_fields:
if field.name in example.fields:
example_field = example.fields[field.name]
try:
field.data_type.check_example(example_field)
except InvalidSpec as e:
e.msg = "Bad example for field '{}': {}".format(
field.name, e.msg)
raise
elif field.has_default or isinstance(field.data_type, Nullable):
# These don't need examples.
pass
else:
raise InvalidSpec(
"Missing field '%s' in example." % field.name,
example.lineno, example.path)
self._raw_examples[example.label] = example
|
python
|
{
"resource": ""
}
|
q24509
|
Union.all_fields
|
train
|
def all_fields(self):
"""
Returns a list of all fields. Subtype fields come before this type's
fields.
"""
fields = []
if self.parent_type:
fields.extend(self.parent_type.all_fields)
fields.extend([f for f in self.fields])
return fields
|
python
|
{
"resource": ""
}
|
q24510
|
Union._has_example
|
train
|
def _has_example(self, label):
"""Whether this data type has an example with the given ``label``."""
if label in self._raw_examples:
return True
else:
for field in self.all_fields:
dt, _ = unwrap_nullable(field.data_type)
if not is_user_defined_type(dt) and not is_void_type(dt):
continue
if label == field.name:
return True
else:
return False
|
python
|
{
"resource": ""
}
|
q24511
|
Union.unique_field_data_types
|
train
|
def unique_field_data_types(self):
"""
Checks if all variants have different data types.
If so, the selected variant can be determined just by the data type of
the value without needing a field name / tag. In some languages, this
lets us make a shortcut
"""
data_type_names = set()
for field in self.fields:
if not is_void_type(field.data_type):
if field.data_type.name in data_type_names:
return False
else:
data_type_names.add(field.data_type.name)
else:
return True
|
python
|
{
"resource": ""
}
|
q24512
|
ObjCTypesBackend._generate_namespace_types
|
train
|
def _generate_namespace_types(self, namespace, jazzy_cfg):
"""Creates Obj C argument, error, serializer and deserializer types
for the given namespace."""
ns_name = fmt_public_name(namespace.name)
output_path = os.path.join('ApiObjects', ns_name)
output_path_headers = os.path.join(output_path, 'Headers')
for data_type in namespace.linearize_data_types():
class_name = fmt_class_prefix(data_type)
if self.args.documentation:
append_to_jazzy_category_dict(jazzy_cfg, ns_name, class_name)
append_to_jazzy_category_dict(
jazzy_cfg, 'Serializers', '{}Serializer'.format(class_name))
if is_struct_type(data_type):
# struct header
file_path = os.path.join(output_path_headers,
class_name + '.h')
with self.output_to_relative_path(file_path):
self.emit_raw(base_file_comment)
self._generate_struct_class_h(data_type)
elif is_union_type(data_type):
if self.args.documentation:
append_to_jazzy_category_dict(
jazzy_cfg, 'Tags', '{}Tag'.format(fmt_class_prefix(data_type)))
# union header
file_path = os.path.join(output_path_headers,
class_name + '.h')
with self.output_to_relative_path(file_path):
self.emit_raw(base_file_comment)
self._generate_union_class_h(data_type)
else:
raise TypeError('Can\'t handle type %r' % type(data_type))
file_path = os.path.join(
output_path,
'DB{}Objects.m'.format(fmt_camel_upper(namespace.name)))
with self.output_to_relative_path(file_path):
self.emit_raw(base_file_comment)
description = '/// Arguments, results, and errors for the `{}` namespace.'.format(
fmt_camel_upper(namespace.name))
self.emit(description)
if self.args.exclude_from_analysis:
self.emit()
self.emit('#ifndef __clang_analyzer__')
for data_type in namespace.linearize_data_types():
if is_struct_type(data_type):
# struct implementation
self._generate_struct_class_m(data_type)
elif is_union_type(data_type):
# union implementation
self._generate_union_class_m(data_type)
if self.args.exclude_from_analysis:
self.emit('#endif')
|
python
|
{
"resource": ""
}
|
q24513
|
ObjCTypesBackend._generate_struct_class_m
|
train
|
def _generate_struct_class_m(self, struct):
"""Defines an Obj C implementation file that represents a struct in Stone."""
self.emit()
self._generate_imports_m(
self._get_imports_m(
struct,
default_imports=['DBStoneSerializers', 'DBStoneValidators']))
struct_name = fmt_class_prefix(struct)
self.emit('#pragma mark - API Object')
self.emit()
with self.block_m(struct_name):
self.emit('#pragma mark - Constructors')
self.emit()
self._generate_struct_cstor(struct)
self._generate_struct_cstor_default(struct)
self.emit('#pragma mark - Serialization methods')
self.emit()
self._generate_serializable_funcs(struct_name)
self.emit('#pragma mark - Description method')
self.emit()
self._generate_description_func(struct_name)
self.emit('#pragma mark - Copyable method')
self.emit()
self._generate_copyable_func()
self.emit('#pragma mark - Hash method')
self.emit()
self._generate_hash_func(struct)
self.emit('#pragma mark - Equality method')
self.emit()
self._generate_equality_func(struct)
self.emit()
self.emit()
self.emit('#pragma mark - Serializer Object')
self.emit()
with self.block_m(fmt_serial_class(struct_name)):
self._generate_struct_serializer(struct)
self._generate_struct_deserializer(struct)
|
python
|
{
"resource": ""
}
|
q24514
|
ObjCTypesBackend._generate_struct_class_h
|
train
|
def _generate_struct_class_h(self, struct):
"""Defines an Obj C header file that represents a struct in Stone."""
self._generate_init_imports_h(struct)
self._generate_imports_h(self._get_imports_h(struct))
self.emit()
self.emit('NS_ASSUME_NONNULL_BEGIN')
self.emit()
self.emit('#pragma mark - API Object')
self.emit()
self._generate_class_comment(struct)
struct_name = fmt_class_prefix(struct)
with self.block_h_from_data_type(struct, protocol=['DBSerializable', 'NSCopying']):
self.emit('#pragma mark - Instance fields')
self.emit()
self._generate_struct_properties(struct.fields)
self.emit('#pragma mark - Constructors')
self.emit()
self._generate_struct_cstor_signature(struct)
self._generate_struct_cstor_signature_default(struct)
self._generate_init_unavailable_signature(struct)
self.emit()
self.emit()
self.emit('#pragma mark - Serializer Object')
self.emit()
self.emit(comment_prefix)
self.emit_wrapped_text(
'The serialization class for the `{}` struct.'.format(
fmt_class(struct.name)),
prefix=comment_prefix)
self.emit(comment_prefix)
with self.block_h(fmt_serial_class(struct_name)):
self._generate_serializer_signatures(struct_name)
self.emit()
self.emit('NS_ASSUME_NONNULL_END')
self.emit()
|
python
|
{
"resource": ""
}
|
q24515
|
ObjCTypesBackend._generate_union_class_m
|
train
|
def _generate_union_class_m(self, union):
"""Defines an Obj C implementation file that represents a union in Stone."""
self.emit()
self._generate_imports_m(
self._get_imports_m(
union,
default_imports=['DBStoneSerializers', 'DBStoneValidators']))
union_name = fmt_class_prefix(union)
self.emit('#pragma mark - API Object')
self.emit()
with self.block_m(fmt_class_prefix(union)):
self._generate_synthesize_ivars(union)
self.emit('#pragma mark - Constructors')
self.emit()
self._generate_union_cstor_funcs(union)
self.emit('#pragma mark - Instance field accessors')
self.emit()
self._generate_union_tag_vars_funcs(union)
self.emit('#pragma mark - Tag state methods')
self.emit()
self._generate_union_tag_state_funcs(union)
self.emit('#pragma mark - Serialization methods')
self.emit()
self._generate_serializable_funcs(union_name)
self.emit('#pragma mark - Description method')
self.emit()
self._generate_description_func(union_name)
self.emit('#pragma mark - Copyable method')
self.emit()
self._generate_copyable_func()
self.emit('#pragma mark - Hash method')
self.emit()
self._generate_hash_func(union)
self.emit('#pragma mark - Equality method')
self.emit()
self._generate_equality_func(union)
self.emit()
self.emit()
self.emit('#pragma mark - Serializer Object')
self.emit()
with self.block_m(fmt_serial_class(union_name)):
self._generate_union_serializer(union)
self._generate_union_deserializer(union)
|
python
|
{
"resource": ""
}
|
q24516
|
ObjCTypesBackend._generate_union_class_h
|
train
|
def _generate_union_class_h(self, union):
"""Defines an Obj C header file that represents a union in Stone."""
self._generate_init_imports_h(union)
self._generate_imports_h(self._get_imports_h(union))
self.emit()
self.emit('NS_ASSUME_NONNULL_BEGIN')
self.emit()
self.emit('#pragma mark - API Object')
self.emit()
self._generate_class_comment(union)
union_name = fmt_class_prefix(union)
with self.block_h_from_data_type(union, protocol=['DBSerializable', 'NSCopying']):
self.emit('#pragma mark - Instance fields')
self.emit()
self._generate_union_tag_state(union)
self._generate_union_tag_property(union)
self._generate_union_properties(union.all_fields)
self.emit('#pragma mark - Constructors')
self.emit()
self._generate_union_cstor_signatures(union, union.all_fields)
self._generate_init_unavailable_signature(union)
self.emit('#pragma mark - Tag state methods')
self.emit()
self._generate_union_tag_access_signatures(union)
self.emit()
self.emit()
self.emit('#pragma mark - Serializer Object')
self.emit()
self.emit(comment_prefix)
self.emit_wrapped_text(
'The serialization class for the `{}` union.'.format(union_name),
prefix=comment_prefix)
self.emit(comment_prefix)
with self.block_h(fmt_serial_class(union_name)):
self._generate_serializer_signatures(union_name)
self.emit()
self.emit('NS_ASSUME_NONNULL_END')
self.emit()
|
python
|
{
"resource": ""
}
|
q24517
|
ObjCTypesBackend._generate_struct_cstor
|
train
|
def _generate_struct_cstor(self, struct):
"""Emits struct standard constructor."""
with self.block_func(
func=self._cstor_name_from_fields(struct.all_fields),
args=fmt_func_args_from_fields(struct.all_fields),
return_type='instancetype'):
for field in struct.all_fields:
self._generate_validator(field)
self.emit()
super_fields = [
f for f in struct.all_fields if f not in struct.fields
]
if super_fields:
super_args = fmt_func_args([(fmt_var(f.name), fmt_var(f.name))
for f in super_fields])
self.emit('self = [super {}:{}];'.format(
self._cstor_name_from_fields(super_fields), super_args))
else:
if struct.parent_type:
self.emit('self = [super initDefault];')
else:
self.emit('self = [super init];')
with self.block_init():
for field in struct.fields:
field_name = fmt_var(field.name)
if field.has_default:
self.emit('_{} = {} ?: {};'.format(
field_name, field_name, fmt_default_value(field)))
else:
self.emit('_{} = {};'.format(field_name, field_name))
self.emit()
|
python
|
{
"resource": ""
}
|
q24518
|
ObjCTypesBackend._generate_struct_cstor_default
|
train
|
def _generate_struct_cstor_default(self, struct):
"""Emits struct convenience constructor. Default arguments are omitted."""
if not self._struct_has_defaults(struct):
return
fields_no_default = [
f for f in struct.all_fields
if not f.has_default and not is_nullable_type(f.data_type)
]
with self.block_func(
func=self._cstor_name_from_fields(fields_no_default),
args=fmt_func_args_from_fields(fields_no_default),
return_type='instancetype'):
args = ([(fmt_var(f.name), fmt_var(f.name) if not f.has_default and
not is_nullable_type(f.data_type) else 'nil')
for f in struct.all_fields])
cstor_args = fmt_func_args(args)
self.emit('return [self {}:{}];'.format(
self._cstor_name_from_fields(struct.all_fields), cstor_args))
self.emit()
|
python
|
{
"resource": ""
}
|
q24519
|
ObjCTypesBackend._generate_struct_cstor_signature
|
train
|
def _generate_struct_cstor_signature(self, struct):
"""Emits struct standard constructor signature to be used in the struct's header file."""
fields = struct.all_fields
self.emit(comment_prefix)
description_str = 'Full constructor for the struct (exposes all instance variables).'
self.emit_wrapped_text(description_str, prefix=comment_prefix)
signature = fmt_signature(
func=self._cstor_name_from_fields(fields),
args=self._cstor_args_from_fields(fields, is_struct=True),
return_type='instancetype')
self.emit(comment_prefix)
for field in struct.all_fields:
doc = self.process_doc(field.doc,
self._docf) if field.doc else undocumented
self.emit_wrapped_text(
'@param {} {}'.format(fmt_var(field.name), doc),
prefix=comment_prefix)
if struct.all_fields:
self.emit(comment_prefix)
self.emit_wrapped_text(
'@return An initialized instance.', prefix=comment_prefix)
self.emit(comment_prefix)
self.emit('{};'.format(signature))
self.emit()
|
python
|
{
"resource": ""
}
|
q24520
|
ObjCTypesBackend._generate_struct_cstor_signature_default
|
train
|
def _generate_struct_cstor_signature_default(self, struct):
"""Emits struct convenience constructor with default arguments
ommitted signature to be used in the struct header file."""
if not self._struct_has_defaults(struct):
return
fields_no_default = [
f for f in struct.all_fields
if not f.has_default and not is_nullable_type(f.data_type)
]
signature = fmt_signature(
func=self._cstor_name_from_fields(fields_no_default),
args=self._cstor_args_from_fields(
fields_no_default, is_struct=True),
return_type='instancetype')
self.emit(comment_prefix)
description_str = (
'Convenience constructor (exposes only non-nullable '
'instance variables with no default value).')
self.emit_wrapped_text(description_str, prefix=comment_prefix)
self.emit(comment_prefix)
for field in fields_no_default:
doc = self.process_doc(field.doc,
self._docf) if field.doc else undocumented
self.emit_wrapped_text(
'@param {} {}'.format(fmt_var(field.name), doc),
prefix=comment_prefix)
if struct.all_fields:
self.emit(comment_prefix)
self.emit_wrapped_text(
'@return An initialized instance.', prefix=comment_prefix)
self.emit(comment_prefix)
self.emit('{};'.format(signature))
self.emit()
|
python
|
{
"resource": ""
}
|
q24521
|
ObjCTypesBackend._generate_union_cstor_funcs
|
train
|
def _generate_union_cstor_funcs(self, union):
"""Emits standard union constructor."""
for field in union.all_fields:
enum_field_name = fmt_enum_name(field.name, union)
func_args = [] if is_void_type(
field.data_type) else fmt_func_args_from_fields([field])
with self.block_func(
func=self._cstor_name_from_field(field),
args=func_args,
return_type='instancetype'):
self.emit('self = [super init];')
with self.block_init():
self.emit('_tag = {};'.format(enum_field_name))
if not is_void_type(field.data_type):
self.emit('_{} = {};'.format(
fmt_var(field.name), fmt_var(field.name)))
self.emit()
|
python
|
{
"resource": ""
}
|
q24522
|
ObjCTypesBackend._generate_union_cstor_signatures
|
train
|
def _generate_union_cstor_signatures(self, union, fields): # pylint: disable=unused-argument
"""Emits union constructor signatures to be used in the union's header file."""
for field in fields:
args = self._cstor_args_from_fields(
[field] if not is_void_type(field.data_type) else [])
signature = fmt_signature(
func=self._cstor_name_from_field(field),
args=args,
return_type='instancetype')
self.emit(comment_prefix)
self.emit_wrapped_text(
'Initializes union class with tag state of "{}".'.format(
field.name),
prefix=comment_prefix)
self.emit(comment_prefix)
if field.doc:
doc = self.process_doc(
field.doc, self._docf) if field.doc else undocumented
self.emit_wrapped_text(
'Description of the "{}" tag state: {}'.format(
field.name, doc),
prefix=comment_prefix)
self.emit(comment_prefix)
if not is_void_type(field.data_type):
doc = self.process_doc(
field.doc, self._docf) if field.doc else undocumented
self.emit_wrapped_text(
'@param {} {}'.format(fmt_var(field.name), doc),
prefix=comment_prefix)
self.emit(comment_prefix)
self.emit_wrapped_text(
'@return An initialized instance.', prefix=comment_prefix)
self.emit(comment_prefix)
self.emit('{};'.format(signature))
self.emit()
|
python
|
{
"resource": ""
}
|
q24523
|
ObjCTypesBackend._generate_union_tag_state
|
train
|
def _generate_union_tag_state(self, union):
"""Emits union tag enum type, which stores union state."""
union_name = fmt_class_prefix(union)
tag_type = fmt_enum_name('tag', union)
description_str = ('The `{}` enum type represents the possible tag '
'states with which the `{}` union can exist.')
self.emit_wrapped_text(
description_str.format(tag_type, union_name),
prefix=comment_prefix)
with self.block(
'typedef NS_ENUM(NSInteger, {})'.format(tag_type), after=';'):
for field in union.all_fields:
doc = self.process_doc(
field.doc, self._docf) if field.doc else undocumented
self.emit_wrapped_text(doc, prefix=comment_prefix)
self.emit('{},'.format(fmt_enum_name(field.name, union)))
self.emit()
self.emit()
|
python
|
{
"resource": ""
}
|
q24524
|
ObjCTypesBackend._generate_serializer_signatures
|
train
|
def _generate_serializer_signatures(self, obj_name):
"""Emits the signatures of the serializer object's serializing functions."""
serial_signature = fmt_signature(
func='serialize',
args=fmt_func_args_declaration([(
'instance', '{} *'.format(obj_name))]),
return_type='nullable NSDictionary<NSString *, id> *',
class_func=True)
deserial_signature = fmt_signature(
func='deserialize',
args=fmt_func_args_declaration([('dict',
'NSDictionary<NSString *, id> *')]),
return_type='{} *'.format(obj_name),
class_func=True)
self.emit(comment_prefix)
self.emit_wrapped_text(
'Serializes `{}` instances.'.format(obj_name),
prefix=comment_prefix)
self.emit(comment_prefix)
self.emit_wrapped_text(
'@param instance An instance of the `{}` API object.'.format(
obj_name),
prefix=comment_prefix)
self.emit(comment_prefix)
description_str = ('@return A json-compatible dictionary '
'representation of the `{}` API object.')
self.emit_wrapped_text(
description_str.format(obj_name), prefix=comment_prefix)
self.emit(comment_prefix)
self.emit('{};'.format(serial_signature))
self.emit()
self.emit(comment_prefix)
self.emit_wrapped_text(
'Deserializes `{}` instances.'.format(obj_name),
prefix=comment_prefix)
self.emit(comment_prefix)
description_str = ('@param dict A json-compatible dictionary '
'representation of the `{}` API object.')
self.emit_wrapped_text(
description_str.format(obj_name), prefix=comment_prefix)
self.emit(comment_prefix)
self.emit_wrapped_text(
'@return An instantiation of the `{}` object.'.format(obj_name),
prefix=comment_prefix)
self.emit(comment_prefix)
self.emit('{};'.format(deserial_signature))
self.emit()
|
python
|
{
"resource": ""
}
|
q24525
|
ObjCTypesBackend._cstor_args_from_fields
|
train
|
def _cstor_args_from_fields(self, fields, is_struct=False):
"""Returns a string representing the properly formatted arguments for a constructor."""
if is_struct:
args = [(fmt_var(f.name),
fmt_type(f.data_type, tag=True, has_default=f.has_default)) for f in fields]
else:
args = [(fmt_var(f.name), fmt_type(f.data_type, tag=True)) for f in fields]
return fmt_func_args_declaration(args)
|
python
|
{
"resource": ""
}
|
q24526
|
ObjCTypesBackend._generate_validator
|
train
|
def _generate_validator(self, field):
"""Emits validator if data type has associated validator."""
validator = self._determine_validator_type(field.data_type,
fmt_var(field.name),
field.has_default)
value = fmt_var(
field.name) if not field.has_default else '{} ?: {}'.format(
fmt_var(field.name), fmt_default_value(field))
if validator:
self.emit('{}({});'.format(validator, value))
|
python
|
{
"resource": ""
}
|
q24527
|
ObjCTypesBackend._determine_validator_type
|
train
|
def _determine_validator_type(self, data_type, value, has_default):
"""Returns validator string for given data type, else None."""
data_type, nullable = unwrap_nullable(data_type)
validator = None
if is_list_type(data_type):
item_validator = self._determine_validator_type(
data_type.data_type, value, False)
item_validator = item_validator if item_validator else 'nil'
validator = '{}:{}'.format(
fmt_validator(data_type),
fmt_func_args([
('minItems', '@({})'.format(data_type.min_items)
if data_type.min_items else 'nil'),
('maxItems', '@({})'.format(data_type.max_items)
if data_type.max_items else 'nil'),
('itemValidator', item_validator),
]))
elif is_map_type(data_type):
item_validator = self._determine_validator_type(
data_type.value_data_type, value, False)
item_validator = item_validator if item_validator else 'nil'
validator = '{}:{}'.format(
fmt_validator(data_type),
fmt_func_args([
('itemValidator', item_validator),
]))
elif is_numeric_type(data_type):
if data_type.min_value or data_type.max_value:
validator = '{}:{}'.format(
fmt_validator(data_type),
fmt_func_args([
('minValue', '@({})'.format(data_type.min_value)
if data_type.min_value else 'nil'),
('maxValue', '@({})'.format(data_type.max_value)
if data_type.max_value else 'nil'),
]))
elif is_string_type(data_type):
if data_type.pattern or data_type.min_length or data_type.max_length:
pattern = data_type.pattern.encode('unicode_escape').replace(
"\"", "\\\"") if data_type.pattern else None
validator = '{}:{}'.format(
fmt_validator(data_type),
fmt_func_args([
('minLength', '@({})'.format(data_type.min_length)
if data_type.min_length else 'nil'),
('maxLength', '@({})'.format(data_type.max_length)
if data_type.max_length else 'nil'),
('pattern', '@"{}"'.format(pattern)
if pattern else 'nil'),
]))
if nullable:
if validator:
validator = fmt_func_call(
caller='DBStoneValidators', callee=validator)
validator = fmt_func_call(
caller='DBStoneValidators',
callee='nullableValidator',
args=validator)
else:
if validator:
validator = fmt_func_call(
caller='DBStoneValidators', callee=validator)
else:
validator = 'nil'
if not has_default:
validator = fmt_func_call(
caller='DBStoneValidators',
callee='nonnullValidator',
args=validator)
else:
validator = None
return validator
|
python
|
{
"resource": ""
}
|
q24528
|
ObjCTypesBackend._generate_struct_serializer
|
train
|
def _generate_struct_serializer(self, struct):
"""Emits the serialize method for the serialization object for the given struct."""
struct_name = fmt_class_prefix(struct)
with self.block_func(
func='serialize',
args=fmt_func_args_declaration([('valueObj',
'{} *'.format(struct_name))]),
return_type='NSDictionary<NSString *, id> *',
class_func=True):
if not struct.all_fields and not struct.has_enumerated_subtypes():
self.emit('#pragma unused(valueObj)')
self.emit(
'NSMutableDictionary *jsonDict = [[NSMutableDictionary alloc] init];'
)
self.emit()
for field in struct.all_fields:
data_type, nullable = unwrap_nullable(field.data_type)
input_value = 'valueObj.{}'.format(fmt_var(field.name))
serialize_call = self._fmt_serialization_call(
field.data_type, input_value, True)
if not nullable:
if is_primitive_type(data_type):
self.emit('jsonDict[@"{}"] = {};'.format(
field.name, input_value))
else:
self.emit('jsonDict[@"{}"] = {};'.format(
field.name, serialize_call))
else:
with self.block('if ({})'.format(input_value)):
self.emit('jsonDict[@"{}"] = {};'.format(
field.name, serialize_call))
self.emit()
if struct.has_enumerated_subtypes():
first_block = True
for tags, subtype in struct.get_all_subtypes_with_tags():
assert len(tags) == 1, tags
tag = tags[0]
base_condition = '{} ([valueObj isKindOfClass:[{} class]])'
with self.block(
base_condition.format('if' if first_block else
'else if',
fmt_class_prefix(subtype))):
if first_block:
first_block = False
func_args = fmt_func_args([('value',
'({} *)valueObj'.format(
fmt_class_prefix(
subtype)))])
caller = fmt_serial_class(fmt_class_prefix(subtype))
serialize_call = fmt_func_call(
caller=caller, callee='serialize', args=func_args)
self.emit('NSDictionary *subTypeFields = {};'.format(
serialize_call))
with self.block(
'for (NSString* key in subTypeFields)'):
self.emit('jsonDict[key] = subTypeFields[key];')
self.emit(
'jsonDict[@".tag"] = @"{}";'.format(fmt_var(tag)))
self.emit()
self.emit('return [jsonDict count] > 0 ? jsonDict : nil;')
self.emit()
|
python
|
{
"resource": ""
}
|
q24529
|
ObjCTypesBackend._generate_struct_deserializer
|
train
|
def _generate_struct_deserializer(self, struct):
"""Emits the deserialize method for the serialization object for the given struct."""
struct_name = fmt_class_prefix(struct)
with self.block_func(
func='deserialize',
args=fmt_func_args_declaration([('valueDict',
'NSDictionary<NSString *, id> *')]),
return_type='{} *'.format(struct_name),
class_func=True):
if not struct.all_fields and not struct.has_enumerated_subtypes():
self.emit('#pragma unused(valueDict)')
def emit_struct_deserialize_logic(struct):
for field in struct.all_fields:
data_type, nullable = unwrap_nullable(field.data_type)
input_value = 'valueDict[@"{}"]'.format(field.name)
if is_primitive_type(data_type):
deserialize_call = input_value
else:
deserialize_call = self._fmt_serialization_call(
field.data_type, input_value, False)
if nullable or field.has_default:
default_value = fmt_default_value(
field) if field.has_default else 'nil'
if is_primitive_type(data_type):
deserialize_call = '{} ?: {}'.format(
input_value, default_value)
else:
deserialize_call = '{} ? {} : {}'.format(
input_value, deserialize_call, default_value)
self.emit('{}{} = {};'.format(
fmt_type(field.data_type),
fmt_var(field.name), deserialize_call))
self.emit()
deserialized_obj_args = [(fmt_var(f.name), fmt_var(f.name))
for f in struct.all_fields]
init_call = fmt_func_call(
caller=fmt_alloc_call(caller=struct_name),
callee=self._cstor_name_from_fields(struct.all_fields),
args=fmt_func_args(deserialized_obj_args))
self.emit('return {};'.format(init_call))
if not struct.has_enumerated_subtypes():
emit_struct_deserialize_logic(struct)
else:
for tags, subtype in struct.get_all_subtypes_with_tags():
assert len(tags) == 1, tags
tag = tags[0]
base_string = 'if ([valueDict[@".tag"] isEqualToString:@"{}"])'
with self.block(base_string.format(tag)):
caller = fmt_serial_class(fmt_class_prefix(subtype))
args = fmt_func_args([('value', 'valueDict')])
deserialize_call = fmt_func_call(
caller=caller, callee='deserialize', args=args)
self.emit('return {};'.format(deserialize_call))
self.emit()
if struct.is_catch_all():
emit_struct_deserialize_logic(struct)
else:
description_str = (
'[NSString stringWithFormat:@"Tag has an invalid '
'value: \\\"%@\\\".", valueDict[@".tag"]]')
self._generate_throw_error('InvalidTag', description_str)
self.emit()
|
python
|
{
"resource": ""
}
|
q24530
|
ObjCTypesBackend._generate_union_serializer
|
train
|
def _generate_union_serializer(self, union):
"""Emits the serialize method for the serialization object for the given union."""
union_name = fmt_class_prefix(union)
with self.block_func(
func='serialize',
args=fmt_func_args_declaration([('valueObj',
'{} *'.format(union_name))]),
return_type='NSDictionary<NSString *, id> *',
class_func=True):
if not union.all_fields:
self.emit('#pragma unused(valueObj)')
self.emit(
'NSMutableDictionary *jsonDict = [[NSMutableDictionary alloc] init];'
)
self.emit()
first_block = True
for field in union.all_fields:
with self.block('{} ([valueObj is{}])'.format(
'if' if first_block else 'else if',
fmt_camel_upper(field.name))):
data_type, nullable = unwrap_nullable(field.data_type)
input_value = 'valueObj.{}'.format(fmt_var(field.name))
serialize_call = self._fmt_serialization_call(
field.data_type, input_value, True)
def emit_serializer():
if is_user_defined_type(data_type):
if is_struct_type(data_type) and \
not data_type.has_enumerated_subtypes():
self.emit('jsonDict = [{} mutableCopy];'.
format(serialize_call))
else:
self.emit(
'jsonDict[@"{}"] = [{} mutableCopy];'.
format(field.name, serialize_call))
elif is_primitive_type(data_type):
self.emit('jsonDict[@"{}"] = {};'.format(
field.name, input_value))
else:
self.emit('jsonDict[@"{}"] = {};'.format(
field.name, serialize_call))
if not is_void_type(data_type):
if not nullable:
emit_serializer()
else:
with self.block('if (valueObj.{})'.format(
fmt_var(field.name))):
emit_serializer()
self.emit('jsonDict[@".tag"] = @"{}";'.format(field.name))
if first_block:
first_block = False
with self.block('else'):
if not union.closed:
self.emit('jsonDict[@".tag"] = @"other";')
else:
self._generate_throw_error(
'InvalidTag',
'@"Object not properly initialized. Tag has an unknown value."'
)
self.emit()
self.emit('return [jsonDict count] > 0 ? jsonDict : nil;')
self.emit()
|
python
|
{
"resource": ""
}
|
q24531
|
ObjCTypesBackend._generate_union_deserializer
|
train
|
def _generate_union_deserializer(self, union):
"""Emits the deserialize method for the serialization object for the given union."""
union_name = fmt_class_prefix(union)
with self.block_func(
func='deserialize',
args=fmt_func_args_declaration([('valueDict',
'NSDictionary<NSString *, id> *')]),
return_type='{} *'.format(union_name),
class_func=True):
if not union.all_fields:
self.emit('#pragma unused(valueDict)')
self.emit('NSString *tag = valueDict[@".tag"];')
self.emit()
first_block = True
for field in union.all_fields:
base_cond = '{} ([tag isEqualToString:@"{}"])'
with self.block(
base_cond.format('if' if first_block else 'else if',
field.name)):
if first_block:
first_block = False
if not is_void_type(field.data_type):
data_type, nullable = unwrap_nullable(field.data_type)
if is_struct_type(
data_type
) and not data_type.has_enumerated_subtypes():
input_value = 'valueDict'
else:
input_value = 'valueDict[@"{}"]'.format(field.name)
if is_primitive_type(data_type):
deserialize_call = input_value
else:
deserialize_call = self._fmt_serialization_call(
data_type, input_value, False)
if nullable:
deserialize_call = '{} ? {} : nil'.format(
input_value, deserialize_call)
self.emit('{}{} = {};'.format(
fmt_type(field.data_type),
fmt_var(field.name), deserialize_call))
deserialized_obj_args = [(fmt_var(field.name),
fmt_var(field.name))]
else:
deserialized_obj_args = []
args = fmt_func_args(deserialized_obj_args)
callee = self._cstor_name_from_field(field)
self.emit('return {};'.format(
fmt_func_call(
caller=fmt_alloc_call(union_name),
callee=callee,
args=args)))
with self.block('else'):
if not union.closed:
callee = 'initWithOther'
self.emit('return {};'.format(
fmt_func_call(
caller=fmt_alloc_call(union_name), callee=callee)))
else:
reason = (
'[NSString stringWithFormat:@"Tag has an '
'invalid value: \\\"%@\\\".", valueDict[@".tag"]]')
self._generate_throw_error('InvalidTag', reason)
self.emit()
|
python
|
{
"resource": ""
}
|
q24532
|
ObjCTypesBackend._generate_route_objects_h
|
train
|
def _generate_route_objects_h(
self,
route_schema, # pylint: disable=unused-argument
namespace):
"""Emits header files for Route objects which encapsulate information
regarding each route. These objects are passed as parameters when route calls are made."""
output_path = 'Routes/RouteObjects/{}.h'.format(
fmt_route_obj_class(namespace.name))
with self.output_to_relative_path(output_path):
self.emit_raw(base_file_comment)
self.emit('#import <Foundation/Foundation.h>')
self.emit()
self._generate_imports_h(['DBRoute'])
self.emit()
self.emit('NS_ASSUME_NONNULL_BEGIN')
self.emit()
self.emit(comment_prefix)
description_str = (
'Stone route objects for the {} namespace. Each route in '
'the {} namespace has its own static object, which contains '
'information about the route.')
self.emit_wrapped_text(
description_str.format(
fmt_class(namespace.name), fmt_class(namespace.name)),
prefix=comment_prefix)
self.emit(comment_prefix)
with self.block_h(fmt_route_obj_class(namespace.name)):
for route in namespace.routes:
route_name = fmt_route_var(namespace.name, route)
route_obj_access_signature = fmt_signature(
func=route_name,
args=None,
return_type='DBRoute *',
class_func=True)
base_str = 'Accessor method for the {} route object.'
self.emit_wrapped_text(
base_str.format(fmt_route_func(route)),
prefix=comment_prefix)
self.emit('{};'.format(route_obj_access_signature))
self.emit()
self.emit()
self.emit('NS_ASSUME_NONNULL_END')
self.emit()
|
python
|
{
"resource": ""
}
|
q24533
|
ObjCTypesBackend._generate_union_tag_vars_funcs
|
train
|
def _generate_union_tag_vars_funcs(self, union):
"""Emits the getter methods for retrieving tag-specific state. Setters throw
an error in the event an associated tag state variable is accessed without
the correct tag state."""
for field in union.all_fields:
if not is_void_type(field.data_type):
enum_field_name = fmt_enum_name(field.name, union)
with self.block_func(
func=fmt_camel(field.name),
args=[],
return_type=fmt_type(field.data_type)):
with self.block(
'if (![self is{}])'.format(
fmt_camel_upper(field.name)),
delim=('{', '}')):
error_msg = 'Invalid tag: required {}, but was %@.'.format(
enum_field_name)
throw_exc = (
'[NSException raise:@"IllegalStateException" '
'format:@"{}", [self tagName]];')
self.emit(throw_exc.format(error_msg))
self.emit('return _{};'.format(fmt_var(field.name)))
self.emit()
|
python
|
{
"resource": ""
}
|
q24534
|
ObjCTypesBackend._generate_struct_properties
|
train
|
def _generate_struct_properties(self, fields):
"""Emits struct instance properties from the given fields."""
for field in fields:
doc = self.process_doc(field.doc,
self._docf) if field.doc else undocumented
self.emit_wrapped_text(
self.process_doc(doc, self._docf), prefix=comment_prefix)
self.emit(fmt_property(field=field))
self.emit()
|
python
|
{
"resource": ""
}
|
q24535
|
ObjCTypesBackend._generate_union_properties
|
train
|
def _generate_union_properties(self, fields):
"""Emits union instance properties from the given fields."""
for field in fields:
# void types do not need properties to store additional state
# information
if not is_void_type(field.data_type):
doc = self.process_doc(
field.doc, self._docf) if field.doc else undocumented
warning_str = (
' @note Ensure the `is{}` method returns true before accessing, '
'otherwise a runtime exception will be raised.')
doc += warning_str.format(fmt_camel_upper(field.name))
self.emit_wrapped_text(
self.process_doc(doc, self._docf), prefix=comment_prefix)
self.emit(fmt_property(field=field))
self.emit()
|
python
|
{
"resource": ""
}
|
q24536
|
ObjCTypesBackend._generate_union_tag_property
|
train
|
def _generate_union_tag_property(self, union):
"""Emits union instance property representing union state."""
self.emit_wrapped_text(
'Represents the union\'s current tag state.',
prefix=comment_prefix)
self.emit(
fmt_property_str(
prop='tag', typ='{}'.format(fmt_enum_name('tag', union))))
self.emit()
|
python
|
{
"resource": ""
}
|
q24537
|
ObjCTypesBackend._generate_class_comment
|
train
|
def _generate_class_comment(self, data_type):
"""Emits a generic class comment for a union or struct."""
if is_struct_type(data_type):
class_type = 'struct'
elif is_union_type(data_type):
class_type = 'union'
else:
raise TypeError('Can\'t handle type %r' % type(data_type))
self.emit(comment_prefix)
self.emit_wrapped_text(
'The `{}` {}.'.format(fmt_class(data_type.name), class_type),
prefix=comment_prefix)
if data_type.doc:
self.emit(comment_prefix)
self.emit_wrapped_text(
self.process_doc(data_type.doc, self._docf),
prefix=comment_prefix)
self.emit(comment_prefix)
protocol_str = (
'This class implements the `DBSerializable` protocol '
'(serialize and deserialize instance methods), which is required '
'for all Obj-C SDK API route objects.')
self.emit_wrapped_text(
protocol_str.format(fmt_class_prefix(data_type), class_type),
prefix=comment_prefix)
self.emit(comment_prefix)
|
python
|
{
"resource": ""
}
|
q24538
|
ObjCTypesBackend._generate_throw_error
|
train
|
def _generate_throw_error(self, name, reason):
"""Emits a generic error throwing line."""
throw_exc = '@throw([NSException exceptionWithName:@"{}" reason:{} userInfo:nil]);'
self.emit(throw_exc.format(name, reason))
|
python
|
{
"resource": ""
}
|
q24539
|
PythonClientBackend._generate_route_methods
|
train
|
def _generate_route_methods(self, namespaces):
"""Creates methods for the routes in each namespace. All data types
and routes are represented as Python classes."""
self.cur_namespace = None
for namespace in namespaces:
if namespace.routes:
self.emit('# ------------------------------------------')
self.emit('# Routes in {} namespace'.format(namespace.name))
self.emit()
self._generate_routes(namespace)
|
python
|
{
"resource": ""
}
|
q24540
|
PythonClientBackend._generate_routes
|
train
|
def _generate_routes(self, namespace):
"""
Generates Python methods that correspond to routes in the namespace.
"""
# Hack: needed for _docf()
self.cur_namespace = namespace
# list of auth_types supported in this base class.
# this is passed with the new -w flag
if self.args.auth_type is not None:
self.supported_auth_types = [auth_type.strip().lower() for auth_type in self.args.auth_type.split(',')]
check_route_name_conflict(namespace)
for route in namespace.routes:
# compatibility mode : included routes are passed by whitelist
# actual auth attr inluded in the route is ignored in this mode.
if self.supported_auth_types is None:
self._generate_route_helper(namespace, route)
if route.attrs.get('style') == 'download':
self._generate_route_helper(namespace, route, True)
else:
route_auth_attr = None
if route.attrs is not None:
route_auth_attr = route.attrs.get('auth')
if route_auth_attr is None:
continue
route_auth_modes = [mode.strip().lower() for mode in route_auth_attr.split(',')]
for base_auth_type in self.supported_auth_types:
if base_auth_type in route_auth_modes:
self._generate_route_helper(namespace, route)
if route.attrs.get('style') == 'download':
self._generate_route_helper(namespace, route, True)
break
|
python
|
{
"resource": ""
}
|
q24541
|
PythonClientBackend._generate_route_helper
|
train
|
def _generate_route_helper(self, namespace, route, download_to_file=False):
"""Generate a Python method that corresponds to a route.
:param namespace: Namespace that the route belongs to.
:param stone.ir.ApiRoute route: IR node for the route.
:param bool download_to_file: Whether a special version of the route
that downloads the response body to a file should be generated.
This can only be used for download-style routes.
"""
arg_data_type = route.arg_data_type
result_data_type = route.result_data_type
request_binary_body = route.attrs.get('style') == 'upload'
response_binary_body = route.attrs.get('style') == 'download'
if download_to_file:
assert response_binary_body, 'download_to_file can only be set ' \
'for download-style routes.'
self._generate_route_method_decl(namespace,
route,
arg_data_type,
request_binary_body,
method_name_suffix='_to_file',
extra_args=['download_path'])
else:
self._generate_route_method_decl(namespace,
route,
arg_data_type,
request_binary_body)
with self.indent():
extra_request_args = None
extra_return_arg = None
footer = None
if request_binary_body:
extra_request_args = [('f',
'bytes',
'Contents to upload.')]
elif download_to_file:
extra_request_args = [('download_path',
'str',
'Path on local machine to save file.')]
if response_binary_body and not download_to_file:
extra_return_arg = ':class:`requests.models.Response`'
footer = DOCSTRING_CLOSE_RESPONSE
if route.doc:
func_docstring = self.process_doc(route.doc, self._docf)
else:
func_docstring = None
self._generate_docstring_for_func(
namespace,
arg_data_type,
result_data_type,
route.error_data_type,
overview=func_docstring,
extra_request_args=extra_request_args,
extra_return_arg=extra_return_arg,
footer=footer,
)
self._maybe_generate_deprecation_warning(route)
# Code to instantiate a class for the request data type
if is_void_type(arg_data_type):
self.emit('arg = None')
elif is_struct_type(arg_data_type):
self.generate_multiline_list(
[f.name for f in arg_data_type.all_fields],
before='arg = {}.{}'.format(
fmt_namespace(arg_data_type.namespace.name),
fmt_class(arg_data_type.name)),
)
elif not is_union_type(arg_data_type):
raise AssertionError('Unhandled request type %r' %
arg_data_type)
# Code to make the request
args = [
'{}.{}'.format(fmt_namespace(namespace.name),
fmt_func(route.name, version=route.version)),
"'{}'".format(namespace.name),
'arg']
if request_binary_body:
args.append('f')
else:
args.append('None')
self.generate_multiline_list(args, 'r = self.request', compact=False)
if download_to_file:
self.emit('self._save_body_to_file(download_path, r[1])')
if is_void_type(result_data_type):
self.emit('return None')
else:
self.emit('return r[0]')
else:
if is_void_type(result_data_type):
self.emit('return None')
else:
self.emit('return r')
self.emit()
|
python
|
{
"resource": ""
}
|
q24542
|
PythonClientBackend._generate_route_method_decl
|
train
|
def _generate_route_method_decl(
self, namespace, route, arg_data_type, request_binary_body,
method_name_suffix='', extra_args=None):
"""Generates the method prototype for a route."""
args = ['self']
if extra_args:
args += extra_args
if request_binary_body:
args.append('f')
if is_struct_type(arg_data_type):
for field in arg_data_type.all_fields:
if is_nullable_type(field.data_type):
args.append('{}=None'.format(field.name))
elif field.has_default:
# TODO(kelkabany): Decide whether we really want to set the
# default in the argument list. This will send the default
# over the wire even if it isn't overridden. The benefit is
# it locks in a default even if it is changed server-side.
if is_user_defined_type(field.data_type):
ns = field.data_type.namespace
else:
ns = None
arg = '{}={}'.format(
field.name,
self._generate_python_value(ns, field.default))
args.append(arg)
else:
args.append(field.name)
elif is_union_type(arg_data_type):
args.append('arg')
elif not is_void_type(arg_data_type):
raise AssertionError('Unhandled request type: %r' %
arg_data_type)
method_name = fmt_func(route.name + method_name_suffix, version=route.version)
namespace_name = fmt_underscores(namespace.name)
self.generate_multiline_list(args, 'def {}_{}'.format(namespace_name, method_name), ':')
|
python
|
{
"resource": ""
}
|
q24543
|
PythonClientBackend._generate_docstring_for_func
|
train
|
def _generate_docstring_for_func(self, namespace, arg_data_type,
result_data_type=None, error_data_type=None,
overview=None, extra_request_args=None,
extra_return_arg=None, footer=None):
"""
Generates a docstring for a function or method.
This function is versatile. It will create a docstring using all the
data that is provided.
:param arg_data_type: The data type describing the argument to the
route. The data type should be a struct, and each field will be
treated as an input parameter of the method.
:param result_data_type: The data type of the route result.
:param error_data_type: The data type of the route result in the case
of an error.
:param str overview: A description of the route that will be located
at the top of the docstring.
:param extra_request_args: [(field name, field type, field doc), ...]
Describes any additional parameters for the method that aren't a
field in arg_data_type.
:param str extra_return_arg: Name of an additional return type that. If
this is specified, it is assumed that the return of the function
will be a tuple of return_data_type and extra_return-arg.
:param str footer: Additional notes at the end of the docstring.
"""
fields = [] if is_void_type(arg_data_type) else arg_data_type.fields
if not fields and not overview:
# If we don't have an overview or any input parameters, we skip the
# docstring altogether.
return
self.emit('"""')
if overview:
self.emit_wrapped_text(overview)
# Description of all input parameters
if extra_request_args or fields:
if overview:
# Add a blank line if we had an overview
self.emit()
if extra_request_args:
for name, data_type_name, doc in extra_request_args:
if data_type_name:
field_doc = ':param {} {}: {}'.format(data_type_name,
name, doc)
self.emit_wrapped_text(field_doc,
subsequent_prefix=' ')
else:
self.emit_wrapped_text(
':param {}: {}'.format(name, doc),
subsequent_prefix=' ')
if is_struct_type(arg_data_type):
for field in fields:
if field.doc:
if is_user_defined_type(field.data_type):
field_doc = ':param {}: {}'.format(
field.name, self.process_doc(field.doc, self._docf))
else:
field_doc = ':param {} {}: {}'.format(
self._format_type_in_doc(namespace, field.data_type),
field.name,
self.process_doc(field.doc, self._docf),
)
self.emit_wrapped_text(
field_doc, subsequent_prefix=' ')
if is_user_defined_type(field.data_type):
# It's clearer to declare the type of a composite on
# a separate line since it references a class in
# another module
self.emit(':type {}: {}'.format(
field.name,
self._format_type_in_doc(namespace, field.data_type),
))
else:
# If the field has no docstring, then just document its
# type.
field_doc = ':type {}: {}'.format(
field.name,
self._format_type_in_doc(namespace, field.data_type),
)
self.emit_wrapped_text(field_doc)
elif is_union_type(arg_data_type):
if arg_data_type.doc:
self.emit_wrapped_text(':param arg: {}'.format(
self.process_doc(arg_data_type.doc, self._docf)),
subsequent_prefix=' ')
self.emit(':type arg: {}'.format(
self._format_type_in_doc(namespace, arg_data_type)))
if overview and not (extra_request_args or fields):
# Only output an empty line if we had an overview and haven't
# started a section on declaring types.
self.emit()
if extra_return_arg:
# Special case where the function returns a tuple. The first
# element is the JSON response. The second element is the
# the extra_return_arg param.
args = []
if is_void_type(result_data_type):
args.append('None')
else:
rtype = self._format_type_in_doc(namespace,
result_data_type)
args.append(rtype)
args.append(extra_return_arg)
self.generate_multiline_list(args, ':rtype: ')
else:
if is_void_type(result_data_type):
self.emit(':rtype: None')
else:
rtype = self._format_type_in_doc(namespace, result_data_type)
self.emit(':rtype: {}'.format(rtype))
if not is_void_type(error_data_type) and error_data_type.fields:
self.emit(':raises: :class:`{}`'.format(self.args.error_class_path))
self.emit()
# To provide more clarity to a dev who reads the docstring, suggest
# the route's error class. This is confusing, however, because we
# don't know where the error object that's raised will store
# the more detailed route error defined in stone.
error_class_name = self.args.error_class_path.rsplit('.', 1)[-1]
self.emit('If this raises, {} will contain:'.format(error_class_name))
with self.indent():
self.emit(self._format_type_in_doc(namespace, error_data_type))
if footer:
self.emit()
self.emit_wrapped_text(footer)
self.emit('"""')
|
python
|
{
"resource": ""
}
|
q24544
|
PythonClientBackend._format_type_in_doc
|
train
|
def _format_type_in_doc(self, namespace, data_type):
"""
Returns a string that can be recognized by Sphinx as a type reference
in a docstring.
"""
if is_void_type(data_type):
return 'None'
elif is_user_defined_type(data_type):
return ':class:`{}.{}.{}`'.format(
self.args.types_package, namespace.name, fmt_type(data_type))
else:
return fmt_type(data_type)
|
python
|
{
"resource": ""
}
|
q24545
|
ObjCBackend._generate_client_m
|
train
|
def _generate_client_m(self, api):
"""Generates client base implementation file. For each namespace, the client will
have an object field that encapsulates each route in the particular namespace."""
self.emit_raw(base_file_comment)
import_classes = [self.args.module_name]
import_classes += [
fmt_routes_class(ns.name, self.args.auth_type)
for ns in api.namespaces.values()
if ns.routes and self.namespace_to_has_routes[ns]
]
import_classes.append(
'{}Protocol'.format(self.args.transport_client_name))
self._generate_imports_m(import_classes)
with self.block_m(self.args.class_name):
client_args = fmt_func_args_declaration(
[('client',
'id<{}>'.format(self.args.transport_client_name))])
with self.block_func(
func='initWithTransportClient',
args=client_args,
return_type='instancetype'):
self.emit('self = [super init];')
with self.block_init():
self.emit('_transportClient = client;')
for namespace in api.namespaces.values():
if namespace.routes and self.namespace_to_has_routes[namespace]:
base_string = '_{}Routes = [[{} alloc] init:client];'
self.emit(
base_string.format(
fmt_var(namespace.name),
fmt_routes_class(namespace.name,
self.args.auth_type)))
|
python
|
{
"resource": ""
}
|
q24546
|
ObjCBackend._generate_client_h
|
train
|
def _generate_client_h(self, api):
"""Generates client base header file. For each namespace, the client will
have an object field that encapsulates each route in the particular namespace."""
self.emit_raw(stone_warning)
self.emit('#import <Foundation/Foundation.h>')
import_classes = [
fmt_routes_class(ns.name, self.args.auth_type)
for ns in api.namespaces.values()
if ns.routes and self.namespace_to_has_routes[ns]
]
import_classes.append('DBRequestErrors')
import_classes.append('DBTasks')
self._generate_imports_m(import_classes)
self.emit()
self.emit('NS_ASSUME_NONNULL_BEGIN')
self.emit()
self.emit('@protocol {};'.format(self.args.transport_client_name))
self.emit()
self.emit(comment_prefix)
description_str = (
'Base client object that contains an instance field for '
'each namespace, each of which contains references to all routes within '
'that namespace. Fully-implemented API clients will inherit this class.'
)
self.emit_wrapped_text(description_str, prefix=comment_prefix)
self.emit(comment_prefix)
with self.block_h(
self.args.class_name,
protected=[
('transportClient',
'id<{}>'.format(self.args.transport_client_name))
]):
self.emit()
for namespace in api.namespaces.values():
if namespace.routes and self.namespace_to_has_routes[namespace]:
class_doc = 'Routes within the `{}` namespace.'.format(
fmt_var(namespace.name))
self.emit_wrapped_text(class_doc, prefix=comment_prefix)
prop = '{}Routes'.format(fmt_var(namespace.name))
typ = '{} *'.format(
fmt_routes_class(namespace.name, self.args.auth_type))
self.emit(fmt_property_str(prop=prop, typ=typ))
self.emit()
client_args = fmt_func_args_declaration(
[('client',
'id<{}>'.format(self.args.transport_client_name))])
description_str = (
'Initializes the `{}` object with a networking client.')
self.emit_wrapped_text(
description_str.format(self.args.class_name),
prefix=comment_prefix)
init_signature = fmt_signature(
func='initWithTransportClient',
args=client_args,
return_type='instancetype')
self.emit('{};'.format(init_signature))
self.emit()
self.emit()
self.emit('NS_ASSUME_NONNULL_END')
|
python
|
{
"resource": ""
}
|
q24547
|
ObjCBackend._generate_routes_m
|
train
|
def _generate_routes_m(self, namespace):
"""Generates implementation file for namespace object that has as methods
all routes within the namespace."""
with self.block_m(
fmt_routes_class(namespace.name, self.args.auth_type)):
init_args = fmt_func_args_declaration([(
'client', 'id<{}>'.format(self.args.transport_client_name))])
with self.block_func(
func='init', args=init_args, return_type='instancetype'):
self.emit('self = [super init];')
with self.block_init():
self.emit('_client = client;')
self.emit()
style_to_request = json.loads(self.args.z__style_to_request)
for route in namespace.routes:
if (route.attrs.get('auth') != self.args.auth_type
and route.attrs.get('auth') != 'noauth'):
continue
route_type = route.attrs.get('style')
client_args = json.loads(self.args.client_args)
if route_type in client_args.keys():
for args_data in client_args[route_type]:
task_type_key, type_data_dict = tuple(args_data)
task_type_name = style_to_request[task_type_key]
func_suffix = type_data_dict[0]
extra_args = [
tuple(type_data[:-1])
for type_data in type_data_dict[1]
]
if (is_struct_type(route.arg_data_type) and
self._struct_has_defaults(route.arg_data_type)):
route_args, _ = self._get_default_route_args(
namespace, route)
self._generate_route_m(route, namespace,
route_args, extra_args,
task_type_name, func_suffix)
route_args, _ = self._get_route_args(namespace, route)
self._generate_route_m(route, namespace, route_args,
extra_args, task_type_name,
func_suffix)
else:
task_type_name = style_to_request[route_type]
if (is_struct_type(route.arg_data_type) and
self._struct_has_defaults(route.arg_data_type)):
route_args, _ = self._get_default_route_args(
namespace, route)
self._generate_route_m(route, namespace, route_args,
[], task_type_name, '')
route_args, _ = self._get_route_args(namespace, route)
self._generate_route_m(route, namespace, route_args, [],
task_type_name, '')
|
python
|
{
"resource": ""
}
|
q24548
|
ObjCBackend._generate_route_m
|
train
|
def _generate_route_m(self, route, namespace, route_args, extra_args,
task_type_name, func_suffix):
"""Generates route method implementation for the given route."""
user_args = list(route_args)
transport_args = [
('route', 'route'),
('arg', 'arg' if not is_void_type(route.arg_data_type) else 'nil'),
]
for name, value, typ in extra_args:
user_args.append((name, typ))
transport_args.append((name, value))
with self.block_func(
func='{}{}'.format(fmt_route_func(route), func_suffix),
args=fmt_func_args_declaration(user_args),
return_type='{} *'.format(task_type_name)):
self.emit('DBRoute *route = {}.{};'.format(
fmt_route_obj_class(namespace.name),
fmt_route_var(namespace.name, route)))
if is_union_type(route.arg_data_type):
self.emit('{} *arg = {};'.format(
fmt_class_prefix(route.arg_data_type),
fmt_var(route.arg_data_type.name)))
elif not is_void_type(route.arg_data_type):
init_call = fmt_func_call(
caller=fmt_alloc_call(
caller=fmt_class_prefix(route.arg_data_type)),
callee=self._cstor_name_from_fields_names(route_args),
args=fmt_func_args([(f[0], f[0]) for f in route_args]))
self.emit('{} *arg = {};'.format(
fmt_class_prefix(route.arg_data_type), init_call))
request_call = fmt_func_call(
caller='self.client',
callee='request{}'.format(
fmt_camel_upper(route.attrs.get('style'))),
args=fmt_func_args(transport_args))
self.emit('return {};'.format(request_call))
self.emit()
|
python
|
{
"resource": ""
}
|
q24549
|
ObjCBackend._generate_route_signature
|
train
|
def _generate_route_signature(
self,
route,
namespace, # pylint: disable=unused-argument
route_args,
extra_args,
doc_list,
task_type_name,
func_suffix):
"""Generates route method signature for the given route."""
for name, _, typ in extra_args:
route_args.append((name, typ))
deprecated = 'DEPRECATED: ' if route.deprecated else ''
func_name = '{}{}'.format(fmt_route_func(route), func_suffix)
self.emit(comment_prefix)
if route.doc:
route_doc = self.process_doc(route.doc, self._docf)
else:
route_doc = 'The {} route'.format(func_name)
self.emit_wrapped_text(
deprecated + route_doc, prefix=comment_prefix, width=120)
self.emit(comment_prefix)
for name, doc in doc_list:
self.emit_wrapped_text(
'@param {} {}'.format(name, doc if doc else undocumented),
prefix=comment_prefix,
width=120)
self.emit(comment_prefix)
output = (
'@return Through the response callback, the caller will ' +
'receive a `{}` object on success or a `{}` object on failure.')
output = output.format(
fmt_type(route.result_data_type, tag=False, no_ptr=True),
fmt_type(route.error_data_type, tag=False, no_ptr=True))
self.emit_wrapped_text(output, prefix=comment_prefix, width=120)
self.emit(comment_prefix)
result_type_str = fmt_type(route.result_data_type) if not is_void_type(
route.result_data_type) else 'DBNilObject *'
error_type_str = fmt_type(route.error_data_type) if not is_void_type(
route.error_data_type) else 'DBNilObject *'
return_type = '{}<{}, {}> *'.format(task_type_name, result_type_str,
error_type_str)
deprecated = self._get_deprecation_warning(route)
route_signature = fmt_signature(
func=func_name,
args=fmt_func_args_declaration(route_args),
return_type='{}'.format(return_type))
self.emit('{}{};'.format(route_signature, deprecated))
self.emit()
|
python
|
{
"resource": ""
}
|
q24550
|
ParserFactory.get_parser
|
train
|
def get_parser(self):
"""
Returns a ParserFactory with the state reset so it can be used to
parse again.
:return: ParserFactory
"""
self.path = None
self.anony_defs = []
self.exhausted = False
return self
|
python
|
{
"resource": ""
}
|
q24551
|
generate_validator_constructor
|
train
|
def generate_validator_constructor(ns, data_type):
"""
Given a Stone data type, returns a string that can be used to construct
the appropriate validation object in Python.
"""
dt, nullable_dt = unwrap_nullable(data_type)
if is_list_type(dt):
v = generate_func_call(
'bv.List',
args=[
generate_validator_constructor(ns, dt.data_type)],
kwargs=[
('min_items', dt.min_items),
('max_items', dt.max_items)],
)
elif is_map_type(dt):
v = generate_func_call(
'bv.Map',
args=[
generate_validator_constructor(ns, dt.key_data_type),
generate_validator_constructor(ns, dt.value_data_type),
]
)
elif is_numeric_type(dt):
v = generate_func_call(
'bv.{}'.format(dt.name),
kwargs=[
('min_value', dt.min_value),
('max_value', dt.max_value)],
)
elif is_string_type(dt):
pattern = None
if dt.pattern is not None:
pattern = repr(dt.pattern)
v = generate_func_call(
'bv.String',
kwargs=[
('min_length', dt.min_length),
('max_length', dt.max_length),
('pattern', pattern)],
)
elif is_timestamp_type(dt):
v = generate_func_call(
'bv.Timestamp',
args=[repr(dt.format)],
)
elif is_user_defined_type(dt):
v = fmt_class(dt.name) + '_validator'
if ns.name != dt.namespace.name:
v = '{}.{}'.format(fmt_namespace(dt.namespace.name), v)
elif is_alias(dt):
# Assume that the alias has already been declared elsewhere.
name = fmt_class(dt.name) + '_validator'
if ns.name != dt.namespace.name:
name = '{}.{}'.format(fmt_namespace(dt.namespace.name), name)
v = name
elif is_boolean_type(dt) or is_bytes_type(dt) or is_void_type(dt):
v = generate_func_call('bv.{}'.format(dt.name))
else:
raise AssertionError('Unsupported data type: %r' % dt)
if nullable_dt:
return generate_func_call('bv.Nullable', args=[v])
else:
return v
|
python
|
{
"resource": ""
}
|
q24552
|
generate_func_call
|
train
|
def generate_func_call(name, args=None, kwargs=None):
"""
Generates code to call a function.
Args:
name (str): The function name.
args (list[str]): Each positional argument.
kwargs (list[tuple]): Each tuple is (arg: str, value: str). If
value is None, then the keyword argument is omitted. Otherwise,
if the value is not a string, then str() is called on it.
Returns:
str: Code to call a function.
"""
all_args = []
if args:
all_args.extend(args)
if kwargs:
all_args.extend('{}={}'.format(k, v)
for k, v in kwargs if v is not None)
return '{}({})'.format(name, ', '.join(all_args))
|
python
|
{
"resource": ""
}
|
q24553
|
PythonTypesBackend._func_args_from_dict
|
train
|
def _func_args_from_dict(self, d):
"""Given a Python dictionary, creates a string representing arguments
for invoking a function. All arguments with a value of None are
ignored."""
filtered_d = self.filter_out_none_valued_keys(d)
return ', '.join(['%s=%s' % (k, v) for k, v in filtered_d.items()])
|
python
|
{
"resource": ""
}
|
q24554
|
PythonTypesBackend._generate_struct_class_slots
|
train
|
def _generate_struct_class_slots(self, data_type):
"""Creates a slots declaration for struct classes.
Slots are an optimization in Python. They reduce the memory footprint
of instances since attributes cannot be added after declaration.
"""
with self.block('__slots__ =', delim=('[', ']')):
for field in data_type.fields:
field_name = fmt_var(field.name)
self.emit("'_%s_value'," % field_name)
self.emit("'_%s_present'," % field_name)
self.emit()
|
python
|
{
"resource": ""
}
|
q24555
|
PythonTypesBackend._generate_struct_class_init
|
train
|
def _generate_struct_class_init(self, data_type):
"""
Generates constructor. The constructor takes all possible fields as
optional arguments. Any argument that is set on construction sets the
corresponding field for the instance.
"""
args = ['self']
for field in data_type.all_fields:
field_name_reserved_check = fmt_var(field.name, True)
args.append('%s=None' % field_name_reserved_check)
self.generate_multiline_list(args, before='def __init__', after=':')
with self.indent():
lineno = self.lineno
# Call the parent constructor if a super type exists
if data_type.parent_type:
class_name = class_name_for_data_type(data_type)
all_parent_fields = [fmt_func(f.name, check_reserved=True)
for f in data_type.parent_type.all_fields]
self.generate_multiline_list(
all_parent_fields,
before='super({}, self).__init__'.format(class_name))
# initialize each field
for field in data_type.fields:
field_var_name = fmt_var(field.name)
self.emit('self._{}_value = None'.format(field_var_name))
self.emit('self._{}_present = False'.format(field_var_name))
# handle arguments that were set
for field in data_type.fields:
field_var_name = fmt_var(field.name, True)
self.emit('if {} is not None:'.format(field_var_name))
with self.indent():
self.emit('self.{0} = {0}'.format(field_var_name))
if lineno == self.lineno:
self.emit('pass')
self.emit()
|
python
|
{
"resource": ""
}
|
q24556
|
PythonTypesBackend._generate_struct_class_properties
|
train
|
def _generate_struct_class_properties(self, ns, data_type):
"""
Each field of the struct has a corresponding setter and getter.
The setter validates the value being set.
"""
for field in data_type.fields:
field_name = fmt_func(field.name)
field_name_reserved_check = fmt_func(field.name, check_reserved=True)
if is_nullable_type(field.data_type):
field_dt = field.data_type.data_type
dt_nullable = True
else:
field_dt = field.data_type
dt_nullable = False
# generate getter for field
self.emit('@property')
self.emit('def {}(self):'.format(field_name_reserved_check))
with self.indent():
self.emit('"""')
if field.doc:
self.emit_wrapped_text(
self.process_doc(field.doc, self._docf))
# Sphinx wants an extra line between the text and the
# rtype declaration.
self.emit()
self.emit(':rtype: {}'.format(
self._python_type_mapping(ns, field_dt)))
self.emit('"""')
self.emit('if self._{}_present:'.format(field_name))
with self.indent():
self.emit('return self._{}_value'.format(field_name))
self.emit('else:')
with self.indent():
if dt_nullable:
self.emit('return None')
elif field.has_default:
self.emit('return {}'.format(
self._generate_python_value(ns, field.default)))
else:
self.emit(
"raise AttributeError(\"missing required field '%s'\")"
% field_name
)
self.emit()
# generate setter for field
self.emit('@{}.setter'.format(field_name_reserved_check))
self.emit('def {}(self, val):'.format(field_name_reserved_check))
with self.indent():
if dt_nullable:
self.emit('if val is None:')
with self.indent():
self.emit('del self.{}'.format(field_name_reserved_check))
self.emit('return')
if is_user_defined_type(field_dt):
self.emit('self._%s_validator.validate_type_only(val)' %
field_name)
else:
self.emit('val = self._{}_validator.validate(val)'.format(field_name))
self.emit('self._{}_value = val'.format(field_name))
self.emit('self._{}_present = True'.format(field_name))
self.emit()
# generate deleter for field
self.emit('@{}.deleter'.format(field_name_reserved_check))
self.emit('def {}(self):'.format(field_name_reserved_check))
with self.indent():
self.emit('self._{}_value = None'.format(field_name))
self.emit('self._{}_present = False'.format(field_name))
self.emit()
|
python
|
{
"resource": ""
}
|
q24557
|
PythonTypesBackend._generate_custom_annotation_instance
|
train
|
def _generate_custom_annotation_instance(self, ns, annotation):
"""
Generates code to construct an instance of an annotation type object
with parameters from the specified annotation.
"""
annotation_class = class_name_for_annotation_type(annotation.annotation_type, ns)
return generate_func_call(
annotation_class,
kwargs=((fmt_var(k, True), self._generate_python_value(ns, v))
for k, v in annotation.kwargs.items())
)
|
python
|
{
"resource": ""
}
|
q24558
|
PythonTypesBackend._generate_enumerated_subtypes_tag_mapping
|
train
|
def _generate_enumerated_subtypes_tag_mapping(self, ns, data_type):
"""
Generates attributes needed for serializing and deserializing structs
with enumerated subtypes. These assignments are made after all the
Python class definitions to ensure that all references exist.
"""
assert data_type.has_enumerated_subtypes()
# Generate _tag_to_subtype_ attribute: Map from string type tag to
# the validator of the referenced subtype. Used on deserialization
# to look up the subtype for a given tag.
tag_to_subtype_items = []
for tags, subtype in data_type.get_all_subtypes_with_tags():
tag_to_subtype_items.append("{}: {}".format(
tags,
generate_validator_constructor(ns, subtype)))
self.generate_multiline_list(
tag_to_subtype_items,
before='{}._tag_to_subtype_ = '.format(data_type.name),
delim=('{', '}'),
compact=False)
# Generate _pytype_to_tag_and_subtype_: Map from Python class to a
# tuple of (type tag, subtype). Used on serialization to lookup how a
# class should be encoded based on the root struct's enumerated
# subtypes.
items = []
for tag, subtype in data_type.get_all_subtypes_with_tags():
items.append("{0}: ({1}, {2})".format(
fmt_class(subtype.name),
tag,
generate_validator_constructor(ns, subtype)))
self.generate_multiline_list(
items,
before='{}._pytype_to_tag_and_subtype_ = '.format(data_type.name),
delim=('{', '}'),
compact=False)
# Generate _is_catch_all_ attribute:
self.emit('{}._is_catch_all_ = {!r}'.format(
data_type.name, data_type.is_catch_all()))
self.emit()
|
python
|
{
"resource": ""
}
|
q24559
|
PythonTypesBackend._generate_union_class
|
train
|
def _generate_union_class(self, ns, data_type):
# type: (ApiNamespace, Union) -> None
"""Defines a Python class that represents a union in Stone."""
self.emit(self._class_declaration_for_type(ns, data_type))
with self.indent():
self.emit('"""')
if data_type.doc:
self.emit_wrapped_text(
self.process_doc(data_type.doc, self._docf))
self.emit()
self.emit_wrapped_text(
'This class acts as a tagged union. Only one of the ``is_*`` '
'methods will return true. To get the associated value of a '
'tag (if one exists), use the corresponding ``get_*`` method.')
if data_type.has_documented_fields():
self.emit()
for field in data_type.fields:
if not field.doc:
continue
if is_void_type(field.data_type):
ivar_doc = ':ivar {}: {}'.format(
fmt_namespaced_var(ns.name, data_type.name, field.name),
self.process_doc(field.doc, self._docf))
elif is_user_defined_type(field.data_type):
if data_type.namespace.name != ns.name:
formatted_var = fmt_namespaced_var(ns.name, data_type.name, field.name)
else:
formatted_var = '{}.{}'.format(data_type.name, fmt_var(field.name))
ivar_doc = ':ivar {} {}: {}'.format(
fmt_class(field.data_type.name),
formatted_var,
self.process_doc(field.doc, self._docf))
else:
ivar_doc = ':ivar {} {}: {}'.format(
self._python_type_mapping(ns, field.data_type),
fmt_namespaced_var(ns.name, data_type.name, field.name), field.doc)
self.emit_wrapped_text(ivar_doc, subsequent_prefix=' ')
self.emit('"""')
self.emit()
self._generate_union_class_vars(data_type)
self._generate_union_class_variant_creators(ns, data_type)
self._generate_union_class_is_set(data_type)
self._generate_union_class_get_helpers(ns, data_type)
self._generate_union_class_custom_annotations(ns, data_type)
self._generate_union_class_repr(data_type)
self.emit('{0}_validator = bv.Union({0})'.format(
class_name_for_data_type(data_type)
))
self.emit()
|
python
|
{
"resource": ""
}
|
q24560
|
PythonTypesBackend._generate_union_class_vars
|
train
|
def _generate_union_class_vars(self, data_type):
"""
Adds a _catch_all_ attribute to each class. Also, adds a placeholder
attribute for the construction of union members of void type.
"""
lineno = self.lineno
if data_type.catch_all_field:
self.emit("_catch_all = '%s'" % data_type.catch_all_field.name)
elif not data_type.parent_type:
self.emit('_catch_all = None')
# Generate stubs for class variables so that IDEs like PyCharms have an
# easier time detecting their existence.
for field in data_type.fields:
if is_void_type(field.data_type):
field_name = fmt_var(field.name)
self.emit('# Attribute is overwritten below the class definition')
self.emit('{} = None'.format(field_name))
if lineno != self.lineno:
self.emit()
|
python
|
{
"resource": ""
}
|
q24561
|
PythonTypesBackend._generate_union_class_reflection_attributes
|
train
|
def _generate_union_class_reflection_attributes(self, ns, data_type):
"""
Adds a class attribute for each union member assigned to a validator.
Also adds an attribute that is a map from tag names to validators.
"""
class_name = fmt_class(data_type.name)
for field in data_type.fields:
field_name = fmt_var(field.name)
validator_name = generate_validator_constructor(
ns, field.data_type)
full_validator_name = '{}._{}_validator'.format(class_name, field_name)
self.emit('{} = {}'.format(full_validator_name, validator_name))
if field.redactor:
self._generate_redactor(full_validator_name, field.redactor)
# generate _all_fields_ for each omitted caller (and public)
child_omitted_callers = data_type.get_all_omitted_callers()
parent_omitted_callers = data_type.parent_type.get_all_omitted_callers() if \
data_type.parent_type else set([])
all_omitted_callers = child_omitted_callers | parent_omitted_callers
if len(all_omitted_callers) != 0:
self.emit('{}._permissioned_tagmaps = {}'.format(class_name, all_omitted_callers))
for omitted_caller in sorted(all_omitted_callers | {None}, key=str):
is_public = omitted_caller is None
tagmap_name = '_tagmap' if is_public else '_{}_tagmap'.format(omitted_caller)
caller_in_parent = data_type.parent_type and (is_public or omitted_caller
in parent_omitted_callers)
with self.block('{}.{} ='.format(class_name, tagmap_name)):
for field in data_type.fields:
if field.omitted_caller != omitted_caller:
continue
var_name = fmt_var(field.name)
validator_name = '{}._{}_validator'.format(class_name, var_name)
self.emit("'{}': {},".format(var_name, validator_name))
if caller_in_parent:
self.emit('{0}.{1}.update({2}.{1})'.format(
class_name, tagmap_name,
class_name_for_data_type(data_type.parent_type, ns))
)
self.emit()
|
python
|
{
"resource": ""
}
|
q24562
|
PythonTypesBackend._generate_union_class_variant_creators
|
train
|
def _generate_union_class_variant_creators(self, ns, data_type):
"""
Each non-symbol, non-any variant has a corresponding class method that
can be used to construct a union with that variant selected.
"""
for field in data_type.fields:
if not is_void_type(field.data_type):
field_name = fmt_func(field.name)
field_name_reserved_check = fmt_func(field.name, check_reserved=True)
if is_nullable_type(field.data_type):
field_dt = field.data_type.data_type
else:
field_dt = field.data_type
self.emit('@classmethod')
self.emit('def {}(cls, val):'.format(field_name_reserved_check))
with self.indent():
self.emit('"""')
self.emit_wrapped_text(
'Create an instance of this class set to the ``%s`` '
'tag with value ``val``.' % field_name)
self.emit()
self.emit(':param {} val:'.format(
self._python_type_mapping(ns, field_dt)))
self.emit(':rtype: {}'.format(
self._python_type_mapping(ns, data_type)))
self.emit('"""')
self.emit("return cls('{}', val)".format(field_name))
self.emit()
|
python
|
{
"resource": ""
}
|
q24563
|
PythonTypesBackend._generate_union_class_get_helpers
|
train
|
def _generate_union_class_get_helpers(self, ns, data_type):
"""
These are the getters used to access the value of a variant, once
the tag has been switched on.
"""
for field in data_type.fields:
field_name = fmt_func(field.name)
if not is_void_type(field.data_type):
# generate getter for field
self.emit('def get_{}(self):'.format(field_name))
with self.indent():
if is_nullable_type(field.data_type):
field_dt = field.data_type.data_type
else:
field_dt = field.data_type
self.emit('"""')
if field.doc:
self.emit_wrapped_text(
self.process_doc(field.doc, self._docf))
self.emit()
self.emit("Only call this if :meth:`is_%s` is true." %
field_name)
# Sphinx wants an extra line between the text and the
# rtype declaration.
self.emit()
self.emit(':rtype: {}'.format(
self._python_type_mapping(ns, field_dt)))
self.emit('"""')
self.emit('if not self.is_{}():'.format(field_name))
with self.indent():
self.emit(
'raise AttributeError("tag \'{}\' not set")'.format(
field_name))
self.emit('return self._value')
self.emit()
|
python
|
{
"resource": ""
}
|
q24564
|
PythonTypesBackend._generate_union_class_symbol_creators
|
train
|
def _generate_union_class_symbol_creators(self, data_type):
"""
Class attributes that represent a symbol are set after the union class
definition.
"""
class_name = fmt_class(data_type.name)
lineno = self.lineno
for field in data_type.fields:
if is_void_type(field.data_type):
field_name = fmt_func(field.name)
self.emit("{0}.{1} = {0}('{1}')".format(class_name, field_name))
if lineno != self.lineno:
self.emit()
|
python
|
{
"resource": ""
}
|
q24565
|
json_encode
|
train
|
def json_encode(data_type, obj, caller_permissions=None, alias_validators=None, old_style=False,
should_redact=False):
"""Encodes an object into JSON based on its type.
Args:
data_type (Validator): Validator for obj.
obj (object): Object to be serialized.
caller_permissions (list): The list of raw-string caller permissions with which
to serialize.
alias_validators (Optional[Mapping[bv.Validator, Callable[[], None]]]):
Custom validation functions. These must raise bv.ValidationError on
failure.
Returns:
str: JSON-encoded object.
This function will also do additional validation that wasn't done by the
objects themselves:
1. The passed in obj may not have been validated with data_type yet.
2. If an object that should be a Struct was assigned to a field, its
type has been validated, but the presence of all required fields
hasn't been.
3. If an object that should be a Union was assigned to a field, whether
or not a tag has been set has not been validated.
4. A list may have passed validation initially, but been mutated since.
Example of serializing a struct to JSON:
struct FileRef
path String
rev String
> fr = FileRef()
> fr.path = 'a/b/c'
> fr.rev = '1234'
> JsonEncoder.encode(fr)
"{'path': 'a/b/c', 'rev': '1234'}"
Example of serializing a union to JSON:
union UploadMode
add
overwrite
update FileRef
> um = UploadMode()
> um.set_add()
> JsonEncoder.encode(um)
'"add"'
> um.update = fr
> JsonEncoder.encode(um)
"{'update': {'path': 'a/b/c', 'rev': '1234'}}"
"""
for_msgpack = False
serializer = StoneToJsonSerializer(
caller_permissions, alias_validators, for_msgpack, old_style, should_redact)
return serializer.encode(data_type, obj)
|
python
|
{
"resource": ""
}
|
q24566
|
json_compat_obj_encode
|
train
|
def json_compat_obj_encode(data_type, obj, caller_permissions=None, alias_validators=None,
old_style=False, for_msgpack=False, should_redact=False):
"""Encodes an object into a JSON-compatible dict based on its type.
Args:
data_type (Validator): Validator for obj.
obj (object): Object to be serialized.
caller_permissions (list): The list of raw-string caller permissions
with which to serialize.
Returns:
An object that when passed to json.dumps() will produce a string
giving the JSON-encoded object.
See json_encode() for additional information about validation.
"""
serializer = StoneToPythonPrimitiveSerializer(
caller_permissions, alias_validators, for_msgpack, old_style, should_redact)
return serializer.encode(data_type, obj)
|
python
|
{
"resource": ""
}
|
q24567
|
json_decode
|
train
|
def json_decode(data_type, serialized_obj, caller_permissions=None,
alias_validators=None, strict=True, old_style=False):
"""Performs the reverse operation of json_encode.
Args:
data_type (Validator): Validator for serialized_obj.
serialized_obj (str): The JSON string to deserialize.
caller_permissions (list): The list of raw-string caller permissions
with which to serialize.
alias_validators (Optional[Mapping[bv.Validator, Callable[[], None]]]):
Custom validation functions. These must raise bv.ValidationError on
failure.
strict (bool): If strict, then unknown struct fields will raise an
error, and unknown union variants will raise an error even if a
catch all field is specified. strict should only be used by a
recipient of serialized JSON if it's guaranteed that its Stone
specs are at least as recent as the senders it receives messages
from.
Returns:
The returned object depends on the input data_type.
- Boolean -> bool
- Bytes -> bytes
- Float -> float
- Integer -> long
- List -> list
- Map -> dict
- Nullable -> None or its wrapped type.
- String -> unicode (PY2) or str (PY3)
- Struct -> An instance of its definition attribute.
- Timestamp -> datetime.datetime
- Union -> An instance of its definition attribute.
"""
try:
deserialized_obj = json.loads(serialized_obj)
except ValueError:
raise bv.ValidationError('could not decode input as JSON')
else:
return json_compat_obj_decode(
data_type, deserialized_obj, caller_permissions=caller_permissions,
alias_validators=alias_validators, strict=strict, old_style=old_style)
|
python
|
{
"resource": ""
}
|
q24568
|
json_compat_obj_decode
|
train
|
def json_compat_obj_decode(data_type, obj, caller_permissions=None,
alias_validators=None, strict=True,
old_style=False, for_msgpack=False):
"""
Decodes a JSON-compatible object based on its data type into a
representative Python object.
Args:
data_type (Validator): Validator for serialized_obj.
obj: The JSON-compatible object to decode based on data_type.
caller_permissions (list): The list of raw-string caller permissions
with which to serialize.
strict (bool): If strict, then unknown struct fields will raise an
error, and unknown union variants will raise an error even if a
catch all field is specified. See json_decode() for more.
Returns:
See json_decode().
"""
decoder = PythonPrimitiveToStoneDecoder(caller_permissions,
alias_validators, for_msgpack, old_style, strict)
if isinstance(data_type, bv.Primitive):
return decoder.make_stone_friendly(
data_type, obj, True)
else:
return decoder.json_compat_obj_decode_helper(
data_type, obj)
|
python
|
{
"resource": ""
}
|
q24569
|
StoneSerializerBase.encode_sub
|
train
|
def encode_sub(self, validator, value):
# type: (bv.Validator, typing.Any) -> typing.Any
"""
Callback intended to be called by other ``encode`` methods to
delegate encoding of sub-values. Arguments have the same semantics
as with the ``encode`` method.
"""
if isinstance(validator, bv.List):
# Because Lists are mutable, we always validate them during
# serialization
validate_f = validator.validate # type: typing.Callable[[typing.Any], None]
encode_f = self.encode_list # type: typing.Callable[[typing.Any, typing.Any], typing.Any] # noqa: E501
elif isinstance(validator, bv.Map):
# Also validate maps during serialization because they are also mutable
validate_f = validator.validate
encode_f = self.encode_map
elif isinstance(validator, bv.Nullable):
validate_f = validator.validate
encode_f = self.encode_nullable
elif isinstance(validator, bv.Primitive):
validate_f = validator.validate
encode_f = self.encode_primitive
elif isinstance(validator, bv.Struct):
if isinstance(validator, bv.StructTree):
if self.caller_permissions.permissions:
def validate_with_permissions(val):
validator.validate_with_permissions(val, self.caller_permissions)
validate_f = validate_with_permissions
else:
validate_f = validator.validate
encode_f = self.encode_struct_tree
else:
# Fields are already validated on assignment
if self.caller_permissions.permissions:
def validate_with_permissions(val):
validator.validate_with_permissions(val, self.caller_permissions)
validate_f = validate_with_permissions
else:
validate_f = validator.validate_type_only
encode_f = self.encode_struct
elif isinstance(validator, bv.Union):
# Fields are already validated on assignment
validate_f = validator.validate_type_only
encode_f = self.encode_union
else:
raise bv.ValidationError('Unsupported data type {}'.format(type(validator).__name__))
validate_f(value)
return encode_f(validator, value)
|
python
|
{
"resource": ""
}
|
q24570
|
PythonPrimitiveToStoneDecoder.determine_struct_tree_subtype
|
train
|
def determine_struct_tree_subtype(self, data_type, obj):
"""
Searches through the JSON-object-compatible dict using the data type
definition to determine which of the enumerated subtypes `obj` is.
"""
if '.tag' not in obj:
raise bv.ValidationError("missing '.tag' key")
if not isinstance(obj['.tag'], six.string_types):
raise bv.ValidationError('expected string, got %s' %
bv.generic_type_name(obj['.tag']),
parent='.tag')
# Find the subtype the tags refer to
full_tags_tuple = (obj['.tag'],)
if full_tags_tuple in data_type.definition._tag_to_subtype_:
subtype = data_type.definition._tag_to_subtype_[full_tags_tuple]
if isinstance(subtype, bv.StructTree):
raise bv.ValidationError("tag '%s' refers to non-leaf subtype" %
('.'.join(full_tags_tuple)))
return subtype
else:
if self.strict:
# In strict mode, the entirety of the tag hierarchy should
# point to a known subtype.
raise bv.ValidationError("unknown subtype '%s'" %
'.'.join(full_tags_tuple))
else:
# If subtype was not found, use the base.
if data_type.definition._is_catch_all_:
return data_type
else:
raise bv.ValidationError(
"unknown subtype '%s' and '%s' is not a catch-all" %
('.'.join(full_tags_tuple), data_type.definition.__name__))
|
python
|
{
"resource": ""
}
|
q24571
|
PythonPrimitiveToStoneDecoder.make_stone_friendly
|
train
|
def make_stone_friendly(self, data_type, val, validate):
"""
Convert a Python object to a type that will pass validation by its
validator.
Validation by ``alias_validators`` is performed even if ``validate`` is
false.
"""
if isinstance(data_type, bv.Timestamp):
try:
ret = datetime.datetime.strptime(val, data_type.format)
except (TypeError, ValueError) as e:
raise bv.ValidationError(e.args[0])
elif isinstance(data_type, bv.Bytes):
if self.for_msgpack:
if isinstance(val, six.text_type):
ret = val.encode('utf-8')
else:
ret = val
else:
try:
ret = base64.b64decode(val)
except TypeError:
raise bv.ValidationError('invalid base64-encoded bytes')
elif isinstance(data_type, bv.Void):
if self.strict and val is not None:
raise bv.ValidationError("expected null, got value")
return None
else:
if validate:
if self.caller_permissions.permissions:
data_type.validate_with_permissions(val, self.caller_permissions)
else:
data_type.validate(val)
ret = val
if self.alias_validators is not None and data_type in self.alias_validators:
self.alias_validators[data_type](ret)
return ret
|
python
|
{
"resource": ""
}
|
q24572
|
fmt_camel
|
train
|
def fmt_camel(name):
"""
Converts name to lower camel case. Words are identified by capitalization,
dashes, and underscores.
"""
words = split_words(name)
assert len(words) > 0
first = words.pop(0).lower()
return first + ''.join([word.capitalize() for word in words])
|
python
|
{
"resource": ""
}
|
q24573
|
check_route_name_conflict
|
train
|
def check_route_name_conflict(namespace):
"""
Check name conflicts among generated route definitions. Raise a runtime exception when a
conflict is encountered.
"""
route_by_name = {}
for route in namespace.routes:
route_name = fmt_func(route.name, version=route.version)
if route_name in route_by_name:
other_route = route_by_name[route_name]
raise RuntimeError(
'There is a name conflict between {!r} and {!r}'.format(other_route, route))
route_by_name[route_name] = route
|
python
|
{
"resource": ""
}
|
q24574
|
generate_imports_for_referenced_namespaces
|
train
|
def generate_imports_for_referenced_namespaces(
backend, namespace, insert_type_ignore=False):
# type: (Backend, ApiNamespace, bool) -> None
"""
Both the true Python backend and the Python PEP 484 Type Stub backend have
to perform the same imports.
:param insert_type_ignore: add a MyPy type-ignore comment to the imports in
the except: clause.
"""
imported_namespaces = namespace.get_imported_namespaces(consider_annotation_types=True)
if not imported_namespaces:
return
type_ignore_comment = TYPE_IGNORE_COMMENT if insert_type_ignore else ""
backend.emit('try:')
with backend.indent():
backend.emit('from . import (')
with backend.indent():
for ns in imported_namespaces:
backend.emit(fmt_namespace(ns.name) + ',')
backend.emit(')')
backend.emit('except (ImportError, SystemError, ValueError):')
# Fallback if imported from outside a package.
with backend.indent():
for ns in imported_namespaces:
backend.emit('import {namespace_name}{type_ignore_comment}'.format(
namespace_name=fmt_namespace(ns.name),
type_ignore_comment=type_ignore_comment
))
backend.emit()
|
python
|
{
"resource": ""
}
|
q24575
|
prefix_with_ns_if_necessary
|
train
|
def prefix_with_ns_if_necessary(name, name_ns, source_ns):
# type: (typing.Text, ApiNamespace, ApiNamespace) -> typing.Text
"""
Returns a name that can be used to reference `name` in namespace `name_ns`
from `source_ns`.
If `source_ns` and `name_ns` are the same, that's just `name`. Otherwise
it's `name_ns`.`name`.
"""
if source_ns == name_ns:
return name
return '{}.{}'.format(fmt_namespace(name_ns.name), name)
|
python
|
{
"resource": ""
}
|
q24576
|
class_name_for_data_type
|
train
|
def class_name_for_data_type(data_type, ns=None):
"""
Returns the name of the Python class that maps to a user-defined type.
The name is identical to the name in the spec.
If ``ns`` is set to a Namespace and the namespace of `data_type` does
not match, then a namespace prefix is added to the returned name.
For example, ``foreign_ns.TypeName``.
"""
assert is_user_defined_type(data_type) or is_alias(data_type), \
'Expected composite type, got %r' % type(data_type)
name = fmt_class(data_type.name)
if ns:
return prefix_with_ns_if_necessary(name, data_type.namespace, ns)
return name
|
python
|
{
"resource": ""
}
|
q24577
|
class_name_for_annotation_type
|
train
|
def class_name_for_annotation_type(annotation_type, ns=None):
"""
Same as class_name_for_data_type, but works with annotation types.
"""
assert isinstance(annotation_type, AnnotationType)
name = fmt_class(annotation_type.name)
if ns:
return prefix_with_ns_if_necessary(name, annotation_type.namespace, ns)
return name
|
python
|
{
"resource": ""
}
|
q24578
|
specs_to_ir
|
train
|
def specs_to_ir(specs, version='0.1b1', debug=False, route_whitelist_filter=None):
"""
Converts a collection of Stone specifications into the intermediate
representation used by Stone backends.
The process is: Lexer -> Parser -> Semantic Analyzer -> IR Generator.
The code is structured as:
1. Parser (Lexer embedded within)
2. IR Generator (Semantic Analyzer embedded within)
:type specs: List[Tuple[path: str, text: str]]
:param specs: `path` is never accessed and is only used to report the
location of a bad spec to the user. `spec` is the text contents of
a spec (.stone) file.
:raises: InvalidSpec
:returns: stone.ir.Api
"""
parser_factory = ParserFactory(debug=debug)
partial_asts = []
for path, text in specs:
logger.info('Parsing spec %s', path)
parser = parser_factory.get_parser()
if debug:
parser.test_lexing(text)
partial_ast = parser.parse(text, path)
if parser.got_errors_parsing():
# TODO(kelkabany): Show more than one error at a time.
msg, lineno, path = parser.get_errors()[0]
raise InvalidSpec(msg, lineno, path)
elif len(partial_ast) == 0:
logger.info('Empty spec: %s', path)
else:
partial_asts.append(partial_ast)
return IRGenerator(partial_asts, version, debug=debug,
route_whitelist_filter=route_whitelist_filter).generate_IR()
|
python
|
{
"resource": ""
}
|
q24579
|
fmt_type
|
train
|
def fmt_type(data_type):
"""
Returns a JSDoc annotation for a data type.
May contain a union of enumerated subtypes.
"""
if is_struct_type(data_type) and data_type.has_enumerated_subtypes():
possible_types = []
possible_subtypes = data_type.get_all_subtypes_with_tags()
for _, subtype in possible_subtypes:
possible_types.append(fmt_type_name(subtype))
if data_type.is_catch_all():
possible_types.append(fmt_type_name(data_type))
return fmt_jsdoc_union(possible_types)
else:
return fmt_type_name(data_type)
|
python
|
{
"resource": ""
}
|
q24580
|
TSDTypesBackend._parse_extra_args
|
train
|
def _parse_extra_args(self, api, extra_args_raw):
"""
Parses extra arguments into a map keyed on particular data types.
"""
extra_args = {}
def invalid(msg, extra_arg_raw):
print('Invalid --extra-arg:%s: %s' % (msg, extra_arg_raw),
file=sys.stderr)
sys.exit(1)
for extra_arg_raw in extra_args_raw:
try:
extra_arg = json.loads(extra_arg_raw)
except ValueError as e:
invalid(str(e), extra_arg_raw)
# Validate extra_arg JSON blob
if 'match' not in extra_arg:
invalid('No match key', extra_arg_raw)
elif (not isinstance(extra_arg['match'], list) or
len(extra_arg['match']) != 2):
invalid('match key is not a list of two strings', extra_arg_raw)
elif (not isinstance(extra_arg['match'][0], six.text_type) or
not isinstance(extra_arg['match'][1], six.text_type)):
print(type(extra_arg['match'][0]))
invalid('match values are not strings', extra_arg_raw)
elif 'arg_name' not in extra_arg:
invalid('No arg_name key', extra_arg_raw)
elif not isinstance(extra_arg['arg_name'], six.text_type):
invalid('arg_name is not a string', extra_arg_raw)
elif 'arg_type' not in extra_arg:
invalid('No arg_type key', extra_arg_raw)
elif not isinstance(extra_arg['arg_type'], six.text_type):
invalid('arg_type is not a string', extra_arg_raw)
elif ('arg_docstring' in extra_arg and
not isinstance(extra_arg['arg_docstring'], six.text_type)):
invalid('arg_docstring is not a string', extra_arg_raw)
attr_key, attr_val = extra_arg['match'][0], extra_arg['match'][1]
extra_args.setdefault(attr_key, {})[attr_val] = \
(extra_arg['arg_name'], extra_arg['arg_type'],
extra_arg.get('arg_docstring'))
# Extra arguments, keyed on data type objects.
extra_args_for_types = {}
# Locate data types that contain extra arguments
for namespace in api.namespaces.values():
for route in namespace.routes:
extra_parameters = []
if is_user_defined_type(route.arg_data_type):
for attr_key in route.attrs:
if attr_key not in extra_args:
continue
attr_val = route.attrs[attr_key]
if attr_val in extra_args[attr_key]:
extra_parameters.append(extra_args[attr_key][attr_val])
if len(extra_parameters) > 0:
extra_args_for_types[route.arg_data_type] = extra_parameters
return extra_args_for_types
|
python
|
{
"resource": ""
}
|
q24581
|
TSDTypesBackend._generate_type
|
train
|
def _generate_type(self, data_type, indent_spaces, extra_args):
"""
Generates a TypeScript type for the given type.
"""
if is_alias(data_type):
self._generate_alias_type(data_type)
elif is_struct_type(data_type):
self._generate_struct_type(data_type, indent_spaces, extra_args)
elif is_union_type(data_type):
self._generate_union_type(data_type, indent_spaces)
|
python
|
{
"resource": ""
}
|
q24582
|
TSDTypesBackend._generate_alias_type
|
train
|
def _generate_alias_type(self, alias_type):
"""
Generates a TypeScript type for a stone alias.
"""
namespace = alias_type.namespace
self.emit('export type %s = %s;' % (fmt_type_name(alias_type, namespace),
fmt_type_name(alias_type.data_type, namespace)))
self.emit()
|
python
|
{
"resource": ""
}
|
q24583
|
TSDTypesBackend._generate_struct_type
|
train
|
def _generate_struct_type(self, struct_type, indent_spaces, extra_parameters):
"""
Generates a TypeScript interface for a stone struct.
"""
namespace = struct_type.namespace
if struct_type.doc:
self._emit_tsdoc_header(struct_type.doc)
parent_type = struct_type.parent_type
extends_line = ' extends %s' % fmt_type_name(parent_type, namespace) if parent_type else ''
self.emit('export interface %s%s {' % (fmt_type_name(struct_type, namespace), extends_line))
with self.indent(dent=indent_spaces):
for param_name, param_type, param_docstring in extra_parameters:
if param_docstring:
self._emit_tsdoc_header(param_docstring)
self.emit('%s: %s;' % (param_name, param_type))
for field in struct_type.fields:
doc = field.doc
field_type, nullable = unwrap_nullable(field.data_type)
field_ts_type = fmt_type(field_type, namespace)
optional = nullable or field.has_default
if field.has_default:
# doc may be None. If it is not empty, add newlines
# before appending to it.
doc = doc + '\n\n' if doc else ''
doc = "Defaults to %s." % field.default
if doc:
self._emit_tsdoc_header(doc)
# Translate nullable types into optional properties.
field_name = '%s?' % field.name if optional else field.name
self.emit('%s: %s;' % (field_name, field_ts_type))
self.emit('}')
self.emit()
# Some structs can explicitly list their subtypes. These structs have a .tag field that
# indicate which subtype they are, which is only present when a type reference is
# ambiguous.
# Emit a special interface that contains this extra field, and refer to it whenever we
# encounter a reference to a type with enumerated subtypes.
if struct_type.is_member_of_enumerated_subtypes_tree():
if struct_type.has_enumerated_subtypes():
# This struct is the parent to multiple subtypes. Determine all of the possible
# values of the .tag property.
tag_values = []
for tags, _ in struct_type.get_all_subtypes_with_tags():
for tag in tags:
tag_values.append('"%s"' % tag)
tag_union = fmt_union(tag_values)
self._emit_tsdoc_header('Reference to the %s polymorphic type. Contains a .tag '
'property to let you discriminate between possible '
'subtypes.' % fmt_type_name(struct_type, namespace))
self.emit('export interface %s extends %s {' %
(fmt_polymorphic_type_reference(struct_type, namespace),
fmt_type_name(struct_type, namespace)))
with self.indent(dent=indent_spaces):
self._emit_tsdoc_header('Tag identifying the subtype variant.')
self.emit('\'.tag\': %s;' % tag_union)
self.emit('}')
self.emit()
else:
# This struct is a particular subtype. Find the applicable .tag value from the
# parent type, which may be an arbitrary number of steps up the inheritance
# hierarchy.
parent = struct_type.parent_type
while not parent.has_enumerated_subtypes():
parent = parent.parent_type
# parent now contains the closest parent type in the inheritance hierarchy that has
# enumerated subtypes. Determine which subtype this is.
for subtype in parent.get_enumerated_subtypes():
if subtype.data_type == struct_type:
self._emit_tsdoc_header('Reference to the %s type, identified by the '
'value of the .tag property.' %
fmt_type_name(struct_type, namespace))
self.emit('export interface %s extends %s {' %
(fmt_polymorphic_type_reference(struct_type, namespace),
fmt_type_name(struct_type, namespace)))
with self.indent(dent=indent_spaces):
self._emit_tsdoc_header('Tag identifying this subtype variant. This '
'field is only present when needed to '
'discriminate between multiple possible '
'subtypes.')
self.emit_wrapped_text('\'.tag\': \'%s\';' % subtype.name)
self.emit('}')
self.emit()
break
|
python
|
{
"resource": ""
}
|
q24584
|
TSDTypesBackend._generate_union_type
|
train
|
def _generate_union_type(self, union_type, indent_spaces):
"""
Generates a TypeScript interface for a stone union.
"""
# Emit an interface for each variant. TypeScript 2.0 supports these tagged unions.
# https://github.com/Microsoft/TypeScript/wiki/What%27s-new-in-TypeScript#tagged-union-types
parent_type = union_type.parent_type
namespace = union_type.namespace
union_type_name = fmt_type_name(union_type, namespace)
variant_type_names = []
if parent_type:
variant_type_names.append(fmt_type_name(parent_type, namespace))
def _is_struct_without_enumerated_subtypes(data_type):
"""
:param data_type: any data type.
:return: True if the given data type is a struct which has no enumerated subtypes.
"""
return is_struct_type(data_type) and (
not data_type.has_enumerated_subtypes())
for variant in union_type.fields:
if variant.doc:
self._emit_tsdoc_header(variant.doc)
variant_name = '%s%s' % (union_type_name, fmt_pascal(variant.name))
variant_type_names.append(variant_name)
is_struct_without_enumerated_subtypes = _is_struct_without_enumerated_subtypes(
variant.data_type)
if is_struct_without_enumerated_subtypes:
self.emit('export interface %s extends %s {' % (
variant_name, fmt_type(variant.data_type, namespace)))
else:
self.emit('export interface %s {' % variant_name)
with self.indent(dent=indent_spaces):
# Since field contains non-alphanumeric character, we need to enclose
# it in quotation marks.
self.emit("'.tag': '%s';" % variant.name)
if is_void_type(variant.data_type) is False and (
not is_struct_without_enumerated_subtypes
):
self.emit("%s: %s;" % (variant.name, fmt_type(variant.data_type, namespace)))
self.emit('}')
self.emit()
if union_type.doc:
self._emit_tsdoc_header(union_type.doc)
self.emit('export type %s = %s;' % (union_type_name, ' | '.join(variant_type_names)))
self.emit()
|
python
|
{
"resource": ""
}
|
q24585
|
CodeBackend.generate_multiline_list
|
train
|
def generate_multiline_list(
self,
items, # type: typing.List[typing.Text]
before='', # type: typing.Text
after='', # type: typing.Text
delim=('(', ')'), # type: DelimTuple
compact=True, # type: bool
sep=',', # type: typing.Text
skip_last_sep=False # type: bool
):
# type: (...) -> None
"""
Given a list of items, emits one item per line.
This is convenient for function prototypes and invocations, as well as
for instantiating arrays, sets, and maps in some languages.
TODO(kelkabany): A backend that uses tabs cannot be used with this
if compact is false.
Args:
items (list[str]): Should contain the items to generate a list of.
before (str): The string to come before the list of items.
after (str): The string to follow the list of items.
delim (str, str): The first element is added immediately following
`before`. The second element is added prior to `after`.
compact (bool): In compact mode, the enclosing parentheses are on
the same lines as the first and last list item.
sep (str): The string that follows each list item when compact is
true. If compact is false, the separator is omitted for the
last item.
skip_last_sep (bool): When compact is false, whether the last line
should have a trailing separator. Ignored when compact is true.
"""
assert len(delim) == 2 and isinstance(delim[0], six.text_type) and \
isinstance(delim[1], six.text_type), 'delim must be a tuple of two unicode strings.'
if len(items) == 0:
self.emit(before + delim[0] + delim[1] + after)
return
if len(items) == 1:
self.emit(before + delim[0] + items[0] + delim[1] + after)
return
if compact:
self.emit(before + delim[0] + items[0] + sep)
def emit_list(items):
items = items[1:]
for (i, item) in enumerate(items):
if i == len(items) - 1:
self.emit(item + delim[1] + after)
else:
self.emit(item + sep)
if before or delim[0]:
with self.indent(len(before) + len(delim[0])):
emit_list(items)
else:
emit_list(items)
else:
if before or delim[0]:
self.emit(before + delim[0])
with self.indent():
for (i, item) in enumerate(items):
if i == len(items) - 1 and skip_last_sep:
self.emit(item)
else:
self.emit(item + sep)
if delim[1] or after:
self.emit(delim[1] + after)
elif delim[1]:
self.emit(delim[1])
|
python
|
{
"resource": ""
}
|
q24586
|
CodeBackend.block
|
train
|
def block(
self,
before='', # type: typing.Text
after='', # type: typing.Text
delim=('{', '}'), # type: DelimTuple
dent=None, # type: typing.Optional[int]
allman=False # type: bool
):
# type: (...) -> typing.Iterator[None]
"""
A context manager that emits configurable lines before and after an
indented block of text.
This is convenient for class and function definitions in some
languages.
Args:
before (str): The string to be output in the first line which is
not indented..
after (str): The string to be output in the last line which is
not indented.
delim (str, str): The first element is added immediately following
`before` and a space. The second element is added prior to a
space and then `after`.
dent (int): The amount to indent the block. If none, the default
indentation increment is used (four spaces or one tab).
allman (bool): Indicates whether to use `Allman` style indentation,
or the default `K&R` style. If there is no `before` string this
is ignored. For more details about indent styles see
http://en.wikipedia.org/wiki/Indent_style
"""
assert len(delim) == 2, 'delim must be a tuple of length 2'
assert (isinstance(delim[0], (six.text_type, type(None))) and
isinstance(delim[1], (six.text_type, type(None)))), (
'delim must be a tuple of two optional strings.')
if before and not allman:
if delim[0] is not None:
self.emit('{} {}'.format(before, delim[0]))
else:
self.emit(before)
else:
if before:
self.emit(before)
if delim[0] is not None:
self.emit(delim[0])
with self.indent(dent):
yield
if delim[1] is not None:
self.emit(delim[1] + after)
else:
self.emit(after)
|
python
|
{
"resource": ""
}
|
q24587
|
fmt_type_name
|
train
|
def fmt_type_name(data_type, inside_namespace=None):
"""
Produces a TypeScript type name for the given data type.
inside_namespace should be set to the namespace that the reference
occurs in, or None if this parameter is not relevant.
"""
if is_user_defined_type(data_type) or is_alias(data_type):
if data_type.namespace == inside_namespace:
return data_type.name
else:
return '%s.%s' % (data_type.namespace.name, data_type.name)
else:
fmted_type = _base_type_table.get(data_type.__class__, 'Object')
if is_list_type(data_type):
fmted_type += '<' + fmt_type(data_type.data_type, inside_namespace) + '>'
return fmted_type
|
python
|
{
"resource": ""
}
|
q24588
|
fmt_type
|
train
|
def fmt_type(data_type, inside_namespace=None):
"""
Returns a TypeScript type annotation for a data type.
May contain a union of enumerated subtypes.
inside_namespace should be set to the namespace that the type reference
occurs in, or None if this parameter is not relevant.
"""
if is_struct_type(data_type) and data_type.has_enumerated_subtypes():
possible_types = []
possible_subtypes = data_type.get_all_subtypes_with_tags()
for _, subtype in possible_subtypes:
possible_types.append(fmt_polymorphic_type_reference(subtype, inside_namespace))
if data_type.is_catch_all():
possible_types.append(fmt_polymorphic_type_reference(data_type, inside_namespace))
return fmt_union(possible_types)
else:
return fmt_type_name(data_type, inside_namespace)
|
python
|
{
"resource": ""
}
|
q24589
|
fmt_tag
|
train
|
def fmt_tag(cur_namespace, tag, val):
"""
Processes a documentation reference.
"""
if tag == 'type':
fq_val = val
if '.' not in val and cur_namespace is not None:
fq_val = cur_namespace.name + '.' + fq_val
return fq_val
elif tag == 'route':
if ':' in val:
val, version = val.split(':', 1)
version = int(version)
else:
version = 1
return fmt_func(val, version) + "()"
elif tag == 'link':
anchor, link = val.rsplit(' ', 1)
# There's no way to have links in TSDoc, so simply use JSDoc's formatting.
# It's entirely possible some editors support this.
return '[%s]{@link %s}' % (anchor, link)
elif tag == 'val':
# Value types seem to match JavaScript (true, false, null)
return val
elif tag == 'field':
return val
else:
raise RuntimeError('Unknown doc ref tag %r' % tag)
|
python
|
{
"resource": ""
}
|
q24590
|
parse_data_types_from_doc_ref
|
train
|
def parse_data_types_from_doc_ref(api, doc, namespace_context, ignore_missing_entries=False):
"""
Given a documentation string, parse it and return all references to other
data types. If there are references to routes, include also the data types of
those routes.
Args:
- api: The API containing this doc ref.
- doc: The documentation string to parse.
- namespace_context: The namespace name relative to this documentation.
- ignore_missing_entries: If set, this will skip references to nonexistent data types instead
of raising an exception.
Returns:
- a list of referenced data types
"""
output = []
data_types, routes_by_ns = parse_data_types_and_routes_from_doc_ref(
api, doc, namespace_context, ignore_missing_entries=ignore_missing_entries)
for d in data_types:
output.append(d)
for ns_name, routes in routes_by_ns.items():
try:
ns = api.namespaces[ns_name]
for r in routes:
for d in ns.get_route_io_data_types_for_route(r):
output.append(d)
except KeyError:
if not ignore_missing_entries:
raise
return output
|
python
|
{
"resource": ""
}
|
q24591
|
parse_route_name_and_version
|
train
|
def parse_route_name_and_version(route_repr):
"""
Parse a route representation string and return the route name and version number.
:param route_repr: Route representation string.
:return: A tuple containing route name and version number.
"""
if ':' in route_repr:
route_name, version = route_repr.split(':', 1)
try:
version = int(version)
except ValueError:
raise ValueError('Invalid route representation: {}'.format(route_repr))
else:
route_name = route_repr
version = 1
return route_name, version
|
python
|
{
"resource": ""
}
|
q24592
|
parse_data_types_and_routes_from_doc_ref
|
train
|
def parse_data_types_and_routes_from_doc_ref(
api,
doc,
namespace_context,
ignore_missing_entries=False
):
"""
Given a documentation string, parse it and return all references to other
data types and routes.
Args:
- api: The API containing this doc ref.
- doc: The documentation string to parse.
- namespace_context: The namespace name relative to this documentation.
- ignore_missing_entries: If set, this will skip references to nonexistent data types instead
of raising an exception.
Returns:
- a tuple of referenced data types and routes
"""
assert doc is not None
data_types = set()
routes = defaultdict(set)
for match in doc_ref_re.finditer(doc):
try:
tag = match.group('tag')
val = match.group('val')
supplied_namespace = api.namespaces[namespace_context]
if tag == 'field':
if '.' in val:
type_name, __ = val.split('.', 1)
doc_type = supplied_namespace.data_type_by_name[type_name]
data_types.add(doc_type)
else:
pass # no action required, because we must be referencing the same object
elif tag == 'route':
if '.' in val:
namespace_name, val = val.split('.', 1)
namespace = api.namespaces[namespace_name]
else:
namespace = supplied_namespace
try:
route_name, version = parse_route_name_and_version(val)
except ValueError as ex:
raise KeyError(str(ex))
route = namespace.routes_by_name[route_name].at_version[version]
routes[namespace.name].add(route)
elif tag == 'type':
if '.' in val:
namespace_name, val = val.split('.', 1)
doc_type = api.namespaces[namespace_name].data_type_by_name[val]
data_types.add(doc_type)
else:
doc_type = supplied_namespace.data_type_by_name[val]
data_types.add(doc_type)
except KeyError:
if not ignore_missing_entries:
raise
return data_types, routes
|
python
|
{
"resource": ""
}
|
q24593
|
IRGenerator.generate_IR
|
train
|
def generate_IR(self):
"""Parses the text of each spec and returns an API description. Returns
None if an error was encountered during parsing."""
raw_api = []
for partial_ast in self._partial_asts:
namespace_ast_node = self._extract_namespace_ast_node(partial_ast)
namespace = self.api.ensure_namespace(namespace_ast_node.name)
base_name = self._get_base_name(namespace.name, namespace.name)
self._item_by_canonical_name[base_name] = namespace_ast_node
if namespace_ast_node.doc is not None:
namespace.add_doc(namespace_ast_node.doc)
raw_api.append((namespace, partial_ast))
self._add_data_types_and_routes_to_api(namespace, partial_ast)
self._add_imports_to_env(raw_api)
self._merge_patches()
self._populate_type_attributes()
self._populate_field_defaults()
self._populate_enumerated_subtypes()
self._populate_route_attributes()
self._populate_examples()
self._validate_doc_refs()
self._validate_annotations()
if self._routes is not None:
self._filter_namespaces_by_route_whitelist()
self.api.normalize()
return self.api
|
python
|
{
"resource": ""
}
|
q24594
|
IRGenerator._extract_namespace_ast_node
|
train
|
def _extract_namespace_ast_node(self, desc):
"""
Checks that the namespace is declared first in the spec, and that only
one namespace is declared.
Args:
desc (List[stone.stone.parser.ASTNode]): All AST nodes in a spec
file in the order they were defined.
Return:
stone.frontend.ast.AstNamespace: The namespace AST node.
"""
if len(desc) == 0 or not isinstance(desc[0], AstNamespace):
if self._debug:
self._logger.info('Description: %r', desc)
raise InvalidSpec('First declaration in a stone must be '
'a namespace. Possibly caused by preceding '
'errors.', desc[0].lineno, desc[0].path)
for item in desc[1:]:
if isinstance(item, AstNamespace):
raise InvalidSpec('Only one namespace declaration per file.',
item[0].lineno, item[0].path)
return desc.pop(0)
|
python
|
{
"resource": ""
}
|
q24595
|
IRGenerator._add_imports_to_env
|
train
|
def _add_imports_to_env(self, raw_api):
"""
Scans raw parser output for import declarations. Checks if the imports
are valid, and then creates a reference to the namespace in the
environment.
Args:
raw_api (Tuple[Namespace, List[stone.stone.parser._Element]]):
Namespace paired with raw parser output.
"""
for namespace, desc in raw_api:
for item in desc:
if isinstance(item, AstImport):
if namespace.name == item.target:
raise InvalidSpec('Cannot import current namespace.',
item.lineno, item.path)
if item.target not in self.api.namespaces:
raise InvalidSpec(
'Namespace %s is not defined in any spec.' %
quote(item.target),
item.lineno, item.path)
env = self._get_or_create_env(namespace.name)
imported_env = self._get_or_create_env(item.target)
if namespace.name in imported_env:
# Block circular imports. The Python backend can't
# easily generate code for circular references.
raise InvalidSpec(
'Circular import of namespaces %s and %s '
'detected.' %
(quote(namespace.name), quote(item.target)),
item.lineno, item.path)
env[item.target] = imported_env
|
python
|
{
"resource": ""
}
|
q24596
|
IRGenerator._create_type
|
train
|
def _create_type(self, env, item):
"""Create a forward reference for a union or struct."""
if item.name in env:
existing_dt = env[item.name]
raise InvalidSpec(
'Symbol %s already defined (%s:%d).' %
(quote(item.name), existing_dt._ast_node.path,
existing_dt._ast_node.lineno), item.lineno, item.path)
namespace = self.api.ensure_namespace(env.namespace_name)
if isinstance(item, AstStructDef):
try:
api_type = Struct(name=item.name, namespace=namespace,
ast_node=item)
except ParameterError as e:
raise InvalidSpec(
'Bad declaration of %s: %s' % (quote(item.name), e.args[0]),
item.lineno, item.path)
elif isinstance(item, AstUnionDef):
api_type = Union(
name=item.name, namespace=namespace, ast_node=item,
closed=item.closed)
else:
raise AssertionError('Unknown type definition %r' % type(item))
env[item.name] = api_type
return api_type
|
python
|
{
"resource": ""
}
|
q24597
|
IRGenerator._merge_patches
|
train
|
def _merge_patches(self):
"""Injects object patches into their original object definitions."""
for patched_item, patched_namespace in self._patch_data_by_canonical_name.values():
patched_item_base_name = self._get_base_name(patched_item.name, patched_namespace.name)
if patched_item_base_name not in self._item_by_canonical_name:
raise InvalidSpec('Patch {} must correspond to a pre-existing data_type.'.format(
quote(patched_item.name)), patched_item.lineno, patched_item.path)
existing_item = self._item_by_canonical_name[patched_item_base_name]
self._check_patch_type_mismatch(patched_item, existing_item)
if isinstance(patched_item, (AstStructPatch, AstUnionPatch)):
self._check_field_names_unique(existing_item, patched_item)
existing_item.fields += patched_item.fields
self._inject_patched_examples(existing_item, patched_item)
else:
raise AssertionError('Unknown Patch Object Type {}'.format(
patched_item.__class__.__name__))
|
python
|
{
"resource": ""
}
|
q24598
|
IRGenerator._check_patch_type_mismatch
|
train
|
def _check_patch_type_mismatch(self, patched_item, existing_item):
"""Enforces that each patch has a corresponding, already-defined data type."""
def raise_mismatch_error(patched_item, existing_item, data_type_name):
error_msg = ('Type mismatch. Patch {} corresponds to pre-existing '
'data_type {} ({}:{}) that has type other than {}.')
raise InvalidSpec(error_msg.format(
quote(patched_item.name),
quote(existing_item.name),
existing_item.path,
existing_item.lineno,
quote(data_type_name)), patched_item.lineno, patched_item.path)
if isinstance(patched_item, AstStructPatch):
if not isinstance(existing_item, AstStructDef):
raise_mismatch_error(patched_item, existing_item, 'struct')
elif isinstance(patched_item, AstUnionPatch):
if not isinstance(existing_item, AstUnionDef):
raise_mismatch_error(patched_item, existing_item, 'union')
else:
if existing_item.closed != patched_item.closed:
raise_mismatch_error(
patched_item, existing_item,
'union_closed' if existing_item.closed else 'union')
else:
raise AssertionError(
'Unknown Patch Object Type {}'.format(patched_item.__class__.__name__))
|
python
|
{
"resource": ""
}
|
q24599
|
IRGenerator._check_field_names_unique
|
train
|
def _check_field_names_unique(self, existing_item, patched_item):
"""Enforces that patched fields don't already exist."""
existing_fields_by_name = {f.name: f for f in existing_item.fields}
for patched_field in patched_item.fields:
if patched_field.name in existing_fields_by_name.keys():
existing_field = existing_fields_by_name[patched_field.name]
raise InvalidSpec('Patched field {} overrides pre-existing field in {} ({}:{}).'
.format(quote(patched_field.name),
quote(patched_item.name),
existing_field.path,
existing_field.lineno), patched_field.lineno, patched_field.path)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.