id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
231,100
awslabs/aws-cfn-template-flip
cfn_tools/yaml_loader.py
multi_constructor
def multi_constructor(loader, tag_suffix, node): """ Deal with !Ref style function format """ if tag_suffix not in UNCONVERTED_SUFFIXES: tag_suffix = "{}{}".format(FN_PREFIX, tag_suffix) constructor = None if tag_suffix == "Fn::GetAtt": constructor = construct_getatt elif isinstance(node, yaml.ScalarNode): constructor = loader.construct_scalar elif isinstance(node, yaml.SequenceNode): constructor = loader.construct_sequence elif isinstance(node, yaml.MappingNode): constructor = loader.construct_mapping else: raise Exception("Bad tag: !{}".format(tag_suffix)) return ODict(( (tag_suffix, constructor(node)), ))
python
def multi_constructor(loader, tag_suffix, node): """ Deal with !Ref style function format """ if tag_suffix not in UNCONVERTED_SUFFIXES: tag_suffix = "{}{}".format(FN_PREFIX, tag_suffix) constructor = None if tag_suffix == "Fn::GetAtt": constructor = construct_getatt elif isinstance(node, yaml.ScalarNode): constructor = loader.construct_scalar elif isinstance(node, yaml.SequenceNode): constructor = loader.construct_sequence elif isinstance(node, yaml.MappingNode): constructor = loader.construct_mapping else: raise Exception("Bad tag: !{}".format(tag_suffix)) return ODict(( (tag_suffix, constructor(node)), ))
[ "def", "multi_constructor", "(", "loader", ",", "tag_suffix", ",", "node", ")", ":", "if", "tag_suffix", "not", "in", "UNCONVERTED_SUFFIXES", ":", "tag_suffix", "=", "\"{}{}\"", ".", "format", "(", "FN_PREFIX", ",", "tag_suffix", ")", "constructor", "=", "None", "if", "tag_suffix", "==", "\"Fn::GetAtt\"", ":", "constructor", "=", "construct_getatt", "elif", "isinstance", "(", "node", ",", "yaml", ".", "ScalarNode", ")", ":", "constructor", "=", "loader", ".", "construct_scalar", "elif", "isinstance", "(", "node", ",", "yaml", ".", "SequenceNode", ")", ":", "constructor", "=", "loader", ".", "construct_sequence", "elif", "isinstance", "(", "node", ",", "yaml", ".", "MappingNode", ")", ":", "constructor", "=", "loader", ".", "construct_mapping", "else", ":", "raise", "Exception", "(", "\"Bad tag: !{}\"", ".", "format", "(", "tag_suffix", ")", ")", "return", "ODict", "(", "(", "(", "tag_suffix", ",", "constructor", "(", "node", ")", ")", ",", ")", ")" ]
Deal with !Ref style function format
[ "Deal", "with", "!Ref", "style", "function", "format" ]
837576bea243e3f5efb0a20b84802371272e2d33
https://github.com/awslabs/aws-cfn-template-flip/blob/837576bea243e3f5efb0a20b84802371272e2d33/cfn_tools/yaml_loader.py#L24-L47
231,101
awslabs/aws-cfn-template-flip
cfn_tools/yaml_loader.py
construct_getatt
def construct_getatt(node): """ Reconstruct !GetAtt into a list """ if isinstance(node.value, six.text_type): return node.value.split(".", 1) elif isinstance(node.value, list): return [s.value for s in node.value] else: raise ValueError("Unexpected node type: {}".format(type(node.value)))
python
def construct_getatt(node): """ Reconstruct !GetAtt into a list """ if isinstance(node.value, six.text_type): return node.value.split(".", 1) elif isinstance(node.value, list): return [s.value for s in node.value] else: raise ValueError("Unexpected node type: {}".format(type(node.value)))
[ "def", "construct_getatt", "(", "node", ")", ":", "if", "isinstance", "(", "node", ".", "value", ",", "six", ".", "text_type", ")", ":", "return", "node", ".", "value", ".", "split", "(", "\".\"", ",", "1", ")", "elif", "isinstance", "(", "node", ".", "value", ",", "list", ")", ":", "return", "[", "s", ".", "value", "for", "s", "in", "node", ".", "value", "]", "else", ":", "raise", "ValueError", "(", "\"Unexpected node type: {}\"", ".", "format", "(", "type", "(", "node", ".", "value", ")", ")", ")" ]
Reconstruct !GetAtt into a list
[ "Reconstruct", "!GetAtt", "into", "a", "list" ]
837576bea243e3f5efb0a20b84802371272e2d33
https://github.com/awslabs/aws-cfn-template-flip/blob/837576bea243e3f5efb0a20b84802371272e2d33/cfn_tools/yaml_loader.py#L50-L60
231,102
awslabs/aws-cfn-template-flip
cfn_tools/yaml_loader.py
construct_mapping
def construct_mapping(self, node, deep=False): """ Use ODict for maps """ mapping = ODict() for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping
python
def construct_mapping(self, node, deep=False): """ Use ODict for maps """ mapping = ODict() for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping
[ "def", "construct_mapping", "(", "self", ",", "node", ",", "deep", "=", "False", ")", ":", "mapping", "=", "ODict", "(", ")", "for", "key_node", ",", "value_node", "in", "node", ".", "value", ":", "key", "=", "self", ".", "construct_object", "(", "key_node", ",", "deep", "=", "deep", ")", "value", "=", "self", ".", "construct_object", "(", "value_node", ",", "deep", "=", "deep", ")", "mapping", "[", "key", "]", "=", "value", "return", "mapping" ]
Use ODict for maps
[ "Use", "ODict", "for", "maps" ]
837576bea243e3f5efb0a20b84802371272e2d33
https://github.com/awslabs/aws-cfn-template-flip/blob/837576bea243e3f5efb0a20b84802371272e2d33/cfn_tools/yaml_loader.py#L63-L76
231,103
awslabs/aws-cfn-template-flip
cfn_flip/main.py
main
def main(ctx, **kwargs): """ AWS CloudFormation Template Flip is a tool that converts AWS CloudFormation templates between JSON and YAML formats, making use of the YAML format's short function syntax where possible. """ in_format = kwargs.pop('in_format') out_format = kwargs.pop('out_format') or kwargs.pop('out_flag') no_flip = kwargs.pop('no_flip') clean = kwargs.pop('clean') long_form = kwargs.pop('long') input_file = kwargs.pop('input') output_file = kwargs.pop('output') if not in_format: if input_file.name.endswith(".json"): in_format = "json" elif input_file.name.endswith(".yaml") or input_file.name.endswith(".yml"): in_format = "yaml" if input_file.name == "<stdin>" and sys.stdin.isatty(): click.echo(ctx.get_help()) ctx.exit() try: output_file.write(flip( input_file.read(), in_format=in_format, out_format=out_format, clean_up=clean, no_flip=no_flip, long_form=long_form )) except Exception as e: raise click.ClickException("{}".format(e))
python
def main(ctx, **kwargs): """ AWS CloudFormation Template Flip is a tool that converts AWS CloudFormation templates between JSON and YAML formats, making use of the YAML format's short function syntax where possible. """ in_format = kwargs.pop('in_format') out_format = kwargs.pop('out_format') or kwargs.pop('out_flag') no_flip = kwargs.pop('no_flip') clean = kwargs.pop('clean') long_form = kwargs.pop('long') input_file = kwargs.pop('input') output_file = kwargs.pop('output') if not in_format: if input_file.name.endswith(".json"): in_format = "json" elif input_file.name.endswith(".yaml") or input_file.name.endswith(".yml"): in_format = "yaml" if input_file.name == "<stdin>" and sys.stdin.isatty(): click.echo(ctx.get_help()) ctx.exit() try: output_file.write(flip( input_file.read(), in_format=in_format, out_format=out_format, clean_up=clean, no_flip=no_flip, long_form=long_form )) except Exception as e: raise click.ClickException("{}".format(e))
[ "def", "main", "(", "ctx", ",", "*", "*", "kwargs", ")", ":", "in_format", "=", "kwargs", ".", "pop", "(", "'in_format'", ")", "out_format", "=", "kwargs", ".", "pop", "(", "'out_format'", ")", "or", "kwargs", ".", "pop", "(", "'out_flag'", ")", "no_flip", "=", "kwargs", ".", "pop", "(", "'no_flip'", ")", "clean", "=", "kwargs", ".", "pop", "(", "'clean'", ")", "long_form", "=", "kwargs", ".", "pop", "(", "'long'", ")", "input_file", "=", "kwargs", ".", "pop", "(", "'input'", ")", "output_file", "=", "kwargs", ".", "pop", "(", "'output'", ")", "if", "not", "in_format", ":", "if", "input_file", ".", "name", ".", "endswith", "(", "\".json\"", ")", ":", "in_format", "=", "\"json\"", "elif", "input_file", ".", "name", ".", "endswith", "(", "\".yaml\"", ")", "or", "input_file", ".", "name", ".", "endswith", "(", "\".yml\"", ")", ":", "in_format", "=", "\"yaml\"", "if", "input_file", ".", "name", "==", "\"<stdin>\"", "and", "sys", ".", "stdin", ".", "isatty", "(", ")", ":", "click", ".", "echo", "(", "ctx", ".", "get_help", "(", ")", ")", "ctx", ".", "exit", "(", ")", "try", ":", "output_file", ".", "write", "(", "flip", "(", "input_file", ".", "read", "(", ")", ",", "in_format", "=", "in_format", ",", "out_format", "=", "out_format", ",", "clean_up", "=", "clean", ",", "no_flip", "=", "no_flip", ",", "long_form", "=", "long_form", ")", ")", "except", "Exception", "as", "e", ":", "raise", "click", ".", "ClickException", "(", "\"{}\"", ".", "format", "(", "e", ")", ")" ]
AWS CloudFormation Template Flip is a tool that converts AWS CloudFormation templates between JSON and YAML formats, making use of the YAML format's short function syntax where possible.
[ "AWS", "CloudFormation", "Template", "Flip", "is", "a", "tool", "that", "converts", "AWS", "CloudFormation", "templates", "between", "JSON", "and", "YAML", "formats", "making", "use", "of", "the", "YAML", "format", "s", "short", "function", "syntax", "where", "possible", "." ]
837576bea243e3f5efb0a20b84802371272e2d33
https://github.com/awslabs/aws-cfn-template-flip/blob/837576bea243e3f5efb0a20b84802371272e2d33/cfn_flip/main.py#L31-L65
231,104
fugue/credstash
credstash-migrate-autoversion.py
updateVersions
def updateVersions(region="us-east-1", table="credential-store"): ''' do a full-table scan of the credential-store, and update the version format of every credential if it is an integer ''' dynamodb = boto3.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) response = secrets.scan(ProjectionExpression="#N, version, #K, contents, hmac", ExpressionAttributeNames={"#N": "name", "#K": "key"}) items = response["Items"] for old_item in items: if isInt(old_item['version']): new_item = copy.copy(old_item) new_item['version'] = credstash.paddedInt(new_item['version']) if new_item['version'] != old_item['version']: secrets.put_item(Item=new_item) secrets.delete_item(Key={'name': old_item['name'], 'version': old_item['version']}) else: print "Skipping item: %s, %s" % (old_item['name'], old_item['version'])
python
def updateVersions(region="us-east-1", table="credential-store"): ''' do a full-table scan of the credential-store, and update the version format of every credential if it is an integer ''' dynamodb = boto3.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) response = secrets.scan(ProjectionExpression="#N, version, #K, contents, hmac", ExpressionAttributeNames={"#N": "name", "#K": "key"}) items = response["Items"] for old_item in items: if isInt(old_item['version']): new_item = copy.copy(old_item) new_item['version'] = credstash.paddedInt(new_item['version']) if new_item['version'] != old_item['version']: secrets.put_item(Item=new_item) secrets.delete_item(Key={'name': old_item['name'], 'version': old_item['version']}) else: print "Skipping item: %s, %s" % (old_item['name'], old_item['version'])
[ "def", "updateVersions", "(", "region", "=", "\"us-east-1\"", ",", "table", "=", "\"credential-store\"", ")", ":", "dynamodb", "=", "boto3", ".", "resource", "(", "'dynamodb'", ",", "region_name", "=", "region", ")", "secrets", "=", "dynamodb", ".", "Table", "(", "table", ")", "response", "=", "secrets", ".", "scan", "(", "ProjectionExpression", "=", "\"#N, version, #K, contents, hmac\"", ",", "ExpressionAttributeNames", "=", "{", "\"#N\"", ":", "\"name\"", ",", "\"#K\"", ":", "\"key\"", "}", ")", "items", "=", "response", "[", "\"Items\"", "]", "for", "old_item", "in", "items", ":", "if", "isInt", "(", "old_item", "[", "'version'", "]", ")", ":", "new_item", "=", "copy", ".", "copy", "(", "old_item", ")", "new_item", "[", "'version'", "]", "=", "credstash", ".", "paddedInt", "(", "new_item", "[", "'version'", "]", ")", "if", "new_item", "[", "'version'", "]", "!=", "old_item", "[", "'version'", "]", ":", "secrets", ".", "put_item", "(", "Item", "=", "new_item", ")", "secrets", ".", "delete_item", "(", "Key", "=", "{", "'name'", ":", "old_item", "[", "'name'", "]", ",", "'version'", ":", "old_item", "[", "'version'", "]", "}", ")", "else", ":", "print", "\"Skipping item: %s, %s\"", "%", "(", "old_item", "[", "'name'", "]", ",", "old_item", "[", "'version'", "]", ")" ]
do a full-table scan of the credential-store, and update the version format of every credential if it is an integer
[ "do", "a", "full", "-", "table", "scan", "of", "the", "credential", "-", "store", "and", "update", "the", "version", "format", "of", "every", "credential", "if", "it", "is", "an", "integer" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash-migrate-autoversion.py#L16-L37
231,105
fugue/credstash
credstash.py
paddedInt
def paddedInt(i): ''' return a string that contains `i`, left-padded with 0's up to PAD_LEN digits ''' i_str = str(i) pad = PAD_LEN - len(i_str) return (pad * "0") + i_str
python
def paddedInt(i): ''' return a string that contains `i`, left-padded with 0's up to PAD_LEN digits ''' i_str = str(i) pad = PAD_LEN - len(i_str) return (pad * "0") + i_str
[ "def", "paddedInt", "(", "i", ")", ":", "i_str", "=", "str", "(", "i", ")", "pad", "=", "PAD_LEN", "-", "len", "(", "i_str", ")", "return", "(", "pad", "*", "\"0\"", ")", "+", "i_str" ]
return a string that contains `i`, left-padded with 0's up to PAD_LEN digits
[ "return", "a", "string", "that", "contains", "i", "left", "-", "padded", "with", "0", "s", "up", "to", "PAD_LEN", "digits" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L207-L213
231,106
fugue/credstash
credstash.py
getHighestVersion
def getHighestVersion(name, region=None, table="credential-store", **kwargs): ''' Return the highest version of `name` in the table ''' session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) response = secrets.query(Limit=1, ScanIndexForward=False, ConsistentRead=True, KeyConditionExpression=boto3.dynamodb.conditions.Key( "name").eq(name), ProjectionExpression="version") if response["Count"] == 0: return 0 return response["Items"][0]["version"]
python
def getHighestVersion(name, region=None, table="credential-store", **kwargs): ''' Return the highest version of `name` in the table ''' session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) response = secrets.query(Limit=1, ScanIndexForward=False, ConsistentRead=True, KeyConditionExpression=boto3.dynamodb.conditions.Key( "name").eq(name), ProjectionExpression="version") if response["Count"] == 0: return 0 return response["Items"][0]["version"]
[ "def", "getHighestVersion", "(", "name", ",", "region", "=", "None", ",", "table", "=", "\"credential-store\"", ",", "*", "*", "kwargs", ")", ":", "session", "=", "get_session", "(", "*", "*", "kwargs", ")", "dynamodb", "=", "session", ".", "resource", "(", "'dynamodb'", ",", "region_name", "=", "region", ")", "secrets", "=", "dynamodb", ".", "Table", "(", "table", ")", "response", "=", "secrets", ".", "query", "(", "Limit", "=", "1", ",", "ScanIndexForward", "=", "False", ",", "ConsistentRead", "=", "True", ",", "KeyConditionExpression", "=", "boto3", ".", "dynamodb", ".", "conditions", ".", "Key", "(", "\"name\"", ")", ".", "eq", "(", "name", ")", ",", "ProjectionExpression", "=", "\"version\"", ")", "if", "response", "[", "\"Count\"", "]", "==", "0", ":", "return", "0", "return", "response", "[", "\"Items\"", "]", "[", "0", "]", "[", "\"version\"", "]" ]
Return the highest version of `name` in the table
[ "Return", "the", "highest", "version", "of", "name", "in", "the", "table" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L216-L235
231,107
fugue/credstash
credstash.py
clean_fail
def clean_fail(func): ''' A decorator to cleanly exit on a failed call to AWS. catch a `botocore.exceptions.ClientError` raised from an action. This sort of error is raised if you are targeting a region that isn't set up (see, `credstash setup`. ''' def func_wrapper(*args, **kwargs): try: return func(*args, **kwargs) except botocore.exceptions.ClientError as e: print(str(e), file=sys.stderr) sys.exit(1) return func_wrapper
python
def clean_fail(func): ''' A decorator to cleanly exit on a failed call to AWS. catch a `botocore.exceptions.ClientError` raised from an action. This sort of error is raised if you are targeting a region that isn't set up (see, `credstash setup`. ''' def func_wrapper(*args, **kwargs): try: return func(*args, **kwargs) except botocore.exceptions.ClientError as e: print(str(e), file=sys.stderr) sys.exit(1) return func_wrapper
[ "def", "clean_fail", "(", "func", ")", ":", "def", "func_wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "e", ":", "print", "(", "str", "(", "e", ")", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")", "return", "func_wrapper" ]
A decorator to cleanly exit on a failed call to AWS. catch a `botocore.exceptions.ClientError` raised from an action. This sort of error is raised if you are targeting a region that isn't set up (see, `credstash setup`.
[ "A", "decorator", "to", "cleanly", "exit", "on", "a", "failed", "call", "to", "AWS", ".", "catch", "a", "botocore", ".", "exceptions", ".", "ClientError", "raised", "from", "an", "action", ".", "This", "sort", "of", "error", "is", "raised", "if", "you", "are", "targeting", "a", "region", "that", "isn", "t", "set", "up", "(", "see", "credstash", "setup", "." ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L238-L251
231,108
fugue/credstash
credstash.py
listSecrets
def listSecrets(region=None, table="credential-store", **kwargs): ''' do a full-table scan of the credential-store, and return the names and versions of every credential ''' session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) last_evaluated_key = True items = [] while last_evaluated_key: params = dict( ProjectionExpression="#N, version, #C", ExpressionAttributeNames={"#N": "name", "#C": "comment"} ) if last_evaluated_key is not True: params['ExclusiveStartKey'] = last_evaluated_key response = secrets.scan(**params) last_evaluated_key = response.get('LastEvaluatedKey') # will set last evaluated key to a number items.extend(response['Items']) return items
python
def listSecrets(region=None, table="credential-store", **kwargs): ''' do a full-table scan of the credential-store, and return the names and versions of every credential ''' session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) last_evaluated_key = True items = [] while last_evaluated_key: params = dict( ProjectionExpression="#N, version, #C", ExpressionAttributeNames={"#N": "name", "#C": "comment"} ) if last_evaluated_key is not True: params['ExclusiveStartKey'] = last_evaluated_key response = secrets.scan(**params) last_evaluated_key = response.get('LastEvaluatedKey') # will set last evaluated key to a number items.extend(response['Items']) return items
[ "def", "listSecrets", "(", "region", "=", "None", ",", "table", "=", "\"credential-store\"", ",", "*", "*", "kwargs", ")", ":", "session", "=", "get_session", "(", "*", "*", "kwargs", ")", "dynamodb", "=", "session", ".", "resource", "(", "'dynamodb'", ",", "region_name", "=", "region", ")", "secrets", "=", "dynamodb", ".", "Table", "(", "table", ")", "last_evaluated_key", "=", "True", "items", "=", "[", "]", "while", "last_evaluated_key", ":", "params", "=", "dict", "(", "ProjectionExpression", "=", "\"#N, version, #C\"", ",", "ExpressionAttributeNames", "=", "{", "\"#N\"", ":", "\"name\"", ",", "\"#C\"", ":", "\"comment\"", "}", ")", "if", "last_evaluated_key", "is", "not", "True", ":", "params", "[", "'ExclusiveStartKey'", "]", "=", "last_evaluated_key", "response", "=", "secrets", ".", "scan", "(", "*", "*", "params", ")", "last_evaluated_key", "=", "response", ".", "get", "(", "'LastEvaluatedKey'", ")", "# will set last evaluated key to a number", "items", ".", "extend", "(", "response", "[", "'Items'", "]", ")", "return", "items" ]
do a full-table scan of the credential-store, and return the names and versions of every credential
[ "do", "a", "full", "-", "table", "scan", "of", "the", "credential", "-", "store", "and", "return", "the", "names", "and", "versions", "of", "every", "credential" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L254-L280
231,109
fugue/credstash
credstash.py
putSecret
def putSecret(name, secret, version="", kms_key="alias/credstash", region=None, table="credential-store", context=None, digest=DEFAULT_DIGEST, comment="", **kwargs): ''' put a secret called `name` into the secret-store, protected by the key kms_key ''' if not context: context = {} session = get_session(**kwargs) kms = session.client('kms', region_name=region) key_service = KeyService(kms, kms_key, context) sealed = seal_aes_ctr_legacy( key_service, secret, digest_method=digest, ) dynamodb = session.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) data = { 'name': name, 'version': paddedInt(version), } if comment: data['comment'] = comment data.update(sealed) return secrets.put_item(Item=data, ConditionExpression=Attr('name').not_exists())
python
def putSecret(name, secret, version="", kms_key="alias/credstash", region=None, table="credential-store", context=None, digest=DEFAULT_DIGEST, comment="", **kwargs): ''' put a secret called `name` into the secret-store, protected by the key kms_key ''' if not context: context = {} session = get_session(**kwargs) kms = session.client('kms', region_name=region) key_service = KeyService(kms, kms_key, context) sealed = seal_aes_ctr_legacy( key_service, secret, digest_method=digest, ) dynamodb = session.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) data = { 'name': name, 'version': paddedInt(version), } if comment: data['comment'] = comment data.update(sealed) return secrets.put_item(Item=data, ConditionExpression=Attr('name').not_exists())
[ "def", "putSecret", "(", "name", ",", "secret", ",", "version", "=", "\"\"", ",", "kms_key", "=", "\"alias/credstash\"", ",", "region", "=", "None", ",", "table", "=", "\"credential-store\"", ",", "context", "=", "None", ",", "digest", "=", "DEFAULT_DIGEST", ",", "comment", "=", "\"\"", ",", "*", "*", "kwargs", ")", ":", "if", "not", "context", ":", "context", "=", "{", "}", "session", "=", "get_session", "(", "*", "*", "kwargs", ")", "kms", "=", "session", ".", "client", "(", "'kms'", ",", "region_name", "=", "region", ")", "key_service", "=", "KeyService", "(", "kms", ",", "kms_key", ",", "context", ")", "sealed", "=", "seal_aes_ctr_legacy", "(", "key_service", ",", "secret", ",", "digest_method", "=", "digest", ",", ")", "dynamodb", "=", "session", ".", "resource", "(", "'dynamodb'", ",", "region_name", "=", "region", ")", "secrets", "=", "dynamodb", ".", "Table", "(", "table", ")", "data", "=", "{", "'name'", ":", "name", ",", "'version'", ":", "paddedInt", "(", "version", ")", ",", "}", "if", "comment", ":", "data", "[", "'comment'", "]", "=", "comment", "data", ".", "update", "(", "sealed", ")", "return", "secrets", ".", "put_item", "(", "Item", "=", "data", ",", "ConditionExpression", "=", "Attr", "(", "'name'", ")", ".", "not_exists", "(", ")", ")" ]
put a secret called `name` into the secret-store, protected by the key kms_key
[ "put", "a", "secret", "called", "name", "into", "the", "secret", "-", "store", "protected", "by", "the", "key", "kms_key" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L283-L312
231,110
fugue/credstash
credstash.py
getAllSecrets
def getAllSecrets(version="", region=None, table="credential-store", context=None, credential=None, session=None, **kwargs): ''' fetch and decrypt all secrets ''' if session is None: session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) kms = session.client('kms', region_name=region) secrets = listSecrets(region, table, **kwargs) # Only return the secrets that match the pattern in `credential` # This already works out of the box with the CLI get action, # but that action doesn't support wildcards when using as library if credential and WILDCARD_CHAR in credential: names = set(expand_wildcard(credential, [x["name"] for x in secrets])) else: names = set(x["name"] for x in secrets) pool = ThreadPool(min(len(names), THREAD_POOL_MAX_SIZE)) results = pool.map( lambda credential: getSecret(credential, version, region, table, context, dynamodb, kms, **kwargs), names) pool.close() pool.join() return dict(zip(names, results))
python
def getAllSecrets(version="", region=None, table="credential-store", context=None, credential=None, session=None, **kwargs): ''' fetch and decrypt all secrets ''' if session is None: session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) kms = session.client('kms', region_name=region) secrets = listSecrets(region, table, **kwargs) # Only return the secrets that match the pattern in `credential` # This already works out of the box with the CLI get action, # but that action doesn't support wildcards when using as library if credential and WILDCARD_CHAR in credential: names = set(expand_wildcard(credential, [x["name"] for x in secrets])) else: names = set(x["name"] for x in secrets) pool = ThreadPool(min(len(names), THREAD_POOL_MAX_SIZE)) results = pool.map( lambda credential: getSecret(credential, version, region, table, context, dynamodb, kms, **kwargs), names) pool.close() pool.join() return dict(zip(names, results))
[ "def", "getAllSecrets", "(", "version", "=", "\"\"", ",", "region", "=", "None", ",", "table", "=", "\"credential-store\"", ",", "context", "=", "None", ",", "credential", "=", "None", ",", "session", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "session", "is", "None", ":", "session", "=", "get_session", "(", "*", "*", "kwargs", ")", "dynamodb", "=", "session", ".", "resource", "(", "'dynamodb'", ",", "region_name", "=", "region", ")", "kms", "=", "session", ".", "client", "(", "'kms'", ",", "region_name", "=", "region", ")", "secrets", "=", "listSecrets", "(", "region", ",", "table", ",", "*", "*", "kwargs", ")", "# Only return the secrets that match the pattern in `credential`", "# This already works out of the box with the CLI get action,", "# but that action doesn't support wildcards when using as library", "if", "credential", "and", "WILDCARD_CHAR", "in", "credential", ":", "names", "=", "set", "(", "expand_wildcard", "(", "credential", ",", "[", "x", "[", "\"name\"", "]", "for", "x", "in", "secrets", "]", ")", ")", "else", ":", "names", "=", "set", "(", "x", "[", "\"name\"", "]", "for", "x", "in", "secrets", ")", "pool", "=", "ThreadPool", "(", "min", "(", "len", "(", "names", ")", ",", "THREAD_POOL_MAX_SIZE", ")", ")", "results", "=", "pool", ".", "map", "(", "lambda", "credential", ":", "getSecret", "(", "credential", ",", "version", ",", "region", ",", "table", ",", "context", ",", "dynamodb", ",", "kms", ",", "*", "*", "kwargs", ")", ",", "names", ")", "pool", ".", "close", "(", ")", "pool", ".", "join", "(", ")", "return", "dict", "(", "zip", "(", "names", ",", "results", ")", ")" ]
fetch and decrypt all secrets
[ "fetch", "and", "decrypt", "all", "secrets" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L315-L342
231,111
fugue/credstash
credstash.py
getSecret
def getSecret(name, version="", region=None, table="credential-store", context=None, dynamodb=None, kms=None, **kwargs): ''' fetch and decrypt the secret called `name` ''' if not context: context = {} # Can we cache if dynamodb is None or kms is None: session = get_session(**kwargs) if dynamodb is None: dynamodb = session.resource('dynamodb', region_name=region) if kms is None: kms = session.client('kms', region_name=region) secrets = dynamodb.Table(table) if version == "": # do a consistent fetch of the credential with the highest version response = secrets.query(Limit=1, ScanIndexForward=False, ConsistentRead=True, KeyConditionExpression=boto3.dynamodb.conditions.Key("name").eq(name)) if response["Count"] == 0: raise ItemNotFound("Item {'name': '%s'} couldn't be found." % name) material = response["Items"][0] else: response = secrets.get_item(Key={"name": name, "version": version}) if "Item" not in response: raise ItemNotFound( "Item {'name': '%s', 'version': '%s'} couldn't be found." % (name, version)) material = response["Item"] key_service = KeyService(kms, None, context) return open_aes_ctr_legacy(key_service, material)
python
def getSecret(name, version="", region=None, table="credential-store", context=None, dynamodb=None, kms=None, **kwargs): ''' fetch and decrypt the secret called `name` ''' if not context: context = {} # Can we cache if dynamodb is None or kms is None: session = get_session(**kwargs) if dynamodb is None: dynamodb = session.resource('dynamodb', region_name=region) if kms is None: kms = session.client('kms', region_name=region) secrets = dynamodb.Table(table) if version == "": # do a consistent fetch of the credential with the highest version response = secrets.query(Limit=1, ScanIndexForward=False, ConsistentRead=True, KeyConditionExpression=boto3.dynamodb.conditions.Key("name").eq(name)) if response["Count"] == 0: raise ItemNotFound("Item {'name': '%s'} couldn't be found." % name) material = response["Items"][0] else: response = secrets.get_item(Key={"name": name, "version": version}) if "Item" not in response: raise ItemNotFound( "Item {'name': '%s', 'version': '%s'} couldn't be found." % (name, version)) material = response["Item"] key_service = KeyService(kms, None, context) return open_aes_ctr_legacy(key_service, material)
[ "def", "getSecret", "(", "name", ",", "version", "=", "\"\"", ",", "region", "=", "None", ",", "table", "=", "\"credential-store\"", ",", "context", "=", "None", ",", "dynamodb", "=", "None", ",", "kms", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "context", ":", "context", "=", "{", "}", "# Can we cache", "if", "dynamodb", "is", "None", "or", "kms", "is", "None", ":", "session", "=", "get_session", "(", "*", "*", "kwargs", ")", "if", "dynamodb", "is", "None", ":", "dynamodb", "=", "session", ".", "resource", "(", "'dynamodb'", ",", "region_name", "=", "region", ")", "if", "kms", "is", "None", ":", "kms", "=", "session", ".", "client", "(", "'kms'", ",", "region_name", "=", "region", ")", "secrets", "=", "dynamodb", ".", "Table", "(", "table", ")", "if", "version", "==", "\"\"", ":", "# do a consistent fetch of the credential with the highest version", "response", "=", "secrets", ".", "query", "(", "Limit", "=", "1", ",", "ScanIndexForward", "=", "False", ",", "ConsistentRead", "=", "True", ",", "KeyConditionExpression", "=", "boto3", ".", "dynamodb", ".", "conditions", ".", "Key", "(", "\"name\"", ")", ".", "eq", "(", "name", ")", ")", "if", "response", "[", "\"Count\"", "]", "==", "0", ":", "raise", "ItemNotFound", "(", "\"Item {'name': '%s'} couldn't be found.\"", "%", "name", ")", "material", "=", "response", "[", "\"Items\"", "]", "[", "0", "]", "else", ":", "response", "=", "secrets", ".", "get_item", "(", "Key", "=", "{", "\"name\"", ":", "name", ",", "\"version\"", ":", "version", "}", ")", "if", "\"Item\"", "not", "in", "response", ":", "raise", "ItemNotFound", "(", "\"Item {'name': '%s', 'version': '%s'} couldn't be found.\"", "%", "(", "name", ",", "version", ")", ")", "material", "=", "response", "[", "\"Item\"", "]", "key_service", "=", "KeyService", "(", "kms", ",", "None", ",", "context", ")", "return", "open_aes_ctr_legacy", "(", "key_service", ",", "material", ")" ]
fetch and decrypt the secret called `name`
[ "fetch", "and", "decrypt", "the", "secret", "called", "name" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L468-L505
231,112
fugue/credstash
credstash.py
createDdbTable
def createDdbTable(region=None, table="credential-store", **kwargs): ''' create the secret store table in DDB in the specified region ''' session = get_session(**kwargs) dynamodb = session.resource("dynamodb", region_name=region) if table in (t.name for t in dynamodb.tables.all()): print("Credential Store table already exists") return print("Creating table...") dynamodb.create_table( TableName=table, KeySchema=[ { "AttributeName": "name", "KeyType": "HASH", }, { "AttributeName": "version", "KeyType": "RANGE", } ], AttributeDefinitions=[ { "AttributeName": "name", "AttributeType": "S", }, { "AttributeName": "version", "AttributeType": "S", }, ], ProvisionedThroughput={ "ReadCapacityUnits": 1, "WriteCapacityUnits": 1, } ) print("Waiting for table to be created...") client = session.client("dynamodb", region_name=region) response = client.describe_table(TableName=table) client.get_waiter("table_exists").wait(TableName=table) print("Adding tag...") client.tag_resource( ResourceArn=response["Table"]["TableArn"], Tags=[ { 'Key': "Name", 'Value': "credstash" }, ] ) print("Table has been created. " "Go read the README about how to create your KMS key")
python
def createDdbTable(region=None, table="credential-store", **kwargs): ''' create the secret store table in DDB in the specified region ''' session = get_session(**kwargs) dynamodb = session.resource("dynamodb", region_name=region) if table in (t.name for t in dynamodb.tables.all()): print("Credential Store table already exists") return print("Creating table...") dynamodb.create_table( TableName=table, KeySchema=[ { "AttributeName": "name", "KeyType": "HASH", }, { "AttributeName": "version", "KeyType": "RANGE", } ], AttributeDefinitions=[ { "AttributeName": "name", "AttributeType": "S", }, { "AttributeName": "version", "AttributeType": "S", }, ], ProvisionedThroughput={ "ReadCapacityUnits": 1, "WriteCapacityUnits": 1, } ) print("Waiting for table to be created...") client = session.client("dynamodb", region_name=region) response = client.describe_table(TableName=table) client.get_waiter("table_exists").wait(TableName=table) print("Adding tag...") client.tag_resource( ResourceArn=response["Table"]["TableArn"], Tags=[ { 'Key': "Name", 'Value': "credstash" }, ] ) print("Table has been created. " "Go read the README about how to create your KMS key")
[ "def", "createDdbTable", "(", "region", "=", "None", ",", "table", "=", "\"credential-store\"", ",", "*", "*", "kwargs", ")", ":", "session", "=", "get_session", "(", "*", "*", "kwargs", ")", "dynamodb", "=", "session", ".", "resource", "(", "\"dynamodb\"", ",", "region_name", "=", "region", ")", "if", "table", "in", "(", "t", ".", "name", "for", "t", "in", "dynamodb", ".", "tables", ".", "all", "(", ")", ")", ":", "print", "(", "\"Credential Store table already exists\"", ")", "return", "print", "(", "\"Creating table...\"", ")", "dynamodb", ".", "create_table", "(", "TableName", "=", "table", ",", "KeySchema", "=", "[", "{", "\"AttributeName\"", ":", "\"name\"", ",", "\"KeyType\"", ":", "\"HASH\"", ",", "}", ",", "{", "\"AttributeName\"", ":", "\"version\"", ",", "\"KeyType\"", ":", "\"RANGE\"", ",", "}", "]", ",", "AttributeDefinitions", "=", "[", "{", "\"AttributeName\"", ":", "\"name\"", ",", "\"AttributeType\"", ":", "\"S\"", ",", "}", ",", "{", "\"AttributeName\"", ":", "\"version\"", ",", "\"AttributeType\"", ":", "\"S\"", ",", "}", ",", "]", ",", "ProvisionedThroughput", "=", "{", "\"ReadCapacityUnits\"", ":", "1", ",", "\"WriteCapacityUnits\"", ":", "1", ",", "}", ")", "print", "(", "\"Waiting for table to be created...\"", ")", "client", "=", "session", ".", "client", "(", "\"dynamodb\"", ",", "region_name", "=", "region", ")", "response", "=", "client", ".", "describe_table", "(", "TableName", "=", "table", ")", "client", ".", "get_waiter", "(", "\"table_exists\"", ")", ".", "wait", "(", "TableName", "=", "table", ")", "print", "(", "\"Adding tag...\"", ")", "client", ".", "tag_resource", "(", "ResourceArn", "=", "response", "[", "\"Table\"", "]", "[", "\"TableArn\"", "]", ",", "Tags", "=", "[", "{", "'Key'", ":", "\"Name\"", ",", "'Value'", ":", "\"credstash\"", "}", ",", "]", ")", "print", "(", "\"Table has been created. \"", "\"Go read the README about how to create your KMS key\"", ")" ]
create the secret store table in DDB in the specified region
[ "create", "the", "secret", "store", "table", "in", "DDB", "in", "the", "specified", "region" ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L526-L585
231,113
fugue/credstash
credstash.py
seal_aes_ctr_legacy
def seal_aes_ctr_legacy(key_service, secret, digest_method=DEFAULT_DIGEST): """ Encrypts `secret` using the key service. You can decrypt with the companion method `open_aes_ctr_legacy`. """ # generate a a 64 byte key. # Half will be for data encryption, the other half for HMAC key, encoded_key = key_service.generate_key_data(64) ciphertext, hmac = _seal_aes_ctr( secret, key, LEGACY_NONCE, digest_method, ) return { 'key': b64encode(encoded_key).decode('utf-8'), 'contents': b64encode(ciphertext).decode('utf-8'), 'hmac': codecs.encode(hmac, "hex_codec"), 'digest': digest_method, }
python
def seal_aes_ctr_legacy(key_service, secret, digest_method=DEFAULT_DIGEST): """ Encrypts `secret` using the key service. You can decrypt with the companion method `open_aes_ctr_legacy`. """ # generate a a 64 byte key. # Half will be for data encryption, the other half for HMAC key, encoded_key = key_service.generate_key_data(64) ciphertext, hmac = _seal_aes_ctr( secret, key, LEGACY_NONCE, digest_method, ) return { 'key': b64encode(encoded_key).decode('utf-8'), 'contents': b64encode(ciphertext).decode('utf-8'), 'hmac': codecs.encode(hmac, "hex_codec"), 'digest': digest_method, }
[ "def", "seal_aes_ctr_legacy", "(", "key_service", ",", "secret", ",", "digest_method", "=", "DEFAULT_DIGEST", ")", ":", "# generate a a 64 byte key.", "# Half will be for data encryption, the other half for HMAC", "key", ",", "encoded_key", "=", "key_service", ".", "generate_key_data", "(", "64", ")", "ciphertext", ",", "hmac", "=", "_seal_aes_ctr", "(", "secret", ",", "key", ",", "LEGACY_NONCE", ",", "digest_method", ",", ")", "return", "{", "'key'", ":", "b64encode", "(", "encoded_key", ")", ".", "decode", "(", "'utf-8'", ")", ",", "'contents'", ":", "b64encode", "(", "ciphertext", ")", ".", "decode", "(", "'utf-8'", ")", ",", "'hmac'", ":", "codecs", ".", "encode", "(", "hmac", ",", "\"hex_codec\"", ")", ",", "'digest'", ":", "digest_method", ",", "}" ]
Encrypts `secret` using the key service. You can decrypt with the companion method `open_aes_ctr_legacy`.
[ "Encrypts", "secret", "using", "the", "key", "service", ".", "You", "can", "decrypt", "with", "the", "companion", "method", "open_aes_ctr_legacy", "." ]
56df8e051fc4c8d15d5e7e373e88bf5bc13f3346
https://github.com/fugue/credstash/blob/56df8e051fc4c8d15d5e7e373e88bf5bc13f3346/credstash.py#L625-L641
231,114
KristianOellegaard/django-health-check
health_check/contrib/rabbitmq/backends.py
RabbitMQHealthCheck.check_status
def check_status(self): """Check RabbitMQ service by opening and closing a broker channel.""" logger.debug("Checking for a broker_url on django settings...") broker_url = getattr(settings, "BROKER_URL", None) logger.debug("Got %s as the broker_url. Connecting to rabbit...", broker_url) logger.debug("Attempting to connect to rabbit...") try: # conn is used as a context to release opened resources later with Connection(broker_url) as conn: conn.connect() # exceptions may be raised upon calling connect except ConnectionRefusedError as e: self.add_error(ServiceUnavailable("Unable to connect to RabbitMQ: Connection was refused."), e) except AccessRefused as e: self.add_error(ServiceUnavailable("Unable to connect to RabbitMQ: Authentication error."), e) except IOError as e: self.add_error(ServiceUnavailable("IOError"), e) except BaseException as e: self.add_error(ServiceUnavailable("Unknown error"), e) else: logger.debug("Connection estabilished. RabbitMQ is healthy.")
python
def check_status(self): """Check RabbitMQ service by opening and closing a broker channel.""" logger.debug("Checking for a broker_url on django settings...") broker_url = getattr(settings, "BROKER_URL", None) logger.debug("Got %s as the broker_url. Connecting to rabbit...", broker_url) logger.debug("Attempting to connect to rabbit...") try: # conn is used as a context to release opened resources later with Connection(broker_url) as conn: conn.connect() # exceptions may be raised upon calling connect except ConnectionRefusedError as e: self.add_error(ServiceUnavailable("Unable to connect to RabbitMQ: Connection was refused."), e) except AccessRefused as e: self.add_error(ServiceUnavailable("Unable to connect to RabbitMQ: Authentication error."), e) except IOError as e: self.add_error(ServiceUnavailable("IOError"), e) except BaseException as e: self.add_error(ServiceUnavailable("Unknown error"), e) else: logger.debug("Connection estabilished. RabbitMQ is healthy.")
[ "def", "check_status", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"Checking for a broker_url on django settings...\"", ")", "broker_url", "=", "getattr", "(", "settings", ",", "\"BROKER_URL\"", ",", "None", ")", "logger", ".", "debug", "(", "\"Got %s as the broker_url. Connecting to rabbit...\"", ",", "broker_url", ")", "logger", ".", "debug", "(", "\"Attempting to connect to rabbit...\"", ")", "try", ":", "# conn is used as a context to release opened resources later", "with", "Connection", "(", "broker_url", ")", "as", "conn", ":", "conn", ".", "connect", "(", ")", "# exceptions may be raised upon calling connect", "except", "ConnectionRefusedError", "as", "e", ":", "self", ".", "add_error", "(", "ServiceUnavailable", "(", "\"Unable to connect to RabbitMQ: Connection was refused.\"", ")", ",", "e", ")", "except", "AccessRefused", "as", "e", ":", "self", ".", "add_error", "(", "ServiceUnavailable", "(", "\"Unable to connect to RabbitMQ: Authentication error.\"", ")", ",", "e", ")", "except", "IOError", "as", "e", ":", "self", ".", "add_error", "(", "ServiceUnavailable", "(", "\"IOError\"", ")", ",", "e", ")", "except", "BaseException", "as", "e", ":", "self", ".", "add_error", "(", "ServiceUnavailable", "(", "\"Unknown error\"", ")", ",", "e", ")", "else", ":", "logger", ".", "debug", "(", "\"Connection estabilished. RabbitMQ is healthy.\"", ")" ]
Check RabbitMQ service by opening and closing a broker channel.
[ "Check", "RabbitMQ", "service", "by", "opening", "and", "closing", "a", "broker", "channel", "." ]
575f811b7224dba0ef5f113791ca6aab20711041
https://github.com/KristianOellegaard/django-health-check/blob/575f811b7224dba0ef5f113791ca6aab20711041/health_check/contrib/rabbitmq/backends.py#L16-L41
231,115
KristianOellegaard/django-health-check
health_check/views.py
MediaType.from_string
def from_string(cls, value): """Return single instance parsed from given accept header string.""" match = cls.pattern.search(value) if match is None: raise ValueError('"%s" is not a valid media type' % value) try: return cls(match.group('mime_type'), float(match.group('weight') or 1)) except ValueError: return cls(value)
python
def from_string(cls, value): """Return single instance parsed from given accept header string.""" match = cls.pattern.search(value) if match is None: raise ValueError('"%s" is not a valid media type' % value) try: return cls(match.group('mime_type'), float(match.group('weight') or 1)) except ValueError: return cls(value)
[ "def", "from_string", "(", "cls", ",", "value", ")", ":", "match", "=", "cls", ".", "pattern", ".", "search", "(", "value", ")", "if", "match", "is", "None", ":", "raise", "ValueError", "(", "'\"%s\" is not a valid media type'", "%", "value", ")", "try", ":", "return", "cls", "(", "match", ".", "group", "(", "'mime_type'", ")", ",", "float", "(", "match", ".", "group", "(", "'weight'", ")", "or", "1", ")", ")", "except", "ValueError", ":", "return", "cls", "(", "value", ")" ]
Return single instance parsed from given accept header string.
[ "Return", "single", "instance", "parsed", "from", "given", "accept", "header", "string", "." ]
575f811b7224dba0ef5f113791ca6aab20711041
https://github.com/KristianOellegaard/django-health-check/blob/575f811b7224dba0ef5f113791ca6aab20711041/health_check/views.py#L28-L36
231,116
KristianOellegaard/django-health-check
health_check/views.py
MediaType.parse_header
def parse_header(cls, value='*/*'): """Parse HTTP accept header and return instances sorted by weight.""" yield from sorted(( cls.from_string(token.strip()) for token in value.split(',') if token.strip() ), reverse=True)
python
def parse_header(cls, value='*/*'): """Parse HTTP accept header and return instances sorted by weight.""" yield from sorted(( cls.from_string(token.strip()) for token in value.split(',') if token.strip() ), reverse=True)
[ "def", "parse_header", "(", "cls", ",", "value", "=", "'*/*'", ")", ":", "yield", "from", "sorted", "(", "(", "cls", ".", "from_string", "(", "token", ".", "strip", "(", ")", ")", "for", "token", "in", "value", ".", "split", "(", "','", ")", "if", "token", ".", "strip", "(", ")", ")", ",", "reverse", "=", "True", ")" ]
Parse HTTP accept header and return instances sorted by weight.
[ "Parse", "HTTP", "accept", "header", "and", "return", "instances", "sorted", "by", "weight", "." ]
575f811b7224dba0ef5f113791ca6aab20711041
https://github.com/KristianOellegaard/django-health-check/blob/575f811b7224dba0ef5f113791ca6aab20711041/health_check/views.py#L39-L45
231,117
spulec/freezegun
freezegun/api.py
convert_to_timezone_naive
def convert_to_timezone_naive(time_to_freeze): """ Converts a potentially timezone-aware datetime to be a naive UTC datetime """ if time_to_freeze.tzinfo: time_to_freeze -= time_to_freeze.utcoffset() time_to_freeze = time_to_freeze.replace(tzinfo=None) return time_to_freeze
python
def convert_to_timezone_naive(time_to_freeze): """ Converts a potentially timezone-aware datetime to be a naive UTC datetime """ if time_to_freeze.tzinfo: time_to_freeze -= time_to_freeze.utcoffset() time_to_freeze = time_to_freeze.replace(tzinfo=None) return time_to_freeze
[ "def", "convert_to_timezone_naive", "(", "time_to_freeze", ")", ":", "if", "time_to_freeze", ".", "tzinfo", ":", "time_to_freeze", "-=", "time_to_freeze", ".", "utcoffset", "(", ")", "time_to_freeze", "=", "time_to_freeze", ".", "replace", "(", "tzinfo", "=", "None", ")", "return", "time_to_freeze" ]
Converts a potentially timezone-aware datetime to be a naive UTC datetime
[ "Converts", "a", "potentially", "timezone", "-", "aware", "datetime", "to", "be", "a", "naive", "UTC", "datetime" ]
9347d133f33f675c87bb0569d70d9d95abef737f
https://github.com/spulec/freezegun/blob/9347d133f33f675c87bb0569d70d9d95abef737f/freezegun/api.py#L364-L371
231,118
spulec/freezegun
freezegun/api.py
FrozenDateTimeFactory.move_to
def move_to(self, target_datetime): """Moves frozen date to the given ``target_datetime``""" target_datetime = _parse_time_to_freeze(target_datetime) delta = target_datetime - self.time_to_freeze self.tick(delta=delta)
python
def move_to(self, target_datetime): """Moves frozen date to the given ``target_datetime``""" target_datetime = _parse_time_to_freeze(target_datetime) delta = target_datetime - self.time_to_freeze self.tick(delta=delta)
[ "def", "move_to", "(", "self", ",", "target_datetime", ")", ":", "target_datetime", "=", "_parse_time_to_freeze", "(", "target_datetime", ")", "delta", "=", "target_datetime", "-", "self", ".", "time_to_freeze", "self", ".", "tick", "(", "delta", "=", "delta", ")" ]
Moves frozen date to the given ``target_datetime``
[ "Moves", "frozen", "date", "to", "the", "given", "target_datetime" ]
9347d133f33f675c87bb0569d70d9d95abef737f
https://github.com/spulec/freezegun/blob/9347d133f33f675c87bb0569d70d9d95abef737f/freezegun/api.py#L448-L452
231,119
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.process_module
def process_module(self, yam): """Process data nodes, RPCs and notifications in a single module.""" for ann in yam.search(("ietf-yang-metadata", "annotation")): self.process_annotation(ann) for ch in yam.i_children[:]: if ch.keyword == "rpc": self.process_rpc(ch) elif ch.keyword == "notification": self.process_notification(ch) else: continue yam.i_children.remove(ch) self.process_children(yam, "//nc:*", 1)
python
def process_module(self, yam): """Process data nodes, RPCs and notifications in a single module.""" for ann in yam.search(("ietf-yang-metadata", "annotation")): self.process_annotation(ann) for ch in yam.i_children[:]: if ch.keyword == "rpc": self.process_rpc(ch) elif ch.keyword == "notification": self.process_notification(ch) else: continue yam.i_children.remove(ch) self.process_children(yam, "//nc:*", 1)
[ "def", "process_module", "(", "self", ",", "yam", ")", ":", "for", "ann", "in", "yam", ".", "search", "(", "(", "\"ietf-yang-metadata\"", ",", "\"annotation\"", ")", ")", ":", "self", ".", "process_annotation", "(", "ann", ")", "for", "ch", "in", "yam", ".", "i_children", "[", ":", "]", ":", "if", "ch", ".", "keyword", "==", "\"rpc\"", ":", "self", ".", "process_rpc", "(", "ch", ")", "elif", "ch", ".", "keyword", "==", "\"notification\"", ":", "self", ".", "process_notification", "(", "ch", ")", "else", ":", "continue", "yam", ".", "i_children", ".", "remove", "(", "ch", ")", "self", ".", "process_children", "(", "yam", ",", "\"//nc:*\"", ",", "1", ")" ]
Process data nodes, RPCs and notifications in a single module.
[ "Process", "data", "nodes", "RPCs", "and", "notifications", "in", "a", "single", "module", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L101-L113
231,120
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.process_annotation
def process_annotation(self, ann): """Process metadata annotation.""" tmpl = self.xsl_template("@" + self.qname(ann)) ET.SubElement(tmpl, "param", name="level", select="0") ct = self.xsl_calltemplate("leaf", tmpl) ET.SubElement(ct, "with-param", name="level", select="$level") self.xsl_withparam("nsid", ann.i_module.i_modulename + ":", ct) self.type_param(ann, ct)
python
def process_annotation(self, ann): """Process metadata annotation.""" tmpl = self.xsl_template("@" + self.qname(ann)) ET.SubElement(tmpl, "param", name="level", select="0") ct = self.xsl_calltemplate("leaf", tmpl) ET.SubElement(ct, "with-param", name="level", select="$level") self.xsl_withparam("nsid", ann.i_module.i_modulename + ":", ct) self.type_param(ann, ct)
[ "def", "process_annotation", "(", "self", ",", "ann", ")", ":", "tmpl", "=", "self", ".", "xsl_template", "(", "\"@\"", "+", "self", ".", "qname", "(", "ann", ")", ")", "ET", ".", "SubElement", "(", "tmpl", ",", "\"param\"", ",", "name", "=", "\"level\"", ",", "select", "=", "\"0\"", ")", "ct", "=", "self", ".", "xsl_calltemplate", "(", "\"leaf\"", ",", "tmpl", ")", "ET", ".", "SubElement", "(", "ct", ",", "\"with-param\"", ",", "name", "=", "\"level\"", ",", "select", "=", "\"$level\"", ")", "self", ".", "xsl_withparam", "(", "\"nsid\"", ",", "ann", ".", "i_module", ".", "i_modulename", "+", "\":\"", ",", "ct", ")", "self", ".", "type_param", "(", "ann", ",", "ct", ")" ]
Process metadata annotation.
[ "Process", "metadata", "annotation", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L115-L122
231,121
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.process_rpc
def process_rpc(self, rpc): """Process input and output parts of `rpc`.""" p = "/nc:rpc/" + self.qname(rpc) tmpl = self.xsl_template(p) inp = rpc.search_one("input") if inp is not None: ct = self.xsl_calltemplate("rpc-input", tmpl) self.xsl_withparam("nsid", rpc.i_module.i_modulename + ":", ct) self.process_children(inp, p, 2) outp = rpc.search_one("output") if outp is not None: self.process_children(outp, "/nc:rpc-reply", 1)
python
def process_rpc(self, rpc): """Process input and output parts of `rpc`.""" p = "/nc:rpc/" + self.qname(rpc) tmpl = self.xsl_template(p) inp = rpc.search_one("input") if inp is not None: ct = self.xsl_calltemplate("rpc-input", tmpl) self.xsl_withparam("nsid", rpc.i_module.i_modulename + ":", ct) self.process_children(inp, p, 2) outp = rpc.search_one("output") if outp is not None: self.process_children(outp, "/nc:rpc-reply", 1)
[ "def", "process_rpc", "(", "self", ",", "rpc", ")", ":", "p", "=", "\"/nc:rpc/\"", "+", "self", ".", "qname", "(", "rpc", ")", "tmpl", "=", "self", ".", "xsl_template", "(", "p", ")", "inp", "=", "rpc", ".", "search_one", "(", "\"input\"", ")", "if", "inp", "is", "not", "None", ":", "ct", "=", "self", ".", "xsl_calltemplate", "(", "\"rpc-input\"", ",", "tmpl", ")", "self", ".", "xsl_withparam", "(", "\"nsid\"", ",", "rpc", ".", "i_module", ".", "i_modulename", "+", "\":\"", ",", "ct", ")", "self", ".", "process_children", "(", "inp", ",", "p", ",", "2", ")", "outp", "=", "rpc", ".", "search_one", "(", "\"output\"", ")", "if", "outp", "is", "not", "None", ":", "self", ".", "process_children", "(", "outp", ",", "\"/nc:rpc-reply\"", ",", "1", ")" ]
Process input and output parts of `rpc`.
[ "Process", "input", "and", "output", "parts", "of", "rpc", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L124-L135
231,122
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.process_notification
def process_notification(self, ntf): """Process event notification `ntf`.""" p = "/en:notification/" + self.qname(ntf) tmpl = self.xsl_template(p) ct = self.xsl_calltemplate("container", tmpl) self.xsl_withparam("level", "1", ct) if ntf.arg == "eventTime": # local name collision self.xsl_withparam("nsid", ntf.i_module.i_modulename + ":", ct) self.process_children(ntf, p, 2)
python
def process_notification(self, ntf): """Process event notification `ntf`.""" p = "/en:notification/" + self.qname(ntf) tmpl = self.xsl_template(p) ct = self.xsl_calltemplate("container", tmpl) self.xsl_withparam("level", "1", ct) if ntf.arg == "eventTime": # local name collision self.xsl_withparam("nsid", ntf.i_module.i_modulename + ":", ct) self.process_children(ntf, p, 2)
[ "def", "process_notification", "(", "self", ",", "ntf", ")", ":", "p", "=", "\"/en:notification/\"", "+", "self", ".", "qname", "(", "ntf", ")", "tmpl", "=", "self", ".", "xsl_template", "(", "p", ")", "ct", "=", "self", ".", "xsl_calltemplate", "(", "\"container\"", ",", "tmpl", ")", "self", ".", "xsl_withparam", "(", "\"level\"", ",", "\"1\"", ",", "ct", ")", "if", "ntf", ".", "arg", "==", "\"eventTime\"", ":", "# local name collision", "self", ".", "xsl_withparam", "(", "\"nsid\"", ",", "ntf", ".", "i_module", ".", "i_modulename", "+", "\":\"", ",", "ct", ")", "self", ".", "process_children", "(", "ntf", ",", "p", ",", "2", ")" ]
Process event notification `ntf`.
[ "Process", "event", "notification", "ntf", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L137-L145
231,123
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.process_children
def process_children(self, node, path, level, parent=None): """Process all children of `node`. `path` is the Xpath of `node` which is used in the 'select' attribute of XSLT templates. """ data_parent = parent if parent else node chs = node.i_children for ch in chs: if ch.keyword in ["choice", "case"]: self.process_children(ch, path, level, node) continue p = path + "/" + self.qname(ch) tmpl = self.xsl_template(p) ct = self.xsl_calltemplate(ch.keyword, tmpl) self.xsl_withparam("level", "%d" % level, ct) if (data_parent.i_module is None or ch.i_module.i_modulename != data_parent.i_module.i_modulename): self.xsl_withparam("nsid", ch.i_module.i_modulename + ":", ct) if ch.keyword in ["leaf", "leaf-list"]: self.type_param(ch, ct) elif ch.keyword != "anyxml": offset = 2 if ch.keyword == "list" else 1 self.process_children(ch, p, level + offset)
python
def process_children(self, node, path, level, parent=None): """Process all children of `node`. `path` is the Xpath of `node` which is used in the 'select' attribute of XSLT templates. """ data_parent = parent if parent else node chs = node.i_children for ch in chs: if ch.keyword in ["choice", "case"]: self.process_children(ch, path, level, node) continue p = path + "/" + self.qname(ch) tmpl = self.xsl_template(p) ct = self.xsl_calltemplate(ch.keyword, tmpl) self.xsl_withparam("level", "%d" % level, ct) if (data_parent.i_module is None or ch.i_module.i_modulename != data_parent.i_module.i_modulename): self.xsl_withparam("nsid", ch.i_module.i_modulename + ":", ct) if ch.keyword in ["leaf", "leaf-list"]: self.type_param(ch, ct) elif ch.keyword != "anyxml": offset = 2 if ch.keyword == "list" else 1 self.process_children(ch, p, level + offset)
[ "def", "process_children", "(", "self", ",", "node", ",", "path", ",", "level", ",", "parent", "=", "None", ")", ":", "data_parent", "=", "parent", "if", "parent", "else", "node", "chs", "=", "node", ".", "i_children", "for", "ch", "in", "chs", ":", "if", "ch", ".", "keyword", "in", "[", "\"choice\"", ",", "\"case\"", "]", ":", "self", ".", "process_children", "(", "ch", ",", "path", ",", "level", ",", "node", ")", "continue", "p", "=", "path", "+", "\"/\"", "+", "self", ".", "qname", "(", "ch", ")", "tmpl", "=", "self", ".", "xsl_template", "(", "p", ")", "ct", "=", "self", ".", "xsl_calltemplate", "(", "ch", ".", "keyword", ",", "tmpl", ")", "self", ".", "xsl_withparam", "(", "\"level\"", ",", "\"%d\"", "%", "level", ",", "ct", ")", "if", "(", "data_parent", ".", "i_module", "is", "None", "or", "ch", ".", "i_module", ".", "i_modulename", "!=", "data_parent", ".", "i_module", ".", "i_modulename", ")", ":", "self", ".", "xsl_withparam", "(", "\"nsid\"", ",", "ch", ".", "i_module", ".", "i_modulename", "+", "\":\"", ",", "ct", ")", "if", "ch", ".", "keyword", "in", "[", "\"leaf\"", ",", "\"leaf-list\"", "]", ":", "self", ".", "type_param", "(", "ch", ",", "ct", ")", "elif", "ch", ".", "keyword", "!=", "\"anyxml\"", ":", "offset", "=", "2", "if", "ch", ".", "keyword", "==", "\"list\"", "else", "1", "self", ".", "process_children", "(", "ch", ",", "p", ",", "level", "+", "offset", ")" ]
Process all children of `node`. `path` is the Xpath of `node` which is used in the 'select' attribute of XSLT templates.
[ "Process", "all", "children", "of", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L147-L170
231,124
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.type_param
def type_param(self, node, ct): """Resolve the type of a leaf or leaf-list node for JSON. """ types = self.get_types(node) ftyp = types[0] if len(types) == 1: if ftyp in type_class: jtyp = type_class[ftyp] else: jtyp = "other" self.xsl_withparam("type", jtyp, ct) elif ftyp in ["string", "enumeration", "bits", "binary", "identityref", "instance-identifier"]: self.xsl_withparam("type", "string", ct) else: opts = [] for t in types: if t in union_class: ut = union_class[t] elif t in ["int64", "uint64"] or t.startswith("decimal@"): ut = t else: ut = "other" if ut not in opts: opts.append(ut) if ut == "other": break if ut == "decimal" and "integer" not in opts: opts.append("integer") self.xsl_withparam("type", "union", ct) self.xsl_withparam("options", ",".join(opts) + ",", ct)
python
def type_param(self, node, ct): """Resolve the type of a leaf or leaf-list node for JSON. """ types = self.get_types(node) ftyp = types[0] if len(types) == 1: if ftyp in type_class: jtyp = type_class[ftyp] else: jtyp = "other" self.xsl_withparam("type", jtyp, ct) elif ftyp in ["string", "enumeration", "bits", "binary", "identityref", "instance-identifier"]: self.xsl_withparam("type", "string", ct) else: opts = [] for t in types: if t in union_class: ut = union_class[t] elif t in ["int64", "uint64"] or t.startswith("decimal@"): ut = t else: ut = "other" if ut not in opts: opts.append(ut) if ut == "other": break if ut == "decimal" and "integer" not in opts: opts.append("integer") self.xsl_withparam("type", "union", ct) self.xsl_withparam("options", ",".join(opts) + ",", ct)
[ "def", "type_param", "(", "self", ",", "node", ",", "ct", ")", ":", "types", "=", "self", ".", "get_types", "(", "node", ")", "ftyp", "=", "types", "[", "0", "]", "if", "len", "(", "types", ")", "==", "1", ":", "if", "ftyp", "in", "type_class", ":", "jtyp", "=", "type_class", "[", "ftyp", "]", "else", ":", "jtyp", "=", "\"other\"", "self", ".", "xsl_withparam", "(", "\"type\"", ",", "jtyp", ",", "ct", ")", "elif", "ftyp", "in", "[", "\"string\"", ",", "\"enumeration\"", ",", "\"bits\"", ",", "\"binary\"", ",", "\"identityref\"", ",", "\"instance-identifier\"", "]", ":", "self", ".", "xsl_withparam", "(", "\"type\"", ",", "\"string\"", ",", "ct", ")", "else", ":", "opts", "=", "[", "]", "for", "t", "in", "types", ":", "if", "t", "in", "union_class", ":", "ut", "=", "union_class", "[", "t", "]", "elif", "t", "in", "[", "\"int64\"", ",", "\"uint64\"", "]", "or", "t", ".", "startswith", "(", "\"decimal@\"", ")", ":", "ut", "=", "t", "else", ":", "ut", "=", "\"other\"", "if", "ut", "not", "in", "opts", ":", "opts", ".", "append", "(", "ut", ")", "if", "ut", "==", "\"other\"", ":", "break", "if", "ut", "==", "\"decimal\"", "and", "\"integer\"", "not", "in", "opts", ":", "opts", ".", "append", "(", "\"integer\"", ")", "self", ".", "xsl_withparam", "(", "\"type\"", ",", "\"union\"", ",", "ct", ")", "self", ".", "xsl_withparam", "(", "\"options\"", ",", "\",\"", ".", "join", "(", "opts", ")", "+", "\",\"", ",", "ct", ")" ]
Resolve the type of a leaf or leaf-list node for JSON.
[ "Resolve", "the", "type", "of", "a", "leaf", "or", "leaf", "-", "list", "node", "for", "JSON", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L172-L201
231,125
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.xsl_text
def xsl_text(self, text, parent): """Construct an XSLT 'text' element containing `text`. `parent` is this element's parent. """ res = ET.SubElement(parent, "text") res.text = text return res
python
def xsl_text(self, text, parent): """Construct an XSLT 'text' element containing `text`. `parent` is this element's parent. """ res = ET.SubElement(parent, "text") res.text = text return res
[ "def", "xsl_text", "(", "self", ",", "text", ",", "parent", ")", ":", "res", "=", "ET", ".", "SubElement", "(", "parent", ",", "\"text\"", ")", "res", ".", "text", "=", "text", "return", "res" ]
Construct an XSLT 'text' element containing `text`. `parent` is this element's parent.
[ "Construct", "an", "XSLT", "text", "element", "containing", "text", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L233-L240
231,126
mbj4668/pyang
pyang/plugins/jsonxsl.py
JsonXslPlugin.xsl_withparam
def xsl_withparam(self, name, value, parent): """Construct an XSLT 'with-param' element. `parent` is this element's parent. `name` is the parameter name. `value` is the parameter value. """ res = ET.SubElement(parent, "with-param", name=name) res.text = value return res
python
def xsl_withparam(self, name, value, parent): """Construct an XSLT 'with-param' element. `parent` is this element's parent. `name` is the parameter name. `value` is the parameter value. """ res = ET.SubElement(parent, "with-param", name=name) res.text = value return res
[ "def", "xsl_withparam", "(", "self", ",", "name", ",", "value", ",", "parent", ")", ":", "res", "=", "ET", ".", "SubElement", "(", "parent", ",", "\"with-param\"", ",", "name", "=", "name", ")", "res", ".", "text", "=", "value", "return", "res" ]
Construct an XSLT 'with-param' element. `parent` is this element's parent. `name` is the parameter name. `value` is the parameter value.
[ "Construct", "an", "XSLT", "with", "-", "param", "element", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jsonxsl.py#L250-L259
231,127
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.element
def element(cls, name, parent=None, interleave=None, occur=0): """Create an element node.""" node = cls("element", parent, interleave=interleave) node.attr["name"] = name node.occur = occur return node
python
def element(cls, name, parent=None, interleave=None, occur=0): """Create an element node.""" node = cls("element", parent, interleave=interleave) node.attr["name"] = name node.occur = occur return node
[ "def", "element", "(", "cls", ",", "name", ",", "parent", "=", "None", ",", "interleave", "=", "None", ",", "occur", "=", "0", ")", ":", "node", "=", "cls", "(", "\"element\"", ",", "parent", ",", "interleave", "=", "interleave", ")", "node", ".", "attr", "[", "\"name\"", "]", "=", "name", "node", ".", "occur", "=", "occur", "return", "node" ]
Create an element node.
[ "Create", "an", "element", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L63-L68
231,128
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.leaf_list
def leaf_list(cls, name, parent=None, interleave=None): """Create _list_ node for a leaf-list.""" node = cls("_list_", parent, interleave=interleave) node.attr["name"] = name node.keys = None node.minEl = "0" node.maxEl = None node.occur = 3 return node
python
def leaf_list(cls, name, parent=None, interleave=None): """Create _list_ node for a leaf-list.""" node = cls("_list_", parent, interleave=interleave) node.attr["name"] = name node.keys = None node.minEl = "0" node.maxEl = None node.occur = 3 return node
[ "def", "leaf_list", "(", "cls", ",", "name", ",", "parent", "=", "None", ",", "interleave", "=", "None", ")", ":", "node", "=", "cls", "(", "\"_list_\"", ",", "parent", ",", "interleave", "=", "interleave", ")", "node", ".", "attr", "[", "\"name\"", "]", "=", "name", "node", ".", "keys", "=", "None", "node", ".", "minEl", "=", "\"0\"", "node", ".", "maxEl", "=", "None", "node", ".", "occur", "=", "3", "return", "node" ]
Create _list_ node for a leaf-list.
[ "Create", "_list_", "node", "for", "a", "leaf", "-", "list", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L71-L79
231,129
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.list
def list(cls, name, parent=None, interleave=None): """Create _list_ node for a list.""" node = cls.leaf_list(name, parent, interleave=interleave) node.keys = [] node.keymap = {} return node
python
def list(cls, name, parent=None, interleave=None): """Create _list_ node for a list.""" node = cls.leaf_list(name, parent, interleave=interleave) node.keys = [] node.keymap = {} return node
[ "def", "list", "(", "cls", ",", "name", ",", "parent", "=", "None", ",", "interleave", "=", "None", ")", ":", "node", "=", "cls", ".", "leaf_list", "(", "name", ",", "parent", ",", "interleave", "=", "interleave", ")", "node", ".", "keys", "=", "[", "]", "node", ".", "keymap", "=", "{", "}", "return", "node" ]
Create _list_ node for a list.
[ "Create", "_list_", "node", "for", "a", "list", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L82-L87
231,130
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.choice
def choice(cls, parent=None, occur=0): """Create choice node.""" node = cls("choice", parent) node.occur = occur node.default_case = None return node
python
def choice(cls, parent=None, occur=0): """Create choice node.""" node = cls("choice", parent) node.occur = occur node.default_case = None return node
[ "def", "choice", "(", "cls", ",", "parent", "=", "None", ",", "occur", "=", "0", ")", ":", "node", "=", "cls", "(", "\"choice\"", ",", "parent", ")", "node", ".", "occur", "=", "occur", "node", ".", "default_case", "=", "None", "return", "node" ]
Create choice node.
[ "Create", "choice", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L90-L95
231,131
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.define
def define(cls, name, parent=None, interleave=False): """Create define node.""" node = cls("define", parent, interleave=interleave) node.occur = 0 node.attr["name"] = name return node
python
def define(cls, name, parent=None, interleave=False): """Create define node.""" node = cls("define", parent, interleave=interleave) node.occur = 0 node.attr["name"] = name return node
[ "def", "define", "(", "cls", ",", "name", ",", "parent", "=", "None", ",", "interleave", "=", "False", ")", ":", "node", "=", "cls", "(", "\"define\"", ",", "parent", ",", "interleave", "=", "interleave", ")", "node", ".", "occur", "=", "0", "node", ".", "attr", "[", "\"name\"", "]", "=", "name", "return", "node" ]
Create define node.
[ "Create", "define", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L105-L110
231,132
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.adjust_interleave
def adjust_interleave(self, interleave): """Inherit interleave status from parent if undefined.""" if interleave == None and self.parent: self.interleave = self.parent.interleave else: self.interleave = interleave
python
def adjust_interleave(self, interleave): """Inherit interleave status from parent if undefined.""" if interleave == None and self.parent: self.interleave = self.parent.interleave else: self.interleave = interleave
[ "def", "adjust_interleave", "(", "self", ",", "interleave", ")", ":", "if", "interleave", "==", "None", "and", "self", ".", "parent", ":", "self", ".", "interleave", "=", "self", ".", "parent", ".", "interleave", "else", ":", "self", ".", "interleave", "=", "interleave" ]
Inherit interleave status from parent if undefined.
[ "Inherit", "interleave", "status", "from", "parent", "if", "undefined", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L139-L144
231,133
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.subnode
def subnode(self, node): """Make `node` receiver's child.""" self.children.append(node) node.parent = self node.adjust_interleave(node.interleave)
python
def subnode(self, node): """Make `node` receiver's child.""" self.children.append(node) node.parent = self node.adjust_interleave(node.interleave)
[ "def", "subnode", "(", "self", ",", "node", ")", ":", "self", ".", "children", ".", "append", "(", "node", ")", "node", ".", "parent", "=", "self", "node", ".", "adjust_interleave", "(", "node", ".", "interleave", ")" ]
Make `node` receiver's child.
[ "Make", "node", "receiver", "s", "child", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L146-L150
231,134
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.annot
def annot(self, node): """Add `node` as an annotation of the receiver.""" self.annots.append(node) node.parent = self
python
def annot(self, node): """Add `node` as an annotation of the receiver.""" self.annots.append(node) node.parent = self
[ "def", "annot", "(", "self", ",", "node", ")", ":", "self", ".", "annots", ".", "append", "(", "node", ")", "node", ".", "parent", "=", "self" ]
Add `node` as an annotation of the receiver.
[ "Add", "node", "as", "an", "annotation", "of", "the", "receiver", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L152-L155
231,135
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.start_tag
def start_tag(self, alt=None, empty=False): """Return XML start tag for the receiver.""" if alt: name = alt else: name = self.name result = "<" + name for it in self.attr: result += ' %s="%s"' % (it, escape(self.attr[it], {'"':"&quot;", '%': "%%"})) if empty: return result + "/>%s" else: return result + ">"
python
def start_tag(self, alt=None, empty=False): """Return XML start tag for the receiver.""" if alt: name = alt else: name = self.name result = "<" + name for it in self.attr: result += ' %s="%s"' % (it, escape(self.attr[it], {'"':"&quot;", '%': "%%"})) if empty: return result + "/>%s" else: return result + ">"
[ "def", "start_tag", "(", "self", ",", "alt", "=", "None", ",", "empty", "=", "False", ")", ":", "if", "alt", ":", "name", "=", "alt", "else", ":", "name", "=", "self", ".", "name", "result", "=", "\"<\"", "+", "name", "for", "it", "in", "self", ".", "attr", ":", "result", "+=", "' %s=\"%s\"'", "%", "(", "it", ",", "escape", "(", "self", ".", "attr", "[", "it", "]", ",", "{", "'\"'", ":", "\"&quot;\"", ",", "'%'", ":", "\"%%\"", "}", ")", ")", "if", "empty", ":", "return", "result", "+", "\"/>%s\"", "else", ":", "return", "result", "+", "\">\"" ]
Return XML start tag for the receiver.
[ "Return", "XML", "start", "tag", "for", "the", "receiver", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L162-L174
231,136
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.end_tag
def end_tag(self, alt=None): """Return XML end tag for the receiver.""" if alt: name = alt else: name = self.name return "</" + name + ">"
python
def end_tag(self, alt=None): """Return XML end tag for the receiver.""" if alt: name = alt else: name = self.name return "</" + name + ">"
[ "def", "end_tag", "(", "self", ",", "alt", "=", "None", ")", ":", "if", "alt", ":", "name", "=", "alt", "else", ":", "name", "=", "self", ".", "name", "return", "\"</\"", "+", "name", "+", "\">\"" ]
Return XML end tag for the receiver.
[ "Return", "XML", "end", "tag", "for", "the", "receiver", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L176-L182
231,137
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode.serialize
def serialize(self, occur=None): """Return RELAX NG representation of the receiver and subtree. """ fmt = self.ser_format.get(self.name, SchemaNode._default_format) return fmt(self, occur) % (escape(self.text) + self.serialize_children())
python
def serialize(self, occur=None): """Return RELAX NG representation of the receiver and subtree. """ fmt = self.ser_format.get(self.name, SchemaNode._default_format) return fmt(self, occur) % (escape(self.text) + self.serialize_children())
[ "def", "serialize", "(", "self", ",", "occur", "=", "None", ")", ":", "fmt", "=", "self", ".", "ser_format", ".", "get", "(", "self", ".", "name", ",", "SchemaNode", ".", "_default_format", ")", "return", "fmt", "(", "self", ",", "occur", ")", "%", "(", "escape", "(", "self", ".", "text", ")", "+", "self", ".", "serialize_children", "(", ")", ")" ]
Return RELAX NG representation of the receiver and subtree.
[ "Return", "RELAX", "NG", "representation", "of", "the", "receiver", "and", "subtree", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L184-L189
231,138
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode._default_format
def _default_format(self, occur): """Return the default serialization format.""" if self.text or self.children: return self.start_tag() + "%s" + self.end_tag() return self.start_tag(empty=True)
python
def _default_format(self, occur): """Return the default serialization format.""" if self.text or self.children: return self.start_tag() + "%s" + self.end_tag() return self.start_tag(empty=True)
[ "def", "_default_format", "(", "self", ",", "occur", ")", ":", "if", "self", ".", "text", "or", "self", ".", "children", ":", "return", "self", ".", "start_tag", "(", ")", "+", "\"%s\"", "+", "self", ".", "end_tag", "(", ")", "return", "self", ".", "start_tag", "(", "empty", "=", "True", ")" ]
Return the default serialization format.
[ "Return", "the", "default", "serialization", "format", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L191-L195
231,139
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode._define_format
def _define_format(self, occur): """Return the serialization format for a define node.""" if hasattr(self, "default"): self.attr["nma:default"] = self.default middle = self._chorder() if self.rng_children() else "<empty/>%s" return (self.start_tag() + self.serialize_annots().replace("%", "%%") + middle + self.end_tag())
python
def _define_format(self, occur): """Return the serialization format for a define node.""" if hasattr(self, "default"): self.attr["nma:default"] = self.default middle = self._chorder() if self.rng_children() else "<empty/>%s" return (self.start_tag() + self.serialize_annots().replace("%", "%%") + middle + self.end_tag())
[ "def", "_define_format", "(", "self", ",", "occur", ")", ":", "if", "hasattr", "(", "self", ",", "\"default\"", ")", ":", "self", ".", "attr", "[", "\"nma:default\"", "]", "=", "self", ".", "default", "middle", "=", "self", ".", "_chorder", "(", ")", "if", "self", ".", "rng_children", "(", ")", "else", "\"<empty/>%s\"", "return", "(", "self", ".", "start_tag", "(", ")", "+", "self", ".", "serialize_annots", "(", ")", ".", "replace", "(", "\"%\"", ",", "\"%%\"", ")", "+", "middle", "+", "self", ".", "end_tag", "(", ")", ")" ]
Return the serialization format for a define node.
[ "Return", "the", "serialization", "format", "for", "a", "define", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L201-L207
231,140
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode._element_format
def _element_format(self, occur): """Return the serialization format for an element node.""" if occur: occ = occur else: occ = self.occur if occ == 1: if hasattr(self, "default"): self.attr["nma:default"] = self.default else: self.attr["nma:implicit"] = "true" middle = self._chorder() if self.rng_children() else "<empty/>%s" fmt = (self.start_tag() + self.serialize_annots().replace("%", "%%") + middle + self.end_tag()) if (occ == 2 or self.parent.name == "choice" or self.parent.name == "case" and len(self.parent.children) == 1): return fmt else: return "<optional>" + fmt + "</optional>"
python
def _element_format(self, occur): """Return the serialization format for an element node.""" if occur: occ = occur else: occ = self.occur if occ == 1: if hasattr(self, "default"): self.attr["nma:default"] = self.default else: self.attr["nma:implicit"] = "true" middle = self._chorder() if self.rng_children() else "<empty/>%s" fmt = (self.start_tag() + self.serialize_annots().replace("%", "%%") + middle + self.end_tag()) if (occ == 2 or self.parent.name == "choice" or self.parent.name == "case" and len(self.parent.children) == 1): return fmt else: return "<optional>" + fmt + "</optional>"
[ "def", "_element_format", "(", "self", ",", "occur", ")", ":", "if", "occur", ":", "occ", "=", "occur", "else", ":", "occ", "=", "self", ".", "occur", "if", "occ", "==", "1", ":", "if", "hasattr", "(", "self", ",", "\"default\"", ")", ":", "self", ".", "attr", "[", "\"nma:default\"", "]", "=", "self", ".", "default", "else", ":", "self", ".", "attr", "[", "\"nma:implicit\"", "]", "=", "\"true\"", "middle", "=", "self", ".", "_chorder", "(", ")", "if", "self", ".", "rng_children", "(", ")", "else", "\"<empty/>%s\"", "fmt", "=", "(", "self", ".", "start_tag", "(", ")", "+", "self", ".", "serialize_annots", "(", ")", ".", "replace", "(", "\"%\"", ",", "\"%%\"", ")", "+", "middle", "+", "self", ".", "end_tag", "(", ")", ")", "if", "(", "occ", "==", "2", "or", "self", ".", "parent", ".", "name", "==", "\"choice\"", "or", "self", ".", "parent", ".", "name", "==", "\"case\"", "and", "len", "(", "self", ".", "parent", ".", "children", ")", "==", "1", ")", ":", "return", "fmt", "else", ":", "return", "\"<optional>\"", "+", "fmt", "+", "\"</optional>\"" ]
Return the serialization format for an element node.
[ "Return", "the", "serialization", "format", "for", "an", "element", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L209-L227
231,141
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode._list_format
def _list_format(self, occur): """Return the serialization format for a _list_ node.""" if self.keys: self.attr["nma:key"] = " ".join(self.keys) keys = ''.join([self.keymap[k].serialize(occur=2) for k in self.keys]) else: keys = "" if self.maxEl: self.attr["nma:max-elements"] = self.maxEl if int(self.minEl) == 0: ord_ = "zeroOrMore" else: ord_ = "oneOrMore" if int(self.minEl) > 1: self.attr["nma:min-elements"] = self.minEl middle = self._chorder() if self.rng_children() else "<empty/>%s" return ("<" + ord_ + ">" + self.start_tag("element") + (self.serialize_annots() + keys).replace("%", "%%") + middle + self.end_tag("element") + "</" + ord_ + ">")
python
def _list_format(self, occur): """Return the serialization format for a _list_ node.""" if self.keys: self.attr["nma:key"] = " ".join(self.keys) keys = ''.join([self.keymap[k].serialize(occur=2) for k in self.keys]) else: keys = "" if self.maxEl: self.attr["nma:max-elements"] = self.maxEl if int(self.minEl) == 0: ord_ = "zeroOrMore" else: ord_ = "oneOrMore" if int(self.minEl) > 1: self.attr["nma:min-elements"] = self.minEl middle = self._chorder() if self.rng_children() else "<empty/>%s" return ("<" + ord_ + ">" + self.start_tag("element") + (self.serialize_annots() + keys).replace("%", "%%") + middle + self.end_tag("element") + "</" + ord_ + ">")
[ "def", "_list_format", "(", "self", ",", "occur", ")", ":", "if", "self", ".", "keys", ":", "self", ".", "attr", "[", "\"nma:key\"", "]", "=", "\" \"", ".", "join", "(", "self", ".", "keys", ")", "keys", "=", "''", ".", "join", "(", "[", "self", ".", "keymap", "[", "k", "]", ".", "serialize", "(", "occur", "=", "2", ")", "for", "k", "in", "self", ".", "keys", "]", ")", "else", ":", "keys", "=", "\"\"", "if", "self", ".", "maxEl", ":", "self", ".", "attr", "[", "\"nma:max-elements\"", "]", "=", "self", ".", "maxEl", "if", "int", "(", "self", ".", "minEl", ")", "==", "0", ":", "ord_", "=", "\"zeroOrMore\"", "else", ":", "ord_", "=", "\"oneOrMore\"", "if", "int", "(", "self", ".", "minEl", ")", ">", "1", ":", "self", ".", "attr", "[", "\"nma:min-elements\"", "]", "=", "self", ".", "minEl", "middle", "=", "self", ".", "_chorder", "(", ")", "if", "self", ".", "rng_children", "(", ")", "else", "\"<empty/>%s\"", "return", "(", "\"<\"", "+", "ord_", "+", "\">\"", "+", "self", ".", "start_tag", "(", "\"element\"", ")", "+", "(", "self", ".", "serialize_annots", "(", ")", "+", "keys", ")", ".", "replace", "(", "\"%\"", ",", "\"%%\"", ")", "+", "middle", "+", "self", ".", "end_tag", "(", "\"element\"", ")", "+", "\"</\"", "+", "ord_", "+", "\">\"", ")" ]
Return the serialization format for a _list_ node.
[ "Return", "the", "serialization", "format", "for", "a", "_list_", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L236-L255
231,142
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode._choice_format
def _choice_format(self, occur): """Return the serialization format for a choice node.""" middle = "%s" if self.rng_children() else "<empty/>%s" fmt = self.start_tag() + middle + self.end_tag() if self.occur != 2: return "<optional>" + fmt + "</optional>" else: return fmt
python
def _choice_format(self, occur): """Return the serialization format for a choice node.""" middle = "%s" if self.rng_children() else "<empty/>%s" fmt = self.start_tag() + middle + self.end_tag() if self.occur != 2: return "<optional>" + fmt + "</optional>" else: return fmt
[ "def", "_choice_format", "(", "self", ",", "occur", ")", ":", "middle", "=", "\"%s\"", "if", "self", ".", "rng_children", "(", ")", "else", "\"<empty/>%s\"", "fmt", "=", "self", ".", "start_tag", "(", ")", "+", "middle", "+", "self", ".", "end_tag", "(", ")", "if", "self", ".", "occur", "!=", "2", ":", "return", "\"<optional>\"", "+", "fmt", "+", "\"</optional>\"", "else", ":", "return", "fmt" ]
Return the serialization format for a choice node.
[ "Return", "the", "serialization", "format", "for", "a", "choice", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L257-L264
231,143
mbj4668/pyang
pyang/translators/schemanode.py
SchemaNode._case_format
def _case_format(self, occur): """Return the serialization format for a case node.""" if self.occur == 1: self.attr["nma:implicit"] = "true" ccnt = len(self.rng_children()) if ccnt == 0: return "<empty/>%s" if ccnt == 1 or not self.interleave: return self.start_tag("group") + "%s" + self.end_tag("group") return (self.start_tag("interleave") + "%s" + self.end_tag("interleave"))
python
def _case_format(self, occur): """Return the serialization format for a case node.""" if self.occur == 1: self.attr["nma:implicit"] = "true" ccnt = len(self.rng_children()) if ccnt == 0: return "<empty/>%s" if ccnt == 1 or not self.interleave: return self.start_tag("group") + "%s" + self.end_tag("group") return (self.start_tag("interleave") + "%s" + self.end_tag("interleave"))
[ "def", "_case_format", "(", "self", ",", "occur", ")", ":", "if", "self", ".", "occur", "==", "1", ":", "self", ".", "attr", "[", "\"nma:implicit\"", "]", "=", "\"true\"", "ccnt", "=", "len", "(", "self", ".", "rng_children", "(", ")", ")", "if", "ccnt", "==", "0", ":", "return", "\"<empty/>%s\"", "if", "ccnt", "==", "1", "or", "not", "self", ".", "interleave", ":", "return", "self", ".", "start_tag", "(", "\"group\"", ")", "+", "\"%s\"", "+", "self", ".", "end_tag", "(", "\"group\"", ")", "return", "(", "self", ".", "start_tag", "(", "\"interleave\"", ")", "+", "\"%s\"", "+", "self", ".", "end_tag", "(", "\"interleave\"", ")", ")" ]
Return the serialization format for a case node.
[ "Return", "the", "serialization", "format", "for", "a", "case", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/schemanode.py#L266-L275
231,144
mbj4668/pyang
pyang/plugins/jtox.py
JtoXPlugin.process_children
def process_children(self, node, parent, pmod): """Process all children of `node`, except "rpc" and "notification". """ for ch in node.i_children: if ch.keyword in ["rpc", "notification"]: continue if ch.keyword in ["choice", "case"]: self.process_children(ch, parent, pmod) continue if ch.i_module.i_modulename == pmod: nmod = pmod nodename = ch.arg else: nmod = ch.i_module.i_modulename nodename = "%s:%s" % (nmod, ch.arg) ndata = [ch.keyword] if ch.keyword == "container": ndata.append({}) self.process_children(ch, ndata[1], nmod) elif ch.keyword == "list": ndata.append({}) self.process_children(ch, ndata[1], nmod) ndata.append([(k.i_module.i_modulename, k.arg) for k in ch.i_key]) elif ch.keyword in ["leaf", "leaf-list"]: ndata.append(self.base_type(ch.search_one("type"))) modname = ch.i_module.i_modulename parent[nodename] = ndata
python
def process_children(self, node, parent, pmod): """Process all children of `node`, except "rpc" and "notification". """ for ch in node.i_children: if ch.keyword in ["rpc", "notification"]: continue if ch.keyword in ["choice", "case"]: self.process_children(ch, parent, pmod) continue if ch.i_module.i_modulename == pmod: nmod = pmod nodename = ch.arg else: nmod = ch.i_module.i_modulename nodename = "%s:%s" % (nmod, ch.arg) ndata = [ch.keyword] if ch.keyword == "container": ndata.append({}) self.process_children(ch, ndata[1], nmod) elif ch.keyword == "list": ndata.append({}) self.process_children(ch, ndata[1], nmod) ndata.append([(k.i_module.i_modulename, k.arg) for k in ch.i_key]) elif ch.keyword in ["leaf", "leaf-list"]: ndata.append(self.base_type(ch.search_one("type"))) modname = ch.i_module.i_modulename parent[nodename] = ndata
[ "def", "process_children", "(", "self", ",", "node", ",", "parent", ",", "pmod", ")", ":", "for", "ch", "in", "node", ".", "i_children", ":", "if", "ch", ".", "keyword", "in", "[", "\"rpc\"", ",", "\"notification\"", "]", ":", "continue", "if", "ch", ".", "keyword", "in", "[", "\"choice\"", ",", "\"case\"", "]", ":", "self", ".", "process_children", "(", "ch", ",", "parent", ",", "pmod", ")", "continue", "if", "ch", ".", "i_module", ".", "i_modulename", "==", "pmod", ":", "nmod", "=", "pmod", "nodename", "=", "ch", ".", "arg", "else", ":", "nmod", "=", "ch", ".", "i_module", ".", "i_modulename", "nodename", "=", "\"%s:%s\"", "%", "(", "nmod", ",", "ch", ".", "arg", ")", "ndata", "=", "[", "ch", ".", "keyword", "]", "if", "ch", ".", "keyword", "==", "\"container\"", ":", "ndata", ".", "append", "(", "{", "}", ")", "self", ".", "process_children", "(", "ch", ",", "ndata", "[", "1", "]", ",", "nmod", ")", "elif", "ch", ".", "keyword", "==", "\"list\"", ":", "ndata", ".", "append", "(", "{", "}", ")", "self", ".", "process_children", "(", "ch", ",", "ndata", "[", "1", "]", ",", "nmod", ")", "ndata", ".", "append", "(", "[", "(", "k", ".", "i_module", ".", "i_modulename", ",", "k", ".", "arg", ")", "for", "k", "in", "ch", ".", "i_key", "]", ")", "elif", "ch", ".", "keyword", "in", "[", "\"leaf\"", ",", "\"leaf-list\"", "]", ":", "ndata", ".", "append", "(", "self", ".", "base_type", "(", "ch", ".", "search_one", "(", "\"type\"", ")", ")", ")", "modname", "=", "ch", ".", "i_module", ".", "i_modulename", "parent", "[", "nodename", "]", "=", "ndata" ]
Process all children of `node`, except "rpc" and "notification".
[ "Process", "all", "children", "of", "node", "except", "rpc", "and", "notification", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jtox.py#L61-L87
231,145
mbj4668/pyang
pyang/plugins/jtox.py
JtoXPlugin.base_type
def base_type(self, type): """Return the base type of `type`.""" while 1: if type.arg == "leafref": node = type.i_type_spec.i_target_node elif type.i_typedef is None: break else: node = type.i_typedef type = node.search_one("type") if type.arg == "decimal64": return [type.arg, int(type.search_one("fraction-digits").arg)] elif type.arg == "union": return [type.arg, [self.base_type(x) for x in type.i_type_spec.types]] else: return type.arg
python
def base_type(self, type): """Return the base type of `type`.""" while 1: if type.arg == "leafref": node = type.i_type_spec.i_target_node elif type.i_typedef is None: break else: node = type.i_typedef type = node.search_one("type") if type.arg == "decimal64": return [type.arg, int(type.search_one("fraction-digits").arg)] elif type.arg == "union": return [type.arg, [self.base_type(x) for x in type.i_type_spec.types]] else: return type.arg
[ "def", "base_type", "(", "self", ",", "type", ")", ":", "while", "1", ":", "if", "type", ".", "arg", "==", "\"leafref\"", ":", "node", "=", "type", ".", "i_type_spec", ".", "i_target_node", "elif", "type", ".", "i_typedef", "is", "None", ":", "break", "else", ":", "node", "=", "type", ".", "i_typedef", "type", "=", "node", ".", "search_one", "(", "\"type\"", ")", "if", "type", ".", "arg", "==", "\"decimal64\"", ":", "return", "[", "type", ".", "arg", ",", "int", "(", "type", ".", "search_one", "(", "\"fraction-digits\"", ")", ".", "arg", ")", "]", "elif", "type", ".", "arg", "==", "\"union\"", ":", "return", "[", "type", ".", "arg", ",", "[", "self", ".", "base_type", "(", "x", ")", "for", "x", "in", "type", ".", "i_type_spec", ".", "types", "]", "]", "else", ":", "return", "type", ".", "arg" ]
Return the base type of `type`.
[ "Return", "the", "base", "type", "of", "type", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/jtox.py#L89-L104
231,146
mbj4668/pyang
pyang/yang_parser.py
YangTokenizer.skip
def skip(self): """Skip whitespace and count position""" buflen = len(self.buf) while True: self.buf = self.buf.lstrip() if self.buf == '': self.readline() buflen = len(self.buf) else: self.offset += (buflen - len(self.buf)) break # do not keep comments in the syntax tree if not self.keep_comments: # skip line comment if self.buf[0] == '/': if self.buf[1] == '/': self.readline() return self.skip() # skip block comment elif self.buf[1] == '*': i = self.buf.find('*/') while i == -1: self.readline() i = self.buf.find('*/') self.set_buf(i+2) return self.skip()
python
def skip(self): """Skip whitespace and count position""" buflen = len(self.buf) while True: self.buf = self.buf.lstrip() if self.buf == '': self.readline() buflen = len(self.buf) else: self.offset += (buflen - len(self.buf)) break # do not keep comments in the syntax tree if not self.keep_comments: # skip line comment if self.buf[0] == '/': if self.buf[1] == '/': self.readline() return self.skip() # skip block comment elif self.buf[1] == '*': i = self.buf.find('*/') while i == -1: self.readline() i = self.buf.find('*/') self.set_buf(i+2) return self.skip()
[ "def", "skip", "(", "self", ")", ":", "buflen", "=", "len", "(", "self", ".", "buf", ")", "while", "True", ":", "self", ".", "buf", "=", "self", ".", "buf", ".", "lstrip", "(", ")", "if", "self", ".", "buf", "==", "''", ":", "self", ".", "readline", "(", ")", "buflen", "=", "len", "(", "self", ".", "buf", ")", "else", ":", "self", ".", "offset", "+=", "(", "buflen", "-", "len", "(", "self", ".", "buf", ")", ")", "break", "# do not keep comments in the syntax tree", "if", "not", "self", ".", "keep_comments", ":", "# skip line comment", "if", "self", ".", "buf", "[", "0", "]", "==", "'/'", ":", "if", "self", ".", "buf", "[", "1", "]", "==", "'/'", ":", "self", ".", "readline", "(", ")", "return", "self", ".", "skip", "(", ")", "# skip block comment", "elif", "self", ".", "buf", "[", "1", "]", "==", "'*'", ":", "i", "=", "self", ".", "buf", ".", "find", "(", "'*/'", ")", "while", "i", "==", "-", "1", ":", "self", ".", "readline", "(", ")", "i", "=", "self", ".", "buf", ".", "find", "(", "'*/'", ")", "self", ".", "set_buf", "(", "i", "+", "2", ")", "return", "self", ".", "skip", "(", ")" ]
Skip whitespace and count position
[ "Skip", "whitespace", "and", "count", "position" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/yang_parser.py#L51-L78
231,147
mbj4668/pyang
pyang/yang_parser.py
YangParser.parse
def parse(self, ctx, ref, text): """Parse the string `text` containing a YANG statement. Return a Statement on success or None on failure """ self.ctx = ctx self.pos = error.Position(ref) self.top = None try: self.tokenizer = YangTokenizer(text, self.pos, ctx.errors, ctx.max_line_len, ctx.keep_comments, not ctx.lax_quote_checks) stmt = self._parse_statement(None) except error.Abort: return None except error.Eof as e: error.err_add(self.ctx.errors, self.pos, 'EOF_ERROR', ()) return None try: # we expect a error.Eof at this point, everything else is an error self.tokenizer.peek() except error.Eof: return stmt except: pass error.err_add(self.ctx.errors, self.pos, 'TRAILING_GARBAGE', ()) return None
python
def parse(self, ctx, ref, text): """Parse the string `text` containing a YANG statement. Return a Statement on success or None on failure """ self.ctx = ctx self.pos = error.Position(ref) self.top = None try: self.tokenizer = YangTokenizer(text, self.pos, ctx.errors, ctx.max_line_len, ctx.keep_comments, not ctx.lax_quote_checks) stmt = self._parse_statement(None) except error.Abort: return None except error.Eof as e: error.err_add(self.ctx.errors, self.pos, 'EOF_ERROR', ()) return None try: # we expect a error.Eof at this point, everything else is an error self.tokenizer.peek() except error.Eof: return stmt except: pass error.err_add(self.ctx.errors, self.pos, 'TRAILING_GARBAGE', ()) return None
[ "def", "parse", "(", "self", ",", "ctx", ",", "ref", ",", "text", ")", ":", "self", ".", "ctx", "=", "ctx", "self", ".", "pos", "=", "error", ".", "Position", "(", "ref", ")", "self", ".", "top", "=", "None", "try", ":", "self", ".", "tokenizer", "=", "YangTokenizer", "(", "text", ",", "self", ".", "pos", ",", "ctx", ".", "errors", ",", "ctx", ".", "max_line_len", ",", "ctx", ".", "keep_comments", ",", "not", "ctx", ".", "lax_quote_checks", ")", "stmt", "=", "self", ".", "_parse_statement", "(", "None", ")", "except", "error", ".", "Abort", ":", "return", "None", "except", "error", ".", "Eof", "as", "e", ":", "error", ".", "err_add", "(", "self", ".", "ctx", ".", "errors", ",", "self", ".", "pos", ",", "'EOF_ERROR'", ",", "(", ")", ")", "return", "None", "try", ":", "# we expect a error.Eof at this point, everything else is an error", "self", ".", "tokenizer", ".", "peek", "(", ")", "except", "error", ".", "Eof", ":", "return", "stmt", "except", ":", "pass", "error", ".", "err_add", "(", "self", ".", "ctx", ".", "errors", ",", "self", ".", "pos", ",", "'TRAILING_GARBAGE'", ",", "(", ")", ")", "return", "None" ]
Parse the string `text` containing a YANG statement. Return a Statement on success or None on failure
[ "Parse", "the", "string", "text", "containing", "a", "YANG", "statement", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/yang_parser.py#L261-L288
231,148
mbj4668/pyang
pyang/statements.py
add_validation_phase
def add_validation_phase(phase, before=None, after=None): """Add a validation phase to the framework. Can be used by plugins to do special validation of extensions.""" idx = 0 for x in _validation_phases: if x == before: _validation_phases.insert(idx, phase) return elif x == after: _validation_phases.insert(idx+1, phase) return idx = idx + 1 # otherwise append at the end _validation_phases.append(phase)
python
def add_validation_phase(phase, before=None, after=None): """Add a validation phase to the framework. Can be used by plugins to do special validation of extensions.""" idx = 0 for x in _validation_phases: if x == before: _validation_phases.insert(idx, phase) return elif x == after: _validation_phases.insert(idx+1, phase) return idx = idx + 1 # otherwise append at the end _validation_phases.append(phase)
[ "def", "add_validation_phase", "(", "phase", ",", "before", "=", "None", ",", "after", "=", "None", ")", ":", "idx", "=", "0", "for", "x", "in", "_validation_phases", ":", "if", "x", "==", "before", ":", "_validation_phases", ".", "insert", "(", "idx", ",", "phase", ")", "return", "elif", "x", "==", "after", ":", "_validation_phases", ".", "insert", "(", "idx", "+", "1", ",", "phase", ")", "return", "idx", "=", "idx", "+", "1", "# otherwise append at the end", "_validation_phases", ".", "append", "(", "phase", ")" ]
Add a validation phase to the framework. Can be used by plugins to do special validation of extensions.
[ "Add", "a", "validation", "phase", "to", "the", "framework", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L16-L30
231,149
mbj4668/pyang
pyang/statements.py
add_validation_fun
def add_validation_fun(phase, keywords, f): """Add a validation function to some phase in the framework. Function `f` is called for each valid occurance of each keyword in `keywords`. Can be used by plugins to do special validation of extensions.""" for keyword in keywords: if (phase, keyword) in _validation_map: oldf = _validation_map[(phase, keyword)] def newf(ctx, s): oldf(ctx, s) f(ctx, s) _validation_map[(phase, keyword)] = newf else: _validation_map[(phase, keyword)] = f
python
def add_validation_fun(phase, keywords, f): """Add a validation function to some phase in the framework. Function `f` is called for each valid occurance of each keyword in `keywords`. Can be used by plugins to do special validation of extensions.""" for keyword in keywords: if (phase, keyword) in _validation_map: oldf = _validation_map[(phase, keyword)] def newf(ctx, s): oldf(ctx, s) f(ctx, s) _validation_map[(phase, keyword)] = newf else: _validation_map[(phase, keyword)] = f
[ "def", "add_validation_fun", "(", "phase", ",", "keywords", ",", "f", ")", ":", "for", "keyword", "in", "keywords", ":", "if", "(", "phase", ",", "keyword", ")", "in", "_validation_map", ":", "oldf", "=", "_validation_map", "[", "(", "phase", ",", "keyword", ")", "]", "def", "newf", "(", "ctx", ",", "s", ")", ":", "oldf", "(", "ctx", ",", "s", ")", "f", "(", "ctx", ",", "s", ")", "_validation_map", "[", "(", "phase", ",", "keyword", ")", "]", "=", "newf", "else", ":", "_validation_map", "[", "(", "phase", ",", "keyword", ")", "]", "=", "f" ]
Add a validation function to some phase in the framework. Function `f` is called for each valid occurance of each keyword in `keywords`. Can be used by plugins to do special validation of extensions.
[ "Add", "a", "validation", "function", "to", "some", "phase", "in", "the", "framework", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L32-L46
231,150
mbj4668/pyang
pyang/statements.py
v_init_extension
def v_init_extension(ctx, stmt): """find the modulename of the prefix, and set `stmt.keyword`""" (prefix, identifier) = stmt.raw_keyword (modname, revision) = \ prefix_to_modulename_and_revision(stmt.i_module, prefix, stmt.pos, ctx.errors) stmt.keyword = (modname, identifier) stmt.i_extension_modulename = modname stmt.i_extension_revision = revision stmt.i_extension = None
python
def v_init_extension(ctx, stmt): """find the modulename of the prefix, and set `stmt.keyword`""" (prefix, identifier) = stmt.raw_keyword (modname, revision) = \ prefix_to_modulename_and_revision(stmt.i_module, prefix, stmt.pos, ctx.errors) stmt.keyword = (modname, identifier) stmt.i_extension_modulename = modname stmt.i_extension_revision = revision stmt.i_extension = None
[ "def", "v_init_extension", "(", "ctx", ",", "stmt", ")", ":", "(", "prefix", ",", "identifier", ")", "=", "stmt", ".", "raw_keyword", "(", "modname", ",", "revision", ")", "=", "prefix_to_modulename_and_revision", "(", "stmt", ".", "i_module", ",", "prefix", ",", "stmt", ".", "pos", ",", "ctx", ".", "errors", ")", "stmt", ".", "keyword", "=", "(", "modname", ",", "identifier", ")", "stmt", ".", "i_extension_modulename", "=", "modname", "stmt", ".", "i_extension_revision", "=", "revision", "stmt", ".", "i_extension", "=", "None" ]
find the modulename of the prefix, and set `stmt.keyword`
[ "find", "the", "modulename", "of", "the", "prefix", "and", "set", "stmt", ".", "keyword" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L491-L500
231,151
mbj4668/pyang
pyang/statements.py
v_grammar_unique_defs
def v_grammar_unique_defs(ctx, stmt): """Verify that all typedefs and groupings are unique Called for every statement. Stores all typedefs in stmt.i_typedef, groupings in stmt.i_grouping """ defs = [('typedef', 'TYPE_ALREADY_DEFINED', stmt.i_typedefs), ('grouping', 'GROUPING_ALREADY_DEFINED', stmt.i_groupings)] if stmt.parent is None: defs.extend( [('feature', 'FEATURE_ALREADY_DEFINED', stmt.i_features), ('identity', 'IDENTITY_ALREADY_DEFINED', stmt.i_identities), ('extension', 'EXTENSION_ALREADY_DEFINED', stmt.i_extensions)]) for (keyword, errcode, dict) in defs: for definition in stmt.search(keyword): if definition.arg in dict: other = dict[definition.arg] err_add(ctx.errors, definition.pos, errcode, (definition.arg, other.pos)) else: dict[definition.arg] = definition
python
def v_grammar_unique_defs(ctx, stmt): """Verify that all typedefs and groupings are unique Called for every statement. Stores all typedefs in stmt.i_typedef, groupings in stmt.i_grouping """ defs = [('typedef', 'TYPE_ALREADY_DEFINED', stmt.i_typedefs), ('grouping', 'GROUPING_ALREADY_DEFINED', stmt.i_groupings)] if stmt.parent is None: defs.extend( [('feature', 'FEATURE_ALREADY_DEFINED', stmt.i_features), ('identity', 'IDENTITY_ALREADY_DEFINED', stmt.i_identities), ('extension', 'EXTENSION_ALREADY_DEFINED', stmt.i_extensions)]) for (keyword, errcode, dict) in defs: for definition in stmt.search(keyword): if definition.arg in dict: other = dict[definition.arg] err_add(ctx.errors, definition.pos, errcode, (definition.arg, other.pos)) else: dict[definition.arg] = definition
[ "def", "v_grammar_unique_defs", "(", "ctx", ",", "stmt", ")", ":", "defs", "=", "[", "(", "'typedef'", ",", "'TYPE_ALREADY_DEFINED'", ",", "stmt", ".", "i_typedefs", ")", ",", "(", "'grouping'", ",", "'GROUPING_ALREADY_DEFINED'", ",", "stmt", ".", "i_groupings", ")", "]", "if", "stmt", ".", "parent", "is", "None", ":", "defs", ".", "extend", "(", "[", "(", "'feature'", ",", "'FEATURE_ALREADY_DEFINED'", ",", "stmt", ".", "i_features", ")", ",", "(", "'identity'", ",", "'IDENTITY_ALREADY_DEFINED'", ",", "stmt", ".", "i_identities", ")", ",", "(", "'extension'", ",", "'EXTENSION_ALREADY_DEFINED'", ",", "stmt", ".", "i_extensions", ")", "]", ")", "for", "(", "keyword", ",", "errcode", ",", "dict", ")", "in", "defs", ":", "for", "definition", "in", "stmt", ".", "search", "(", "keyword", ")", ":", "if", "definition", ".", "arg", "in", "dict", ":", "other", "=", "dict", "[", "definition", ".", "arg", "]", "err_add", "(", "ctx", ".", "errors", ",", "definition", ".", "pos", ",", "errcode", ",", "(", "definition", ".", "arg", ",", "other", ".", "pos", ")", ")", "else", ":", "dict", "[", "definition", ".", "arg", "]", "=", "definition" ]
Verify that all typedefs and groupings are unique Called for every statement. Stores all typedefs in stmt.i_typedef, groupings in stmt.i_grouping
[ "Verify", "that", "all", "typedefs", "and", "groupings", "are", "unique", "Called", "for", "every", "statement", ".", "Stores", "all", "typedefs", "in", "stmt", ".", "i_typedef", "groupings", "in", "stmt", ".", "i_grouping" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L537-L556
231,152
mbj4668/pyang
pyang/statements.py
v_type_extension
def v_type_extension(ctx, stmt): """verify that the extension matches the extension definition""" (modulename, identifier) = stmt.keyword revision = stmt.i_extension_revision module = modulename_to_module(stmt.i_module, modulename, revision) if module is None: return if identifier not in module.i_extensions: if module.i_modulename == stmt.i_orig_module.i_modulename: # extension defined in current submodule if identifier not in stmt.i_orig_module.i_extensions: err_add(ctx.errors, stmt.pos, 'EXTENSION_NOT_DEFINED', (identifier, module.arg)) return else: stmt.i_extension = stmt.i_orig_module.i_extensions[identifier] else: err_add(ctx.errors, stmt.pos, 'EXTENSION_NOT_DEFINED', (identifier, module.arg)) return else: stmt.i_extension = module.i_extensions[identifier] ext_arg = stmt.i_extension.search_one('argument') if stmt.arg is not None and ext_arg is None: err_add(ctx.errors, stmt.pos, 'EXTENSION_ARGUMENT_PRESENT', identifier) elif stmt.arg is None and ext_arg is not None: err_add(ctx.errors, stmt.pos, 'EXTENSION_NO_ARGUMENT_PRESENT', identifier)
python
def v_type_extension(ctx, stmt): """verify that the extension matches the extension definition""" (modulename, identifier) = stmt.keyword revision = stmt.i_extension_revision module = modulename_to_module(stmt.i_module, modulename, revision) if module is None: return if identifier not in module.i_extensions: if module.i_modulename == stmt.i_orig_module.i_modulename: # extension defined in current submodule if identifier not in stmt.i_orig_module.i_extensions: err_add(ctx.errors, stmt.pos, 'EXTENSION_NOT_DEFINED', (identifier, module.arg)) return else: stmt.i_extension = stmt.i_orig_module.i_extensions[identifier] else: err_add(ctx.errors, stmt.pos, 'EXTENSION_NOT_DEFINED', (identifier, module.arg)) return else: stmt.i_extension = module.i_extensions[identifier] ext_arg = stmt.i_extension.search_one('argument') if stmt.arg is not None and ext_arg is None: err_add(ctx.errors, stmt.pos, 'EXTENSION_ARGUMENT_PRESENT', identifier) elif stmt.arg is None and ext_arg is not None: err_add(ctx.errors, stmt.pos, 'EXTENSION_NO_ARGUMENT_PRESENT', identifier)
[ "def", "v_type_extension", "(", "ctx", ",", "stmt", ")", ":", "(", "modulename", ",", "identifier", ")", "=", "stmt", ".", "keyword", "revision", "=", "stmt", ".", "i_extension_revision", "module", "=", "modulename_to_module", "(", "stmt", ".", "i_module", ",", "modulename", ",", "revision", ")", "if", "module", "is", "None", ":", "return", "if", "identifier", "not", "in", "module", ".", "i_extensions", ":", "if", "module", ".", "i_modulename", "==", "stmt", ".", "i_orig_module", ".", "i_modulename", ":", "# extension defined in current submodule", "if", "identifier", "not", "in", "stmt", ".", "i_orig_module", ".", "i_extensions", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'EXTENSION_NOT_DEFINED'", ",", "(", "identifier", ",", "module", ".", "arg", ")", ")", "return", "else", ":", "stmt", ".", "i_extension", "=", "stmt", ".", "i_orig_module", ".", "i_extensions", "[", "identifier", "]", "else", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'EXTENSION_NOT_DEFINED'", ",", "(", "identifier", ",", "module", ".", "arg", ")", ")", "return", "else", ":", "stmt", ".", "i_extension", "=", "module", ".", "i_extensions", "[", "identifier", "]", "ext_arg", "=", "stmt", ".", "i_extension", ".", "search_one", "(", "'argument'", ")", "if", "stmt", ".", "arg", "is", "not", "None", "and", "ext_arg", "is", "None", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'EXTENSION_ARGUMENT_PRESENT'", ",", "identifier", ")", "elif", "stmt", ".", "arg", "is", "None", "and", "ext_arg", "is", "not", "None", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'EXTENSION_NO_ARGUMENT_PRESENT'", ",", "identifier", ")" ]
verify that the extension matches the extension definition
[ "verify", "that", "the", "extension", "matches", "the", "extension", "definition" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L1172-L1200
231,153
mbj4668/pyang
pyang/statements.py
v_type_if_feature
def v_type_if_feature(ctx, stmt, no_error_report=False): """verify that the referenced feature exists.""" stmt.i_feature = None # Verify the argument type expr = syntax.parse_if_feature_expr(stmt.arg) if stmt.i_module.i_version == '1': # version 1 allows only a single value as if-feature if type(expr) != type(''): err_add(ctx.errors, stmt.pos, 'BAD_VALUE', (stmt.arg, 'identifier-ref')) return def eval(expr): if type(expr) == type(''): return has_feature(expr) else: (op, op1, op2) = expr if op == 'not': return not eval(op1) elif op == 'and': return eval(op1) and eval(op2) elif op == 'or': return eval(op1) or eval(op2) def has_feature(name): # raises Abort if the feature is not defined # returns True if we compile with the feature # returns False if we compile without the feature found = None if name.find(":") == -1: prefix = None else: [prefix, name] = name.split(':', 1) if prefix is None or stmt.i_module.i_prefix == prefix: # check local features pmodule = stmt.i_module else: # this is a prefixed name, check the imported modules pmodule = prefix_to_module(stmt.i_module, prefix, stmt.pos, ctx.errors) if pmodule is None: raise Abort if name in pmodule.i_features: f = pmodule.i_features[name] if prefix is None and not is_submodule_included(stmt, f): pass else: found = pmodule.i_features[name] v_type_feature(ctx, found) if pmodule.i_modulename in ctx.features: if name not in ctx.features[pmodule.i_modulename]: return False if found is None and no_error_report == False: err_add(ctx.errors, stmt.pos, 'FEATURE_NOT_FOUND', (name, pmodule.arg)) raise Abort return found is not None # Evaluate the if-feature expression, and verify that all # referenced features exist. try: if eval(expr) == False: # prune the parent. # since the parent can have more than one if-feature # statement, we must check if the parent # already has been scheduled for removal if stmt.parent not in stmt.i_module.i_prune: stmt.i_module.i_prune.append(stmt.parent) except Abort: pass
python
def v_type_if_feature(ctx, stmt, no_error_report=False): """verify that the referenced feature exists.""" stmt.i_feature = None # Verify the argument type expr = syntax.parse_if_feature_expr(stmt.arg) if stmt.i_module.i_version == '1': # version 1 allows only a single value as if-feature if type(expr) != type(''): err_add(ctx.errors, stmt.pos, 'BAD_VALUE', (stmt.arg, 'identifier-ref')) return def eval(expr): if type(expr) == type(''): return has_feature(expr) else: (op, op1, op2) = expr if op == 'not': return not eval(op1) elif op == 'and': return eval(op1) and eval(op2) elif op == 'or': return eval(op1) or eval(op2) def has_feature(name): # raises Abort if the feature is not defined # returns True if we compile with the feature # returns False if we compile without the feature found = None if name.find(":") == -1: prefix = None else: [prefix, name] = name.split(':', 1) if prefix is None or stmt.i_module.i_prefix == prefix: # check local features pmodule = stmt.i_module else: # this is a prefixed name, check the imported modules pmodule = prefix_to_module(stmt.i_module, prefix, stmt.pos, ctx.errors) if pmodule is None: raise Abort if name in pmodule.i_features: f = pmodule.i_features[name] if prefix is None and not is_submodule_included(stmt, f): pass else: found = pmodule.i_features[name] v_type_feature(ctx, found) if pmodule.i_modulename in ctx.features: if name not in ctx.features[pmodule.i_modulename]: return False if found is None and no_error_report == False: err_add(ctx.errors, stmt.pos, 'FEATURE_NOT_FOUND', (name, pmodule.arg)) raise Abort return found is not None # Evaluate the if-feature expression, and verify that all # referenced features exist. try: if eval(expr) == False: # prune the parent. # since the parent can have more than one if-feature # statement, we must check if the parent # already has been scheduled for removal if stmt.parent not in stmt.i_module.i_prune: stmt.i_module.i_prune.append(stmt.parent) except Abort: pass
[ "def", "v_type_if_feature", "(", "ctx", ",", "stmt", ",", "no_error_report", "=", "False", ")", ":", "stmt", ".", "i_feature", "=", "None", "# Verify the argument type", "expr", "=", "syntax", ".", "parse_if_feature_expr", "(", "stmt", ".", "arg", ")", "if", "stmt", ".", "i_module", ".", "i_version", "==", "'1'", ":", "# version 1 allows only a single value as if-feature", "if", "type", "(", "expr", ")", "!=", "type", "(", "''", ")", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'BAD_VALUE'", ",", "(", "stmt", ".", "arg", ",", "'identifier-ref'", ")", ")", "return", "def", "eval", "(", "expr", ")", ":", "if", "type", "(", "expr", ")", "==", "type", "(", "''", ")", ":", "return", "has_feature", "(", "expr", ")", "else", ":", "(", "op", ",", "op1", ",", "op2", ")", "=", "expr", "if", "op", "==", "'not'", ":", "return", "not", "eval", "(", "op1", ")", "elif", "op", "==", "'and'", ":", "return", "eval", "(", "op1", ")", "and", "eval", "(", "op2", ")", "elif", "op", "==", "'or'", ":", "return", "eval", "(", "op1", ")", "or", "eval", "(", "op2", ")", "def", "has_feature", "(", "name", ")", ":", "# raises Abort if the feature is not defined", "# returns True if we compile with the feature", "# returns False if we compile without the feature", "found", "=", "None", "if", "name", ".", "find", "(", "\":\"", ")", "==", "-", "1", ":", "prefix", "=", "None", "else", ":", "[", "prefix", ",", "name", "]", "=", "name", ".", "split", "(", "':'", ",", "1", ")", "if", "prefix", "is", "None", "or", "stmt", ".", "i_module", ".", "i_prefix", "==", "prefix", ":", "# check local features", "pmodule", "=", "stmt", ".", "i_module", "else", ":", "# this is a prefixed name, check the imported modules", "pmodule", "=", "prefix_to_module", "(", "stmt", ".", "i_module", ",", "prefix", ",", "stmt", ".", "pos", ",", "ctx", ".", "errors", ")", "if", "pmodule", "is", "None", ":", "raise", "Abort", "if", "name", "in", "pmodule", ".", "i_features", ":", "f", "=", "pmodule", ".", "i_features", "[", "name", "]", "if", "prefix", "is", "None", "and", "not", "is_submodule_included", "(", "stmt", ",", "f", ")", ":", "pass", "else", ":", "found", "=", "pmodule", ".", "i_features", "[", "name", "]", "v_type_feature", "(", "ctx", ",", "found", ")", "if", "pmodule", ".", "i_modulename", "in", "ctx", ".", "features", ":", "if", "name", "not", "in", "ctx", ".", "features", "[", "pmodule", ".", "i_modulename", "]", ":", "return", "False", "if", "found", "is", "None", "and", "no_error_report", "==", "False", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'FEATURE_NOT_FOUND'", ",", "(", "name", ",", "pmodule", ".", "arg", ")", ")", "raise", "Abort", "return", "found", "is", "not", "None", "# Evaluate the if-feature expression, and verify that all", "# referenced features exist.", "try", ":", "if", "eval", "(", "expr", ")", "==", "False", ":", "# prune the parent.", "# since the parent can have more than one if-feature", "# statement, we must check if the parent", "# already has been scheduled for removal", "if", "stmt", ".", "parent", "not", "in", "stmt", ".", "i_module", ".", "i_prune", ":", "stmt", ".", "i_module", ".", "i_prune", ".", "append", "(", "stmt", ".", "parent", ")", "except", "Abort", ":", "pass" ]
verify that the referenced feature exists.
[ "verify", "that", "the", "referenced", "feature", "exists", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L1224-L1294
231,154
mbj4668/pyang
pyang/statements.py
v_type_base
def v_type_base(ctx, stmt, no_error_report=False): """verify that the referenced identity exists.""" # Find the identity name = stmt.arg stmt.i_identity = None if name.find(":") == -1: prefix = None else: [prefix, name] = name.split(':', 1) if prefix is None or stmt.i_module.i_prefix == prefix: # check local identities pmodule = stmt.i_module else: # this is a prefixed name, check the imported modules pmodule = prefix_to_module(stmt.i_module, prefix, stmt.pos, ctx.errors) if pmodule is None: return if name in pmodule.i_identities: i = pmodule.i_identities[name] if prefix is None and not is_submodule_included(stmt, i): pass else: stmt.i_identity = i v_type_identity(ctx, stmt.i_identity) if stmt.i_identity is None and no_error_report == False: err_add(ctx.errors, stmt.pos, 'IDENTITY_NOT_FOUND', (name, pmodule.arg))
python
def v_type_base(ctx, stmt, no_error_report=False): """verify that the referenced identity exists.""" # Find the identity name = stmt.arg stmt.i_identity = None if name.find(":") == -1: prefix = None else: [prefix, name] = name.split(':', 1) if prefix is None or stmt.i_module.i_prefix == prefix: # check local identities pmodule = stmt.i_module else: # this is a prefixed name, check the imported modules pmodule = prefix_to_module(stmt.i_module, prefix, stmt.pos, ctx.errors) if pmodule is None: return if name in pmodule.i_identities: i = pmodule.i_identities[name] if prefix is None and not is_submodule_included(stmt, i): pass else: stmt.i_identity = i v_type_identity(ctx, stmt.i_identity) if stmt.i_identity is None and no_error_report == False: err_add(ctx.errors, stmt.pos, 'IDENTITY_NOT_FOUND', (name, pmodule.arg))
[ "def", "v_type_base", "(", "ctx", ",", "stmt", ",", "no_error_report", "=", "False", ")", ":", "# Find the identity", "name", "=", "stmt", ".", "arg", "stmt", ".", "i_identity", "=", "None", "if", "name", ".", "find", "(", "\":\"", ")", "==", "-", "1", ":", "prefix", "=", "None", "else", ":", "[", "prefix", ",", "name", "]", "=", "name", ".", "split", "(", "':'", ",", "1", ")", "if", "prefix", "is", "None", "or", "stmt", ".", "i_module", ".", "i_prefix", "==", "prefix", ":", "# check local identities", "pmodule", "=", "stmt", ".", "i_module", "else", ":", "# this is a prefixed name, check the imported modules", "pmodule", "=", "prefix_to_module", "(", "stmt", ".", "i_module", ",", "prefix", ",", "stmt", ".", "pos", ",", "ctx", ".", "errors", ")", "if", "pmodule", "is", "None", ":", "return", "if", "name", "in", "pmodule", ".", "i_identities", ":", "i", "=", "pmodule", ".", "i_identities", "[", "name", "]", "if", "prefix", "is", "None", "and", "not", "is_submodule_included", "(", "stmt", ",", "i", ")", ":", "pass", "else", ":", "stmt", ".", "i_identity", "=", "i", "v_type_identity", "(", "ctx", ",", "stmt", ".", "i_identity", ")", "if", "stmt", ".", "i_identity", "is", "None", "and", "no_error_report", "==", "False", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'IDENTITY_NOT_FOUND'", ",", "(", "name", ",", "pmodule", ".", "arg", ")", ")" ]
verify that the referenced identity exists.
[ "verify", "that", "the", "referenced", "identity", "exists", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L1333-L1359
231,155
mbj4668/pyang
pyang/statements.py
v_unique_name_defintions
def v_unique_name_defintions(ctx, stmt): """Make sure that all top-level definitions in a module are unique""" defs = [('typedef', 'TYPE_ALREADY_DEFINED', stmt.i_typedefs), ('grouping', 'GROUPING_ALREADY_DEFINED', stmt.i_groupings)] def f(s): for (keyword, errcode, dict) in defs: if s.keyword == keyword and s.arg in dict: err_add(ctx.errors, dict[s.arg].pos, errcode, (s.arg, s.pos)) for i in stmt.search('include'): submodulename = i.arg subm = ctx.get_module(submodulename) if subm is not None: for s in subm.substmts: for ss in s.substmts: iterate_stmt(ss, f)
python
def v_unique_name_defintions(ctx, stmt): """Make sure that all top-level definitions in a module are unique""" defs = [('typedef', 'TYPE_ALREADY_DEFINED', stmt.i_typedefs), ('grouping', 'GROUPING_ALREADY_DEFINED', stmt.i_groupings)] def f(s): for (keyword, errcode, dict) in defs: if s.keyword == keyword and s.arg in dict: err_add(ctx.errors, dict[s.arg].pos, errcode, (s.arg, s.pos)) for i in stmt.search('include'): submodulename = i.arg subm = ctx.get_module(submodulename) if subm is not None: for s in subm.substmts: for ss in s.substmts: iterate_stmt(ss, f)
[ "def", "v_unique_name_defintions", "(", "ctx", ",", "stmt", ")", ":", "defs", "=", "[", "(", "'typedef'", ",", "'TYPE_ALREADY_DEFINED'", ",", "stmt", ".", "i_typedefs", ")", ",", "(", "'grouping'", ",", "'GROUPING_ALREADY_DEFINED'", ",", "stmt", ".", "i_groupings", ")", "]", "def", "f", "(", "s", ")", ":", "for", "(", "keyword", ",", "errcode", ",", "dict", ")", "in", "defs", ":", "if", "s", ".", "keyword", "==", "keyword", "and", "s", ".", "arg", "in", "dict", ":", "err_add", "(", "ctx", ".", "errors", ",", "dict", "[", "s", ".", "arg", "]", ".", "pos", ",", "errcode", ",", "(", "s", ".", "arg", ",", "s", ".", "pos", ")", ")", "for", "i", "in", "stmt", ".", "search", "(", "'include'", ")", ":", "submodulename", "=", "i", ".", "arg", "subm", "=", "ctx", ".", "get_module", "(", "submodulename", ")", "if", "subm", "is", "not", "None", ":", "for", "s", "in", "subm", ".", "substmts", ":", "for", "ss", "in", "s", ".", "substmts", ":", "iterate_stmt", "(", "ss", ",", "f", ")" ]
Make sure that all top-level definitions in a module are unique
[ "Make", "sure", "that", "all", "top", "-", "level", "definitions", "in", "a", "module", "are", "unique" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L1802-L1818
231,156
mbj4668/pyang
pyang/statements.py
v_unique_name_children
def v_unique_name_children(ctx, stmt): """Make sure that each child of stmt has a unique name""" def sort_pos(p1, p2): if p1.line < p2.line: return (p1,p2) else: return (p2,p1) dict = {} chs = stmt.i_children def check(c): key = (c.i_module.i_modulename, c.arg) if key in dict: dup = dict[key] (minpos, maxpos) = sort_pos(c.pos, dup.pos) pos = chk_uses_pos(c, maxpos) err_add(ctx.errors, pos, 'DUPLICATE_CHILD_NAME', (stmt.arg, stmt.pos, c.arg, minpos)) else: dict[key] = c # also check all data nodes in the cases if c.keyword == 'choice': for case in c.i_children: for cc in case.i_children: check(cc) for c in chs: check(c)
python
def v_unique_name_children(ctx, stmt): """Make sure that each child of stmt has a unique name""" def sort_pos(p1, p2): if p1.line < p2.line: return (p1,p2) else: return (p2,p1) dict = {} chs = stmt.i_children def check(c): key = (c.i_module.i_modulename, c.arg) if key in dict: dup = dict[key] (minpos, maxpos) = sort_pos(c.pos, dup.pos) pos = chk_uses_pos(c, maxpos) err_add(ctx.errors, pos, 'DUPLICATE_CHILD_NAME', (stmt.arg, stmt.pos, c.arg, minpos)) else: dict[key] = c # also check all data nodes in the cases if c.keyword == 'choice': for case in c.i_children: for cc in case.i_children: check(cc) for c in chs: check(c)
[ "def", "v_unique_name_children", "(", "ctx", ",", "stmt", ")", ":", "def", "sort_pos", "(", "p1", ",", "p2", ")", ":", "if", "p1", ".", "line", "<", "p2", ".", "line", ":", "return", "(", "p1", ",", "p2", ")", "else", ":", "return", "(", "p2", ",", "p1", ")", "dict", "=", "{", "}", "chs", "=", "stmt", ".", "i_children", "def", "check", "(", "c", ")", ":", "key", "=", "(", "c", ".", "i_module", ".", "i_modulename", ",", "c", ".", "arg", ")", "if", "key", "in", "dict", ":", "dup", "=", "dict", "[", "key", "]", "(", "minpos", ",", "maxpos", ")", "=", "sort_pos", "(", "c", ".", "pos", ",", "dup", ".", "pos", ")", "pos", "=", "chk_uses_pos", "(", "c", ",", "maxpos", ")", "err_add", "(", "ctx", ".", "errors", ",", "pos", ",", "'DUPLICATE_CHILD_NAME'", ",", "(", "stmt", ".", "arg", ",", "stmt", ".", "pos", ",", "c", ".", "arg", ",", "minpos", ")", ")", "else", ":", "dict", "[", "key", "]", "=", "c", "# also check all data nodes in the cases", "if", "c", ".", "keyword", "==", "'choice'", ":", "for", "case", "in", "c", ".", "i_children", ":", "for", "cc", "in", "case", ".", "i_children", ":", "check", "(", "cc", ")", "for", "c", "in", "chs", ":", "check", "(", "c", ")" ]
Make sure that each child of stmt has a unique name
[ "Make", "sure", "that", "each", "child", "of", "stmt", "has", "a", "unique", "name" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L1821-L1850
231,157
mbj4668/pyang
pyang/statements.py
v_unique_name_leaf_list
def v_unique_name_leaf_list(ctx, stmt): """Make sure config true leaf-lists do nothave duplicate defaults""" if not stmt.i_config: return seen = [] for defval in stmt.i_default: if defval in seen: err_add(ctx.errors, stmt.pos, 'DUPLICATE_DEFAULT', (defval)) else: seen.append(defval)
python
def v_unique_name_leaf_list(ctx, stmt): """Make sure config true leaf-lists do nothave duplicate defaults""" if not stmt.i_config: return seen = [] for defval in stmt.i_default: if defval in seen: err_add(ctx.errors, stmt.pos, 'DUPLICATE_DEFAULT', (defval)) else: seen.append(defval)
[ "def", "v_unique_name_leaf_list", "(", "ctx", ",", "stmt", ")", ":", "if", "not", "stmt", ".", "i_config", ":", "return", "seen", "=", "[", "]", "for", "defval", "in", "stmt", ".", "i_default", ":", "if", "defval", "in", "seen", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'DUPLICATE_DEFAULT'", ",", "(", "defval", ")", ")", "else", ":", "seen", ".", "append", "(", "defval", ")" ]
Make sure config true leaf-lists do nothave duplicate defaults
[ "Make", "sure", "config", "true", "leaf", "-", "lists", "do", "nothave", "duplicate", "defaults" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L1852-L1862
231,158
mbj4668/pyang
pyang/statements.py
v_reference_choice
def v_reference_choice(ctx, stmt): """Make sure that the default case exists""" d = stmt.search_one('default') if d is not None: m = stmt.search_one('mandatory') if m is not None and m.arg == 'true': err_add(ctx.errors, stmt.pos, 'DEFAULT_AND_MANDATORY', ()) ptr = attrsearch(d.arg, 'arg', stmt.i_children) if ptr is None: err_add(ctx.errors, d.pos, 'DEFAULT_CASE_NOT_FOUND', d.arg) else: # make sure there are no mandatory nodes in the default case def chk_no_defaults(s): for c in s.i_children: if c.keyword in ('leaf', 'choice'): m = c.search_one('mandatory') if m is not None and m.arg == 'true': err_add(ctx.errors, c.pos, 'MANDATORY_NODE_IN_DEFAULT_CASE', ()) elif c.keyword in ('list', 'leaf-list'): m = c.search_one('min-elements') if m is not None and int(m.arg) > 0: err_add(ctx.errors, c.pos, 'MANDATORY_NODE_IN_DEFAULT_CASE', ()) elif c.keyword == 'container': p = c.search_one('presence') if p == None or p.arg == 'false': chk_no_defaults(c) chk_no_defaults(ptr)
python
def v_reference_choice(ctx, stmt): """Make sure that the default case exists""" d = stmt.search_one('default') if d is not None: m = stmt.search_one('mandatory') if m is not None and m.arg == 'true': err_add(ctx.errors, stmt.pos, 'DEFAULT_AND_MANDATORY', ()) ptr = attrsearch(d.arg, 'arg', stmt.i_children) if ptr is None: err_add(ctx.errors, d.pos, 'DEFAULT_CASE_NOT_FOUND', d.arg) else: # make sure there are no mandatory nodes in the default case def chk_no_defaults(s): for c in s.i_children: if c.keyword in ('leaf', 'choice'): m = c.search_one('mandatory') if m is not None and m.arg == 'true': err_add(ctx.errors, c.pos, 'MANDATORY_NODE_IN_DEFAULT_CASE', ()) elif c.keyword in ('list', 'leaf-list'): m = c.search_one('min-elements') if m is not None and int(m.arg) > 0: err_add(ctx.errors, c.pos, 'MANDATORY_NODE_IN_DEFAULT_CASE', ()) elif c.keyword == 'container': p = c.search_one('presence') if p == None or p.arg == 'false': chk_no_defaults(c) chk_no_defaults(ptr)
[ "def", "v_reference_choice", "(", "ctx", ",", "stmt", ")", ":", "d", "=", "stmt", ".", "search_one", "(", "'default'", ")", "if", "d", "is", "not", "None", ":", "m", "=", "stmt", ".", "search_one", "(", "'mandatory'", ")", "if", "m", "is", "not", "None", "and", "m", ".", "arg", "==", "'true'", ":", "err_add", "(", "ctx", ".", "errors", ",", "stmt", ".", "pos", ",", "'DEFAULT_AND_MANDATORY'", ",", "(", ")", ")", "ptr", "=", "attrsearch", "(", "d", ".", "arg", ",", "'arg'", ",", "stmt", ".", "i_children", ")", "if", "ptr", "is", "None", ":", "err_add", "(", "ctx", ".", "errors", ",", "d", ".", "pos", ",", "'DEFAULT_CASE_NOT_FOUND'", ",", "d", ".", "arg", ")", "else", ":", "# make sure there are no mandatory nodes in the default case", "def", "chk_no_defaults", "(", "s", ")", ":", "for", "c", "in", "s", ".", "i_children", ":", "if", "c", ".", "keyword", "in", "(", "'leaf'", ",", "'choice'", ")", ":", "m", "=", "c", ".", "search_one", "(", "'mandatory'", ")", "if", "m", "is", "not", "None", "and", "m", ".", "arg", "==", "'true'", ":", "err_add", "(", "ctx", ".", "errors", ",", "c", ".", "pos", ",", "'MANDATORY_NODE_IN_DEFAULT_CASE'", ",", "(", ")", ")", "elif", "c", ".", "keyword", "in", "(", "'list'", ",", "'leaf-list'", ")", ":", "m", "=", "c", ".", "search_one", "(", "'min-elements'", ")", "if", "m", "is", "not", "None", "and", "int", "(", "m", ".", "arg", ")", ">", "0", ":", "err_add", "(", "ctx", ".", "errors", ",", "c", ".", "pos", ",", "'MANDATORY_NODE_IN_DEFAULT_CASE'", ",", "(", ")", ")", "elif", "c", ".", "keyword", "==", "'container'", ":", "p", "=", "c", ".", "search_one", "(", "'presence'", ")", "if", "p", "==", "None", "or", "p", ".", "arg", "==", "'false'", ":", "chk_no_defaults", "(", "c", ")", "chk_no_defaults", "(", "ptr", ")" ]
Make sure that the default case exists
[ "Make", "sure", "that", "the", "default", "case", "exists" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L1993-L2021
231,159
mbj4668/pyang
pyang/statements.py
v_reference_leaf_leafref
def v_reference_leaf_leafref(ctx, stmt): """Verify that all leafrefs in a leaf or leaf-list have correct path""" if (hasattr(stmt, 'i_leafref') and stmt.i_leafref is not None and stmt.i_leafref_expanded is False): path_type_spec = stmt.i_leafref not_req_inst = not(path_type_spec.require_instance) x = validate_leafref_path(ctx, stmt, path_type_spec.path_spec, path_type_spec.path_, accept_non_config_target=not_req_inst ) if x is None: return ptr, expanded_path, path_list = x path_type_spec.i_target_node = ptr path_type_spec.i_expanded_path = expanded_path path_type_spec.i_path_list = path_list stmt.i_leafref_expanded = True if ptr is not None: chk_status(ctx, stmt, ptr) stmt.i_leafref_ptr = (ptr, path_type_spec.pos)
python
def v_reference_leaf_leafref(ctx, stmt): """Verify that all leafrefs in a leaf or leaf-list have correct path""" if (hasattr(stmt, 'i_leafref') and stmt.i_leafref is not None and stmt.i_leafref_expanded is False): path_type_spec = stmt.i_leafref not_req_inst = not(path_type_spec.require_instance) x = validate_leafref_path(ctx, stmt, path_type_spec.path_spec, path_type_spec.path_, accept_non_config_target=not_req_inst ) if x is None: return ptr, expanded_path, path_list = x path_type_spec.i_target_node = ptr path_type_spec.i_expanded_path = expanded_path path_type_spec.i_path_list = path_list stmt.i_leafref_expanded = True if ptr is not None: chk_status(ctx, stmt, ptr) stmt.i_leafref_ptr = (ptr, path_type_spec.pos)
[ "def", "v_reference_leaf_leafref", "(", "ctx", ",", "stmt", ")", ":", "if", "(", "hasattr", "(", "stmt", ",", "'i_leafref'", ")", "and", "stmt", ".", "i_leafref", "is", "not", "None", "and", "stmt", ".", "i_leafref_expanded", "is", "False", ")", ":", "path_type_spec", "=", "stmt", ".", "i_leafref", "not_req_inst", "=", "not", "(", "path_type_spec", ".", "require_instance", ")", "x", "=", "validate_leafref_path", "(", "ctx", ",", "stmt", ",", "path_type_spec", ".", "path_spec", ",", "path_type_spec", ".", "path_", ",", "accept_non_config_target", "=", "not_req_inst", ")", "if", "x", "is", "None", ":", "return", "ptr", ",", "expanded_path", ",", "path_list", "=", "x", "path_type_spec", ".", "i_target_node", "=", "ptr", "path_type_spec", ".", "i_expanded_path", "=", "expanded_path", "path_type_spec", ".", "i_path_list", "=", "path_list", "stmt", ".", "i_leafref_expanded", "=", "True", "if", "ptr", "is", "not", "None", ":", "chk_status", "(", "ctx", ",", "stmt", ",", "ptr", ")", "stmt", ".", "i_leafref_ptr", "=", "(", "ptr", ",", "path_type_spec", ".", "pos", ")" ]
Verify that all leafrefs in a leaf or leaf-list have correct path
[ "Verify", "that", "all", "leafrefs", "in", "a", "leaf", "or", "leaf", "-", "list", "have", "correct", "path" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2023-L2045
231,160
mbj4668/pyang
pyang/statements.py
has_type
def has_type(type, names): """Return type with name if `type` has name as one of its base types, and name is in the `names` list. otherwise, return None.""" if type.arg in names: return type for t in type.search('type'): # check all union's member types r = has_type(t, names) if r is not None: return r if not hasattr(type, 'i_typedef'): return None if (type.i_typedef is not None and hasattr(type.i_typedef, 'i_is_circular') and type.i_typedef.i_is_circular == False): t = type.i_typedef.search_one('type') if t is not None: return has_type(t, names) return None
python
def has_type(type, names): """Return type with name if `type` has name as one of its base types, and name is in the `names` list. otherwise, return None.""" if type.arg in names: return type for t in type.search('type'): # check all union's member types r = has_type(t, names) if r is not None: return r if not hasattr(type, 'i_typedef'): return None if (type.i_typedef is not None and hasattr(type.i_typedef, 'i_is_circular') and type.i_typedef.i_is_circular == False): t = type.i_typedef.search_one('type') if t is not None: return has_type(t, names) return None
[ "def", "has_type", "(", "type", ",", "names", ")", ":", "if", "type", ".", "arg", "in", "names", ":", "return", "type", "for", "t", "in", "type", ".", "search", "(", "'type'", ")", ":", "# check all union's member types", "r", "=", "has_type", "(", "t", ",", "names", ")", "if", "r", "is", "not", "None", ":", "return", "r", "if", "not", "hasattr", "(", "type", ",", "'i_typedef'", ")", ":", "return", "None", "if", "(", "type", ".", "i_typedef", "is", "not", "None", "and", "hasattr", "(", "type", ".", "i_typedef", ",", "'i_is_circular'", ")", "and", "type", ".", "i_typedef", ".", "i_is_circular", "==", "False", ")", ":", "t", "=", "type", ".", "i_typedef", ".", "search_one", "(", "'type'", ")", "if", "t", "is", "not", "None", ":", "return", "has_type", "(", "t", ",", "names", ")", "return", "None" ]
Return type with name if `type` has name as one of its base types, and name is in the `names` list. otherwise, return None.
[ "Return", "type", "with", "name", "if", "type", "has", "name", "as", "one", "of", "its", "base", "types", "and", "name", "is", "in", "the", "names", "list", ".", "otherwise", "return", "None", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2268-L2285
231,161
mbj4668/pyang
pyang/statements.py
search_typedef
def search_typedef(stmt, name): """Search for a typedef in scope First search the hierarchy, then the module and its submodules.""" mod = stmt.i_orig_module while stmt is not None: if name in stmt.i_typedefs: t = stmt.i_typedefs[name] if (mod is not None and mod != t.i_orig_module and t.i_orig_module.keyword == 'submodule'): # make sure this submodule is included if mod.search_one('include', t.i_orig_module.arg) is None: return None return t stmt = stmt.parent return None
python
def search_typedef(stmt, name): """Search for a typedef in scope First search the hierarchy, then the module and its submodules.""" mod = stmt.i_orig_module while stmt is not None: if name in stmt.i_typedefs: t = stmt.i_typedefs[name] if (mod is not None and mod != t.i_orig_module and t.i_orig_module.keyword == 'submodule'): # make sure this submodule is included if mod.search_one('include', t.i_orig_module.arg) is None: return None return t stmt = stmt.parent return None
[ "def", "search_typedef", "(", "stmt", ",", "name", ")", ":", "mod", "=", "stmt", ".", "i_orig_module", "while", "stmt", "is", "not", "None", ":", "if", "name", "in", "stmt", ".", "i_typedefs", ":", "t", "=", "stmt", ".", "i_typedefs", "[", "name", "]", "if", "(", "mod", "is", "not", "None", "and", "mod", "!=", "t", ".", "i_orig_module", "and", "t", ".", "i_orig_module", ".", "keyword", "==", "'submodule'", ")", ":", "# make sure this submodule is included", "if", "mod", ".", "search_one", "(", "'include'", ",", "t", ".", "i_orig_module", ".", "arg", ")", "is", "None", ":", "return", "None", "return", "t", "stmt", "=", "stmt", ".", "parent", "return", "None" ]
Search for a typedef in scope First search the hierarchy, then the module and its submodules.
[ "Search", "for", "a", "typedef", "in", "scope", "First", "search", "the", "hierarchy", "then", "the", "module", "and", "its", "submodules", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2318-L2333
231,162
mbj4668/pyang
pyang/statements.py
search_grouping
def search_grouping(stmt, name): """Search for a grouping in scope First search the hierarchy, then the module and its submodules.""" mod = stmt.i_orig_module while stmt is not None: if name in stmt.i_groupings: g = stmt.i_groupings[name] if (mod is not None and mod != g.i_orig_module and g.i_orig_module.keyword == 'submodule'): # make sure this submodule is included if mod.search_one('include', g.i_orig_module.arg) is None: return None return g stmt = stmt.parent return None
python
def search_grouping(stmt, name): """Search for a grouping in scope First search the hierarchy, then the module and its submodules.""" mod = stmt.i_orig_module while stmt is not None: if name in stmt.i_groupings: g = stmt.i_groupings[name] if (mod is not None and mod != g.i_orig_module and g.i_orig_module.keyword == 'submodule'): # make sure this submodule is included if mod.search_one('include', g.i_orig_module.arg) is None: return None return g stmt = stmt.parent return None
[ "def", "search_grouping", "(", "stmt", ",", "name", ")", ":", "mod", "=", "stmt", ".", "i_orig_module", "while", "stmt", "is", "not", "None", ":", "if", "name", "in", "stmt", ".", "i_groupings", ":", "g", "=", "stmt", ".", "i_groupings", "[", "name", "]", "if", "(", "mod", "is", "not", "None", "and", "mod", "!=", "g", ".", "i_orig_module", "and", "g", ".", "i_orig_module", ".", "keyword", "==", "'submodule'", ")", ":", "# make sure this submodule is included", "if", "mod", ".", "search_one", "(", "'include'", ",", "g", ".", "i_orig_module", ".", "arg", ")", "is", "None", ":", "return", "None", "return", "g", "stmt", "=", "stmt", ".", "parent", "return", "None" ]
Search for a grouping in scope First search the hierarchy, then the module and its submodules.
[ "Search", "for", "a", "grouping", "in", "scope", "First", "search", "the", "hierarchy", "then", "the", "module", "and", "its", "submodules", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2335-L2350
231,163
mbj4668/pyang
pyang/statements.py
is_submodule_included
def is_submodule_included(src, tgt): """Check that the tgt's submodule is included by src, if they belong to the same module.""" if tgt is None or not hasattr(tgt, 'i_orig_module'): return True if (tgt.i_orig_module.keyword == 'submodule' and src.i_orig_module != tgt.i_orig_module and src.i_orig_module.i_modulename == tgt.i_orig_module.i_modulename): if src.i_orig_module.search_one('include', tgt.i_orig_module.arg) is None: return False return True
python
def is_submodule_included(src, tgt): """Check that the tgt's submodule is included by src, if they belong to the same module.""" if tgt is None or not hasattr(tgt, 'i_orig_module'): return True if (tgt.i_orig_module.keyword == 'submodule' and src.i_orig_module != tgt.i_orig_module and src.i_orig_module.i_modulename == tgt.i_orig_module.i_modulename): if src.i_orig_module.search_one('include', tgt.i_orig_module.arg) is None: return False return True
[ "def", "is_submodule_included", "(", "src", ",", "tgt", ")", ":", "if", "tgt", "is", "None", "or", "not", "hasattr", "(", "tgt", ",", "'i_orig_module'", ")", ":", "return", "True", "if", "(", "tgt", ".", "i_orig_module", ".", "keyword", "==", "'submodule'", "and", "src", ".", "i_orig_module", "!=", "tgt", ".", "i_orig_module", "and", "src", ".", "i_orig_module", ".", "i_modulename", "==", "tgt", ".", "i_orig_module", ".", "i_modulename", ")", ":", "if", "src", ".", "i_orig_module", ".", "search_one", "(", "'include'", ",", "tgt", ".", "i_orig_module", ".", "arg", ")", "is", "None", ":", "return", "False", "return", "True" ]
Check that the tgt's submodule is included by src, if they belong to the same module.
[ "Check", "that", "the", "tgt", "s", "submodule", "is", "included", "by", "src", "if", "they", "belong", "to", "the", "same", "module", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2466-L2477
231,164
mbj4668/pyang
pyang/statements.py
mk_path_str
def mk_path_str(stmt, with_prefixes=False, prefix_onchange=False, prefix_to_module=False, resolve_top_prefix_to_module=False): """Returns the XPath path of the node. with_prefixes indicates whether or not to prefix every node. prefix_onchange modifies the behavior of with_prefixes and only adds prefixes when the prefix changes mid-XPath. prefix_to_module replaces prefixes with the module name of the prefix. resolve_top_prefix_to_module resolves the module-level prefix to the module name. Prefixes may be included in the path if the prefix changes mid-path. """ resolved_names = mk_path_list(stmt) xpath_elements = [] last_prefix = None for index, resolved_name in enumerate(resolved_names): module_name, prefix, node_name = resolved_name xpath_element = node_name if with_prefixes or (prefix_onchange and prefix != last_prefix): new_prefix = prefix if (prefix_to_module or (index == 0 and resolve_top_prefix_to_module)): new_prefix = module_name xpath_element = '%s:%s' % (new_prefix, node_name) xpath_elements.append(xpath_element) last_prefix = prefix return '/%s' % '/'.join(xpath_elements)
python
def mk_path_str(stmt, with_prefixes=False, prefix_onchange=False, prefix_to_module=False, resolve_top_prefix_to_module=False): """Returns the XPath path of the node. with_prefixes indicates whether or not to prefix every node. prefix_onchange modifies the behavior of with_prefixes and only adds prefixes when the prefix changes mid-XPath. prefix_to_module replaces prefixes with the module name of the prefix. resolve_top_prefix_to_module resolves the module-level prefix to the module name. Prefixes may be included in the path if the prefix changes mid-path. """ resolved_names = mk_path_list(stmt) xpath_elements = [] last_prefix = None for index, resolved_name in enumerate(resolved_names): module_name, prefix, node_name = resolved_name xpath_element = node_name if with_prefixes or (prefix_onchange and prefix != last_prefix): new_prefix = prefix if (prefix_to_module or (index == 0 and resolve_top_prefix_to_module)): new_prefix = module_name xpath_element = '%s:%s' % (new_prefix, node_name) xpath_elements.append(xpath_element) last_prefix = prefix return '/%s' % '/'.join(xpath_elements)
[ "def", "mk_path_str", "(", "stmt", ",", "with_prefixes", "=", "False", ",", "prefix_onchange", "=", "False", ",", "prefix_to_module", "=", "False", ",", "resolve_top_prefix_to_module", "=", "False", ")", ":", "resolved_names", "=", "mk_path_list", "(", "stmt", ")", "xpath_elements", "=", "[", "]", "last_prefix", "=", "None", "for", "index", ",", "resolved_name", "in", "enumerate", "(", "resolved_names", ")", ":", "module_name", ",", "prefix", ",", "node_name", "=", "resolved_name", "xpath_element", "=", "node_name", "if", "with_prefixes", "or", "(", "prefix_onchange", "and", "prefix", "!=", "last_prefix", ")", ":", "new_prefix", "=", "prefix", "if", "(", "prefix_to_module", "or", "(", "index", "==", "0", "and", "resolve_top_prefix_to_module", ")", ")", ":", "new_prefix", "=", "module_name", "xpath_element", "=", "'%s:%s'", "%", "(", "new_prefix", ",", "node_name", ")", "xpath_elements", ".", "append", "(", "xpath_element", ")", "last_prefix", "=", "prefix", "return", "'/%s'", "%", "'/'", ".", "join", "(", "xpath_elements", ")" ]
Returns the XPath path of the node. with_prefixes indicates whether or not to prefix every node. prefix_onchange modifies the behavior of with_prefixes and only adds prefixes when the prefix changes mid-XPath. prefix_to_module replaces prefixes with the module name of the prefix. resolve_top_prefix_to_module resolves the module-level prefix to the module name. Prefixes may be included in the path if the prefix changes mid-path.
[ "Returns", "the", "XPath", "path", "of", "the", "node", ".", "with_prefixes", "indicates", "whether", "or", "not", "to", "prefix", "every", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L3107-L3139
231,165
mbj4668/pyang
pyang/statements.py
get_xpath
def get_xpath(stmt, qualified=False, prefix_to_module=False): """Gets the XPath of the statement. Unless qualified=True, does not include prefixes unless the prefix changes mid-XPath. qualified will add a prefix to each node. prefix_to_module will resolve prefixes to module names instead. For RFC 8040, set prefix_to_module=True: /prefix:root/node/prefix:node/... qualified=True: /prefix:root/prefix:node/prefix:node/... qualified=True, prefix_to_module=True: /module:root/module:node/module:node/... prefix_to_module=True: /module:root/node/module:node/... """ return mk_path_str(stmt, with_prefixes=qualified, prefix_onchange=True, prefix_to_module=prefix_to_module)
python
def get_xpath(stmt, qualified=False, prefix_to_module=False): """Gets the XPath of the statement. Unless qualified=True, does not include prefixes unless the prefix changes mid-XPath. qualified will add a prefix to each node. prefix_to_module will resolve prefixes to module names instead. For RFC 8040, set prefix_to_module=True: /prefix:root/node/prefix:node/... qualified=True: /prefix:root/prefix:node/prefix:node/... qualified=True, prefix_to_module=True: /module:root/module:node/module:node/... prefix_to_module=True: /module:root/node/module:node/... """ return mk_path_str(stmt, with_prefixes=qualified, prefix_onchange=True, prefix_to_module=prefix_to_module)
[ "def", "get_xpath", "(", "stmt", ",", "qualified", "=", "False", ",", "prefix_to_module", "=", "False", ")", ":", "return", "mk_path_str", "(", "stmt", ",", "with_prefixes", "=", "qualified", ",", "prefix_onchange", "=", "True", ",", "prefix_to_module", "=", "prefix_to_module", ")" ]
Gets the XPath of the statement. Unless qualified=True, does not include prefixes unless the prefix changes mid-XPath. qualified will add a prefix to each node. prefix_to_module will resolve prefixes to module names instead. For RFC 8040, set prefix_to_module=True: /prefix:root/node/prefix:node/... qualified=True: /prefix:root/prefix:node/prefix:node/... qualified=True, prefix_to_module=True: /module:root/module:node/module:node/... prefix_to_module=True: /module:root/node/module:node/...
[ "Gets", "the", "XPath", "of", "the", "statement", ".", "Unless", "qualified", "=", "True", "does", "not", "include", "prefixes", "unless", "the", "prefix", "changes", "mid", "-", "XPath", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L3141-L3161
231,166
mbj4668/pyang
pyang/statements.py
get_qualified_type
def get_qualified_type(stmt): """Gets the qualified, top-level type of the node. This enters the typedef if defined instead of using the prefix to ensure absolute distinction. """ type_obj = stmt.search_one('type') fq_type_name = None if type_obj: if getattr(type_obj, 'i_typedef', None): # If type_obj has typedef, substitute. # Absolute module:type instead of prefix:type type_obj = type_obj.i_typedef type_name = type_obj.arg if check_primitive_type(type_obj): # Doesn't make sense to qualify a primitive..I think. fq_type_name = type_name else: type_module = type_obj.i_orig_module.arg fq_type_name = '%s:%s' % (type_module, type_name) return fq_type_name
python
def get_qualified_type(stmt): """Gets the qualified, top-level type of the node. This enters the typedef if defined instead of using the prefix to ensure absolute distinction. """ type_obj = stmt.search_one('type') fq_type_name = None if type_obj: if getattr(type_obj, 'i_typedef', None): # If type_obj has typedef, substitute. # Absolute module:type instead of prefix:type type_obj = type_obj.i_typedef type_name = type_obj.arg if check_primitive_type(type_obj): # Doesn't make sense to qualify a primitive..I think. fq_type_name = type_name else: type_module = type_obj.i_orig_module.arg fq_type_name = '%s:%s' % (type_module, type_name) return fq_type_name
[ "def", "get_qualified_type", "(", "stmt", ")", ":", "type_obj", "=", "stmt", ".", "search_one", "(", "'type'", ")", "fq_type_name", "=", "None", "if", "type_obj", ":", "if", "getattr", "(", "type_obj", ",", "'i_typedef'", ",", "None", ")", ":", "# If type_obj has typedef, substitute.", "# Absolute module:type instead of prefix:type", "type_obj", "=", "type_obj", ".", "i_typedef", "type_name", "=", "type_obj", ".", "arg", "if", "check_primitive_type", "(", "type_obj", ")", ":", "# Doesn't make sense to qualify a primitive..I think.", "fq_type_name", "=", "type_name", "else", ":", "type_module", "=", "type_obj", ".", "i_orig_module", ".", "arg", "fq_type_name", "=", "'%s:%s'", "%", "(", "type_module", ",", "type_name", ")", "return", "fq_type_name" ]
Gets the qualified, top-level type of the node. This enters the typedef if defined instead of using the prefix to ensure absolute distinction.
[ "Gets", "the", "qualified", "top", "-", "level", "type", "of", "the", "node", ".", "This", "enters", "the", "typedef", "if", "defined", "instead", "of", "using", "the", "prefix", "to", "ensure", "absolute", "distinction", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L3171-L3190
231,167
mbj4668/pyang
pyang/statements.py
get_primitive_type
def get_primitive_type(stmt): """Recurses through the typedefs and returns the most primitive YANG type defined. """ type_obj = stmt.search_one('type') type_name = getattr(type_obj, 'arg', None) typedef_obj = getattr(type_obj, 'i_typedef', None) if typedef_obj: type_name = get_primitive_type(typedef_obj) elif type_obj and not check_primitive_type(type_obj): raise Exception('%s is not a primitive! Incomplete parse tree?' % type_name) return type_name
python
def get_primitive_type(stmt): """Recurses through the typedefs and returns the most primitive YANG type defined. """ type_obj = stmt.search_one('type') type_name = getattr(type_obj, 'arg', None) typedef_obj = getattr(type_obj, 'i_typedef', None) if typedef_obj: type_name = get_primitive_type(typedef_obj) elif type_obj and not check_primitive_type(type_obj): raise Exception('%s is not a primitive! Incomplete parse tree?' % type_name) return type_name
[ "def", "get_primitive_type", "(", "stmt", ")", ":", "type_obj", "=", "stmt", ".", "search_one", "(", "'type'", ")", "type_name", "=", "getattr", "(", "type_obj", ",", "'arg'", ",", "None", ")", "typedef_obj", "=", "getattr", "(", "type_obj", ",", "'i_typedef'", ",", "None", ")", "if", "typedef_obj", ":", "type_name", "=", "get_primitive_type", "(", "typedef_obj", ")", "elif", "type_obj", "and", "not", "check_primitive_type", "(", "type_obj", ")", ":", "raise", "Exception", "(", "'%s is not a primitive! Incomplete parse tree?'", "%", "type_name", ")", "return", "type_name" ]
Recurses through the typedefs and returns the most primitive YANG type defined.
[ "Recurses", "through", "the", "typedefs", "and", "returns", "the", "most", "primitive", "YANG", "type", "defined", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L3192-L3204
231,168
mbj4668/pyang
pyang/statements.py
Statement.search
def search(self, keyword, children=None, arg=None): """Return list of receiver's substmts with `keyword`. """ if children is None: children = self.substmts return [ ch for ch in children if (ch.keyword == keyword and (arg is None or ch.arg == arg))]
python
def search(self, keyword, children=None, arg=None): """Return list of receiver's substmts with `keyword`. """ if children is None: children = self.substmts return [ ch for ch in children if (ch.keyword == keyword and (arg is None or ch.arg == arg))]
[ "def", "search", "(", "self", ",", "keyword", ",", "children", "=", "None", ",", "arg", "=", "None", ")", ":", "if", "children", "is", "None", ":", "children", "=", "self", ".", "substmts", "return", "[", "ch", "for", "ch", "in", "children", "if", "(", "ch", ".", "keyword", "==", "keyword", "and", "(", "arg", "is", "None", "or", "ch", ".", "arg", "==", "arg", ")", ")", "]" ]
Return list of receiver's substmts with `keyword`.
[ "Return", "list", "of", "receiver", "s", "substmts", "with", "keyword", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2819-L2826
231,169
mbj4668/pyang
pyang/statements.py
Statement.search_one
def search_one(self, keyword, arg=None, children=None): """Return receiver's substmt with `keyword` and optionally `arg`. """ if children is None: children = self.substmts for ch in children: if ch.keyword == keyword and (arg is None or ch.arg == arg): return ch return None
python
def search_one(self, keyword, arg=None, children=None): """Return receiver's substmt with `keyword` and optionally `arg`. """ if children is None: children = self.substmts for ch in children: if ch.keyword == keyword and (arg is None or ch.arg == arg): return ch return None
[ "def", "search_one", "(", "self", ",", "keyword", ",", "arg", "=", "None", ",", "children", "=", "None", ")", ":", "if", "children", "is", "None", ":", "children", "=", "self", ".", "substmts", "for", "ch", "in", "children", ":", "if", "ch", ".", "keyword", "==", "keyword", "and", "(", "arg", "is", "None", "or", "ch", ".", "arg", "==", "arg", ")", ":", "return", "ch", "return", "None" ]
Return receiver's substmt with `keyword` and optionally `arg`.
[ "Return", "receiver", "s", "substmt", "with", "keyword", "and", "optionally", "arg", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2828-L2836
231,170
mbj4668/pyang
pyang/statements.py
Statement.main_module
def main_module(self): """Return the main module to which the receiver belongs.""" if self.i_module.keyword == "submodule": return self.i_module.i_ctx.get_module( self.i_module.i_including_modulename) return self.i_module
python
def main_module(self): """Return the main module to which the receiver belongs.""" if self.i_module.keyword == "submodule": return self.i_module.i_ctx.get_module( self.i_module.i_including_modulename) return self.i_module
[ "def", "main_module", "(", "self", ")", ":", "if", "self", ".", "i_module", ".", "keyword", "==", "\"submodule\"", ":", "return", "self", ".", "i_module", ".", "i_ctx", ".", "get_module", "(", "self", ".", "i_module", ".", "i_including_modulename", ")", "return", "self", ".", "i_module" ]
Return the main module to which the receiver belongs.
[ "Return", "the", "main", "module", "to", "which", "the", "receiver", "belongs", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/statements.py#L2866-L2871
231,171
mbj4668/pyang
pyang/xpath.py
add_prefix
def add_prefix(prefix, s): "Add `prefix` to all unprefixed names in `s`" # tokenize the XPath expression toks = xpath_lexer.scan(s) # add default prefix to unprefixed names toks2 = [_add_prefix(prefix, tok) for tok in toks] # build a string of the patched expression ls = [x.value for x in toks2] return ''.join(ls)
python
def add_prefix(prefix, s): "Add `prefix` to all unprefixed names in `s`" # tokenize the XPath expression toks = xpath_lexer.scan(s) # add default prefix to unprefixed names toks2 = [_add_prefix(prefix, tok) for tok in toks] # build a string of the patched expression ls = [x.value for x in toks2] return ''.join(ls)
[ "def", "add_prefix", "(", "prefix", ",", "s", ")", ":", "# tokenize the XPath expression", "toks", "=", "xpath_lexer", ".", "scan", "(", "s", ")", "# add default prefix to unprefixed names", "toks2", "=", "[", "_add_prefix", "(", "prefix", ",", "tok", ")", "for", "tok", "in", "toks", "]", "# build a string of the patched expression", "ls", "=", "[", "x", ".", "value", "for", "x", "in", "toks2", "]", "return", "''", ".", "join", "(", "ls", ")" ]
Add `prefix` to all unprefixed names in `s`
[ "Add", "prefix", "to", "all", "unprefixed", "names", "in", "s" ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/xpath.py#L56-L64
231,172
mbj4668/pyang
pyang/syntax.py
chk_date_arg
def chk_date_arg(s): """Checks if the string `s` is a valid date string. Return True of False.""" if re_date.search(s) is None: return False comp = s.split('-') try: dt = datetime.date(int(comp[0]), int(comp[1]), int(comp[2])) return True except Exception as e: return False
python
def chk_date_arg(s): """Checks if the string `s` is a valid date string. Return True of False.""" if re_date.search(s) is None: return False comp = s.split('-') try: dt = datetime.date(int(comp[0]), int(comp[1]), int(comp[2])) return True except Exception as e: return False
[ "def", "chk_date_arg", "(", "s", ")", ":", "if", "re_date", ".", "search", "(", "s", ")", "is", "None", ":", "return", "False", "comp", "=", "s", ".", "split", "(", "'-'", ")", "try", ":", "dt", "=", "datetime", ".", "date", "(", "int", "(", "comp", "[", "0", "]", ")", ",", "int", "(", "comp", "[", "1", "]", ")", ",", "int", "(", "comp", "[", "2", "]", ")", ")", "return", "True", "except", "Exception", "as", "e", ":", "return", "False" ]
Checks if the string `s` is a valid date string. Return True of False.
[ "Checks", "if", "the", "string", "s", "is", "a", "valid", "date", "string", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/syntax.py#L173-L184
231,173
mbj4668/pyang
pyang/syntax.py
chk_enum_arg
def chk_enum_arg(s): """Checks if the string `s` is a valid enum string. Return True or False.""" if len(s) == 0 or s[0].isspace() or s[-1].isspace(): return False else: return True
python
def chk_enum_arg(s): """Checks if the string `s` is a valid enum string. Return True or False.""" if len(s) == 0 or s[0].isspace() or s[-1].isspace(): return False else: return True
[ "def", "chk_enum_arg", "(", "s", ")", ":", "if", "len", "(", "s", ")", "==", "0", "or", "s", "[", "0", "]", ".", "isspace", "(", ")", "or", "s", "[", "-", "1", "]", ".", "isspace", "(", ")", ":", "return", "False", "else", ":", "return", "True" ]
Checks if the string `s` is a valid enum string. Return True or False.
[ "Checks", "if", "the", "string", "s", "is", "a", "valid", "enum", "string", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/syntax.py#L186-L194
231,174
mbj4668/pyang
pyang/syntax.py
chk_fraction_digits_arg
def chk_fraction_digits_arg(s): """Checks if the string `s` is a valid fraction-digits argument. Return True or False.""" try: v = int(s) if v >= 1 and v <= 18: return True else: return False except ValueError: return False
python
def chk_fraction_digits_arg(s): """Checks if the string `s` is a valid fraction-digits argument. Return True or False.""" try: v = int(s) if v >= 1 and v <= 18: return True else: return False except ValueError: return False
[ "def", "chk_fraction_digits_arg", "(", "s", ")", ":", "try", ":", "v", "=", "int", "(", "s", ")", "if", "v", ">=", "1", "and", "v", "<=", "18", ":", "return", "True", "else", ":", "return", "False", "except", "ValueError", ":", "return", "False" ]
Checks if the string `s` is a valid fraction-digits argument. Return True or False.
[ "Checks", "if", "the", "string", "s", "is", "a", "valid", "fraction", "-", "digits", "argument", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/syntax.py#L196-L207
231,175
mbj4668/pyang
pyang/translators/dsdl.py
Patch.combine
def combine(self, patch): """Add `patch.plist` to `self.plist`.""" exclusive = set(["config", "default", "mandatory", "presence", "min-elements", "max-elements"]) kws = set([s.keyword for s in self.plist]) & exclusive add = [n for n in patch.plist if n.keyword not in kws] self.plist.extend(add)
python
def combine(self, patch): """Add `patch.plist` to `self.plist`.""" exclusive = set(["config", "default", "mandatory", "presence", "min-elements", "max-elements"]) kws = set([s.keyword for s in self.plist]) & exclusive add = [n for n in patch.plist if n.keyword not in kws] self.plist.extend(add)
[ "def", "combine", "(", "self", ",", "patch", ")", ":", "exclusive", "=", "set", "(", "[", "\"config\"", ",", "\"default\"", ",", "\"mandatory\"", ",", "\"presence\"", ",", "\"min-elements\"", ",", "\"max-elements\"", "]", ")", "kws", "=", "set", "(", "[", "s", ".", "keyword", "for", "s", "in", "self", ".", "plist", "]", ")", "&", "exclusive", "add", "=", "[", "n", "for", "n", "in", "patch", ".", "plist", "if", "n", ".", "keyword", "not", "in", "kws", "]", "self", ".", "plist", ".", "extend", "(", "add", ")" ]
Add `patch.plist` to `self.plist`.
[ "Add", "patch", ".", "plist", "to", "self", ".", "plist", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L136-L142
231,176
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.serialize
def serialize(self): """Return the string representation of the receiver.""" res = '<?xml version="1.0" encoding="UTF-8"?>' for ns in self.namespaces: self.top_grammar.attr["xmlns:" + self.namespaces[ns]] = ns res += self.top_grammar.start_tag() for ch in self.top_grammar.children: res += ch.serialize() res += self.tree.serialize() for d in self.global_defs: res += self.global_defs[d].serialize() for i in self.identities: res += self.identities[i].serialize() return res + self.top_grammar.end_tag()
python
def serialize(self): """Return the string representation of the receiver.""" res = '<?xml version="1.0" encoding="UTF-8"?>' for ns in self.namespaces: self.top_grammar.attr["xmlns:" + self.namespaces[ns]] = ns res += self.top_grammar.start_tag() for ch in self.top_grammar.children: res += ch.serialize() res += self.tree.serialize() for d in self.global_defs: res += self.global_defs[d].serialize() for i in self.identities: res += self.identities[i].serialize() return res + self.top_grammar.end_tag()
[ "def", "serialize", "(", "self", ")", ":", "res", "=", "'<?xml version=\"1.0\" encoding=\"UTF-8\"?>'", "for", "ns", "in", "self", ".", "namespaces", ":", "self", ".", "top_grammar", ".", "attr", "[", "\"xmlns:\"", "+", "self", ".", "namespaces", "[", "ns", "]", "]", "=", "ns", "res", "+=", "self", ".", "top_grammar", ".", "start_tag", "(", ")", "for", "ch", "in", "self", ".", "top_grammar", ".", "children", ":", "res", "+=", "ch", ".", "serialize", "(", ")", "res", "+=", "self", ".", "tree", ".", "serialize", "(", ")", "for", "d", "in", "self", ".", "global_defs", ":", "res", "+=", "self", ".", "global_defs", "[", "d", "]", ".", "serialize", "(", ")", "for", "i", "in", "self", ".", "identities", ":", "res", "+=", "self", ".", "identities", "[", "i", "]", ".", "serialize", "(", ")", "return", "res", "+", "self", ".", "top_grammar", ".", "end_tag", "(", ")" ]
Return the string representation of the receiver.
[ "Return", "the", "string", "representation", "of", "the", "receiver", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L340-L353
231,177
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.setup_top
def setup_top(self): """Create top-level elements of the hybrid schema.""" self.top_grammar = SchemaNode("grammar") self.top_grammar.attr = { "xmlns": "http://relaxng.org/ns/structure/1.0", "datatypeLibrary": "http://www.w3.org/2001/XMLSchema-datatypes"} self.tree = SchemaNode("start")
python
def setup_top(self): """Create top-level elements of the hybrid schema.""" self.top_grammar = SchemaNode("grammar") self.top_grammar.attr = { "xmlns": "http://relaxng.org/ns/structure/1.0", "datatypeLibrary": "http://www.w3.org/2001/XMLSchema-datatypes"} self.tree = SchemaNode("start")
[ "def", "setup_top", "(", "self", ")", ":", "self", ".", "top_grammar", "=", "SchemaNode", "(", "\"grammar\"", ")", "self", ".", "top_grammar", ".", "attr", "=", "{", "\"xmlns\"", ":", "\"http://relaxng.org/ns/structure/1.0\"", ",", "\"datatypeLibrary\"", ":", "\"http://www.w3.org/2001/XMLSchema-datatypes\"", "}", "self", ".", "tree", "=", "SchemaNode", "(", "\"start\"", ")" ]
Create top-level elements of the hybrid schema.
[ "Create", "top", "-", "level", "elements", "of", "the", "hybrid", "schema", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L424-L430
231,178
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.create_roots
def create_roots(self, yam): """Create the top-level structure for module `yam`.""" self.local_grammar = SchemaNode("grammar") self.local_grammar.attr = { "ns": yam.search_one("namespace").arg, "nma:module": self.module.arg} src_text = "YANG module '%s'" % yam.arg revs = yam.search("revision") if len(revs) > 0: src_text += " revision %s" % self.current_revision(revs) self.dc_element(self.local_grammar, "source", src_text) start = SchemaNode("start", self.local_grammar) self.data = SchemaNode("nma:data", start, interleave=True) self.data.occur = 2 self.rpcs = SchemaNode("nma:rpcs", start, interleave=False) self.notifications = SchemaNode("nma:notifications", start, interleave=False)
python
def create_roots(self, yam): """Create the top-level structure for module `yam`.""" self.local_grammar = SchemaNode("grammar") self.local_grammar.attr = { "ns": yam.search_one("namespace").arg, "nma:module": self.module.arg} src_text = "YANG module '%s'" % yam.arg revs = yam.search("revision") if len(revs) > 0: src_text += " revision %s" % self.current_revision(revs) self.dc_element(self.local_grammar, "source", src_text) start = SchemaNode("start", self.local_grammar) self.data = SchemaNode("nma:data", start, interleave=True) self.data.occur = 2 self.rpcs = SchemaNode("nma:rpcs", start, interleave=False) self.notifications = SchemaNode("nma:notifications", start, interleave=False)
[ "def", "create_roots", "(", "self", ",", "yam", ")", ":", "self", ".", "local_grammar", "=", "SchemaNode", "(", "\"grammar\"", ")", "self", ".", "local_grammar", ".", "attr", "=", "{", "\"ns\"", ":", "yam", ".", "search_one", "(", "\"namespace\"", ")", ".", "arg", ",", "\"nma:module\"", ":", "self", ".", "module", ".", "arg", "}", "src_text", "=", "\"YANG module '%s'\"", "%", "yam", ".", "arg", "revs", "=", "yam", ".", "search", "(", "\"revision\"", ")", "if", "len", "(", "revs", ")", ">", "0", ":", "src_text", "+=", "\" revision %s\"", "%", "self", ".", "current_revision", "(", "revs", ")", "self", ".", "dc_element", "(", "self", ".", "local_grammar", ",", "\"source\"", ",", "src_text", ")", "start", "=", "SchemaNode", "(", "\"start\"", ",", "self", ".", "local_grammar", ")", "self", ".", "data", "=", "SchemaNode", "(", "\"nma:data\"", ",", "start", ",", "interleave", "=", "True", ")", "self", ".", "data", ".", "occur", "=", "2", "self", ".", "rpcs", "=", "SchemaNode", "(", "\"nma:rpcs\"", ",", "start", ",", "interleave", "=", "False", ")", "self", ".", "notifications", "=", "SchemaNode", "(", "\"nma:notifications\"", ",", "start", ",", "interleave", "=", "False", ")" ]
Create the top-level structure for module `yam`.
[ "Create", "the", "top", "-", "level", "structure", "for", "module", "yam", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L432-L448
231,179
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.yang_to_xpath
def yang_to_xpath(self, xpe): """Transform YANG's `xpath` to a form suitable for Schematron. 1. Prefixes are added to unprefixed local names. Inside global groupings, the prefix is represented as the variable '$pref' which is substituted via Schematron abstract patterns. 2. '$root' is prepended to every absolute location path. """ if self.gg_level: pref = "$pref:" else: pref = self.prefix_stack[-1] + ":" toks = xpath_lexer.scan(xpe) prev = None res = "" for tok in toks: if (tok.type == "SLASH" and prev not in ("DOT", "DOTDOT", "RPAREN", "RBRACKET", "name", "wildcard", "prefix_test")): res += "$root" elif tok.type == "name" and ":" not in tok.value: res += pref res += tok.value if tok.type != "_whitespace": prev = tok.type return res
python
def yang_to_xpath(self, xpe): """Transform YANG's `xpath` to a form suitable for Schematron. 1. Prefixes are added to unprefixed local names. Inside global groupings, the prefix is represented as the variable '$pref' which is substituted via Schematron abstract patterns. 2. '$root' is prepended to every absolute location path. """ if self.gg_level: pref = "$pref:" else: pref = self.prefix_stack[-1] + ":" toks = xpath_lexer.scan(xpe) prev = None res = "" for tok in toks: if (tok.type == "SLASH" and prev not in ("DOT", "DOTDOT", "RPAREN", "RBRACKET", "name", "wildcard", "prefix_test")): res += "$root" elif tok.type == "name" and ":" not in tok.value: res += pref res += tok.value if tok.type != "_whitespace": prev = tok.type return res
[ "def", "yang_to_xpath", "(", "self", ",", "xpe", ")", ":", "if", "self", ".", "gg_level", ":", "pref", "=", "\"$pref:\"", "else", ":", "pref", "=", "self", ".", "prefix_stack", "[", "-", "1", "]", "+", "\":\"", "toks", "=", "xpath_lexer", ".", "scan", "(", "xpe", ")", "prev", "=", "None", "res", "=", "\"\"", "for", "tok", "in", "toks", ":", "if", "(", "tok", ".", "type", "==", "\"SLASH\"", "and", "prev", "not", "in", "(", "\"DOT\"", ",", "\"DOTDOT\"", ",", "\"RPAREN\"", ",", "\"RBRACKET\"", ",", "\"name\"", ",", "\"wildcard\"", ",", "\"prefix_test\"", ")", ")", ":", "res", "+=", "\"$root\"", "elif", "tok", ".", "type", "==", "\"name\"", "and", "\":\"", "not", "in", "tok", ".", "value", ":", "res", "+=", "pref", "res", "+=", "tok", ".", "value", "if", "tok", ".", "type", "!=", "\"_whitespace\"", ":", "prev", "=", "tok", ".", "type", "return", "res" ]
Transform YANG's `xpath` to a form suitable for Schematron. 1. Prefixes are added to unprefixed local names. Inside global groupings, the prefix is represented as the variable '$pref' which is substituted via Schematron abstract patterns. 2. '$root' is prepended to every absolute location path.
[ "Transform", "YANG", "s", "xpath", "to", "a", "form", "suitable", "for", "Schematron", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L450-L475
231,180
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.register_identity
def register_identity(self, id_stmt): """Register `id_stmt` with its base identity, if any. """ bst = id_stmt.search_one("base") if bst: bder = self.identity_deps.setdefault(bst.i_identity, []) bder.append(id_stmt)
python
def register_identity(self, id_stmt): """Register `id_stmt` with its base identity, if any. """ bst = id_stmt.search_one("base") if bst: bder = self.identity_deps.setdefault(bst.i_identity, []) bder.append(id_stmt)
[ "def", "register_identity", "(", "self", ",", "id_stmt", ")", ":", "bst", "=", "id_stmt", ".", "search_one", "(", "\"base\"", ")", "if", "bst", ":", "bder", "=", "self", ".", "identity_deps", ".", "setdefault", "(", "bst", ".", "i_identity", ",", "[", "]", ")", "bder", ".", "append", "(", "id_stmt", ")" ]
Register `id_stmt` with its base identity, if any.
[ "Register", "id_stmt", "with", "its", "base", "identity", "if", "any", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L498-L504
231,181
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.add_derived_identity
def add_derived_identity(self, id_stmt): """Add pattern def for `id_stmt` and all derived identities. The corresponding "ref" pattern is returned. """ p = self.add_namespace(id_stmt.main_module()) if id_stmt not in self.identities: # add named pattern def self.identities[id_stmt] = SchemaNode.define("__%s_%s" % (p, id_stmt.arg)) parent = self.identities[id_stmt] if id_stmt in self.identity_deps: parent = SchemaNode.choice(parent, occur=2) for i in self.identity_deps[id_stmt]: parent.subnode(self.add_derived_identity(i)) idval = SchemaNode("value", parent, p+":"+id_stmt.arg) idval.attr["type"] = "QName" res = SchemaNode("ref") res.attr["name"] = self.identities[id_stmt].attr["name"] return res
python
def add_derived_identity(self, id_stmt): """Add pattern def for `id_stmt` and all derived identities. The corresponding "ref" pattern is returned. """ p = self.add_namespace(id_stmt.main_module()) if id_stmt not in self.identities: # add named pattern def self.identities[id_stmt] = SchemaNode.define("__%s_%s" % (p, id_stmt.arg)) parent = self.identities[id_stmt] if id_stmt in self.identity_deps: parent = SchemaNode.choice(parent, occur=2) for i in self.identity_deps[id_stmt]: parent.subnode(self.add_derived_identity(i)) idval = SchemaNode("value", parent, p+":"+id_stmt.arg) idval.attr["type"] = "QName" res = SchemaNode("ref") res.attr["name"] = self.identities[id_stmt].attr["name"] return res
[ "def", "add_derived_identity", "(", "self", ",", "id_stmt", ")", ":", "p", "=", "self", ".", "add_namespace", "(", "id_stmt", ".", "main_module", "(", ")", ")", "if", "id_stmt", "not", "in", "self", ".", "identities", ":", "# add named pattern def", "self", ".", "identities", "[", "id_stmt", "]", "=", "SchemaNode", ".", "define", "(", "\"__%s_%s\"", "%", "(", "p", ",", "id_stmt", ".", "arg", ")", ")", "parent", "=", "self", ".", "identities", "[", "id_stmt", "]", "if", "id_stmt", "in", "self", ".", "identity_deps", ":", "parent", "=", "SchemaNode", ".", "choice", "(", "parent", ",", "occur", "=", "2", ")", "for", "i", "in", "self", ".", "identity_deps", "[", "id_stmt", "]", ":", "parent", ".", "subnode", "(", "self", ".", "add_derived_identity", "(", "i", ")", ")", "idval", "=", "SchemaNode", "(", "\"value\"", ",", "parent", ",", "p", "+", "\":\"", "+", "id_stmt", ".", "arg", ")", "idval", ".", "attr", "[", "\"type\"", "]", "=", "\"QName\"", "res", "=", "SchemaNode", "(", "\"ref\"", ")", "res", ".", "attr", "[", "\"name\"", "]", "=", "self", ".", "identities", "[", "id_stmt", "]", ".", "attr", "[", "\"name\"", "]", "return", "res" ]
Add pattern def for `id_stmt` and all derived identities. The corresponding "ref" pattern is returned.
[ "Add", "pattern", "def", "for", "id_stmt", "and", "all", "derived", "identities", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L506-L524
231,182
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.preload_defs
def preload_defs(self): """Preload all top-level definitions.""" for d in (self.module.search("grouping") + self.module.search("typedef")): uname, dic = self.unique_def_name(d) self.install_def(uname, d, dic)
python
def preload_defs(self): """Preload all top-level definitions.""" for d in (self.module.search("grouping") + self.module.search("typedef")): uname, dic = self.unique_def_name(d) self.install_def(uname, d, dic)
[ "def", "preload_defs", "(", "self", ")", ":", "for", "d", "in", "(", "self", ".", "module", ".", "search", "(", "\"grouping\"", ")", "+", "self", ".", "module", ".", "search", "(", "\"typedef\"", ")", ")", ":", "uname", ",", "dic", "=", "self", ".", "unique_def_name", "(", "d", ")", "self", ".", "install_def", "(", "uname", ",", "d", ",", "dic", ")" ]
Preload all top-level definitions.
[ "Preload", "all", "top", "-", "level", "definitions", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L526-L531
231,183
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.add_prefix
def add_prefix(self, name, stmt): """Return `name` prepended with correct prefix. If the name is already prefixed, the prefix may be translated to the value obtained from `self.module_prefixes`. Unmodified `name` is returned if we are inside a global grouping. """ if self.gg_level: return name pref, colon, local = name.partition(":") if colon: return (self.module_prefixes[stmt.i_module.i_prefixes[pref][0]] + ":" + local) else: return self.prefix_stack[-1] + ":" + pref
python
def add_prefix(self, name, stmt): """Return `name` prepended with correct prefix. If the name is already prefixed, the prefix may be translated to the value obtained from `self.module_prefixes`. Unmodified `name` is returned if we are inside a global grouping. """ if self.gg_level: return name pref, colon, local = name.partition(":") if colon: return (self.module_prefixes[stmt.i_module.i_prefixes[pref][0]] + ":" + local) else: return self.prefix_stack[-1] + ":" + pref
[ "def", "add_prefix", "(", "self", ",", "name", ",", "stmt", ")", ":", "if", "self", ".", "gg_level", ":", "return", "name", "pref", ",", "colon", ",", "local", "=", "name", ".", "partition", "(", "\":\"", ")", "if", "colon", ":", "return", "(", "self", ".", "module_prefixes", "[", "stmt", ".", "i_module", ".", "i_prefixes", "[", "pref", "]", "[", "0", "]", "]", "+", "\":\"", "+", "local", ")", "else", ":", "return", "self", ".", "prefix_stack", "[", "-", "1", "]", "+", "\":\"", "+", "pref" ]
Return `name` prepended with correct prefix. If the name is already prefixed, the prefix may be translated to the value obtained from `self.module_prefixes`. Unmodified `name` is returned if we are inside a global grouping.
[ "Return", "name", "prepended", "with", "correct", "prefix", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L533-L546
231,184
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.dc_element
def dc_element(self, parent, name, text): """Add DC element `name` containing `text` to `parent`.""" if self.dc_uri in self.namespaces: dcel = SchemaNode(self.namespaces[self.dc_uri] + ":" + name, text=text) parent.children.insert(0,dcel)
python
def dc_element(self, parent, name, text): """Add DC element `name` containing `text` to `parent`.""" if self.dc_uri in self.namespaces: dcel = SchemaNode(self.namespaces[self.dc_uri] + ":" + name, text=text) parent.children.insert(0,dcel)
[ "def", "dc_element", "(", "self", ",", "parent", ",", "name", ",", "text", ")", ":", "if", "self", ".", "dc_uri", "in", "self", ".", "namespaces", ":", "dcel", "=", "SchemaNode", "(", "self", ".", "namespaces", "[", "self", ".", "dc_uri", "]", "+", "\":\"", "+", "name", ",", "text", "=", "text", ")", "parent", ".", "children", ".", "insert", "(", "0", ",", "dcel", ")" ]
Add DC element `name` containing `text` to `parent`.
[ "Add", "DC", "element", "name", "containing", "text", "to", "parent", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L557-L562
231,185
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.get_default
def get_default(self, stmt, refd): """Return default value for `stmt` node. `refd` is a dictionary of applicable refinements that is constructed in the `process_patches` method. """ if refd["default"]: return refd["default"] defst = stmt.search_one("default") if defst: return defst.arg return None
python
def get_default(self, stmt, refd): """Return default value for `stmt` node. `refd` is a dictionary of applicable refinements that is constructed in the `process_patches` method. """ if refd["default"]: return refd["default"] defst = stmt.search_one("default") if defst: return defst.arg return None
[ "def", "get_default", "(", "self", ",", "stmt", ",", "refd", ")", ":", "if", "refd", "[", "\"default\"", "]", ":", "return", "refd", "[", "\"default\"", "]", "defst", "=", "stmt", ".", "search_one", "(", "\"default\"", ")", "if", "defst", ":", "return", "defst", ".", "arg", "return", "None" ]
Return default value for `stmt` node. `refd` is a dictionary of applicable refinements that is constructed in the `process_patches` method.
[ "Return", "default", "value", "for", "stmt", "node", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L564-L575
231,186
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.add_patch
def add_patch(self, pset, augref): """Add patch corresponding to `augref` to `pset`. `augref` must be either 'augment' or 'refine' statement. """ try: path = [ self.add_prefix(c, augref) for c in augref.arg.split("/") if c ] except KeyError: # augment of a module that's not among input modules return car = path[0] patch = Patch(path[1:], augref) if car in pset: sel = [ x for x in pset[car] if patch.path == x.path ] if sel: sel[0].combine(patch) else: pset[car].append(patch) else: pset[car] = [patch]
python
def add_patch(self, pset, augref): """Add patch corresponding to `augref` to `pset`. `augref` must be either 'augment' or 'refine' statement. """ try: path = [ self.add_prefix(c, augref) for c in augref.arg.split("/") if c ] except KeyError: # augment of a module that's not among input modules return car = path[0] patch = Patch(path[1:], augref) if car in pset: sel = [ x for x in pset[car] if patch.path == x.path ] if sel: sel[0].combine(patch) else: pset[car].append(patch) else: pset[car] = [patch]
[ "def", "add_patch", "(", "self", ",", "pset", ",", "augref", ")", ":", "try", ":", "path", "=", "[", "self", ".", "add_prefix", "(", "c", ",", "augref", ")", "for", "c", "in", "augref", ".", "arg", ".", "split", "(", "\"/\"", ")", "if", "c", "]", "except", "KeyError", ":", "# augment of a module that's not among input modules", "return", "car", "=", "path", "[", "0", "]", "patch", "=", "Patch", "(", "path", "[", "1", ":", "]", ",", "augref", ")", "if", "car", "in", "pset", ":", "sel", "=", "[", "x", "for", "x", "in", "pset", "[", "car", "]", "if", "patch", ".", "path", "==", "x", ".", "path", "]", "if", "sel", ":", "sel", "[", "0", "]", ".", "combine", "(", "patch", ")", "else", ":", "pset", "[", "car", "]", ".", "append", "(", "patch", ")", "else", ":", "pset", "[", "car", "]", "=", "[", "patch", "]" ]
Add patch corresponding to `augref` to `pset`. `augref` must be either 'augment' or 'refine' statement.
[ "Add", "patch", "corresponding", "to", "augref", "to", "pset", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L598-L618
231,187
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.apply_augments
def apply_augments(self, auglist, p_elem, pset): """Handle substatements of augments from `auglist`. The augments are applied in the context of `p_elem`. `pset` is a patch set containing patches that may be applicable to descendants. """ for a in auglist: par = a.parent if a.search_one("when") is None: wel = p_elem else: if p_elem.interleave: kw = "interleave" else: kw = "group" wel = SchemaNode(kw, p_elem, interleave=p_elem.interleave) wel.occur = p_elem.occur if par.keyword == "uses": self.handle_substmts(a, wel, pset) continue if par.keyword == "submodule": mnam = par.i_including_modulename else: mnam = par.arg if self.prefix_stack[-1] == self.module_prefixes[mnam]: self.handle_substmts(a, wel, pset) else: self.prefix_stack.append(self.module_prefixes[mnam]) self.handle_substmts(a, wel, pset) self.prefix_stack.pop()
python
def apply_augments(self, auglist, p_elem, pset): """Handle substatements of augments from `auglist`. The augments are applied in the context of `p_elem`. `pset` is a patch set containing patches that may be applicable to descendants. """ for a in auglist: par = a.parent if a.search_one("when") is None: wel = p_elem else: if p_elem.interleave: kw = "interleave" else: kw = "group" wel = SchemaNode(kw, p_elem, interleave=p_elem.interleave) wel.occur = p_elem.occur if par.keyword == "uses": self.handle_substmts(a, wel, pset) continue if par.keyword == "submodule": mnam = par.i_including_modulename else: mnam = par.arg if self.prefix_stack[-1] == self.module_prefixes[mnam]: self.handle_substmts(a, wel, pset) else: self.prefix_stack.append(self.module_prefixes[mnam]) self.handle_substmts(a, wel, pset) self.prefix_stack.pop()
[ "def", "apply_augments", "(", "self", ",", "auglist", ",", "p_elem", ",", "pset", ")", ":", "for", "a", "in", "auglist", ":", "par", "=", "a", ".", "parent", "if", "a", ".", "search_one", "(", "\"when\"", ")", "is", "None", ":", "wel", "=", "p_elem", "else", ":", "if", "p_elem", ".", "interleave", ":", "kw", "=", "\"interleave\"", "else", ":", "kw", "=", "\"group\"", "wel", "=", "SchemaNode", "(", "kw", ",", "p_elem", ",", "interleave", "=", "p_elem", ".", "interleave", ")", "wel", ".", "occur", "=", "p_elem", ".", "occur", "if", "par", ".", "keyword", "==", "\"uses\"", ":", "self", ".", "handle_substmts", "(", "a", ",", "wel", ",", "pset", ")", "continue", "if", "par", ".", "keyword", "==", "\"submodule\"", ":", "mnam", "=", "par", ".", "i_including_modulename", "else", ":", "mnam", "=", "par", ".", "arg", "if", "self", ".", "prefix_stack", "[", "-", "1", "]", "==", "self", ".", "module_prefixes", "[", "mnam", "]", ":", "self", ".", "handle_substmts", "(", "a", ",", "wel", ",", "pset", ")", "else", ":", "self", ".", "prefix_stack", ".", "append", "(", "self", ".", "module_prefixes", "[", "mnam", "]", ")", "self", ".", "handle_substmts", "(", "a", ",", "wel", ",", "pset", ")", "self", ".", "prefix_stack", ".", "pop", "(", ")" ]
Handle substatements of augments from `auglist`. The augments are applied in the context of `p_elem`. `pset` is a patch set containing patches that may be applicable to descendants.
[ "Handle", "substatements", "of", "augments", "from", "auglist", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L620-L650
231,188
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.current_revision
def current_revision(self, r_stmts): """Pick the most recent revision date. `r_stmts` is a list of 'revision' statements. """ cur = max([[int(p) for p in r.arg.split("-")] for r in r_stmts]) return "%4d-%02d-%02d" % tuple(cur)
python
def current_revision(self, r_stmts): """Pick the most recent revision date. `r_stmts` is a list of 'revision' statements. """ cur = max([[int(p) for p in r.arg.split("-")] for r in r_stmts]) return "%4d-%02d-%02d" % tuple(cur)
[ "def", "current_revision", "(", "self", ",", "r_stmts", ")", ":", "cur", "=", "max", "(", "[", "[", "int", "(", "p", ")", "for", "p", "in", "r", ".", "arg", ".", "split", "(", "\"-\"", ")", "]", "for", "r", "in", "r_stmts", "]", ")", "return", "\"%4d-%02d-%02d\"", "%", "tuple", "(", "cur", ")" ]
Pick the most recent revision date. `r_stmts` is a list of 'revision' statements.
[ "Pick", "the", "most", "recent", "revision", "date", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L652-L658
231,189
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.install_def
def install_def(self, name, dstmt, def_map, interleave=False): """Install definition `name` into the appropriate dictionary. `dstmt` is the definition statement ('typedef' or 'grouping') that is to be mapped to a RELAX NG named pattern '<define name="`name`">'. `def_map` must be either `self.local_defs` or `self.global_defs`. `interleave` determines the interleave status inside the definition. """ delem = SchemaNode.define(name, interleave=interleave) delem.attr["name"] = name def_map[name] = delem if def_map is self.global_defs: self.gg_level += 1 self.handle_substmts(dstmt, delem) if def_map is self.global_defs: self.gg_level -= 1
python
def install_def(self, name, dstmt, def_map, interleave=False): """Install definition `name` into the appropriate dictionary. `dstmt` is the definition statement ('typedef' or 'grouping') that is to be mapped to a RELAX NG named pattern '<define name="`name`">'. `def_map` must be either `self.local_defs` or `self.global_defs`. `interleave` determines the interleave status inside the definition. """ delem = SchemaNode.define(name, interleave=interleave) delem.attr["name"] = name def_map[name] = delem if def_map is self.global_defs: self.gg_level += 1 self.handle_substmts(dstmt, delem) if def_map is self.global_defs: self.gg_level -= 1
[ "def", "install_def", "(", "self", ",", "name", ",", "dstmt", ",", "def_map", ",", "interleave", "=", "False", ")", ":", "delem", "=", "SchemaNode", ".", "define", "(", "name", ",", "interleave", "=", "interleave", ")", "delem", ".", "attr", "[", "\"name\"", "]", "=", "name", "def_map", "[", "name", "]", "=", "delem", "if", "def_map", "is", "self", ".", "global_defs", ":", "self", ".", "gg_level", "+=", "1", "self", ".", "handle_substmts", "(", "dstmt", ",", "delem", ")", "if", "def_map", "is", "self", ".", "global_defs", ":", "self", ".", "gg_level", "-=", "1" ]
Install definition `name` into the appropriate dictionary. `dstmt` is the definition statement ('typedef' or 'grouping') that is to be mapped to a RELAX NG named pattern '<define name="`name`">'. `def_map` must be either `self.local_defs` or `self.global_defs`. `interleave` determines the interleave status inside the definition.
[ "Install", "definition", "name", "into", "the", "appropriate", "dictionary", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L666-L680
231,190
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.rng_annotation
def rng_annotation(self, stmt, p_elem): """Append YIN representation of extension statement `stmt`.""" ext = stmt.i_extension prf, extkw = stmt.raw_keyword (modname,rev)=stmt.i_module.i_prefixes[prf] prefix = self.add_namespace( statements.modulename_to_module(self.module,modname,rev)) eel = SchemaNode(prefix + ":" + extkw, p_elem) argst = ext.search_one("argument") if argst: if argst.search_one("yin-element", "true"): SchemaNode(prefix + ":" + argst.arg, eel, stmt.arg) else: eel.attr[argst.arg] = stmt.arg self.handle_substmts(stmt, eel)
python
def rng_annotation(self, stmt, p_elem): """Append YIN representation of extension statement `stmt`.""" ext = stmt.i_extension prf, extkw = stmt.raw_keyword (modname,rev)=stmt.i_module.i_prefixes[prf] prefix = self.add_namespace( statements.modulename_to_module(self.module,modname,rev)) eel = SchemaNode(prefix + ":" + extkw, p_elem) argst = ext.search_one("argument") if argst: if argst.search_one("yin-element", "true"): SchemaNode(prefix + ":" + argst.arg, eel, stmt.arg) else: eel.attr[argst.arg] = stmt.arg self.handle_substmts(stmt, eel)
[ "def", "rng_annotation", "(", "self", ",", "stmt", ",", "p_elem", ")", ":", "ext", "=", "stmt", ".", "i_extension", "prf", ",", "extkw", "=", "stmt", ".", "raw_keyword", "(", "modname", ",", "rev", ")", "=", "stmt", ".", "i_module", ".", "i_prefixes", "[", "prf", "]", "prefix", "=", "self", ".", "add_namespace", "(", "statements", ".", "modulename_to_module", "(", "self", ".", "module", ",", "modname", ",", "rev", ")", ")", "eel", "=", "SchemaNode", "(", "prefix", "+", "\":\"", "+", "extkw", ",", "p_elem", ")", "argst", "=", "ext", ".", "search_one", "(", "\"argument\"", ")", "if", "argst", ":", "if", "argst", ".", "search_one", "(", "\"yin-element\"", ",", "\"true\"", ")", ":", "SchemaNode", "(", "prefix", "+", "\":\"", "+", "argst", ".", "arg", ",", "eel", ",", "stmt", ".", "arg", ")", "else", ":", "eel", ".", "attr", "[", "argst", ".", "arg", "]", "=", "stmt", ".", "arg", "self", ".", "handle_substmts", "(", "stmt", ",", "eel", ")" ]
Append YIN representation of extension statement `stmt`.
[ "Append", "YIN", "representation", "of", "extension", "statement", "stmt", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L682-L696
231,191
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.propagate_occur
def propagate_occur(self, node, value): """Propagate occurence `value` to `node` and its ancestors. Occurence values are defined and explained in the SchemaNode class. """ while node.occur < value: node.occur = value if node.name == "define": break node = node.parent
python
def propagate_occur(self, node, value): """Propagate occurence `value` to `node` and its ancestors. Occurence values are defined and explained in the SchemaNode class. """ while node.occur < value: node.occur = value if node.name == "define": break node = node.parent
[ "def", "propagate_occur", "(", "self", ",", "node", ",", "value", ")", ":", "while", "node", ".", "occur", "<", "value", ":", "node", ".", "occur", "=", "value", "if", "node", ".", "name", "==", "\"define\"", ":", "break", "node", "=", "node", ".", "parent" ]
Propagate occurence `value` to `node` and its ancestors. Occurence values are defined and explained in the SchemaNode class.
[ "Propagate", "occurence", "value", "to", "node", "and", "its", "ancestors", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L698-L708
231,192
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.process_patches
def process_patches(self, pset, stmt, elem, altname=None): """Process patches for data node `name` from `pset`. `stmt` provides the context in YANG and `elem` is the parent element in the output schema. Refinements adding documentation and changing the config status are immediately applied. The returned tuple consists of: - a dictionary of refinements, in which keys are the keywords of the refinement statements and values are the new values of refined parameters. - a list of 'augment' statements that are to be applied directly under `elem`. - a new patch set containing patches applicable to substatements of `stmt`. """ if altname: name = altname else: name = stmt.arg new_pset = {} augments = [] refine_dict = dict.fromkeys(("presence", "default", "mandatory", "min-elements", "max-elements")) for p in pset.pop(self.add_prefix(name, stmt), []): if p.path: head = p.pop() if head in new_pset: new_pset[head].append(p) else: new_pset[head] = [p] else: for refaug in p.plist: if refaug.keyword == "augment": augments.append(refaug) else: for s in refaug.substmts: if s.keyword == "description": self.description_stmt(s, elem, None) elif s.keyword == "reference": self.reference_stmt(s, elem, None) elif s.keyword == "must": self.must_stmt(s, elem, None) elif s.keyword == "config": self.nma_attribute(s, elem) elif refine_dict.get(s.keyword, False) is None: refine_dict[s.keyword] = s.arg return (refine_dict, augments, new_pset)
python
def process_patches(self, pset, stmt, elem, altname=None): """Process patches for data node `name` from `pset`. `stmt` provides the context in YANG and `elem` is the parent element in the output schema. Refinements adding documentation and changing the config status are immediately applied. The returned tuple consists of: - a dictionary of refinements, in which keys are the keywords of the refinement statements and values are the new values of refined parameters. - a list of 'augment' statements that are to be applied directly under `elem`. - a new patch set containing patches applicable to substatements of `stmt`. """ if altname: name = altname else: name = stmt.arg new_pset = {} augments = [] refine_dict = dict.fromkeys(("presence", "default", "mandatory", "min-elements", "max-elements")) for p in pset.pop(self.add_prefix(name, stmt), []): if p.path: head = p.pop() if head in new_pset: new_pset[head].append(p) else: new_pset[head] = [p] else: for refaug in p.plist: if refaug.keyword == "augment": augments.append(refaug) else: for s in refaug.substmts: if s.keyword == "description": self.description_stmt(s, elem, None) elif s.keyword == "reference": self.reference_stmt(s, elem, None) elif s.keyword == "must": self.must_stmt(s, elem, None) elif s.keyword == "config": self.nma_attribute(s, elem) elif refine_dict.get(s.keyword, False) is None: refine_dict[s.keyword] = s.arg return (refine_dict, augments, new_pset)
[ "def", "process_patches", "(", "self", ",", "pset", ",", "stmt", ",", "elem", ",", "altname", "=", "None", ")", ":", "if", "altname", ":", "name", "=", "altname", "else", ":", "name", "=", "stmt", ".", "arg", "new_pset", "=", "{", "}", "augments", "=", "[", "]", "refine_dict", "=", "dict", ".", "fromkeys", "(", "(", "\"presence\"", ",", "\"default\"", ",", "\"mandatory\"", ",", "\"min-elements\"", ",", "\"max-elements\"", ")", ")", "for", "p", "in", "pset", ".", "pop", "(", "self", ".", "add_prefix", "(", "name", ",", "stmt", ")", ",", "[", "]", ")", ":", "if", "p", ".", "path", ":", "head", "=", "p", ".", "pop", "(", ")", "if", "head", "in", "new_pset", ":", "new_pset", "[", "head", "]", ".", "append", "(", "p", ")", "else", ":", "new_pset", "[", "head", "]", "=", "[", "p", "]", "else", ":", "for", "refaug", "in", "p", ".", "plist", ":", "if", "refaug", ".", "keyword", "==", "\"augment\"", ":", "augments", ".", "append", "(", "refaug", ")", "else", ":", "for", "s", "in", "refaug", ".", "substmts", ":", "if", "s", ".", "keyword", "==", "\"description\"", ":", "self", ".", "description_stmt", "(", "s", ",", "elem", ",", "None", ")", "elif", "s", ".", "keyword", "==", "\"reference\"", ":", "self", ".", "reference_stmt", "(", "s", ",", "elem", ",", "None", ")", "elif", "s", ".", "keyword", "==", "\"must\"", ":", "self", ".", "must_stmt", "(", "s", ",", "elem", ",", "None", ")", "elif", "s", ".", "keyword", "==", "\"config\"", ":", "self", ".", "nma_attribute", "(", "s", ",", "elem", ")", "elif", "refine_dict", ".", "get", "(", "s", ".", "keyword", ",", "False", ")", "is", "None", ":", "refine_dict", "[", "s", ".", "keyword", "]", "=", "s", ".", "arg", "return", "(", "refine_dict", ",", "augments", ",", "new_pset", ")" ]
Process patches for data node `name` from `pset`. `stmt` provides the context in YANG and `elem` is the parent element in the output schema. Refinements adding documentation and changing the config status are immediately applied. The returned tuple consists of: - a dictionary of refinements, in which keys are the keywords of the refinement statements and values are the new values of refined parameters. - a list of 'augment' statements that are to be applied directly under `elem`. - a new patch set containing patches applicable to substatements of `stmt`.
[ "Process", "patches", "for", "data", "node", "name", "from", "pset", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L710-L757
231,193
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.lookup_expand
def lookup_expand(self, stmt, names): """Find schema nodes under `stmt`, also in used groupings. `names` is a list with qualified names of the schema nodes to look up. All 'uses'/'grouping' pairs between `stmt` and found schema nodes are marked for expansion. """ if not names: return [] todo = [stmt] while todo: pst = todo.pop() for sub in pst.substmts: if sub.keyword in self.schema_nodes: qname = self.qname(sub) if qname in names: names.remove(qname) par = sub.parent while hasattr(par,"d_ref"): # par must be grouping par.d_ref.d_expand = True par = par.d_ref.parent if not names: return [] # all found elif sub.keyword == "uses": g = sub.i_grouping g.d_ref = sub todo.append(g) return names
python
def lookup_expand(self, stmt, names): """Find schema nodes under `stmt`, also in used groupings. `names` is a list with qualified names of the schema nodes to look up. All 'uses'/'grouping' pairs between `stmt` and found schema nodes are marked for expansion. """ if not names: return [] todo = [stmt] while todo: pst = todo.pop() for sub in pst.substmts: if sub.keyword in self.schema_nodes: qname = self.qname(sub) if qname in names: names.remove(qname) par = sub.parent while hasattr(par,"d_ref"): # par must be grouping par.d_ref.d_expand = True par = par.d_ref.parent if not names: return [] # all found elif sub.keyword == "uses": g = sub.i_grouping g.d_ref = sub todo.append(g) return names
[ "def", "lookup_expand", "(", "self", ",", "stmt", ",", "names", ")", ":", "if", "not", "names", ":", "return", "[", "]", "todo", "=", "[", "stmt", "]", "while", "todo", ":", "pst", "=", "todo", ".", "pop", "(", ")", "for", "sub", "in", "pst", ".", "substmts", ":", "if", "sub", ".", "keyword", "in", "self", ".", "schema_nodes", ":", "qname", "=", "self", ".", "qname", "(", "sub", ")", "if", "qname", "in", "names", ":", "names", ".", "remove", "(", "qname", ")", "par", "=", "sub", ".", "parent", "while", "hasattr", "(", "par", ",", "\"d_ref\"", ")", ":", "# par must be grouping", "par", ".", "d_ref", ".", "d_expand", "=", "True", "par", "=", "par", ".", "d_ref", ".", "parent", "if", "not", "names", ":", "return", "[", "]", "# all found", "elif", "sub", ".", "keyword", "==", "\"uses\"", ":", "g", "=", "sub", ".", "i_grouping", "g", ".", "d_ref", "=", "sub", "todo", ".", "append", "(", "g", ")", "return", "names" ]
Find schema nodes under `stmt`, also in used groupings. `names` is a list with qualified names of the schema nodes to look up. All 'uses'/'grouping' pairs between `stmt` and found schema nodes are marked for expansion.
[ "Find", "schema", "nodes", "under", "stmt", "also", "in", "used", "groupings", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L780-L805
231,194
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.type_with_ranges
def type_with_ranges(self, tchain, p_elem, rangekw, gen_data): """Handle types with 'range' or 'length' restrictions. `tchain` is the chain of type definitions from which the ranges may need to be extracted. `rangekw` is the statement keyword determining the range type (either 'range' or 'length'). `gen_data` is a function that generates the output schema node (a RELAX NG <data> pattern). """ ranges = self.get_ranges(tchain, rangekw) if not ranges: return p_elem.subnode(gen_data()) if len(ranges) > 1: p_elem = SchemaNode.choice(p_elem) p_elem.occur = 2 for r in ranges: d_elem = gen_data() for p in self.range_params(r, rangekw): d_elem.subnode(p) p_elem.subnode(d_elem)
python
def type_with_ranges(self, tchain, p_elem, rangekw, gen_data): """Handle types with 'range' or 'length' restrictions. `tchain` is the chain of type definitions from which the ranges may need to be extracted. `rangekw` is the statement keyword determining the range type (either 'range' or 'length'). `gen_data` is a function that generates the output schema node (a RELAX NG <data> pattern). """ ranges = self.get_ranges(tchain, rangekw) if not ranges: return p_elem.subnode(gen_data()) if len(ranges) > 1: p_elem = SchemaNode.choice(p_elem) p_elem.occur = 2 for r in ranges: d_elem = gen_data() for p in self.range_params(r, rangekw): d_elem.subnode(p) p_elem.subnode(d_elem)
[ "def", "type_with_ranges", "(", "self", ",", "tchain", ",", "p_elem", ",", "rangekw", ",", "gen_data", ")", ":", "ranges", "=", "self", ".", "get_ranges", "(", "tchain", ",", "rangekw", ")", "if", "not", "ranges", ":", "return", "p_elem", ".", "subnode", "(", "gen_data", "(", ")", ")", "if", "len", "(", "ranges", ")", ">", "1", ":", "p_elem", "=", "SchemaNode", ".", "choice", "(", "p_elem", ")", "p_elem", ".", "occur", "=", "2", "for", "r", "in", "ranges", ":", "d_elem", "=", "gen_data", "(", ")", "for", "p", "in", "self", ".", "range_params", "(", "r", ",", "rangekw", ")", ":", "d_elem", ".", "subnode", "(", "p", ")", "p_elem", ".", "subnode", "(", "d_elem", ")" ]
Handle types with 'range' or 'length' restrictions. `tchain` is the chain of type definitions from which the ranges may need to be extracted. `rangekw` is the statement keyword determining the range type (either 'range' or 'length'). `gen_data` is a function that generates the output schema node (a RELAX NG <data> pattern).
[ "Handle", "types", "with", "range", "or", "length", "restrictions", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L807-L825
231,195
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.get_ranges
def get_ranges(self, tchain, kw): """Return list of ranges defined in `tchain`. `kw` is the statement keyword determining the type of the range, i.e. 'range' or 'length'. `tchain` is the chain of type definitions from which the resulting range is obtained. The returned value is a list of tuples containing the segments of the resulting range. """ (lo, hi) = ("min", "max") ran = None for t in tchain: rstmt = t.search_one(kw) if rstmt is None: continue parts = [ p.strip() for p in rstmt.arg.split("|") ] ran = [ [ i.strip() for i in p.split("..") ] for p in parts ] if ran[0][0] != 'min': lo = ran[0][0] if ran[-1][-1] != 'max': hi = ran[-1][-1] if ran is None: return None if len(ran) == 1: return [(lo, hi)] else: return [(lo, ran[0][-1])] + ran[1:-1] + [(ran[-1][0], hi)]
python
def get_ranges(self, tchain, kw): """Return list of ranges defined in `tchain`. `kw` is the statement keyword determining the type of the range, i.e. 'range' or 'length'. `tchain` is the chain of type definitions from which the resulting range is obtained. The returned value is a list of tuples containing the segments of the resulting range. """ (lo, hi) = ("min", "max") ran = None for t in tchain: rstmt = t.search_one(kw) if rstmt is None: continue parts = [ p.strip() for p in rstmt.arg.split("|") ] ran = [ [ i.strip() for i in p.split("..") ] for p in parts ] if ran[0][0] != 'min': lo = ran[0][0] if ran[-1][-1] != 'max': hi = ran[-1][-1] if ran is None: return None if len(ran) == 1: return [(lo, hi)] else: return [(lo, ran[0][-1])] + ran[1:-1] + [(ran[-1][0], hi)]
[ "def", "get_ranges", "(", "self", ",", "tchain", ",", "kw", ")", ":", "(", "lo", ",", "hi", ")", "=", "(", "\"min\"", ",", "\"max\"", ")", "ran", "=", "None", "for", "t", "in", "tchain", ":", "rstmt", "=", "t", ".", "search_one", "(", "kw", ")", "if", "rstmt", "is", "None", ":", "continue", "parts", "=", "[", "p", ".", "strip", "(", ")", "for", "p", "in", "rstmt", ".", "arg", ".", "split", "(", "\"|\"", ")", "]", "ran", "=", "[", "[", "i", ".", "strip", "(", ")", "for", "i", "in", "p", ".", "split", "(", "\"..\"", ")", "]", "for", "p", "in", "parts", "]", "if", "ran", "[", "0", "]", "[", "0", "]", "!=", "'min'", ":", "lo", "=", "ran", "[", "0", "]", "[", "0", "]", "if", "ran", "[", "-", "1", "]", "[", "-", "1", "]", "!=", "'max'", ":", "hi", "=", "ran", "[", "-", "1", "]", "[", "-", "1", "]", "if", "ran", "is", "None", ":", "return", "None", "if", "len", "(", "ran", ")", "==", "1", ":", "return", "[", "(", "lo", ",", "hi", ")", "]", "else", ":", "return", "[", "(", "lo", ",", "ran", "[", "0", "]", "[", "-", "1", "]", ")", "]", "+", "ran", "[", "1", ":", "-", "1", "]", "+", "[", "(", "ran", "[", "-", "1", "]", "[", "0", "]", ",", "hi", ")", "]" ]
Return list of ranges defined in `tchain`. `kw` is the statement keyword determining the type of the range, i.e. 'range' or 'length'. `tchain` is the chain of type definitions from which the resulting range is obtained. The returned value is a list of tuples containing the segments of the resulting range.
[ "Return", "list", "of", "ranges", "defined", "in", "tchain", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L827-L850
231,196
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.handle_stmt
def handle_stmt(self, stmt, p_elem, pset={}): """ Run handler method for statement `stmt`. `p_elem` is the parent node in the output schema. `pset` is the current "patch set" - a dictionary with keys being QNames of schema nodes at the current level of hierarchy for which (or descendants thereof) any pending patches exist. The values are instances of the Patch class. All handler methods are defined below and must have the same arguments as this method. They should create the output schema fragment corresponding to `stmt`, apply all patches from `pset` belonging to `stmt`, insert the fragment under `p_elem` and perform all side effects as necessary. """ if self.debug > 0: sys.stderr.write("Handling '%s %s'\n" % (util.keyword_to_str(stmt.raw_keyword), stmt.arg)) try: method = self.stmt_handler[stmt.keyword] except KeyError: if isinstance(stmt.keyword, tuple): try: method = self.ext_handler[stmt.keyword[0]][stmt.keyword[1]] except KeyError: method = self.rng_annotation method(stmt, p_elem) return else: raise error.EmitError( "Unknown keyword %s - this should not happen.\n" % stmt.keyword) method(stmt, p_elem, pset)
python
def handle_stmt(self, stmt, p_elem, pset={}): """ Run handler method for statement `stmt`. `p_elem` is the parent node in the output schema. `pset` is the current "patch set" - a dictionary with keys being QNames of schema nodes at the current level of hierarchy for which (or descendants thereof) any pending patches exist. The values are instances of the Patch class. All handler methods are defined below and must have the same arguments as this method. They should create the output schema fragment corresponding to `stmt`, apply all patches from `pset` belonging to `stmt`, insert the fragment under `p_elem` and perform all side effects as necessary. """ if self.debug > 0: sys.stderr.write("Handling '%s %s'\n" % (util.keyword_to_str(stmt.raw_keyword), stmt.arg)) try: method = self.stmt_handler[stmt.keyword] except KeyError: if isinstance(stmt.keyword, tuple): try: method = self.ext_handler[stmt.keyword[0]][stmt.keyword[1]] except KeyError: method = self.rng_annotation method(stmt, p_elem) return else: raise error.EmitError( "Unknown keyword %s - this should not happen.\n" % stmt.keyword) method(stmt, p_elem, pset)
[ "def", "handle_stmt", "(", "self", ",", "stmt", ",", "p_elem", ",", "pset", "=", "{", "}", ")", ":", "if", "self", ".", "debug", ">", "0", ":", "sys", ".", "stderr", ".", "write", "(", "\"Handling '%s %s'\\n\"", "%", "(", "util", ".", "keyword_to_str", "(", "stmt", ".", "raw_keyword", ")", ",", "stmt", ".", "arg", ")", ")", "try", ":", "method", "=", "self", ".", "stmt_handler", "[", "stmt", ".", "keyword", "]", "except", "KeyError", ":", "if", "isinstance", "(", "stmt", ".", "keyword", ",", "tuple", ")", ":", "try", ":", "method", "=", "self", ".", "ext_handler", "[", "stmt", ".", "keyword", "[", "0", "]", "]", "[", "stmt", ".", "keyword", "[", "1", "]", "]", "except", "KeyError", ":", "method", "=", "self", ".", "rng_annotation", "method", "(", "stmt", ",", "p_elem", ")", "return", "else", ":", "raise", "error", ".", "EmitError", "(", "\"Unknown keyword %s - this should not happen.\\n\"", "%", "stmt", ".", "keyword", ")", "method", "(", "stmt", ",", "p_elem", ",", "pset", ")" ]
Run handler method for statement `stmt`. `p_elem` is the parent node in the output schema. `pset` is the current "patch set" - a dictionary with keys being QNames of schema nodes at the current level of hierarchy for which (or descendants thereof) any pending patches exist. The values are instances of the Patch class. All handler methods are defined below and must have the same arguments as this method. They should create the output schema fragment corresponding to `stmt`, apply all patches from `pset` belonging to `stmt`, insert the fragment under `p_elem` and perform all side effects as necessary.
[ "Run", "handler", "method", "for", "statement", "stmt", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L882-L915
231,197
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.handle_substmts
def handle_substmts(self, stmt, p_elem, pset={}): """Handle all substatements of `stmt`.""" for sub in stmt.substmts: self.handle_stmt(sub, p_elem, pset)
python
def handle_substmts(self, stmt, p_elem, pset={}): """Handle all substatements of `stmt`.""" for sub in stmt.substmts: self.handle_stmt(sub, p_elem, pset)
[ "def", "handle_substmts", "(", "self", ",", "stmt", ",", "p_elem", ",", "pset", "=", "{", "}", ")", ":", "for", "sub", "in", "stmt", ".", "substmts", ":", "self", ".", "handle_stmt", "(", "sub", ",", "p_elem", ",", "pset", ")" ]
Handle all substatements of `stmt`.
[ "Handle", "all", "substatements", "of", "stmt", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L917-L920
231,198
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.nma_attribute
def nma_attribute(self, stmt, p_elem, pset=None): """Map `stmt` to a NETMOD-specific attribute. The name of the attribute is the same as the 'keyword' of `stmt`. """ att = "nma:" + stmt.keyword if att not in p_elem.attr: p_elem.attr[att] = stmt.arg
python
def nma_attribute(self, stmt, p_elem, pset=None): """Map `stmt` to a NETMOD-specific attribute. The name of the attribute is the same as the 'keyword' of `stmt`. """ att = "nma:" + stmt.keyword if att not in p_elem.attr: p_elem.attr[att] = stmt.arg
[ "def", "nma_attribute", "(", "self", ",", "stmt", ",", "p_elem", ",", "pset", "=", "None", ")", ":", "att", "=", "\"nma:\"", "+", "stmt", ".", "keyword", "if", "att", "not", "in", "p_elem", ".", "attr", ":", "p_elem", ".", "attr", "[", "att", "]", "=", "stmt", ".", "arg" ]
Map `stmt` to a NETMOD-specific attribute. The name of the attribute is the same as the 'keyword' of `stmt`.
[ "Map", "stmt", "to", "a", "NETMOD", "-", "specific", "attribute", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L942-L950
231,199
mbj4668/pyang
pyang/translators/dsdl.py
HybridDSDLSchema.type_stmt
def type_stmt(self, stmt, p_elem, pset): """Handle ``type`` statement. Built-in types are handled by one of the specific type callback methods defined below. """ typedef = stmt.i_typedef if typedef and not stmt.i_is_derived: # just ref uname, dic = self.unique_def_name(typedef) if uname not in dic: self.install_def(uname, typedef, dic) SchemaNode("ref", p_elem).set_attr("name", uname) defst = typedef.search_one("default") if defst: dic[uname].default = defst.arg occur = 1 else: occur = dic[uname].occur if occur > 0: self.propagate_occur(p_elem, occur) return chain = [stmt] tdefault = None while typedef: type_ = typedef.search_one("type") chain.insert(0, type_) if tdefault is None: tdef = typedef.search_one("default") if tdef: tdefault = tdef.arg typedef = type_.i_typedef if tdefault and p_elem.occur == 0: p_elem.default = tdefault self.propagate_occur(p_elem, 1) self.type_handler[chain[0].arg](chain, p_elem)
python
def type_stmt(self, stmt, p_elem, pset): """Handle ``type`` statement. Built-in types are handled by one of the specific type callback methods defined below. """ typedef = stmt.i_typedef if typedef and not stmt.i_is_derived: # just ref uname, dic = self.unique_def_name(typedef) if uname not in dic: self.install_def(uname, typedef, dic) SchemaNode("ref", p_elem).set_attr("name", uname) defst = typedef.search_one("default") if defst: dic[uname].default = defst.arg occur = 1 else: occur = dic[uname].occur if occur > 0: self.propagate_occur(p_elem, occur) return chain = [stmt] tdefault = None while typedef: type_ = typedef.search_one("type") chain.insert(0, type_) if tdefault is None: tdef = typedef.search_one("default") if tdef: tdefault = tdef.arg typedef = type_.i_typedef if tdefault and p_elem.occur == 0: p_elem.default = tdefault self.propagate_occur(p_elem, 1) self.type_handler[chain[0].arg](chain, p_elem)
[ "def", "type_stmt", "(", "self", ",", "stmt", ",", "p_elem", ",", "pset", ")", ":", "typedef", "=", "stmt", ".", "i_typedef", "if", "typedef", "and", "not", "stmt", ".", "i_is_derived", ":", "# just ref", "uname", ",", "dic", "=", "self", ".", "unique_def_name", "(", "typedef", ")", "if", "uname", "not", "in", "dic", ":", "self", ".", "install_def", "(", "uname", ",", "typedef", ",", "dic", ")", "SchemaNode", "(", "\"ref\"", ",", "p_elem", ")", ".", "set_attr", "(", "\"name\"", ",", "uname", ")", "defst", "=", "typedef", ".", "search_one", "(", "\"default\"", ")", "if", "defst", ":", "dic", "[", "uname", "]", ".", "default", "=", "defst", ".", "arg", "occur", "=", "1", "else", ":", "occur", "=", "dic", "[", "uname", "]", ".", "occur", "if", "occur", ">", "0", ":", "self", ".", "propagate_occur", "(", "p_elem", ",", "occur", ")", "return", "chain", "=", "[", "stmt", "]", "tdefault", "=", "None", "while", "typedef", ":", "type_", "=", "typedef", ".", "search_one", "(", "\"type\"", ")", "chain", ".", "insert", "(", "0", ",", "type_", ")", "if", "tdefault", "is", "None", ":", "tdef", "=", "typedef", ".", "search_one", "(", "\"default\"", ")", "if", "tdef", ":", "tdefault", "=", "tdef", ".", "arg", "typedef", "=", "type_", ".", "i_typedef", "if", "tdefault", "and", "p_elem", ".", "occur", "==", "0", ":", "p_elem", ".", "default", "=", "tdefault", "self", ".", "propagate_occur", "(", "p_elem", ",", "1", ")", "self", ".", "type_handler", "[", "chain", "[", "0", "]", ".", "arg", "]", "(", "chain", ",", "p_elem", ")" ]
Handle ``type`` statement. Built-in types are handled by one of the specific type callback methods defined below.
[ "Handle", "type", "statement", "." ]
f2a5cc3142162e5b9ee4e18d154568d939ff63dd
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/translators/dsdl.py#L1119-L1152