Spaces:
Running on Zero
Running on Zero
File size: 405 Bytes
9d7cf7f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 | from copy import deepcopy
from .spec import Tokenizer
from .tokenizer_part import TokenizerPart
def get_tokenizer(**kwargs) -> Tokenizer:
__target__ = kwargs.get('__target__')
assert __target__ is not None, "do not find `__target__` in tokenizer config"
del kwargs['__target__']
MAP = {
'tokenizer_part': TokenizerPart,
}
return MAP[__target__].parse(**deepcopy(kwargs))
|