signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
@property<EOL><INDENT>def dtype(self):<DEDENT>
return self._data.dtype<EOL>
numpy.dtype: Describes the layout of each element of the data.
f8497:c3:m6
@property<EOL><INDENT>def datatype(self):<DEDENT>
return self._data.dtype<EOL>
numpy.dtype: Describes the layout of each element of the data.
f8497:c3:m7
@property<EOL><INDENT>def dimensions(self):<DEDENT>
return self._dimensions<EOL>
tuple[str]: all the names of :class:`Dimension` used by this :class:`Variable`.
f8497:c3:m8
def __setitem__(self, ind, value):
self._data[ind] = value<EOL>
Handle setting values on the Variable.
f8497:c3:m9
def __getitem__(self, ind):
return self._data[ind]<EOL>
Handle getting values from the Variable.
f8497:c3:m10
def __str__(self):
groups = [str(type(self))<EOL>+ '<STR_LIT>'.format(self, '<STR_LIT:U+002CU+0020>'.join(self.dimensions))]<EOL>for att in self.ncattrs():<EOL><INDENT>groups.append('<STR_LIT>'.format(att, getattr(self, att)))<EOL><DEDENT>if self.ndim:<EOL><INDENT>shape = tuple(int(s) for s in self.shape)<EOL>if self.ndim > <NUM_LIT:1>:<EOL><INDENT>shape_str = str(shape)<EOL><DEDENT>else:<EOL><INDENT>shape_str = str(shape[<NUM_LIT:0>])<EOL><DEDENT>groups.append('<STR_LIT>' + shape_str)<EOL><DEDENT>return '<STR_LIT:\n>'.join(groups)<EOL>
Return a string representation of the Variable.
f8497:c3:m11
def __init__(self, group, name, size=None):
self._group = group<EOL>self.name = name<EOL>self.size = size<EOL>
Initialize a Dimension. Instead of constructing a Dimension directly, you should use ``Group.createDimension``. Parameters ---------- group : Group The parent Group that owns this Variable. name : str The name of this Variable. size : int or None, optional The size of the Dimension. Defaults to None, which implies an empty dimension. See Also -------- Group.createDimension
f8497:c4:m0
def group(self):
return self._group<EOL>
Get the Group that owns this Dimension. Returns ------- Group The parent Group.
f8497:c4:m1
def __len__(self):
return self.size<EOL>
Return the length of this Dimension.
f8497:c4:m2
def __str__(self):
return '<STR_LIT>'.format(type(self), self)<EOL>
Return a string representation of this Dimension.
f8497:c4:m3
def register_processor(num):
def inner(func):<EOL><INDENT>"""<STR_LIT>"""<EOL>processors[num] = func<EOL>return func<EOL><DEDENT>return inner<EOL>
Register functions to handle particular message numbers.
f8500:m0
@register_processor(<NUM_LIT:3>)<EOL>def process_msg3(fname):
with open(fname, '<STR_LIT:r>') as infile:<EOL><INDENT>info = []<EOL>for lineno, line in enumerate(infile):<EOL><INDENT>parts = line.split('<STR_LIT:U+0020>')<EOL>try:<EOL><INDENT>var_name, desc, typ, units = parts[:<NUM_LIT:4>]<EOL>size_hw = parts[-<NUM_LIT:1>]<EOL>if '<STR_LIT:->' in size_hw:<EOL><INDENT>start, end = map(int, size_hw.split('<STR_LIT:->'))<EOL>size = (end - start + <NUM_LIT:1>) * <NUM_LIT:2><EOL><DEDENT>else:<EOL><INDENT>size = <NUM_LIT:2><EOL><DEDENT>assert size >= <NUM_LIT:2><EOL>fmt = fix_type(typ, size)<EOL>var_name = fix_var_name(var_name)<EOL>full_desc = fix_desc(desc, units)<EOL>info.append({'<STR_LIT:name>': var_name, '<STR_LIT>': full_desc, '<STR_LIT>': fmt})<EOL>if ignored_item(info[-<NUM_LIT:1>]) and var_name != '<STR_LIT>':<EOL><INDENT>warnings.warn('<STR_LIT>'.format(var_name, typ))<EOL><DEDENT><DEDENT>except (ValueError, AssertionError):<EOL><INDENT>warnings.warn('<STR_LIT>'.format(lineno + <NUM_LIT:1>, '<STR_LIT::>'.join(parts)))<EOL>raise<EOL><DEDENT><DEDENT>return info<EOL><DEDENT>
Handle information for message type 3.
f8500:m1
@register_processor(<NUM_LIT>)<EOL>def process_msg18(fname):
with open(fname, '<STR_LIT:r>') as infile:<EOL><INDENT>info = []<EOL>for lineno, line in enumerate(infile):<EOL><INDENT>parts = line.split('<STR_LIT:U+0020>')<EOL>try:<EOL><INDENT>if len(parts) == <NUM_LIT:8>:<EOL><INDENT>parts = parts[:<NUM_LIT:6>] + [parts[<NUM_LIT:6>] + parts[<NUM_LIT:7>]]<EOL><DEDENT>var_name, desc, typ, units, rng, prec, byte_range = parts<EOL>start, end = map(int, byte_range.split('<STR_LIT:->'))<EOL>size = end - start + <NUM_LIT:1><EOL>assert size >= <NUM_LIT:4><EOL>fmt = fix_type(typ, size,<EOL>additional=[('<STR_LIT>', ('<STR_LIT>', <NUM_LIT>))])<EOL>if '<STR_LIT:U+0020>' in var_name:<EOL><INDENT>warnings.warn('<STR_LIT>'.format(var_name))<EOL><DEDENT>if not desc:<EOL><INDENT>warnings.warn('<STR_LIT>'.format(var_name))<EOL><DEDENT>var_name = fix_var_name(var_name)<EOL>full_desc = fix_desc(desc, units)<EOL>info.append({'<STR_LIT:name>': var_name, '<STR_LIT>': full_desc, '<STR_LIT>': fmt})<EOL>if (ignored_item(info[-<NUM_LIT:1>]) and var_name != '<STR_LIT>'<EOL>and '<STR_LIT>' not in full_desc):<EOL><INDENT>warnings.warn('<STR_LIT>'.format(var_name, typ))<EOL><DEDENT><DEDENT>except (ValueError, AssertionError):<EOL><INDENT>warnings.warn('<STR_LIT>'.format(lineno + <NUM_LIT:1>, '<STR_LIT::>'.join(parts)))<EOL>raise<EOL><DEDENT><DEDENT>return info<EOL><DEDENT>
Handle information for message type 18.
f8500:m2
def fix_type(typ, size, additional=None):
if additional is not None:<EOL><INDENT>my_types = types + additional<EOL><DEDENT>else:<EOL><INDENT>my_types = types<EOL><DEDENT>for t, info in my_types:<EOL><INDENT>if callable(t):<EOL><INDENT>matches = t(typ)<EOL><DEDENT>else:<EOL><INDENT>matches = t == typ<EOL><DEDENT>if matches:<EOL><INDENT>if callable(info):<EOL><INDENT>fmt_str, true_size = info(size)<EOL><DEDENT>else:<EOL><INDENT>fmt_str, true_size = info<EOL><DEDENT>assert size == true_size, ('<STR_LIT>'.format(typ, size,<EOL>true_size))<EOL>return fmt_str.format(size=size)<EOL><DEDENT><DEDENT>raise ValueError('<STR_LIT>'.format(typ))<EOL>
Fix up creating the appropriate struct type based on the information in the column.
f8500:m3
def fix_var_name(var_name):
name = var_name.strip()<EOL>for char in '<STR_LIT>':<EOL><INDENT>name = name.replace(char, '<STR_LIT:_>')<EOL><DEDENT>name = name.replace('<STR_LIT:+>', '<STR_LIT>')<EOL>name = name.replace('<STR_LIT:->', '<STR_LIT>')<EOL>if name.endswith('<STR_LIT:_>'):<EOL><INDENT>name = name[:-<NUM_LIT:1>]<EOL><DEDENT>return name<EOL>
Clean up and apply standard formatting to variable names.
f8500:m4
def fix_desc(desc, units=None):
full_desc = desc.strip()<EOL>if units and units != '<STR_LIT>':<EOL><INDENT>if full_desc:<EOL><INDENT>full_desc += '<STR_LIT>' + units + '<STR_LIT:)>'<EOL><DEDENT>else:<EOL><INDENT>full_desc = units<EOL><DEDENT><DEDENT>return full_desc<EOL>
Clean up description column.
f8500:m5
def ignored_item(item):
return item['<STR_LIT:name>'].upper() == '<STR_LIT>' or '<STR_LIT:x>' in item['<STR_LIT>']<EOL>
Determine whether this item should be ignored.
f8500:m6
def need_desc(item):
return item['<STR_LIT>'] and not ignored_item(item)<EOL>
Determine whether we need a description for this item.
f8500:m7
def field_name(item):
return '<STR_LIT>'.format(item['<STR_LIT:name>']) if not ignored_item(item) else None<EOL>
Return the field name if appropriate.
f8500:m8
def field_fmt(item):
return '<STR_LIT>'.format(item['<STR_LIT>']) if '<STR_LIT:">' not in item['<STR_LIT>'] else item['<STR_LIT>']<EOL>
Return the field format if appropriate.
f8500:m9
def write_file(fname, info):
with open(fname, '<STR_LIT:w>') as outfile:<EOL><INDENT>outfile.write('<STR_LIT>')<EOL>outfile.write('<STR_LIT>')<EOL>outfile.write('<STR_LIT>')<EOL>outfile.write('<STR_LIT>')<EOL>outfile.write('<STR_LIT>')<EOL>outfile.write('<STR_LIT>')<EOL>outdata = '<STR_LIT>'.join('<STR_LIT>'.format(<EOL>**i) for i in info if need_desc(i))<EOL>outfile.write(outdata)<EOL>outfile.write('<STR_LIT>')<EOL>outfile.write('<STR_LIT>')<EOL>outdata = '<STR_LIT>'.join('<STR_LIT>'.format(<EOL>fname=field_name(i), **i) for i in info)<EOL>outfile.write(outdata)<EOL>outfile.write('<STR_LIT>')<EOL><DEDENT>
Write out the generated Python code.
f8500:m10
def warn_deprecated(since, message='<STR_LIT>', name='<STR_LIT>', alternative='<STR_LIT>', pending=False,<EOL>obj_type='<STR_LIT>', addendum='<STR_LIT>'):
message = _generate_deprecation_message(since, message, name, alternative,<EOL>pending, obj_type)<EOL>warnings.warn(message, metpyDeprecation, stacklevel=<NUM_LIT:1>)<EOL>
Display deprecation warning in a standard way. Parameters ---------- since : str The release at which this API became deprecated. message : str, optional Override the default deprecation message. The format specifier `%(name)s` may be used for the name of the function, and `%(alternative)s` may be used in the deprecation message to insert the name of an alternative to the deprecated function. `%(obj_type)s` may be used to insert a friendly name for the type of object being deprecated. name : str, optional The name of the deprecated object. alternative : str, optional An alternative function that the user may use in place of the deprecated function. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a PendingDeprecationWarning instead of a DeprecationWarning. obj_type : str, optional The object type being deprecated. addendum : str, optional Additional text appended directly to the final message. Examples -------- Basic example:: # To warn of the deprecation of "metpy.name_of_module" warn_deprecated('0.6.0', name='metpy.name_of_module', obj_type='module')
f8504:m1
def deprecated(since, message='<STR_LIT>', name='<STR_LIT>', alternative='<STR_LIT>', pending=False,<EOL>obj_type=None, addendum='<STR_LIT>'):
def deprecate(obj, message=message, name=name, alternative=alternative,<EOL>pending=pending, addendum=addendum):<EOL><INDENT>import textwrap<EOL>if not name:<EOL><INDENT>name = obj.__name__<EOL><DEDENT>if isinstance(obj, type):<EOL><INDENT>obj_type = '<STR_LIT:class>'<EOL>old_doc = obj.__doc__<EOL>func = obj.__init__<EOL>def finalize(wrapper, new_doc):<EOL><INDENT>obj.__init__ = wrapper<EOL>return obj<EOL><DEDENT><DEDENT>else:<EOL><INDENT>obj_type = '<STR_LIT>'<EOL>func = obj<EOL>old_doc = func.__doc__<EOL>def finalize(wrapper, new_doc):<EOL><INDENT>wrapper = functools.wraps(func)(wrapper)<EOL>return wrapper<EOL><DEDENT><DEDENT>message = _generate_deprecation_message(since, message, name,<EOL>alternative, pending,<EOL>obj_type, addendum)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>warnings.warn(message, metpyDeprecation, stacklevel=<NUM_LIT:2>)<EOL>return func(*args, **kwargs)<EOL><DEDENT>old_doc = textwrap.dedent(old_doc or '<STR_LIT>').strip('<STR_LIT:\n>')<EOL>message = message.strip()<EOL>new_doc = ('<STR_LIT>'<EOL>'<STR_LIT>'.format(since, message) + old_doc)<EOL>if not old_doc:<EOL><INDENT>new_doc += r'<STR_LIT>'<EOL><DEDENT>return finalize(wrapper, new_doc)<EOL><DEDENT>return deprecate<EOL>
Mark a function or a class as deprecated. Parameters ---------- since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier `%(name)s` may be used for the name of the object, and `%(alternative)s` may be used in the deprecation message to insert the name of an alternative to the deprecated object. `%(obj_type)s` may be used to insert a friendly name for the type of object being deprecated. name : str, optional The name of the deprecated object; if not provided the name is automatically determined from the passed in object, though this is useful in the case of renamed functions, where the new function is just assigned to the name of the deprecated function. For example:: def new_function(): ... oldFunction = new_function alternative : str, optional An alternative object that the user may use in place of the deprecated object. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a PendingDeprecationWarning instead of a DeprecationWarning. addendum : str, optional Additional text appended directly to the final message. Examples -------- Basic example:: @deprecated('1.4.0') def the_function_to_deprecate(): pass
f8504:m2
def get_keywords():
<EOL>git_refnames = "<STR_LIT>"<EOL>git_full = "<STR_LIT>"<EOL>git_date = "<STR_LIT>"<EOL>keywords = {"<STR_LIT>": git_refnames, "<STR_LIT>": git_full, "<STR_LIT:date>": git_date}<EOL>return keywords<EOL>
Get the keywords needed to look up the version information.
f8505:m0
def get_config():
<EOL>cfg = VersioneerConfig()<EOL>cfg.VCS = "<STR_LIT>"<EOL>cfg.style = "<STR_LIT>"<EOL>cfg.tag_prefix = "<STR_LIT:v>"<EOL>cfg.parentdir_prefix = "<STR_LIT>"<EOL>cfg.versionfile_source = "<STR_LIT>"<EOL>cfg.verbose = False<EOL>return cfg<EOL>
Create, populate and return the VersioneerConfig() object.
f8505:m1
def register_vcs_handler(vcs, method):
def decorate(f):<EOL><INDENT>"""<STR_LIT>"""<EOL>if vcs not in HANDLERS:<EOL><INDENT>HANDLERS[vcs] = {}<EOL><DEDENT>HANDLERS[vcs][method] = f<EOL>return f<EOL><DEDENT>return decorate<EOL>
Decorator to mark a method as the handler for a particular VCS.
f8505:m2
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,<EOL>env=None):
assert isinstance(commands, list)<EOL>p = None<EOL>for c in commands:<EOL><INDENT>try:<EOL><INDENT>dispcmd = str([c] + args)<EOL>p = subprocess.Popen([c] + args, cwd=cwd, env=env,<EOL>stdout=subprocess.PIPE,<EOL>stderr=(subprocess.PIPE if hide_stderr<EOL>else None))<EOL>break<EOL><DEDENT>except EnvironmentError:<EOL><INDENT>e = sys.exc_info()[<NUM_LIT:1>]<EOL>if e.errno == errno.ENOENT:<EOL><INDENT>continue<EOL><DEDENT>if verbose:<EOL><INDENT>print("<STR_LIT>" % dispcmd)<EOL>print(e)<EOL><DEDENT>return None, None<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if verbose:<EOL><INDENT>print("<STR_LIT>" % (commands,))<EOL><DEDENT>return None, None<EOL><DEDENT>stdout = p.communicate()[<NUM_LIT:0>].strip()<EOL>if sys.version_info[<NUM_LIT:0>] >= <NUM_LIT:3>:<EOL><INDENT>stdout = stdout.decode()<EOL><DEDENT>if p.returncode != <NUM_LIT:0>:<EOL><INDENT>if verbose:<EOL><INDENT>print("<STR_LIT>" % dispcmd)<EOL>print("<STR_LIT>" % stdout)<EOL><DEDENT>return None, p.returncode<EOL><DEDENT>return stdout, p.returncode<EOL>
Call the given command(s).
f8505:m3
def versions_from_parentdir(parentdir_prefix, root, verbose):
rootdirs = []<EOL>for i in range(<NUM_LIT:3>):<EOL><INDENT>dirname = os.path.basename(root)<EOL>if dirname.startswith(parentdir_prefix):<EOL><INDENT>return {"<STR_LIT:version>": dirname[len(parentdir_prefix):],<EOL>"<STR_LIT>": None,<EOL>"<STR_LIT>": False, "<STR_LIT:error>": None, "<STR_LIT:date>": None}<EOL><DEDENT>else:<EOL><INDENT>rootdirs.append(root)<EOL>root = os.path.dirname(root) <EOL><DEDENT><DEDENT>if verbose:<EOL><INDENT>print("<STR_LIT>" %<EOL>(str(rootdirs), parentdir_prefix))<EOL><DEDENT>raise NotThisMethod("<STR_LIT>")<EOL>
Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory
f8505:m4
@register_vcs_handler("<STR_LIT>", "<STR_LIT>")<EOL>def git_get_keywords(versionfile_abs):
<EOL>keywords = {}<EOL>try:<EOL><INDENT>f = open(versionfile_abs, "<STR_LIT:r>")<EOL>for line in f.readlines():<EOL><INDENT>if line.strip().startswith("<STR_LIT>"):<EOL><INDENT>mo = re.search(r'<STR_LIT>', line)<EOL>if mo:<EOL><INDENT>keywords["<STR_LIT>"] = mo.group(<NUM_LIT:1>)<EOL><DEDENT><DEDENT>if line.strip().startswith("<STR_LIT>"):<EOL><INDENT>mo = re.search(r'<STR_LIT>', line)<EOL>if mo:<EOL><INDENT>keywords["<STR_LIT>"] = mo.group(<NUM_LIT:1>)<EOL><DEDENT><DEDENT>if line.strip().startswith("<STR_LIT>"):<EOL><INDENT>mo = re.search(r'<STR_LIT>', line)<EOL>if mo:<EOL><INDENT>keywords["<STR_LIT:date>"] = mo.group(<NUM_LIT:1>)<EOL><DEDENT><DEDENT><DEDENT>f.close()<EOL><DEDENT>except EnvironmentError:<EOL><INDENT>pass<EOL><DEDENT>return keywords<EOL>
Extract version information from the given file.
f8505:m5
@register_vcs_handler("<STR_LIT>", "<STR_LIT>")<EOL>def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:<EOL><INDENT>raise NotThisMethod("<STR_LIT>")<EOL><DEDENT>date = keywords.get("<STR_LIT:date>")<EOL>if date is not None:<EOL><INDENT>date = date.strip().replace("<STR_LIT:U+0020>", "<STR_LIT:T>", <NUM_LIT:1>).replace("<STR_LIT:U+0020>", "<STR_LIT>", <NUM_LIT:1>)<EOL><DEDENT>refnames = keywords["<STR_LIT>"].strip()<EOL>if refnames.startswith("<STR_LIT>"):<EOL><INDENT>if verbose:<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>raise NotThisMethod("<STR_LIT>")<EOL><DEDENT>refs = set([r.strip() for r in refnames.strip("<STR_LIT>").split("<STR_LIT:U+002C>")])<EOL>TAG = "<STR_LIT>"<EOL>tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])<EOL>if not tags:<EOL><INDENT>tags = set([r for r in refs if re.search(r'<STR_LIT>', r)])<EOL>if verbose:<EOL><INDENT>print("<STR_LIT>" % "<STR_LIT:U+002C>".join(refs - tags))<EOL><DEDENT><DEDENT>if verbose:<EOL><INDENT>print("<STR_LIT>" % "<STR_LIT:U+002C>".join(sorted(tags)))<EOL><DEDENT>for ref in sorted(tags):<EOL><INDENT>if ref.startswith(tag_prefix):<EOL><INDENT>r = ref[len(tag_prefix):]<EOL>if verbose:<EOL><INDENT>print("<STR_LIT>" % r)<EOL><DEDENT>return {"<STR_LIT:version>": r,<EOL>"<STR_LIT>": keywords["<STR_LIT>"].strip(),<EOL>"<STR_LIT>": False, "<STR_LIT:error>": None,<EOL>"<STR_LIT:date>": date}<EOL><DEDENT><DEDENT>if verbose:<EOL><INDENT>print("<STR_LIT>")<EOL><DEDENT>return {"<STR_LIT:version>": "<STR_LIT>",<EOL>"<STR_LIT>": keywords["<STR_LIT>"].strip(),<EOL>"<STR_LIT>": False, "<STR_LIT:error>": "<STR_LIT>", "<STR_LIT:date>": None}<EOL>
Get version information from git keywords.
f8505:m6
@register_vcs_handler("<STR_LIT>", "<STR_LIT>")<EOL>def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
GITS = ["<STR_LIT>"]<EOL>if sys.platform == "<STR_LIT:win32>":<EOL><INDENT>GITS = ["<STR_LIT>", "<STR_LIT>"]<EOL><DEDENT>out, rc = run_command(GITS, ["<STR_LIT>", "<STR_LIT>"], cwd=root,<EOL>hide_stderr=True)<EOL>if rc != <NUM_LIT:0>:<EOL><INDENT>if verbose:<EOL><INDENT>print("<STR_LIT>" % root)<EOL><DEDENT>raise NotThisMethod("<STR_LIT>")<EOL><DEDENT>describe_out, rc = run_command(GITS, ["<STR_LIT>", "<STR_LIT>", "<STR_LIT>",<EOL>"<STR_LIT>", "<STR_LIT>",<EOL>"<STR_LIT>", "<STR_LIT>" % tag_prefix],<EOL>cwd=root)<EOL>if describe_out is None:<EOL><INDENT>raise NotThisMethod("<STR_LIT>")<EOL><DEDENT>describe_out = describe_out.strip()<EOL>full_out, rc = run_command(GITS, ["<STR_LIT>", "<STR_LIT>"], cwd=root)<EOL>if full_out is None:<EOL><INDENT>raise NotThisMethod("<STR_LIT>")<EOL><DEDENT>full_out = full_out.strip()<EOL>pieces = {}<EOL>pieces["<STR_LIT>"] = full_out<EOL>pieces["<STR_LIT>"] = full_out[:<NUM_LIT:7>] <EOL>pieces["<STR_LIT:error>"] = None<EOL>git_describe = describe_out<EOL>dirty = git_describe.endswith("<STR_LIT>")<EOL>pieces["<STR_LIT>"] = dirty<EOL>if dirty:<EOL><INDENT>git_describe = git_describe[:git_describe.rindex("<STR_LIT>")]<EOL><DEDENT>if "<STR_LIT:->" in git_describe:<EOL><INDENT>mo = re.search(r'<STR_LIT>', git_describe)<EOL>if not mo:<EOL><INDENT>pieces["<STR_LIT:error>"] = ("<STR_LIT>"<EOL>% describe_out)<EOL>return pieces<EOL><DEDENT>full_tag = mo.group(<NUM_LIT:1>)<EOL>if not full_tag.startswith(tag_prefix):<EOL><INDENT>if verbose:<EOL><INDENT>fmt = "<STR_LIT>"<EOL>print(fmt % (full_tag, tag_prefix))<EOL><DEDENT>pieces["<STR_LIT:error>"] = ("<STR_LIT>"<EOL>% (full_tag, tag_prefix))<EOL>return pieces<EOL><DEDENT>pieces["<STR_LIT>"] = full_tag[len(tag_prefix):]<EOL>pieces["<STR_LIT>"] = int(mo.group(<NUM_LIT:2>))<EOL>pieces["<STR_LIT>"] = mo.group(<NUM_LIT:3>)<EOL><DEDENT>else:<EOL><INDENT>pieces["<STR_LIT>"] = None<EOL>count_out, rc = run_command(GITS, ["<STR_LIT>", "<STR_LIT>", "<STR_LIT>"],<EOL>cwd=root)<EOL>pieces["<STR_LIT>"] = int(count_out) <EOL><DEDENT>date = run_command(GITS, ["<STR_LIT>", "<STR_LIT>", "<STR_LIT>", "<STR_LIT>"],<EOL>cwd=root)[<NUM_LIT:0>].strip()<EOL>pieces["<STR_LIT:date>"] = date.strip().replace("<STR_LIT:U+0020>", "<STR_LIT:T>", <NUM_LIT:1>).replace("<STR_LIT:U+0020>", "<STR_LIT>", <NUM_LIT:1>)<EOL>return pieces<EOL>
Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree.
f8505:m7
def plus_or_dot(pieces):
if "<STR_LIT:+>" in pieces.get("<STR_LIT>", "<STR_LIT>"):<EOL><INDENT>return "<STR_LIT:.>"<EOL><DEDENT>return "<STR_LIT:+>"<EOL>
Return a + if we don't already have one, else return a .
f8505:m8
def render_pep440(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"] or pieces["<STR_LIT>"]:<EOL><INDENT>rendered += plus_or_dot(pieces)<EOL>rendered += "<STR_LIT>" % (pieces["<STR_LIT>"], pieces["<STR_LIT>"])<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>rendered = "<STR_LIT>" % (pieces["<STR_LIT>"],<EOL>pieces["<STR_LIT>"])<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT><DEDENT>return rendered<EOL>
Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
f8505:m9
def render_pep440_pre(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL><DEDENT><DEDENT>else:<EOL><INDENT>rendered = "<STR_LIT>" % pieces["<STR_LIT>"]<EOL><DEDENT>return rendered<EOL>
TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE
f8505:m10
def render_pep440_post(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"] or pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>rendered += plus_or_dot(pieces)<EOL>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL><DEDENT><DEDENT>else:<EOL><INDENT>rendered = "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL><DEDENT>return rendered<EOL>
TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0]
f8505:m11
def render_pep440_old(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"] or pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>rendered = "<STR_LIT>" % pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT><DEDENT>return rendered<EOL>
TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0]
f8505:m12
def render_git_describe(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>" % (pieces["<STR_LIT>"], pieces["<STR_LIT>"])<EOL><DEDENT><DEDENT>else:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL><DEDENT>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>return rendered<EOL>
TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix)
f8505:m13
def render_git_describe_long(pieces):
if pieces["<STR_LIT>"]:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL>rendered += "<STR_LIT>" % (pieces["<STR_LIT>"], pieces["<STR_LIT>"])<EOL><DEDENT>else:<EOL><INDENT>rendered = pieces["<STR_LIT>"]<EOL><DEDENT>if pieces["<STR_LIT>"]:<EOL><INDENT>rendered += "<STR_LIT>"<EOL><DEDENT>return rendered<EOL>
TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix)
f8505:m14
def render(pieces, style):
if pieces["<STR_LIT:error>"]:<EOL><INDENT>return {"<STR_LIT:version>": "<STR_LIT>",<EOL>"<STR_LIT>": pieces.get("<STR_LIT>"),<EOL>"<STR_LIT>": None,<EOL>"<STR_LIT:error>": pieces["<STR_LIT:error>"],<EOL>"<STR_LIT:date>": None}<EOL><DEDENT>if not style or style == "<STR_LIT:default>":<EOL><INDENT>style = "<STR_LIT>" <EOL><DEDENT>if style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440_pre(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440_post(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_pep440_old(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_git_describe(pieces)<EOL><DEDENT>elif style == "<STR_LIT>":<EOL><INDENT>rendered = render_git_describe_long(pieces)<EOL><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>" % style)<EOL><DEDENT>return {"<STR_LIT:version>": rendered, "<STR_LIT>": pieces["<STR_LIT>"],<EOL>"<STR_LIT>": pieces["<STR_LIT>"], "<STR_LIT:error>": None,<EOL>"<STR_LIT:date>": pieces.get("<STR_LIT:date>")}<EOL>
Render the given version pieces into the requested style.
f8505:m15
def get_versions():
<EOL>cfg = get_config()<EOL>verbose = cfg.verbose<EOL>try:<EOL><INDENT>return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,<EOL>verbose)<EOL><DEDENT>except NotThisMethod:<EOL><INDENT>pass<EOL><DEDENT>try:<EOL><INDENT>root = os.path.realpath(__file__)<EOL>for i in cfg.versionfile_source.split('<STR_LIT:/>'):<EOL><INDENT>root = os.path.dirname(root)<EOL><DEDENT><DEDENT>except NameError:<EOL><INDENT>return {"<STR_LIT:version>": "<STR_LIT>", "<STR_LIT>": None,<EOL>"<STR_LIT>": None,<EOL>"<STR_LIT:error>": "<STR_LIT>",<EOL>"<STR_LIT:date>": None}<EOL><DEDENT>try:<EOL><INDENT>pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)<EOL>return render(pieces, cfg.style)<EOL><DEDENT>except NotThisMethod:<EOL><INDENT>pass<EOL><DEDENT>try:<EOL><INDENT>if cfg.parentdir_prefix:<EOL><INDENT>return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)<EOL><DEDENT><DEDENT>except NotThisMethod:<EOL><INDENT>pass<EOL><DEDENT>return {"<STR_LIT:version>": "<STR_LIT>", "<STR_LIT>": None,<EOL>"<STR_LIT>": None,<EOL>"<STR_LIT:error>": "<STR_LIT>", "<STR_LIT:date>": None}<EOL>
Get version information or return default if unable to do so.
f8505:m16
def get_upper_air_data(date, station):
sounding_key = '<STR_LIT>'.format(date, station)<EOL>sounding_files = {'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>'}<EOL>fname = sounding_files[sounding_key]<EOL>fobj = get_test_data(fname)<EOL>def to_float(s):<EOL><INDENT>if not s.strip():<EOL><INDENT>s = '<STR_LIT>'<EOL><DEDENT>return float(s)<EOL><DEDENT>for _ in range(<NUM_LIT:4>):<EOL><INDENT>fobj.readline()<EOL><DEDENT>arr_data = []<EOL>for row in fobj:<EOL><INDENT>level = to_float(row[<NUM_LIT:0>:<NUM_LIT:7>])<EOL>values = (to_float(row[<NUM_LIT:7>:<NUM_LIT>]), to_float(row[<NUM_LIT>:<NUM_LIT>]), to_float(row[<NUM_LIT>:<NUM_LIT>]),<EOL>to_float(row[<NUM_LIT>:<NUM_LIT>]), to_float(row[<NUM_LIT>:<NUM_LIT>]))<EOL>if any(np.invert(np.isnan(values[<NUM_LIT:1>:]))):<EOL><INDENT>arr_data.append((level,) + values)<EOL><DEDENT><DEDENT>p, z, t, td, direc, spd = np.array(arr_data).T<EOL>p = p * units.hPa<EOL>z = z * units.meters<EOL>t = t * units.degC<EOL>td = td * units.degC<EOL>direc = direc * units.degrees<EOL>spd = spd * units.knots<EOL>u, v = wind_components(spd, direc)<EOL>return {'<STR_LIT>': p, '<STR_LIT>': z, '<STR_LIT>': t,<EOL>'<STR_LIT>': td, '<STR_LIT>': direc, '<STR_LIT>': spd, '<STR_LIT>': u, '<STR_LIT>': v}<EOL>
Get upper air observations from the test data cache. Parameters ---------- time : datetime The date and time of the desired observation. station : str The three letter ICAO identifier of the station for which data should be downloaded. Returns ------- dict : upper air data
f8506:m0
def check_and_drop_units(actual, desired):
try:<EOL><INDENT>if hasattr(desired, '<STR_LIT>'):<EOL><INDENT>if not hasattr(actual, '<STR_LIT>'):<EOL><INDENT>actual = units.Quantity(actual, '<STR_LIT>')<EOL><DEDENT>actual = actual.to(desired.units)<EOL><DEDENT>else:<EOL><INDENT>if hasattr(actual, '<STR_LIT>'):<EOL><INDENT>actual = actual.to('<STR_LIT>')<EOL><DEDENT><DEDENT><DEDENT>except DimensionalityError:<EOL><INDENT>raise AssertionError('<STR_LIT>'.format(<EOL>actual.units, getattr(desired, '<STR_LIT>', '<STR_LIT>')))<EOL><DEDENT>except AttributeError:<EOL><INDENT>pass<EOL><DEDENT>if hasattr(actual, '<STR_LIT>'):<EOL><INDENT>actual = actual.magnitude<EOL><DEDENT>if hasattr(desired, '<STR_LIT>'):<EOL><INDENT>desired = desired.magnitude<EOL><DEDENT>return actual, desired<EOL>
r"""Check that the units on the passed in arrays are compatible; return the magnitudes. Parameters ---------- actual : `pint.Quantity` or array-like desired : `pint.Quantity` or array-like Returns ------- actual, desired array-like versions of `actual` and `desired` once they have been coerced to compatible units. Raises ------ AssertionError If the units on the passed in objects are not compatible.
f8506:m1
def assert_nan(value, units):
if not np.isnan(value):<EOL><INDENT>pytest.fail('<STR_LIT>'.format(value))<EOL><DEDENT>check_and_drop_units(value, np.nan * units)<EOL>return True<EOL>
Check for nan with proper units.
f8506:m2
def assert_almost_equal(actual, desired, decimal=<NUM_LIT:7>):
actual, desired = check_and_drop_units(actual, desired)<EOL>numpy.testing.assert_almost_equal(actual, desired, decimal)<EOL>
Check that values are almost equal, including units. Wrapper around :func:`numpy.testing.assert_almost_equal`
f8506:m3
def assert_array_almost_equal(actual, desired, decimal=<NUM_LIT:7>):
actual, desired = check_and_drop_units(actual, desired)<EOL>numpy.testing.assert_array_almost_equal(actual, desired, decimal)<EOL>
Check that arrays are almost equal, including units. Wrapper around :func:`numpy.testing.assert_array_almost_equal`
f8506:m4
def assert_array_equal(actual, desired):
actual, desired = check_and_drop_units(actual, desired)<EOL>numpy.testing.assert_array_equal(actual, desired)<EOL>
Check that arrays are equal, including units. Wrapper around :func:`numpy.testing.assert_array_equal`
f8506:m5
def assert_xarray_allclose(actual, desired):
xr.testing.assert_allclose(actual, desired)<EOL>assert desired.metpy.coordinates_identical(actual)<EOL>assert desired.attrs == actual.attrs<EOL>
Check that the xarrays are almost equal, including coordinates and attributes.
f8506:m6
def ignore_deprecation(func):
@functools.wraps(func)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>with pytest.warns(MetpyDeprecationWarning):<EOL><INDENT>return func(*args, **kwargs)<EOL><DEDENT><DEDENT>return wrapper<EOL>
Decorate a function to swallow metpy deprecation warnings, making sure they are present. This should be used on deprecation function tests to make sure the deprecation warnings are not failing the tests, but still allow testing of those functions.
f8506:m9
def pandas_dataframe_to_unit_arrays(df, column_units=None):
if not column_units:<EOL><INDENT>try:<EOL><INDENT>column_units = df.units<EOL><DEDENT>except AttributeError:<EOL><INDENT>raise ValueError('<STR_LIT>'<EOL>'<STR_LIT>')<EOL><DEDENT><DEDENT>res = {}<EOL>for column in df:<EOL><INDENT>if column in column_units and column_units[column]:<EOL><INDENT>res[column] = df[column].values * units(column_units[column])<EOL><DEDENT>else:<EOL><INDENT>res[column] = df[column].values<EOL><DEDENT><DEDENT>return res<EOL>
Attach units to data in pandas dataframes and return united arrays. Parameters ---------- df : `pandas.DataFrame` Data in pandas dataframe. column_units : dict Dictionary of units to attach to columns of the dataframe. Overrides the units attribute if it is attached to the dataframe. Returns ------- Dictionary containing united arrays with keys corresponding to the dataframe column names.
f8507:m0
def concatenate(arrs, axis=<NUM_LIT:0>):
dest = '<STR_LIT>'<EOL>for a in arrs:<EOL><INDENT>if hasattr(a, '<STR_LIT>'):<EOL><INDENT>dest = a.units<EOL>break<EOL><DEDENT><DEDENT>data = []<EOL>for a in arrs:<EOL><INDENT>if hasattr(a, '<STR_LIT:to>'):<EOL><INDENT>a = a.to(dest).magnitude<EOL><DEDENT>data.append(np.atleast_1d(a))<EOL><DEDENT>data = np.ma.concatenate(data, axis=axis)<EOL>if not np.any(data.mask):<EOL><INDENT>data = np.asarray(data)<EOL><DEDENT>return units.Quantity(data, dest)<EOL>
r"""Concatenate multiple values into a new unitized object. This is essentially a unit-aware version of `numpy.concatenate`. All items must be able to be converted to the same units. If an item has no units, it will be given those of the rest of the collection, without conversion. The first units found in the arguments is used as the final output units. Parameters ---------- arrs : Sequence of arrays The items to be joined together axis : integer, optional The array axis along which to join the arrays. Defaults to 0 (the first dimension) Returns ------- `pint.Quantity` New container with the value passed in and units corresponding to the first item.
f8507:m1
def diff(x, **kwargs):
ret = np.diff(x, **kwargs)<EOL>if hasattr(x, '<STR_LIT>'):<EOL><INDENT>it = x.flat<EOL>true_units = (next(it) - next(it)).units<EOL>ret = ret * true_units<EOL><DEDENT>return ret<EOL>
Calculate the n-th discrete difference along given axis. Wraps :func:`numpy.diff` to handle units. Parameters ---------- x : array-like Input data n : int, optional The number of times values are differenced. axis : int, optional The axis along which the difference is taken, default is the last axis. Returns ------- diff : ndarray The n-th differences. The shape of the output is the same as `a` except along `axis` where the dimension is smaller by `n`. The type of the output is the same as that of the input. See Also -------- numpy.diff
f8507:m2
def atleast_1d(*arrs):
mags = [a.magnitude if hasattr(a, '<STR_LIT>') else a for a in arrs]<EOL>orig_units = [a.units if hasattr(a, '<STR_LIT>') else None for a in arrs]<EOL>ret = np.atleast_1d(*mags)<EOL>if len(mags) == <NUM_LIT:1>:<EOL><INDENT>if orig_units[<NUM_LIT:0>] is not None:<EOL><INDENT>return units.Quantity(ret, orig_units[<NUM_LIT:0>])<EOL><DEDENT>else:<EOL><INDENT>return ret<EOL><DEDENT><DEDENT>return [units.Quantity(m, u) if u is not None else m for m, u in zip(ret, orig_units)]<EOL>
r"""Convert inputs to arrays with at least one dimension. Scalars are converted to 1-dimensional arrays, whilst other higher-dimensional inputs are preserved. This is a thin wrapper around `numpy.atleast_1d` to preserve units. Parameters ---------- arrs : arbitrary positional arguments Input arrays to be converted if necessary Returns ------- `pint.Quantity` A single quantity or a list of quantities, matching the number of inputs.
f8507:m3
def atleast_2d(*arrs):
mags = [a.magnitude if hasattr(a, '<STR_LIT>') else a for a in arrs]<EOL>orig_units = [a.units if hasattr(a, '<STR_LIT>') else None for a in arrs]<EOL>ret = np.atleast_2d(*mags)<EOL>if len(mags) == <NUM_LIT:1>:<EOL><INDENT>if orig_units[<NUM_LIT:0>] is not None:<EOL><INDENT>return units.Quantity(ret, orig_units[<NUM_LIT:0>])<EOL><DEDENT>else:<EOL><INDENT>return ret<EOL><DEDENT><DEDENT>return [units.Quantity(m, u) if u is not None else m for m, u in zip(ret, orig_units)]<EOL>
r"""Convert inputs to arrays with at least two dimensions. Scalars and 1-dimensional arrays are converted to 2-dimensional arrays, whilst other higher-dimensional inputs are preserved. This is a thin wrapper around `numpy.atleast_2d` to preserve units. Parameters ---------- arrs : arbitrary positional arguments Input arrays to be converted if necessary Returns ------- `pint.Quantity` A single quantity or a list of quantities, matching the number of inputs.
f8507:m4
def masked_array(data, data_units=None, **kwargs):
if data_units is None:<EOL><INDENT>data_units = data.units<EOL><DEDENT>return units.Quantity(np.ma.masked_array(data, **kwargs), data_units)<EOL>
Create a :class:`numpy.ma.MaskedArray` with units attached. This is a thin wrapper around :func:`numpy.ma.masked_array` that ensures that units are properly attached to the result (otherwise units are silently lost). Units are taken from the ``units`` argument, or if this is ``None``, the units on ``data`` are used. Parameters ---------- data : array_like The source data. If ``units`` is `None`, this should be a `pint.Quantity` with the desired units. data_units : str or `pint.Unit` The units for the resulting `pint.Quantity` **kwargs : Arbitrary keyword arguments passed to `numpy.ma.masked_array` Returns ------- `pint.Quantity`
f8507:m5
def _check_argument_units(args, dimensionality):
for arg, val in args.items():<EOL><INDENT>try:<EOL><INDENT>need, parsed = dimensionality[arg]<EOL><DEDENT>except KeyError:<EOL><INDENT>continue<EOL><DEDENT>try:<EOL><INDENT>if val.dimensionality != parsed:<EOL><INDENT>yield arg, val.units, need<EOL><DEDENT><DEDENT>except AttributeError:<EOL><INDENT>if parsed != '<STR_LIT>':<EOL><INDENT>yield arg, '<STR_LIT:none>', need<EOL><DEDENT><DEDENT><DEDENT>
Yield arguments with improper dimensionality.
f8507:m6
def check_units(*units_by_pos, **units_by_name):
try:<EOL><INDENT>from inspect import signature<EOL>def dec(func):<EOL><INDENT>sig = signature(func)<EOL>bound_units = sig.bind_partial(*units_by_pos, **units_by_name)<EOL>dims = {name: (orig, units.get_dimensionality(orig.replace('<STR_LIT>', '<STR_LIT>')))<EOL>for name, orig in bound_units.arguments.items()}<EOL>@functools.wraps(func)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>bound_args = sig.bind(*args, **kwargs)<EOL>bad = list(_check_argument_units(bound_args.arguments, dims))<EOL>if bad:<EOL><INDENT>msg = '<STR_LIT>'.format(<EOL>func.__name__,<EOL>'<STR_LIT:U+002CU+0020>'.join('<STR_LIT>'.format(arg, req, given)<EOL>for arg, given, req in bad))<EOL>if '<STR_LIT:none>' in msg:<EOL><INDENT>msg += ('<STR_LIT>'<EOL>'<STR_LIT>'<EOL>'<STR_LIT>')<EOL><DEDENT>raise ValueError(msg)<EOL><DEDENT>return func(*args, **kwargs)<EOL><DEDENT>return wrapper<EOL><DEDENT><DEDENT>except ImportError:<EOL><INDENT>def dec(func):<EOL><INDENT>return func<EOL><DEDENT><DEDENT>return dec<EOL>
Create a decorator to check units of function arguments.
f8507:m7
def is_string_like(s):
return isinstance(s, string_type)<EOL>
Check if an object is a string.
f8509:m0
def broadcast_indices(x, minv, ndim, axis):
ret = []<EOL>for dim in range(ndim):<EOL><INDENT>if dim == axis:<EOL><INDENT>ret.append(minv)<EOL><DEDENT>else:<EOL><INDENT>broadcast_slice = [np.newaxis] * ndim<EOL>broadcast_slice[dim] = slice(None)<EOL>dim_inds = np.arange(x.shape[dim])<EOL>ret.append(dim_inds[tuple(broadcast_slice)])<EOL><DEDENT><DEDENT>return tuple(ret)<EOL>
Calculate index values to properly broadcast index array within data array. See usage in interp.
f8509:m2
def __init__(self):
self._registry = {}<EOL>
Initialize an empty registry.
f8509:c0:m0
def register(self, name):
def dec(func):<EOL><INDENT>self._registry[name] = func<EOL>return func<EOL><DEDENT>return dec<EOL>
Register a callable with the registry under a particular name. Parameters ---------- name : str The name under which to register a function Returns ------- dec : callable A decorator that takes a function and will register it under the name.
f8509:c0:m1
def __getitem__(self, name):
return self._registry[name]<EOL>
Return any callable registered under name.
f8509:c0:m2
def __init__(self, globls):
self.globls = globls<EOL>self.exports = globls.setdefault('<STR_LIT>', [])<EOL>
Initialize the Exporter.
f8510:c0:m0
def export(self, defn):
self.exports.append(defn.__name__)<EOL>return defn<EOL>
Declare a function or class as exported.
f8510:c0:m1
def __enter__(self):
self.start_vars = set(self.globls)<EOL>
Start a block tracking all instances created at global scope.
f8510:c0:m2
def __exit__(self, exc_type, exc_val, exc_tb):
self.exports.extend(set(self.globls) - self.start_vars)<EOL>del self.start_vars<EOL>
Exit the instance tracking block.
f8510:c0:m3
def add_timestamp(ax, time=None, x=<NUM_LIT>, y=-<NUM_LIT>, ha='<STR_LIT:right>', high_contrast=False,<EOL>pretext='<STR_LIT>', time_format='<STR_LIT>', **kwargs):
if high_contrast:<EOL><INDENT>text_args = {'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>':<EOL>[mpatheffects.withStroke(linewidth=<NUM_LIT:2>, foreground='<STR_LIT>')]}<EOL><DEDENT>else:<EOL><INDENT>text_args = {}<EOL><DEDENT>text_args.update(**kwargs)<EOL>if not time:<EOL><INDENT>time = datetime.utcnow()<EOL><DEDENT>timestr = pretext + time.strftime(time_format)<EOL>return ax.text(x, y, timestr, ha=ha, transform=ax.transAxes, **text_args)<EOL>
Add a timestamp to a plot. Adds a timestamp to a plot, defaulting to the time of plot creation in ISO format. Parameters ---------- ax : `matplotlib.axes.Axes` The `Axes` instance used for plotting time : `datetime.datetime` Specific time to be plotted - datetime.utcnow will be use if not specified x : float Relative x position on the axes of the timestamp y : float Relative y position on the axes of the timestamp ha : str Horizontal alignment of the time stamp string high_contrast : bool Outline text for increased contrast pretext : str Text to appear before the timestamp, optional. Defaults to 'Created: ' time_format : str Display format of time, optional. Defaults to ISO format. Returns ------- `matplotlib.text.Text` The `matplotlib.text.Text` instance created
f8511:m0
def _add_logo(fig, x=<NUM_LIT:10>, y=<NUM_LIT>, zorder=<NUM_LIT:100>, which='<STR_LIT>', size='<STR_LIT>', **kwargs):
fname_suffix = {'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>'}<EOL>fname_prefix = {'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>'}<EOL>try:<EOL><INDENT>fname = fname_prefix[which] + fname_suffix[size]<EOL>fpath = posixpath.join('<STR_LIT>', fname)<EOL><DEDENT>except KeyError:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>logo = imread(pkg_resources.resource_stream('<STR_LIT>', fpath))<EOL>return fig.figimage(logo, x, y, zorder=zorder, **kwargs)<EOL>
Add the MetPy or Unidata logo to a figure. Adds an image to the figure. Parameters ---------- fig : `matplotlib.figure` The `figure` instance used for plotting x : int x position padding in pixels y : float y position padding in pixels zorder : int The zorder of the logo which : str Which logo to plot 'metpy' or 'unidata' size : str Size of logo to be used. Can be 'small' for 75 px square or 'large' for 150 px square. Returns ------- `matplotlib.image.FigureImage` The `matplotlib.image.FigureImage` instance created
f8511:m1
def add_metpy_logo(fig, x=<NUM_LIT:10>, y=<NUM_LIT>, zorder=<NUM_LIT:100>, size='<STR_LIT>', **kwargs):
return _add_logo(fig, x=x, y=y, zorder=zorder, which='<STR_LIT>', size=size, **kwargs)<EOL>
Add the MetPy logo to a figure. Adds an image of the MetPy logo to the figure. Parameters ---------- fig : `matplotlib.figure` The `figure` instance used for plotting x : int x position padding in pixels y : float y position padding in pixels zorder : int The zorder of the logo size : str Size of logo to be used. Can be 'small' for 75 px square or 'large' for 150 px square. Returns ------- `matplotlib.image.FigureImage` The `matplotlib.image.FigureImage` instance created
f8511:m2
def add_unidata_logo(fig, x=<NUM_LIT:10>, y=<NUM_LIT>, zorder=<NUM_LIT:100>, size='<STR_LIT>', **kwargs):
return _add_logo(fig, x=x, y=y, zorder=zorder, which='<STR_LIT>', size=size, **kwargs)<EOL>
Add the Unidata logo to a figure. Adds an image of the MetPy logo to the figure. Parameters ---------- fig : `matplotlib.figure` The `figure` instance used for plotting x : int x position padding in pixels y : float y position padding in pixels zorder : int The zorder of the logo size : str Size of logo to be used. Can be 'small' for 75 px square or 'large' for 150 px square. Returns ------- `matplotlib.image.FigureImage` The `matplotlib.image.FigureImage` instance created
f8511:m3
def colored_line(x, y, c, **kwargs):
<EOL>nan_mask = ~(np.isnan(x) | np.isnan(y) | np.isnan(c))<EOL>x = x[nan_mask]<EOL>y = y[nan_mask]<EOL>c = c[nan_mask]<EOL>points = concatenate([x, y])<EOL>num_pts = points.size // <NUM_LIT:2><EOL>final_shape = (num_pts - <NUM_LIT:1>, <NUM_LIT:2>, <NUM_LIT:2>)<EOL>final_strides = (points.itemsize, points.itemsize, num_pts * points.itemsize)<EOL>segments = np.lib.stride_tricks.as_strided(points, shape=final_shape,<EOL>strides=final_strides)<EOL>lc = LineCollection(segments, **kwargs)<EOL>lc.set_array(c)<EOL>return lc<EOL>
Create a multi-colored line. Takes a set of points and turns them into a collection of lines colored by another array. Parameters ---------- x : array-like x-axis coordinates y : array-like y-axis coordinates c : array-like values used for color-mapping kwargs : dict Other keyword arguments passed to :class:`matplotlib.collections.LineCollection` Returns ------- The created :class:`matplotlib.collections.LineCollection` instance.
f8511:m4
def convert_gempak_color(c, style='<STR_LIT>'):
def normalize(x):<EOL><INDENT>"""<STR_LIT>"""<EOL>x = int(x)<EOL>if x < <NUM_LIT:0> or x == <NUM_LIT>:<EOL><INDENT>x = <NUM_LIT:0><EOL><DEDENT>else:<EOL><INDENT>x = x % <NUM_LIT:32><EOL><DEDENT>return x<EOL><DEDENT>cols = ['<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>', <EOL>'<STR_LIT>'] <EOL>if style != '<STR_LIT>':<EOL><INDENT>if style == '<STR_LIT>':<EOL><INDENT>cols[<NUM_LIT:0>] = '<STR_LIT>'<EOL>cols[<NUM_LIT:1>] = '<STR_LIT>'<EOL>cols[<NUM_LIT>] = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT><DEDENT>try:<EOL><INDENT>c_list = list(c)<EOL>res = [cols[normalize(x)] for x in c_list]<EOL><DEDENT>except TypeError:<EOL><INDENT>res = cols[normalize(c)]<EOL><DEDENT>return res<EOL>
Convert GEMPAK color numbers into corresponding Matplotlib colors. Takes a sequence of GEMPAK color numbers and turns them into equivalent Matplotlib colors. Various GEMPAK quirks are respected, such as treating negative values as equivalent to 0. Parameters ---------- c : int or sequence of ints GEMPAK color number(s) style : str, optional The GEMPAK 'device' to use to interpret color numbers. May be 'psc' (the default; best for a white background) or 'xw' (best for a black background). Returns ------- List of strings of Matplotlib colors, or a single string if only one color requested.
f8511:m5
def update_position(self, loc):
<EOL>self._loc = loc<EOL>super(SkewXTick, self).update_position(loc)<EOL>
Set the location of tick in data coords with scalar *loc*.
f8522:c0:m0
@property<EOL><INDENT>def gridOn(self): <DEDENT>
return (self._gridOn and (self._has_default_loc()<EOL>or transforms.interval_contains(self.get_view_interval(), self.get_loc())))<EOL>
Control whether the gridline is drawn for this tick.
f8522:c0:m4
@property<EOL><INDENT>def tick1On(self): <DEDENT>
return self._tick1On and self._need_lower()<EOL>
Control whether the lower tick mark is drawn for this tick.
f8522:c0:m6
@property<EOL><INDENT>def label1On(self): <DEDENT>
return self._label1On and self._need_lower()<EOL>
Control whether the lower tick label is drawn for this tick.
f8522:c0:m8
@property<EOL><INDENT>def tick2On(self): <DEDENT>
return self._tick2On and self._need_upper()<EOL>
Control whether the upper tick mark is drawn for this tick.
f8522:c0:m10
@property<EOL><INDENT>def label2On(self): <DEDENT>
return self._label2On and self._need_upper()<EOL>
Control whether the upper tick label is drawn for this tick.
f8522:c0:m12
def get_view_interval(self):
return self.axes.xaxis.get_view_interval()<EOL>
Get the view interval.
f8522:c0:m14
def get_view_interval(self):
return self.axes.upper_xlim[<NUM_LIT:0>], self.axes.lower_xlim[<NUM_LIT:1>]<EOL>
Get the view interval.
f8522:c1:m1
def __init__(self, *args, **kwargs):
<EOL>self.rot = kwargs.pop('<STR_LIT>', <NUM_LIT:30>)<EOL>Axes.__init__(self, *args, **kwargs)<EOL>
r"""Initialize `SkewXAxes`. Parameters ---------- args : Arbitrary positional arguments Passed to :class:`matplotlib.axes.Axes` position: int, optional The rotation of the x-axis against the y-axis, in degrees. kwargs : Arbitrary keyword arguments Passed to :class:`matplotlib.axes.Axes`
f8522:c3:m0
def _set_lim_and_transforms(self):
<EOL>Axes._set_lim_and_transforms(self)<EOL>self.transDataToAxes = (self.transScale<EOL>+ (self.transLimits<EOL>+ transforms.Affine2D().skew_deg(self.rot, <NUM_LIT:0>)))<EOL>self.transData = self.transDataToAxes + self.transAxes<EOL>self._xaxis_transform = (<EOL>transforms.blended_transform_factory(self.transScale + self.transLimits,<EOL>transforms.IdentityTransform())<EOL>+ transforms.Affine2D().skew_deg(self.rot, <NUM_LIT:0>)) + self.transAxes<EOL>
Set limits and transforms. This is called once when the plot is created to set up all the transforms for the data, text and grids.
f8522:c3:m3
@property<EOL><INDENT>def lower_xlim(self):<DEDENT>
return self.axes.viewLim.intervalx<EOL>
Get the data limits for the x-axis along the bottom of the axes.
f8522:c3:m4
@property<EOL><INDENT>def upper_xlim(self):<DEDENT>
return self.transDataToAxes.inverted().transform([[<NUM_LIT:0.>, <NUM_LIT:1.>], [<NUM_LIT:1.>, <NUM_LIT:1.>]])[:, <NUM_LIT:0>]<EOL>
Get the data limits for the x-axis along the top of the axes.
f8522:c3:m5
def __init__(self, fig=None, rotation=<NUM_LIT:30>, subplot=None, rect=None):
if fig is None:<EOL><INDENT>import matplotlib.pyplot as plt<EOL>figsize = plt.rcParams.get('<STR_LIT>', (<NUM_LIT:7>, <NUM_LIT:7>))<EOL>fig = plt.figure(figsize=figsize)<EOL><DEDENT>self._fig = fig<EOL>if rect and subplot:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>elif rect:<EOL><INDENT>self.ax = fig.add_axes(rect, projection='<STR_LIT>', rotation=rotation)<EOL><DEDENT>else:<EOL><INDENT>if subplot is not None:<EOL><INDENT>try:<EOL><INDENT>len(subplot)<EOL><DEDENT>except TypeError:<EOL><INDENT>subplot = (subplot,)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>subplot = (<NUM_LIT:1>, <NUM_LIT:1>, <NUM_LIT:1>)<EOL><DEDENT>self.ax = fig.add_subplot(*subplot, projection='<STR_LIT>', rotation=rotation)<EOL><DEDENT>self.ax.grid(True)<EOL>
r"""Create SkewT - logP plots. Parameters ---------- fig : matplotlib.figure.Figure, optional Source figure to use for plotting. If none is given, a new :class:`matplotlib.figure.Figure` instance will be created. rotation : float or int, optional Controls the rotation of temperature relative to horizontal. Given in degrees counterclockwise from x-axis. Defaults to 30 degrees. subplot : tuple[int, int, int] or `matplotlib.gridspec.SubplotSpec` instance, optional Controls the size/position of the created subplot. This allows creating the skewT as part of a collection of subplots. If subplot is a tuple, it should conform to the specification used for :meth:`matplotlib.figure.Figure.add_subplot`. The :class:`matplotlib.gridspec.SubplotSpec` can be created by using :class:`matplotlib.gridspec.GridSpec`. rect : tuple[float, float, float, float], optional Rectangle (left, bottom, width, height) in which to place the axes. This allows the user to place the axes at an arbitrary point on the figure.
f8522:c4:m0
def plot(self, p, t, *args, **kwargs):
<EOL>t, p = _delete_masked_points(t, p)<EOL>lines = self.ax.semilogy(t, p, *args, **kwargs)<EOL>self.ax.yaxis.set_major_formatter(ScalarFormatter())<EOL>self.ax.yaxis.set_major_locator(MultipleLocator(<NUM_LIT:100>))<EOL>self.ax.yaxis.set_minor_formatter(NullFormatter())<EOL>if not self.ax.yaxis_inverted():<EOL><INDENT>self.ax.invert_yaxis()<EOL><DEDENT>self.ax.xaxis.set_major_locator(MultipleLocator(<NUM_LIT:10>))<EOL>return lines<EOL>
r"""Plot data. Simple wrapper around plot so that pressure is the first (independent) input. This is essentially a wrapper around `semilogy`. It also sets some appropriate ticking and plot ranges. Parameters ---------- p : array_like pressure values t : array_like temperature values, can also be used for things like dew point args Other positional arguments to pass to :func:`~matplotlib.pyplot.semilogy` kwargs Other keyword arguments to pass to :func:`~matplotlib.pyplot.semilogy` Returns ------- list[matplotlib.lines.Line2D] lines plotted See Also -------- :func:`matplotlib.pyplot.semilogy`
f8522:c4:m1
def plot_barbs(self, p, u, v, c=None, xloc=<NUM_LIT:1.0>, x_clip_radius=<NUM_LIT:0.1>,<EOL>y_clip_radius=<NUM_LIT>, **kwargs):
<EOL>plotting_units = kwargs.pop('<STR_LIT>', None)<EOL>if plotting_units:<EOL><INDENT>if hasattr(u, '<STR_LIT>') and hasattr(v, '<STR_LIT>'):<EOL><INDENT>u = u.to(plotting_units)<EOL>v = v.to(plotting_units)<EOL><DEDENT>else:<EOL><INDENT>raise ValueError('<STR_LIT>'<EOL>'<STR_LIT>')<EOL><DEDENT><DEDENT>x = np.empty_like(p)<EOL>x.fill(xloc)<EOL>if c is not None:<EOL><INDENT>b = self.ax.barbs(x, p, u, v, c,<EOL>transform=self.ax.get_yaxis_transform(which='<STR_LIT>'),<EOL>clip_on=True, **kwargs)<EOL><DEDENT>else:<EOL><INDENT>b = self.ax.barbs(x, p, u, v,<EOL>transform=self.ax.get_yaxis_transform(which='<STR_LIT>'),<EOL>clip_on=True, **kwargs)<EOL><DEDENT>ax_bbox = transforms.Bbox([[xloc - x_clip_radius, -y_clip_radius],<EOL>[xloc + x_clip_radius, <NUM_LIT:1.0> + y_clip_radius]])<EOL>b.set_clip_box(transforms.TransformedBbox(ax_bbox, self.ax.transAxes))<EOL>return b<EOL>
r"""Plot wind barbs. Adds wind barbs to the skew-T plot. This is a wrapper around the `barbs` command that adds to appropriate transform to place the barbs in a vertical line, located as a function of pressure. Parameters ---------- p : array_like pressure values u : array_like U (East-West) component of wind v : array_like V (North-South) component of wind c: An optional array used to map colors to the barbs xloc : float, optional Position for the barbs, in normalized axes coordinates, where 0.0 denotes far left and 1.0 denotes far right. Defaults to far right. x_clip_radius : float, optional Space, in normalized axes coordinates, to leave before clipping wind barbs in the x-direction. Defaults to 0.1. y_clip_radius : float, optional Space, in normalized axes coordinates, to leave above/below plot before clipping wind barbs in the y-direction. Defaults to 0.08. plot_units: `pint.unit` Units to plot in (performing conversion if necessary). Defaults to given units. kwargs Other keyword arguments to pass to :func:`~matplotlib.pyplot.barbs` Returns ------- matplotlib.quiver.Barbs instance created See Also -------- :func:`matplotlib.pyplot.barbs`
f8522:c4:m2
def plot_dry_adiabats(self, t0=None, p=None, **kwargs):
<EOL>if t0 is None:<EOL><INDENT>xmin, xmax = self.ax.get_xlim()<EOL>t0 = np.arange(xmin, xmax + <NUM_LIT:1>, <NUM_LIT:10>) * units.degC<EOL><DEDENT>if p is None:<EOL><INDENT>p = np.linspace(*self.ax.get_ylim()) * units.mbar<EOL><DEDENT>t = dry_lapse(p, t0[:, np.newaxis], <NUM_LIT> * units.mbar).to(units.degC)<EOL>linedata = [np.vstack((ti, p)).T for ti in t]<EOL>kwargs.setdefault('<STR_LIT>', '<STR_LIT:r>')<EOL>kwargs.setdefault('<STR_LIT>', '<STR_LIT>')<EOL>kwargs.setdefault('<STR_LIT>', <NUM_LIT:0.5>)<EOL>return self.ax.add_collection(LineCollection(linedata, **kwargs))<EOL>
r"""Plot dry adiabats. Adds dry adiabats (lines of constant potential temperature) to the plot. The default style of these lines is dashed red lines with an alpha value of 0.5. These can be overridden using keyword arguments. Parameters ---------- t0 : array_like, optional Starting temperature values in Kelvin. If none are given, they will be generated using the current temperature range at the bottom of the plot. p : array_like, optional Pressure values to be included in the dry adiabats. If not specified, they will be linearly distributed across the current plotted pressure range. kwargs Other keyword arguments to pass to :class:`matplotlib.collections.LineCollection` Returns ------- matplotlib.collections.LineCollection instance created See Also -------- :func:`~metpy.calc.thermo.dry_lapse` :meth:`plot_moist_adiabats` :class:`matplotlib.collections.LineCollection`
f8522:c4:m3
def plot_moist_adiabats(self, t0=None, p=None, **kwargs):
<EOL>if t0 is None:<EOL><INDENT>xmin, xmax = self.ax.get_xlim()<EOL>t0 = np.concatenate((np.arange(xmin, <NUM_LIT:0>, <NUM_LIT:10>),<EOL>np.arange(<NUM_LIT:0>, xmax + <NUM_LIT:1>, <NUM_LIT:5>))) * units.degC<EOL><DEDENT>if p is None:<EOL><INDENT>p = np.linspace(*self.ax.get_ylim()) * units.mbar<EOL><DEDENT>t = moist_lapse(p, t0[:, np.newaxis], <NUM_LIT> * units.mbar).to(units.degC)<EOL>linedata = [np.vstack((ti, p)).T for ti in t]<EOL>kwargs.setdefault('<STR_LIT>', '<STR_LIT:b>')<EOL>kwargs.setdefault('<STR_LIT>', '<STR_LIT>')<EOL>kwargs.setdefault('<STR_LIT>', <NUM_LIT:0.5>)<EOL>return self.ax.add_collection(LineCollection(linedata, **kwargs))<EOL>
r"""Plot moist adiabats. Adds saturated pseudo-adiabats (lines of constant equivalent potential temperature) to the plot. The default style of these lines is dashed blue lines with an alpha value of 0.5. These can be overridden using keyword arguments. Parameters ---------- t0 : array_like, optional Starting temperature values in Kelvin. If none are given, they will be generated using the current temperature range at the bottom of the plot. p : array_like, optional Pressure values to be included in the moist adiabats. If not specified, they will be linearly distributed across the current plotted pressure range. kwargs Other keyword arguments to pass to :class:`matplotlib.collections.LineCollection` Returns ------- matplotlib.collections.LineCollection instance created See Also -------- :func:`~metpy.calc.thermo.moist_lapse` :meth:`plot_dry_adiabats` :class:`matplotlib.collections.LineCollection`
f8522:c4:m4
def plot_mixing_lines(self, w=None, p=None, **kwargs):
<EOL>if w is None:<EOL><INDENT>w = np.array([<NUM_LIT>, <NUM_LIT>, <NUM_LIT>, <NUM_LIT>, <NUM_LIT>, <NUM_LIT>,<EOL><NUM_LIT>, <NUM_LIT>, <NUM_LIT>]).reshape(-<NUM_LIT:1>, <NUM_LIT:1>)<EOL><DEDENT>if p is None:<EOL><INDENT>p = np.linspace(<NUM_LIT>, max(self.ax.get_ylim())) * units.mbar<EOL><DEDENT>td = dewpoint(vapor_pressure(p, w))<EOL>linedata = [np.vstack((t, p)).T for t in td]<EOL>kwargs.setdefault('<STR_LIT>', '<STR_LIT:g>')<EOL>kwargs.setdefault('<STR_LIT>', '<STR_LIT>')<EOL>kwargs.setdefault('<STR_LIT>', <NUM_LIT>)<EOL>return self.ax.add_collection(LineCollection(linedata, **kwargs))<EOL>
r"""Plot lines of constant mixing ratio. Adds lines of constant mixing ratio (isohumes) to the plot. The default style of these lines is dashed green lines with an alpha value of 0.8. These can be overridden using keyword arguments. Parameters ---------- w : array_like, optional Unitless mixing ratio values to plot. If none are given, default values are used. p : array_like, optional Pressure values to be included in the isohumes. If not specified, they will be linearly distributed across the current plotted pressure range up to 600 mb. kwargs Other keyword arguments to pass to :class:`matplotlib.collections.LineCollection` Returns ------- matplotlib.collections.LineCollection instance created See Also -------- :class:`matplotlib.collections.LineCollection`
f8522:c4:m5
def shade_area(self, y, x1, x2=<NUM_LIT:0>, which='<STR_LIT>', **kwargs):
fill_properties = {'<STR_LIT>':<EOL>{'<STR_LIT>': '<STR_LIT>', '<STR_LIT>': <NUM_LIT>, '<STR_LIT>': x1 > x2},<EOL>'<STR_LIT>':<EOL>{'<STR_LIT>': '<STR_LIT>', '<STR_LIT>': <NUM_LIT>, '<STR_LIT>': x1 < x2},<EOL>'<STR_LIT>':<EOL>{'<STR_LIT>': '<STR_LIT>', '<STR_LIT>': <NUM_LIT>, '<STR_LIT>': None}}<EOL>try:<EOL><INDENT>fill_args = fill_properties[which]<EOL>fill_args.update(kwargs)<EOL><DEDENT>except KeyError:<EOL><INDENT>raise ValueError('<STR_LIT>'.format(str(which)))<EOL><DEDENT>arrs = y, x1, x2<EOL>if fill_args['<STR_LIT>'] is not None:<EOL><INDENT>arrs = arrs + (fill_args['<STR_LIT>'],)<EOL>fill_args.pop('<STR_LIT>', None)<EOL><DEDENT>if matplotlib.__version__ >= '<STR_LIT>':<EOL><INDENT>fill_args['<STR_LIT>'] = True<EOL><DEDENT>arrs = _delete_masked_points(*arrs)<EOL>return self.ax.fill_betweenx(*arrs, **fill_args)<EOL>
r"""Shade area between two curves. Shades areas between curves. Area can be where one is greater or less than the other or all areas shaded. Parameters ---------- y : array_like 1-dimensional array of numeric y-values x1 : array_like 1-dimensional array of numeric x-values x2 : array_like 1-dimensional array of numeric x-values which : string Specifies if `positive`, `negative`, or `both` areas are being shaded. Will be overridden by where. kwargs Other keyword arguments to pass to :class:`matplotlib.collections.PolyCollection` Returns ------- :class:`matplotlib.collections.PolyCollection` See Also -------- :class:`matplotlib.collections.PolyCollection` :func:`matplotlib.axes.Axes.fill_betweenx`
f8522:c4:m6
def shade_cape(self, p, t, t_parcel, **kwargs):
return self.shade_area(p, t_parcel, t, which='<STR_LIT>', **kwargs)<EOL>
r"""Shade areas of CAPE. Shades areas where the parcel is warmer than the environment (areas of positive buoyancy. Parameters ---------- p : array_like Pressure values t : array_like Temperature values t_parcel : array_like Parcel path temperature values kwargs Other keyword arguments to pass to :class:`matplotlib.collections.PolyCollection` Returns ------- :class:`matplotlib.collections.PolyCollection` See Also -------- :class:`matplotlib.collections.PolyCollection` :func:`matplotlib.axes.Axes.fill_betweenx`
f8522:c4:m7
def shade_cin(self, p, t, t_parcel, **kwargs):
return self.shade_area(p, t_parcel, t, which='<STR_LIT>', **kwargs)<EOL>
r"""Shade areas of CIN. Shades areas where the parcel is cooler than the environment (areas of negative buoyancy. Parameters ---------- p : array_like Pressure values t : array_like Temperature values t_parcel : array_like Parcel path temperature values kwargs Other keyword arguments to pass to :class:`matplotlib.collections.PolyCollection` Returns ------- :class:`matplotlib.collections.PolyCollection` See Also -------- :class:`matplotlib.collections.PolyCollection` :func:`matplotlib.axes.Axes.fill_betweenx`
f8522:c4:m8
def __init__(self, ax=None, component_range=<NUM_LIT>):
if ax is None:<EOL><INDENT>import matplotlib.pyplot as plt<EOL>self.ax = plt.figure().add_subplot(<NUM_LIT:1>, <NUM_LIT:1>, <NUM_LIT:1>)<EOL><DEDENT>else:<EOL><INDENT>self.ax = ax<EOL><DEDENT>self.ax.set_aspect('<STR_LIT>', '<STR_LIT>')<EOL>self.ax.set_xlim(-component_range, component_range)<EOL>self.ax.set_ylim(-component_range, component_range)<EOL>self.max_range = <NUM_LIT> * component_range<EOL>
r"""Create a Hodograph instance. Parameters ---------- ax : `matplotlib.axes.Axes`, optional The `Axes` instance used for plotting component_range : value The maximum range of the plot. Used to set plot bounds and control the maximum number of grid rings needed.
f8522:c5:m0
def add_grid(self, increment=<NUM_LIT>, **kwargs):
<EOL>grid_args = {'<STR_LIT>': '<STR_LIT>', '<STR_LIT>': '<STR_LIT>'}<EOL>if kwargs:<EOL><INDENT>grid_args.update(kwargs)<EOL><DEDENT>circle_args = grid_args.copy()<EOL>color = circle_args.pop('<STR_LIT>', None)<EOL>circle_args['<STR_LIT>'] = color<EOL>circle_args['<STR_LIT>'] = False<EOL>self.rings = []<EOL>for r in np.arange(increment, self.max_range, increment):<EOL><INDENT>c = Circle((<NUM_LIT:0>, <NUM_LIT:0>), radius=r, **circle_args)<EOL>self.ax.add_patch(c)<EOL>self.rings.append(c)<EOL><DEDENT>self.yaxis = self.ax.axvline(<NUM_LIT:0>, **grid_args)<EOL>self.xaxis = self.ax.axhline(<NUM_LIT:0>, **grid_args)<EOL>
r"""Add grid lines to hodograph. Creates lines for the x- and y-axes, as well as circles denoting wind speed values. Parameters ---------- increment : value, optional The value increment between rings kwargs Other kwargs to control appearance of lines See Also -------- :class:`matplotlib.patches.Circle` :meth:`matplotlib.axes.Axes.axhline` :meth:`matplotlib.axes.Axes.axvline`
f8522:c5:m1
@staticmethod<EOL><INDENT>def _form_line_args(kwargs):<DEDENT>
def_args = {'<STR_LIT>': <NUM_LIT:3>}<EOL>def_args.update(kwargs)<EOL>return def_args<EOL>
Simplify taking the default line style and extending with kwargs.
f8522:c5:m2
def plot(self, u, v, **kwargs):
line_args = self._form_line_args(kwargs)<EOL>u, v = _delete_masked_points(u, v)<EOL>return self.ax.plot(u, v, **line_args)<EOL>
r"""Plot u, v data. Plots the wind data on the hodograph. Parameters ---------- u : array_like u-component of wind v : array_like v-component of wind kwargs Other keyword arguments to pass to :meth:`matplotlib.axes.Axes.plot` Returns ------- list[matplotlib.lines.Line2D] lines plotted See Also -------- :meth:`Hodograph.plot_colormapped`
f8522:c5:m3
def wind_vectors(self, u, v, **kwargs):
quiver_args = {'<STR_LIT>': '<STR_LIT>', '<STR_LIT>': <NUM_LIT:1>}<EOL>quiver_args.update(**kwargs)<EOL>center_position = np.zeros_like(u)<EOL>return self.ax.quiver(center_position, center_position,<EOL>u, v, **quiver_args)<EOL>
r"""Plot u, v data as wind vectors. Plot the wind data as vectors for each level, beginning at the origin. Parameters ---------- u : array_like u-component of wind v : array_like v-component of wind kwargs Other keyword arguments to pass to :meth:`matplotlib.axes.Axes.quiver` Returns ------- matplotlib.quiver.Quiver arrows plotted
f8522:c5:m4
def plot_colormapped(self, u, v, c, bounds=None, colors=None, **kwargs):
u, v, c = _delete_masked_points(u, v, c)<EOL>if colors:<EOL><INDENT>cmap = mcolors.ListedColormap(colors)<EOL>if bounds.dimensionality == {'<STR_LIT>': <NUM_LIT:1.0>}:<EOL><INDENT>interpolation_heights = [bound.m for bound in bounds if bound not in c]<EOL>interpolation_heights = np.array(interpolation_heights) * bounds.units<EOL>interpolation_heights = (np.sort(interpolation_heights)<EOL>* interpolation_heights.units)<EOL>(interpolated_heights, interpolated_u,<EOL>interpolated_v) = interpolate_1d(interpolation_heights, c, c, u, v)<EOL>c = concatenate([c, interpolated_heights])<EOL>u = concatenate([u, interpolated_u])<EOL>v = concatenate([v, interpolated_v])<EOL>sort_inds = np.argsort(c)<EOL>c = c[sort_inds]<EOL>u = u[sort_inds]<EOL>v = v[sort_inds]<EOL>c = c.to_base_units() <EOL>bounds = bounds.to_base_units()<EOL><DEDENT>else:<EOL><INDENT>bounds = np.asarray(bounds) * bounds.units<EOL><DEDENT>norm = mcolors.BoundaryNorm(bounds.magnitude, cmap.N)<EOL>cmap.set_over('<STR_LIT:none>')<EOL>cmap.set_under('<STR_LIT:none>')<EOL>kwargs['<STR_LIT>'] = cmap<EOL>kwargs['<STR_LIT>'] = norm<EOL>line_args = self._form_line_args(kwargs)<EOL><DEDENT>else:<EOL><INDENT>line_args = self._form_line_args(kwargs)<EOL><DEDENT>lc = colored_line(u, v, c, **line_args)<EOL>self.ax.add_collection(lc)<EOL>return lc<EOL>
r"""Plot u, v data, with line colored based on a third set of data. Plots the wind data on the hodograph, but with a colormapped line. Takes a third variable besides the winds and either a colormap to color it with or a series of bounds and colors to create a colormap and norm to control colormapping. The bounds must always be in increasing order. For using custom bounds with height data, the function will automatically interpolate to the actual bounds from the height and wind data, as well as convert the input bounds from height AGL to height above MSL to work with the provided heights. Simple wrapper around plot so that pressure is the first (independent) input. This is essentially a wrapper around `semilogy`. It also sets some appropriate ticking and plot ranges. Parameters ---------- u : array_like u-component of wind v : array_like v-component of wind c : array_like data to use for colormapping bounds: array-like, optional Array of bounds for c to use in coloring the hodograph. colors: list, optional Array of strings representing colors for the hodograph segments. kwargs Other keyword arguments to pass to :class:`matplotlib.collections.LineCollection` Returns ------- matplotlib.collections.LineCollection instance created See Also -------- :meth:`Hodograph.plot`
f8522:c5:m5
def __init__(self, name, scale, **kwargs):
super(MetPyMapFeature, self).__init__('<STR_LIT>', name, scale, **kwargs)<EOL>
Create USCountiesFeature instance.
f8524:c0:m0
def geometries(self):
<EOL>fname = '<STR_LIT>'.format(self.name, self.scale)<EOL>for extension in ['<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>get_test_data(fname + extension)<EOL><DEDENT>path = get_test_data(fname + '<STR_LIT>', as_file_obj=False)<EOL>return iter(tuple(shpreader.Reader(path).geometries()))<EOL>
Return an iterator of (shapely) geometries for this feature.
f8524:c0:m1