diff --git a/continuous_integration/build_docs.sh b/continuous_integration/build_docs.sh index 6894ec7834..1ba47e81e6 100644 --- a/continuous_integration/build_docs.sh +++ b/continuous_integration/build_docs.sh @@ -3,20 +3,20 @@ # https://github.com/pydata/pandas set -e -cd "$TRAVIS_BUILD_DIR" - echo "Building Docs" -conda install -q sphinx pillow +conda install -q sphinx pillow ipython +pip install sphinx_rtd_theme +pip install sphinx_gallery +pip install sphinx-copybutton -mv "$TRAVIS_BUILD_DIR"/doc /tmp -cd /tmp/doc -mv -f source/index.ci source/index.rst +cd doc +make clean make html # upload to pyart-docs-travis repo is this is not a pull request and # secure token is available (aka in the ARM-DOE repository. if [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ $TRAVIS_SECURE_ENV_VARS == 'true' ]; then - cd /tmp/doc/build/html + cd build/html git config --global user.email "pyart-docs-bot@example.com" git config --global user.name "pyart-docs-bot" diff --git a/doc/rebuild_docs.sh b/doc/rebuild_docs.sh index 0979d56d08..0a503786bc 100755 --- a/doc/rebuild_docs.sh +++ b/doc/rebuild_docs.sh @@ -1,5 +1,4 @@ # script to rebuild documentation after removing intermediates rm -r build -rm source/user_reference/generated/* -rm source/dev_reference/generated/* +rm source/API/generated/* make html diff --git a/doc/rebuild_examples.sh b/doc/rebuild_examples.sh index 139f907470..8f3b3edea1 100755 --- a/doc/rebuild_examples.sh +++ b/doc/rebuild_examples.sh @@ -1,5 +1,5 @@ # script to rebuild Py-ART example after removing intermediates rm -r build -rm -r source/auto_examples/* +rm -r source/source/auto_examples/* BUILD_PYART_EXAMPLES=1 make html diff --git a/doc/rebuild_full_docs.sh b/doc/rebuild_full_docs.sh index 9b03fc7e14..6dc7f79641 100755 --- a/doc/rebuild_full_docs.sh +++ b/doc/rebuild_full_docs.sh @@ -1,7 +1,6 @@ # script to rebuild complete documentation include examples after removing # intermediates rm -r build -rm source/user_reference/generated/* -rm source/dev_reference/generated/* -rm -r source/auto_examples/* +rm source/API/generated/* +rm -r source/source/auto_examples/* BUILD_PYART_EXAMPLES=1 make html diff --git a/doc/source/API/index.rst b/doc/source/API/index.rst new file mode 100644 index 0000000000..e6acdb1e3b --- /dev/null +++ b/doc/source/API/index.rst @@ -0,0 +1,34 @@ +.. _API: + +#################### +API Reference Manual +#################### + +:Release: |version| +:Date: |today| + +This guide provides documentation for all modules, function, methods, +and classes within Py-ART for those in the public API. + +Documentation is broken down by directory and module. + +.. currentmodule:: pyart + +.. autosummary:: + :toctree: generated/ + + pyart.core + pyart.io + pyart.aux_io + pyart.config + pyart.correct + pyart.exceptions + pyart.retrieve + pyart.graph + pyart.filters + pyart.lazydict + pyart.map + pyart.util + pyart.bridge + pyart.testing + pyart._debug_info diff --git a/doc/source/INSTALL.rst b/doc/source/INSTALL.rst new file mode 100644 index 0000000000..a53ab5aca9 --- /dev/null +++ b/doc/source/INSTALL.rst @@ -0,0 +1,156 @@ +============ +Installation +============ + +Required Dependencies +===================== + +Py-ART requires the following software. + +* Python__ 3.6.x, 3.7.x or 3.8x + +__ http://www.python.org + +* NumPy__ + +__ http://www.scipy.org + +* SciPy__ + +__ http://www.scipy.org + +* matplotlib__ + +__ http://matplotlib.org/ + +* netCDF4__ + +__ https://github.com/Unidata/netcdf4-python + + +Optional Dependencies +===================== + +The following packages are recommended for a fully-functional Py-ART +installation, but the package will install and work with reduced functionality +without these packages. + +* `TRMM RSL `_ + +* `CyLP `_ or + `PyGLPK `_ or + `CVXOPT `_ and their dependencies. + +* `Cartopy `_ or +* `Basemap `_ But Cartopy is recommended as + basemap will no longer have support. + +* `xarray `_ +* `pyproj `_ + +* `pytest `_ + +Obtaining the latest source +=========================== + +The latest source code for Py-ART can be obtained from the GitHub repository, +https://github.com/ARM-DOE/pyart. + +The latest source can be checked out using + +:: + + $ git clone https://github.com/ARM-DOE/pyart.git + + +Installing from Source +====================== + +The path to the TRMM RSL library must be provided during install. This can +either be done by setting the ``RSL_PATH`` environmentation variable. In bash +this can be done using ``export RSL_PATH=/path/to/rsl/``. If this location is +not specified, some common locations will be searched. Note that the location +provided should be the root TRMM RSL path, under which both a `lib` and +`include` directory are contained, the default location is ``/lib/local/trmm``. +If using CyLP, a path for the coincbc directory is needed. This can be done +using ``export COIN_INSTALL_DIR=/path/to/coincbc/``. When using CyLP, on some +systems, installing the Anaconda compilers is needed. These can be found here: +https://docs.conda.io/projects/conda-build/en/latest/resources/compiler-tools.html + +After specifying the TRMM RSL path Py-ART can be installed globally using + +:: + + $ python setup.py install + +of locally using + +:: + + $ python setup.py install --user + +If you prefer to use Py-ART without installing, simply add the this path to +your ``PYTHONPATH`` (directory or with a .pth file) and compile the extension +in-place. + +:: + + $ python setup.py build_ext -i + +You can also install Py-ART in development mode by using + +:: + + $ pip install -e . + +Frequently asked questions +========================== + +* I'm getting a no 'io' module after installing pyart with pip. + + There is a pyart on pip that is a different package. Make sure to do:: + + pip install arm_pyart + + and not:: + + pip install pyart + +* I'm getting a segfault or compile error with CyLP in newer Python versions + when installing in an environment. + + Anaconda has its own compilers now on conda-forge. Theres can be found here: + https://docs.conda.io/projects/conda-build/en/latest/resources/compiler-tools.html + Once the proper compilers are installed, reinstall CyLP. + +* I'm getting a segfault or another error in python when using + ``pyart.io.read_rsl()`` with IRIS/other files. + + This is due to a bug in RSL, and can be remedied by adding + ``-fno-stack-protector -D_FORTIFY_SOURCE=0`` to the CFLAGS parameter of the + makefile of RSL. This issue has been fixed with the release of rsl-v1.44. + +* I'm having trouble getting PyGLPK to compile on my 64-bit operating system. + + Change the line in the setup.py file from + + :: + + define_macros = macros, extra_compile_args=['-m32'], extra_link_args=['-m32'], + + to + + :: + + define_macros = macros, extra_compile_args=['-m64'], extra_link_args=['-m64'], + + Then build and install PyGLPK as recommended in the PYGLPK README.txt file. + +* When running basemap, I get an error 'KeyError: PROJ_LIB'. + + Basemap is not being supported beyond 2020, some of these errors relate + to it not playing nicely with newer versions of packages. We recommend using + Cartopy instead, but some users have been able to use: + import os + os.environ['PROJ_LIB'] = 'C:/Users/xx Username xxx/Anaconda3/Lib/site-packages/mpl_toolkits/basemap' + To get basemap working, but again Cartopy should be used instead of Basemap. diff --git a/doc/source/_static/ppi.png b/doc/source/_static/ppi.png new file mode 100644 index 0000000000..8d80389349 Binary files /dev/null and b/doc/source/_static/ppi.png differ diff --git a/doc/source/_static/rhi.png b/doc/source/_static/rhi.png new file mode 100644 index 0000000000..6fe90d19b6 Binary files /dev/null and b/doc/source/_static/rhi.png differ diff --git a/doc/source/_static/scipy.css b/doc/source/_static/scipy.css deleted file mode 100644 index b331375e54..0000000000 --- a/doc/source/_static/scipy.css +++ /dev/null @@ -1,206 +0,0 @@ -@import "classic.css"; - -/** - * Spacing fixes - */ - -div.body p, div.body dd, div.body li { - line-height: 125%; -} - -ul.simple { - margin-top: 0; - margin-bottom: 0; - padding-top: 0; - padding-bottom: 0; -} - -/* spacing around blockquoted fields in parameters/attributes/returns */ -td.field-body > blockquote { - margin-top: 0.1em; - margin-bottom: 0.5em; -} - -/* spacing around example code */ -div.highlight > pre { - padding: 2px 5px 2px 5px; -} - -/* spacing in see also definition lists */ -dl.last > dd { - margin-top: 1px; - margin-bottom: 5px; - margin-left: 30px; -} - -/* hide overflowing content in the sidebar */ -div.sphinxsidebarwrapper p.topless { - overflow: hidden; -} - -/** - * Hide dummy toctrees - */ - -ul { - padding-top: 0; - padding-bottom: 0; - margin-top: 0; - margin-bottom: 0; -} -ul li { - padding-top: 0; - padding-bottom: 0; - margin-top: 0; - margin-bottom: 0; -} -ul li a.reference { - padding-top: 0; - padding-bottom: 0; - margin-top: 0; - margin-bottom: 0; -} - -/** - * Make high-level subsections easier to distinguish from top-level ones - */ -div.body h3 { - background-color: transparent; -} - -div.body h4 { - border: none; - background-color: transparent; -} - -/** - * Scipy colors - */ - -body { - background-color: rgb(100,135,220); -} - -div.document { - background-color: rgb(230,230,230); -} - -div.sphinxsidebar { - background-color: rgb(230,230,230); -} - -div.related { - background-color: rgb(100,135,220); -} - -div.sphinxsidebar h3 { - color: rgb(0,102,204); -} - -div.sphinxsidebar h3 a { - color: rgb(0,102,204); -} - -div.sphinxsidebar h4 { - color: rgb(0,82,194); -} - -div.sphinxsidebar p { - color: black; -} - -div.sphinxsidebar a { - color: #355f7c; -} - -div.sphinxsidebar ul.want-points { - list-style: disc; -} - -.field-list th { - color: rgb(0,102,204); - white-space: nowrap; -} - -/** - * Extra admonitions - */ - -div.tip { - background-color: #ffffe4; - border: 1px solid #ee6; -} - -div.plot-output { - clear-after: both; -} - -div.plot-output .figure { - float: left; - text-align: center; - margin-bottom: 0; - padding-bottom: 0; -} - -div.plot-output .caption { - margin-top: 2; - padding-top: 0; -} - -div.plot-output p.admonition-title { - display: none; -} - -div.plot-output:after { - content: ""; - display: block; - height: 0; - clear: both; -} - - -/* -div.admonition-example { - background-color: #e4ffe4; - border: 1px solid #ccc; -}*/ - - -/** - * Styling for field lists - */ - -table.field-list th { - border-left: 1px solid #aaa !important; - padding-left: 5px; -} - -table.field-list { - border-collapse: separate; - border-spacing: 10px; -} - -/** - * Styling for footnotes - */ - -table.footnote td, table.footnote th { - border: none; -} - -/** - * Styling for Examples - */ -div#examples.section .figure img { - max-width: 160px; - max-height: 160px; - display: block; - margin: auto; -} - -div#examples.section .thumbnailContainer { - float: left; - width: 182px; - height: 242px; - overflow: auto; -} diff --git a/doc/source/_templates/autosummary/base.rst b/doc/source/_templates/autosummary/base.rst new file mode 100644 index 0000000000..b7556ebf7b --- /dev/null +++ b/doc/source/_templates/autosummary/base.rst @@ -0,0 +1,5 @@ +{{ fullname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. auto{{ objtype }}:: {{ objname }} diff --git a/doc/source/_templates/autosummary/class.rst b/doc/source/_templates/autosummary/class.rst index d707c38f1c..536acfe122 100644 --- a/doc/source/_templates/autosummary/class.rst +++ b/doc/source/_templates/autosummary/class.rst @@ -2,18 +2,32 @@ {% block methods %} {% if methods %} - .. HACK -- the point here is that we don't want this to appear in the output, but the autosummary should still generate the pages. - .. autosummary:: - :toctree: - {% for item in all_methods %} - {%- if not item.startswith('_') or item in ['__call__'] %} - {{ name }}.{{ item }} - {%- endif -%} - {%- endfor %} +.. HACK -- the point here is that we don't want this to appear in the output, but the autosummary should still generate the pages. + +.. autosummary:: + :toctree: + +{% for item in all_methods %} +{%- if not item.startswith('_') or item in ['__call__'] %} + ~{{ name }}.{{ item }} +{%- endif -%} +{%- endfor %} + {% endif %} {% endblock %} {% block attributes %} {% if attributes %} +.. HACK -- the point here is that we don't want this to appear in the output, but the autosummary should still generate the pages. + +.. autosummary:: + :toctree: + +{% for item in all_attributes %} +{%- if not item.startswith('_') %} + ~{{ name }}.{{ item }} +{%- endif -%} + +{%- endfor %} {% endif %} {% endblock %} diff --git a/doc/source/_templates/autosummary/module.rst b/doc/source/_templates/autosummary/module.rst new file mode 100644 index 0000000000..0957773d08 --- /dev/null +++ b/doc/source/_templates/autosummary/module.rst @@ -0,0 +1,29 @@ +{{ fullname | escape | underline }} + +.. rubric:: Description + +.. automodule:: {{ fullname }} + +.. currentmodule:: {{ fullname }} + +{% if classes %} +.. rubric:: Classes + +.. autosummary:: + :toctree: . + {% for class in classes %} + {{ class }} + {% endfor %} + +{% endif %} + +{% if functions %} +.. rubric:: Functions + +.. autosummary:: + :toctree: . + {% for function in functions %} + {{ function }} + {% endfor %} + +{% endif %} diff --git a/doc/source/conf.py b/doc/source/conf.py index 13e924d95a..f372eac2d1 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -1,52 +1,101 @@ -# Py-ART documentation configuration file - -import sys, os, re - -# Check Sphinx version -import sphinx -if sphinx.__version__ < "1.0.1": - raise RuntimeError("Sphinx 1.0.1 or newer required") +#!/usr/bin/env python3 +# +# Python ARM Radar Toolkit documentation build configuration file, created by +# sphinx-quickstart on Thu Jun 28 12:35:56 2018. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +import os +import re +import sys + +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.githubpages', + 'sphinx.ext.intersphinx', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', + 'IPython.sphinxext.ipython_directive', + 'IPython.sphinxext.ipython_console_highlighting', + 'matplotlib.sphinxext.plot_directive', + 'sphinx.ext.napoleon', + 'sphinx_copybutton', +] -needs_sphinx = '1.0' +# only include examples if the BUILD_PYART_EXAMPLES env. variable is set +if 'BUILD_PYART_EXAMPLES' in os.environ: + extensions.append('sphinx_gallery.gen_gallery') + sphinx_gallery_conf = { + 'examples_dirs': '../../examples', + 'gallery_dirs': 'source/auto_examples' +} -#---------------------------------------------------------------------------- -# General configuration -#---------------------------------------------------------------------------- -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# Configuration options for plot_directive. See: +# https://github.com/matplotlib/matplotlib/blob/f3ed922d935751e08494e5fb5311d3050a3b637b/lib/matplotlib/sphinxext/plot_directive.py#L81 +plot_html_show_source_link = False +plot_html_show_formats = False -sys.path.insert(0, os.path.abspath('../sphinxext')) +# Generate the API documentation when building +autoclass_content = "both" -# Try to override the matplotlib configuration -try: - import gen_rst -except: - pass +autosummary_generate = True +autosummary_imported_members = True -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.mathjax', - 'sphinx.ext.autosummary', 'numpydoc'] -# only include examples if the BUILD_PYART_EXAMPLES env. variable is set -if 'BUILD_PYART_EXAMPLES' in os.environ: - extensions.append('gen_rst') +# Otherwise, the Return parameter list looks different from the Parameters list +napoleon_use_rtype = False +# Otherwise, the Attributes parameter list looks different from the Parameters list +napoleon_use_ivar = True +napoleon_include_init_with_doc = False +napoleon_use_param = False # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] -# The suffix of source filenames. +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. -project = u'Py-ART' -copyright = u'2013-2019, Py-ART developers' +project = 'Py-ART' +copyright = '2013-2020, Py-ART developers' +author = 'Py-ART developers' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. - +# import pyart # The short X.Y version (including the .devXXXX suffix if present) version = re.sub(r'^(\d+\.\d+)\.\d+(.*)', r'\1\2', pyart.__version__) @@ -61,221 +110,114 @@ # full Py-ART version in CI built docs if 'CI' in os.environ and os.environ['CI'] == 'true': version = release - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -today_fmt = '%B %d, %Y' - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_templates/*'] - -# If true, '()' will be appended to :func: etc. cross-reference text. -add_function_parentheses = False - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -show_authors = False +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' -# --------------------------------------------------------------------------- -# HTML output -# --------------------------------------------------------------------------- +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + -# The style sheet to use for HTML and HTML Help pages. A file of that name -# must exist either in Sphinx' static/ path, or in one of the custom paths -# given in html_static_path. -html_theme = 'classic' -html_style = 'scipy.css' +# -- Options for HTML output ---------------------------------------------- -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -html_title = "Py-ART Documentation" +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None +html_theme = 'sphinx_rtd_theme' +import sphinx_rtd_theme +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -html_show_copyright = True +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# This is required for the alabaster theme +# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars +html_sidebars = { + '**': [ + 'relations.html', # needs 'show_related': True theme option to display + 'searchbox.html', + ] +} -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' -# This is the file name suffix for HTML files (e.g. ".xhtml"). -html_file_suffix = '.html' +# -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'pyart' -# --------------------------------------------------------------------------- -# LaTeX output -#---------------------------------------------------------------------------- + +# -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'Py-ART.tex', u'Py-ART documentation', - u'Py-ART developers', 'manual'), + (master_doc, 'pyart.tex', 'Py-ART Documentation', + 'Contributors', 'manual'), ] -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False -# If true, show page references after internal links. -#latex_show_pagerefs = False +# -- Options for manual page output --------------------------------------- -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - -#---------------------------------------------------------------------------- -# Numpydoc extension -#---------------------------------------------------------------------------- - -# Numpy autodoc attributes -numpydoc_show_class_members = True - -#---------------------------------------------------------------------------- -# Autosummary -#---------------------------------------------------------------------------- - -if sphinx.__version__ >= "0.7": - import glob - #autosummary_generate = glob.glob("*.rst") - autosummary_generate = True +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'pyart', 'The Python ARM Radar Toolkit', + [author], 1) +] -#---------------------------------------------------------------------------- -# Source code links -#---------------------------------------------------------------------------- +# -- Options for Texinfo output ------------------------------------------- -# these functions borrowed from the scipy project -import inspect -from os.path import relpath, dirname -import pyart +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) -for name in ['sphinx.ext.linkcode', 'linkcode', 'numpydoc.linkcode']: - try: - __import__(name) - extensions.append(name) - break - except ImportError: - pass -else: - print "NOTE: linkcode extension not found -- no links to source generated" - -def linkcode_resolve(domain, info): - """ - Determine the URL corresponding to Python object - """ - if domain != 'py': - return None - - modname = info['module'] - fullname = info['fullname'] - - submod = sys.modules.get(modname) - if submod is None: - return None - - obj = submod - for part in fullname.split('.'): - try: - obj = getattr(obj, part) - except: - return None - - try: - fn = inspect.getsourcefile(obj) - except: - fn = None - if not fn: - try: - fn = inspect.getsourcefile(sys.modules[obj.__module__]) - except: - fn = None - if not fn: - return None - - try: - source, lineno = inspect.findsource(obj) - except: - lineno = None - - if lineno: - linespec = "#L%d" % (lineno + 1) - else: - linespec = "" - - fn = relpath(fn, start=dirname(pyart.__file__)) - - return "http://github.com/ARM-DOE/pyart/blob/master/pyart/%s%s" % (fn, linespec) +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('https://docs.python.org/3/', None), + 'numpy': ('https://docs.scipy.org/doc/numpy/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None), + 'pandas': ('https://pandas.pydata.org/pandas-docs/stable', None), + 'matplotlib': ('https://matplotlib.org', None), + } diff --git a/doc/source/contributors_guide.rst b/doc/source/contributors_guide.rst new file mode 100644 index 0000000000..3ccb4318e1 --- /dev/null +++ b/doc/source/contributors_guide.rst @@ -0,0 +1,434 @@ +Contributor's Guide +=================== + + +The Python ARM Radar Toolkit (Py-ART) +------------------------------------- + +The Python ARM Radar Toolkit, Py-ART, is an open source Python module +containing a growing collection of weather radar algorithms and utilities +build on top of the Scientific Python stack and distributed under the +3-Clause BSD license. Py-ART is used by the +`Atmospheric Radiation Measurement (ARM) Climate Research Facility +`_ for working with data from a number of precipitation +and cloud radars, but has been designed so that it can be used by others in +the radar and atmospheric communities to examine, processes, and analyze +data from many types of weather radars. + + +Important Links +--------------- + +- Official source code repository: https://github.com/ARM-DOE/pyart +- HTML documentation: http://arm-doe.github.io/pyart-docs-travis/ +- Examples: http://arm-doe.github.io/pyart/dev/auto_examples/index.html +- Mailing List: http://groups.google.com/group/pyart-users/ +- Issue Tracker: https://github.com/ARM-DOE/pyart/issues + + +Citing +------ + +If you use the Python ARM Radar Toolkit (Py-ART) to prepare a publication +please cite: + + Helmus, J.J. & Collis, S.M., (2016). The Python ARM Radar Toolkit + (Py-ART), a Library for Working with Weather Radar Data in the Python + Programming Language. Journal of Open Research Software. 4(1), p.e25. + DOI: http://doi.org/10.5334/jors.119 + +Py-ART implements many published scientific methods which should *also* be +cited if you make use of them. Refer to the **References** section in the +documentation of the functions used for information on these citations. + + +Install +------- + +The easiest method for installing Py-ART is to use the conda packages from +the latest release. To do this you must download and install +`Anaconda `_ or +`Miniconda `_. +Then use the following command in a terminal or command prompt to install +the latest version of Py-ART:: + + conda install -c conda-forge arm_pyart + +To update an older version of Py-ART to the latest release use:: + + conda update -c conda-forge arm_pyart + + +Resources +--------- + +Pyart: + +- https://github.com/openradar/AMS-Short-Course-on-Open-Source-Radar-Software +- https://github.com/EVS-ATMOS/pyart_short_course +- https://www.youtube.com/watch?v=diiP-Q3bKZw +- http://arm-doe.github.io/pyart/dev/auto_examples/index.html + +Git: + +- https://git-scm.com/book/en/v2 + + +Code Style +---------- + +Py-ART follows pep8 coding standards. To make sure your code follows the +pep8 style, you can use a variety of tools that can check for you. Two +popular pep8 check modules are pycodestyle and pylint. + +For more on pep8 style: + +- https://www.python.org/dev/peps/pep-0008/ + +To install pycodestyle:: + + conda install pycodestyle + +To use pycodestyle:: + + pycodestyle filename + +To install pylint:: + + conda install pylint + +To get a detailed pylint report:: + + pylint filename + +If you want to just see what line number and the issue, just use:: + + pylint -r n filename + +Both of these tools are highly configurable to suit a user's taste. Refer to +the tools documentation for details on this process. + +- https://pycodestyle.readthedocs.io/en/latest/ +- https://www.pylint.org/ + + +Python File Setup +----------------- + +In a new .py file, the top of the code should have the function or class +location, sphinx comments for template configuration, and the public and +private functions and classes within the .py file. Public functions and +classes are listed first and then private functions and classes. Private +functions and classes should have a underscore in front of the name. A space +is needed between the last function or class and the closing docstring +quotation. + +An example: + +.. code-block:: python + + """ + pyart.retrieve.velocity_azimuth_display + ======================================= + + Retrieval of VADs from a radar object. + + .. autosummary:: + :toctree generated/ + + velocity_azimuth_display + _inverse_dist_squared + _Average1D + + """ + +Following the introduction code, modules are then added. To follow pep8 +standards, modules should be added in the order of: + + 1. Standard library imports. + 2. Related third party imports. + 3. Local application/library specific imports. + +For example: + +.. code-block:: python + + import glob + import os + + import numpy as np + import numpy.ma as ma + from scipy.interpolate import interp1d + + from ..core import HorizontalWindProfile + +Following the main function def line, but before the code within it, a doc +string is needed to explain arguments, returns, references if needed, and +other helpful information. These documentation standards follow the NumPy +documentation style. + +For more on the NumPy documentation style: + +- https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt + +An example: + +.. code-block:: python + + def velocity_azimuth_display( + radar, velocity=None, z_want=None, valid_ray_min=16, + gatefilter=False, window=2): + + """ + Velocity azimuth display. + + Parameters + ---------- + radar : Radar + Radar object used. + velocity : string + Velocity field to use for VAD calculation. + If None, the default velocity field will be used. + + Other Parameters + ---------------- + z_want : array + Height array user would like for the VAD + calculation. None will result in a z_want of + np.linspace and use of _inverse_dist_squared + and _Average1D functions. Note, height must have + same shape as expected u_wind and v_wind if user + provides z_want. + valid_ray_min : int + Amount of rays required to include that level in + the VAD calculation. + gatefilter : GateFilter + Used to correct the velocity field before its use + in the VAD calculation. Uses Py-ART's region dealiaser. + window : int + Value to use for window calculation in _Averag1D + function. + + Returns + ------- + height : array + Heights in meters above sea level at which horizontal + winds were sampled. + speed : array + Horizontal wind speed in meters per second at each height. + direction : array + Horizontal wind direction in degrees at each height. + u_wind : array + U-wind mean in meters per second. + v_wind : array + V-wind mean in meters per second. + + Reference + ---------- + K. A. Browning and R. Wexler, 1968: The Determination + of Kinematic Properties of a Wind Field Using Doppler + Radar. J. Appl. Meteor., 7, 105–113 + + """ + +As seen, each argument has what type of object it is, an explanation of +what it is, mention of units, and if an argument has a default value, a +statement of what that default value is and why. + +Private or smaller functions and classes can have a single line explanation. + +An example: + +.. code-block:: python + + def u_wind(self): + """ U component of horizontal wind in meters per second. """ + + +Testing +------- + +When adding a new function to pyart it is important to add your function to +the __init__.py file under the corresponding pyart folder. + +Create a test for your function and have assert from numpy testing test the +known values to the calculated values. If changes are made in the future to +pyart, pytest will use the test created to see if the function is still valid and +produces the same values. It works that, it takes known values that are +obtained from the function, and when pytest is ran, it takes the test +function and reruns the function and compares the results to the original. + +An example: + +.. code-block:: python + + def test_vad(): + test_radar = pyart.testing.make_target_radar() + height = np.arange(0, 1000, 200) + speed = np.ones_like(height) * 5 + direction = np.array([0, 90, 180, 270, 45]) + profile = pyart.core.HorizontalWindProfile( + height, speed, direction) + sim_vel = pyart.util.simulated_vel_from_profile( + test_radar, profile) + + test_radar.add_field('velocity', sim_vel, + replace_existing=True) + + velocity = 'velocity' + z_start = 0 + z_end = 10 + z_count = 5 + + vad_height = ([0., 2.5, 5., 7.5, 10.]) + vad_speed = ([4.98665725, 4.94020686, 4.88107152, + 4.81939374, 4.75851962]) + vad_direction = ([359.84659496, 359.30240553, 358.58658589, + 357.81073051, 357.01353486]) + u_wind = ([0.01335138, 0.06014712, 0.12039762, + 0.18410404, 0.24791911]) + v_wind = ([-4.98663937, -4.9398407, -4.87958641, + -4.81587601, -4.75205693]) + + vad = pyart.retrieve.velocity_azimuth_display(test_radar, + velocity, + z_start, z_end, + z_count) + + assert_almost_equal(vad.height, vad_height, 3) + assert_almost_equal(vad.speed, vad_speed, 3) + assert_almost_equal(vad.direction, vad_direction, 3) + assert_almost_equal(vad.u_wind, u_wind, 3) + assert_almost_equal(vad.v_wind, v_wind, 3) + +Pytest is used to run unit tests in pyart. + +It is recommended to install pyart in “editable” mode for pytest testing. +From within the main pyart directory:: + + pip install -e . + +This lets you change your source code and rerun tests at will. + +To install pytest:: + + conda install pytest + +To run all tests in pyart with pytest from outside the pyart directory:: + + pytest --pyargs pyart + +All test with increase verbosity:: + + pytest -v + +Just one file:: + + pytest filename + +Note: When an example shows filename as such:: + + pytest filename + +filename is the filename and location, such as:: + + pytest /home/user/pyart/pyart/io/tests/test_cfradial.py + +Relative paths can also be used:: + + cd pyart + pytest ./pyart/retrieve/tests/test_vad.py + +For more on pytest: + +- https://docs.pytest.org/en/latest/ + + +GitHub +------ + +When contributing to pyart, the changes created should be in a new branch +under your forked repository. Let's say the user is adding a new map display. +Instead of creating that new function in your master branch. Create a new +branch called ‘cartopy_map’. If everything checks out and the admin +accepts the pull request, you can then merge the master branch and +cartopy_map branch. + +To delete a branch both locally and remotely, if done with it:: + + git push origin --delete + git branch -d + +or in this case:: + + git push origin --delete cartopy_map + git branch -d cartopy_map + + +To create a new branch:: + + git checkout -b + +If you have a branch with changes that have not been added to a pull request +but you would like to start a new branch with a different task in mind. It +is recommended that your new branch is based on your master. First:: + + git checkout master + +Then:: + + git checkout -b + +This way, your new branch is not a combination of your other task branch and +the new task branch, but is based on the original master branch. + +Typing `git status` will not only inform the user of what files have been +modified and untracked, it will also inform the user of which branch they +are currently on. + +To switch between branches, simply type:: + + git checkout + +When commiting to GitHub, start the statement with a acronym such as +‘ADD:’ depending on what your commiting, could be ‘MAINT:’ or +‘BUG:’ or more. Then following should be a short statement such as +“ADD: Adding cartopy map display.”, but after the short statement, before +finishing the quotations, hit enter and in your terminal you can then type +a more in depth description on what your commiting. + +A set of recommended acronymns can be found at: + +- https://docs.scipy.org/doc/numpy/dev/gitwash/development_workflow.html + +If you would like to type your commit in the terminal and skip the default +editor:: + + git commit -m "STY: Removing whitespace from vad.py pep8." + +To use the default editor(in Linux, usually VIM), simply type:: + + git commit + +One thing to keep in mind is before doing a pull request, update your +branches with the original upstream repository. + +This could be done by:: + + git fetch upstream + +After fetching, a git merge is needed to pull in the changes. + +This is done by:: + + git merge upstream/master + +To prevent a merge commit:: + + git merge --ff-only upstream/master + +After creating a pull request through GitHub, two outside checkers, +Appveyor and TravisCI will determine if the code past all checks. If the +code fails either tests, as the pull request sits, make changes to fix the +code and when pushed to GitHub, the pull request will automatically update +and TravisCI and Appveyor will automatically rerun. diff --git a/doc/source/dev_reference/_debug_info.rst b/doc/source/dev_reference/_debug_info.rst deleted file mode 100644 index 6f05fb0320..0000000000 --- a/doc/source/dev_reference/_debug_info.rst +++ /dev/null @@ -1,5 +0,0 @@ -================= -pyart._debug_info -================= - -.. autofunction:: pyart._debug_info diff --git a/doc/source/dev_reference/aux_io.rst b/doc/source/dev_reference/aux_io.rst deleted file mode 100644 index 22e534e787..0000000000 --- a/doc/source/dev_reference/aux_io.rst +++ /dev/null @@ -1,16 +0,0 @@ -============ -pyart.aux_io -============ - -Input/Output routines. - -.. automodule:: pyart.aux_io.sinarame_h5 -.. automodule:: pyart.aux_io.d3r_gcpex_nc -.. automodule:: pyart.aux_io.edge_netcdf -.. automodule:: pyart.aux_io.gamic_hdf5 -.. automodule:: pyart.aux_io.gamicfile -.. automodule:: pyart.aux_io.odim_h5 -.. automodule:: pyart.aux_io.pattern -.. automodule:: pyart.aux_io.radx -.. automodule:: pyart.aux_io.noxp_iphex_nc -.. automodule:: pyart.aux_io.rainbow_wrl diff --git a/doc/source/dev_reference/bridge.rst b/doc/source/dev_reference/bridge.rst deleted file mode 100644 index 056b4d2423..0000000000 --- a/doc/source/dev_reference/bridge.rst +++ /dev/null @@ -1,7 +0,0 @@ -============ -pyart.bridge -============ - -Bridges to other community software projects. - -.. automodule:: pyart.bridge.wradlib_bridge diff --git a/doc/source/dev_reference/config.rst b/doc/source/dev_reference/config.rst deleted file mode 100644 index f62f9699a5..0000000000 --- a/doc/source/dev_reference/config.rst +++ /dev/null @@ -1,7 +0,0 @@ -============ -pyart.config -============ - -Py-ART configuration. - -.. automodule:: pyart.config diff --git a/doc/source/dev_reference/core.rst b/doc/source/dev_reference/core.rst deleted file mode 100644 index a545088b97..0000000000 --- a/doc/source/dev_reference/core.rst +++ /dev/null @@ -1,10 +0,0 @@ -========== -pyart.core -========== - -Core classes and functions. - -.. automodule:: pyart.core.grid -.. automodule:: pyart.core.radar -.. automodule:: pyart.core.transforms -.. automodule:: pyart.core.wind_profile diff --git a/doc/source/dev_reference/correct.rst b/doc/source/dev_reference/correct.rst deleted file mode 100644 index a7484b665e..0000000000 --- a/doc/source/dev_reference/correct.rst +++ /dev/null @@ -1,18 +0,0 @@ -============= -pyart.correct -============= - -Radar Moment correction routines. - -.. automodule:: pyart.correct.attenuation -.. automodule:: pyart.correct.dealias -.. automodule:: pyart.correct.despeckle -.. automodule:: pyart.correct.phase_proc -.. automodule:: pyart.correct.region_dealias -.. automodule:: pyart.correct.unwrap -.. automodule:: pyart.correct._common_dealias -.. automodule:: pyart.correct._fourdd_interface -.. automodule:: pyart.correct._fast_edge_finder -.. automodule:: pyart.correct._unwrap_1d -.. automodule:: pyart.correct._unwrap_2d -.. automodule:: pyart.correct._unwrap_3d diff --git a/doc/source/dev_reference/exceptions.rst b/doc/source/dev_reference/exceptions.rst deleted file mode 100644 index 1f3799b683..0000000000 --- a/doc/source/dev_reference/exceptions.rst +++ /dev/null @@ -1,7 +0,0 @@ -================ -pyart.exceptions -================ - -Custom Py-ART exceptions. - -.. automodule:: pyart.exceptions diff --git a/doc/source/dev_reference/filters.rst b/doc/source/dev_reference/filters.rst deleted file mode 100644 index 6853f6fa2b..0000000000 --- a/doc/source/dev_reference/filters.rst +++ /dev/null @@ -1,8 +0,0 @@ -============= -pyart.filters -============= - -Classes and routines for specifying which gates are included and excluded from -routines. - -.. automodule:: pyart.filters.gatefilter diff --git a/doc/source/dev_reference/graph.rst b/doc/source/dev_reference/graph.rst deleted file mode 100644 index 910bf02c37..0000000000 --- a/doc/source/dev_reference/graph.rst +++ /dev/null @@ -1,13 +0,0 @@ -=========== -pyart.graph -=========== - -Radar data graphing routines. - -.. automodule:: pyart.graph.cm -.. automodule:: pyart.graph.common -.. automodule:: pyart.graph.gridmapdisplay -.. automodule:: pyart.graph.radardisplay_airborne -.. automodule:: pyart.graph.radardisplay -.. automodule:: pyart.graph.radarmapdisplay -.. automodule:: pyart.graph._cm diff --git a/doc/source/dev_reference/index.rst b/doc/source/dev_reference/index.rst deleted file mode 100644 index 5ce933890b..0000000000 --- a/doc/source/dev_reference/index.rst +++ /dev/null @@ -1,36 +0,0 @@ -.. _developer: - -########################## -Developer Reference Manual -########################## - -:Release: |version| -:Date: |today| - - -The intended audience of this guide is developers who use Py-ART. For a more -general introduction to Py-ART aimed at users see the :ref:`user`. - -This guide provides documentation for all modules, function, methods, -and classes within Py-ART, both those in the public API and private members. - -Documentation is broken down by directory and module. - -.. toctree:: - :maxdepth: 2 - - core - io - aux_io - config - correct - exceptions - retrieve - graph - filters - lazydict - map - util - bridge - testing - _debug_info diff --git a/doc/source/dev_reference/io.rst b/doc/source/dev_reference/io.rst deleted file mode 100644 index eb26753c17..0000000000 --- a/doc/source/dev_reference/io.rst +++ /dev/null @@ -1,31 +0,0 @@ -======== -pyart.io -======== - -Input/Output routines. - -.. automodule:: pyart.io.arm_sonde -.. automodule:: pyart.io.auto_read -.. automodule:: pyart.io.cfradial -.. automodule:: pyart.io.chl -.. automodule:: pyart.io.common -.. automodule:: pyart.io.grid_io -.. automodule:: pyart.io.mdv_common -.. automodule:: pyart.io.mdv_radar -.. automodule:: pyart.io.mdv_grid -.. automodule:: pyart.io.nexradl3_read -.. automodule:: pyart.io.nexrad_archive -.. automodule:: pyart.io.nexrad_cdm -.. automodule:: pyart.io.nexrad_common -.. automodule:: pyart.io.nexrad_interpolate -.. automodule:: pyart.io.nexrad_level2 -.. automodule:: pyart.io.nexrad_level3 -.. automodule:: pyart.io.rsl -.. automodule:: pyart.io.sigmet -.. automodule:: pyart.io.uf -.. automodule:: pyart.io.uffile -.. automodule:: pyart.io.uf_write -.. automodule:: pyart.io.output_to_geotiff -.. automodule:: pyart.io._rsl_interface -.. automodule:: pyart.io._sigmet_noaa_hh -.. automodule:: pyart.io._sigmetfile diff --git a/doc/source/dev_reference/lazydict.rst b/doc/source/dev_reference/lazydict.rst deleted file mode 100644 index 479dff34d1..0000000000 --- a/doc/source/dev_reference/lazydict.rst +++ /dev/null @@ -1,7 +0,0 @@ -============== -pyart.lazydict -============== - -Lazy load dictionary. - -.. automodule:: pyart.lazydict diff --git a/doc/source/dev_reference/map.rst b/doc/source/dev_reference/map.rst deleted file mode 100644 index 282a99eca2..0000000000 --- a/doc/source/dev_reference/map.rst +++ /dev/null @@ -1,9 +0,0 @@ -========= -pyart.map -========= - -Radar mapping routines. - -.. automodule:: pyart.map.gates_to_grid -.. automodule:: pyart.map.grid_mapper -.. automodule:: pyart.map._gate_to_grid_map diff --git a/doc/source/dev_reference/retrieve.rst b/doc/source/dev_reference/retrieve.rst deleted file mode 100644 index 7fd157231a..0000000000 --- a/doc/source/dev_reference/retrieve.rst +++ /dev/null @@ -1,12 +0,0 @@ -============== -pyart.retrieve -============== - -Functions for performing radar retrievals. - -.. automodule:: pyart.retrieve.kdp_proc -.. automodule:: pyart.retrieve._kdp_proc -.. automodule:: pyart.retrieve.echo_class -.. automodule:: pyart.retrieve.gate_id -.. automodule:: pyart.retrieve.simple_moment_calculations -.. .. automodule:: pyart.retrieve._echo_steiner F2PY module do not document. diff --git a/doc/source/dev_reference/testing.rst b/doc/source/dev_reference/testing.rst deleted file mode 100644 index 48fdb825a4..0000000000 --- a/doc/source/dev_reference/testing.rst +++ /dev/null @@ -1,9 +0,0 @@ -============= -pyart.testing -============= - -Testing functions and files. - -.. automodule:: pyart.testing.sample_files -.. automodule:: pyart.testing.sample_objects -.. automodule:: pyart.testing.tmpdirs diff --git a/doc/source/dev_reference/util.rst b/doc/source/dev_reference/util.rst deleted file mode 100644 index 8c0ddea5a2..0000000000 --- a/doc/source/dev_reference/util.rst +++ /dev/null @@ -1,11 +0,0 @@ -========== -pyart.util -========== - -Miscellaneous utility functions. - -.. automodule:: pyart.util.circular_stats -.. automodule:: pyart.util.hildebrand_sekhon -.. automodule:: pyart.util.xsect -.. automodule:: pyart.util.radar_utils -.. automodule:: pyart.util.simulated_vel diff --git a/doc/source/index.ci b/doc/source/index.ci deleted file mode 100644 index 1636793bdc..0000000000 --- a/doc/source/index.ci +++ /dev/null @@ -1,45 +0,0 @@ -==================== -Py-ART documentation -==================== - -Welcome, this is the documentation for the Python ARM Radar Toolkit (Py-ART). - -Bleeding Edge Documentation -=========================== - -This documentation was build automatically from the -`Py-ART GitHub repository `_ and may contain -errors which will be fixed before the next release. See the section below for -links to documentation and examples from the latest release. - -.. toctree:: - :maxdepth: 1 - - user_reference/index - dev_reference/index - -Short Courses -============= - -Various short courses on Py-ART and open source radar software have been given -which contain tutorial like materials and additional examples. - -* `2015 AMS, Open Source Radar Short Course `_ -* `2015 ARM/ASR Meeting, PyART, the Python ARM Radar Toolkit `_ -* `2014 ARM/ASM Meeting, Py-ART tutorial `_ -* `2014 ERAD, Open Source Radar Short Course `_ - -Latest Release -============== - -* `User Reference Manual`_ -* `Developer Reference Manual`_ -* `Examples`_ - -.. _User Reference Manual: http://arm-doe.github.io/pyart/dev/user_reference/index.html -.. _Developer Reference Manual: http://arm-doe.github.io/pyart/dev/dev_reference/index.html -.. _Examples: http://arm-doe.github.io/pyart/dev/auto_examples/index.html - -The files used in the examples are available for download_. - -.. _download: https://engineering.arm.gov/~jhelmus/pyart_example_data/ diff --git a/doc/source/index.rst b/doc/source/index.rst index 12cf6c664d..625be3ab8e 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -1,19 +1,227 @@ -==================== -Py-ART documentation -==================== +===================================== +The Python ARM Radar Toolkit - Py-ART +===================================== + +.. toctree:: + :maxdepth: 1 + :hidden: + :caption: Documentation + + API/index.rst + source/auto_examples/index.rst + INSTALL + setting_up_an_environment + contributors_guide + +.. toctree:: + :maxdepth: 1 + :hidden: + :caption: Downloads -Welcome, this is the documentation for the Python ARM Radar Toolkit (Py-ART). + Anaconda Cloud + GitHub Repo + Zip File of Repository -Latest Release -============== +.. toctree:: + :maxdepth: 1 + :hidden: + :caption: Getting Help + + GitHub Issue Tracker + Mailing List .. toctree:: - :maxdepth: 1 + :maxdepth: 1 + :hidden: + :caption: Science Lead + + Scott Collis + +What is Py-ART? +=============== +The Python ARM Radar Toolkit, Py-ART, is a Python module containing a +collection of weather radar algorithms and utilities. Py-ART is used by the +`Atmospheric Radiation Measurement (ARM) Climate Research Facility `_ for working +with data from a number of its `precipitation and cloud radars `_, but has been +designed so that it can be used by others in the radar and atmospheric +communities to examine, processes, and analyze data from many types of +weather radars. + +Citing Py-ART +============= +If you use Py-ART in your work please cite it in your paper. While the +developers appreciate mentions in the text and acknowledgements citing the +paper helps more. + +For Py-ART cite our paper in the `Journal of Open Research Software `_ + +Helmus, J.J. & Collis, S.M., (2016). The Python ARM Radar Toolkit (Py-ART), a +Library for Working with Weather Radar Data in the Python Programming Language. +Journal of Open Research Software. 4(1), p.e25. DOI: http://doi.org/10.5334/jors.119 + +For a general citation on Open Radar Software please cite Maik Heistermann in +`BAMS `_ + +M. Heistermann, S. Collis, M. J. Dixon, S. Giangrande, J. J. Helmus, B. Kelley, +J. Koistinen, D. B. Michelson, M. Peura, T. Pfaff, and D. B. Wolff, 2015: The +Emergence of Open-Source Software for the Weather Radar Community. Bull. Amer. +Meteor. Soc. 96, 117–128, doi: 10.1175/BAMS-D-13-00240.1. + +What can Py-ART do? +=================== +Py-ART has the ability to ingest (read) from a number of common weather radar +formats including Sigmet/IRIS, MDV, CF/Radial, UF, and NEXRAD Level II archive +files. Radar data can be written to NetCDF files which conform to the CF/Radial +convention. + +Py-ART also contains routines which can produce common radar plots including +PPIs and RHIs. + +|PPI| + +|RHI| + +.. |PPI| image:: _static/ppi.png + +.. |RHI| image:: _static/rhi.png + +Algorithms in the module are able to performs a number of corrections on the +radar moment data in antenna coordinate including attenuation correction of +the reflectivity, velocity dealiasing, and correction of the specific (Kdp) +and differential (PhiDP) phases. + +A sophisticated mapping routines is able to efficiently create uniform +Cartesian grids of radar fields from one or more radars. Routines exist in +Py-ART for plotting these grids as well as saving them to NetCDF files. + +Short Courses +============= + +Various short courses on Py-ART and open source radar software have been given +which contain tutorial like materials and additional examples. + +* `2015 AMS, Open Source Radar Short Course `_ +* `2015 ARM/ASR Meeting, PyART, the Python ARM Radar Toolkit `_ +* `2014 ARM/ASM Meeting, Py-ART tutorial `_ +* `2014 ERAD, Open Source Radar Short Course `_ + +Install +======= + +The easiest method for installing Py-ART is to use the conda packages from +the latest release and use Python 3, as Python 2 support ended January 1st, +2020 and many packages including Py-ART no longer support Python 2. +To do this you must download and install +`Anaconda `_ or +`Miniconda `_. +With Anaconda or Miniconda install, it is recommended to create a new conda +environment when using Py-ART or even other packages. To create a new +environment based on the `environment.yml `_:: + + conda env create -f environment.yml + +Or for a basic environment and downloading optional dependencies as needed:: + + conda create -n pyart_env -c conda-forge python=3.8 arm_pyart + +Basic command in a terminal or command prompt to install the latest version of +Py-ART:: + + conda install -c conda-forge arm_pyart + +To update an older version of Py-ART to the latest release use:: + + conda update -c conda-forge arm_pyart + +If you do not wish to use Anaconda or Miniconda as a Python environment or want +to use the latest, unreleased version of Py-ART clone the git repository or +download the repositories zip file and extract the file. Then run: + +$ python setup.py install + +Additional detail on installing Py-ART can be found in the installation section. + +Dependencies +============ + +Py-ART is tested to work under Python 3.6, 3.7 and 3.8 + +The required dependencies to install Py-ART in addition to Python are: + +* `NumPy `_ +* `SciPy `_ +* `matplotlib `_ +* `netCDF4 `_ + +A working C/C++ compiler is required for some optional modules. An easy method +to install these dependencies is by using a +`Scientific Python distributions `_. +`Anaconda Compilers `_ will install +all of the above packages by default on Windows, Linux and Mac computers and is +provided free of charge by Anaconda. Anaconda also has their own compilers, +which may be required for optional dependencies such as CyLP. These compilers +can be found here: +https://docs.conda.io/projects/conda-build/en/latest/resources/compiler-tools.html + +Optional Dependences +==================== + +The above Python modules are require before installing Py-ART, additional +functionality is available of the following modules are installed. + +* `TRMM Radar Software Library (RSL) + `_. + If installed Py-ART will be able t`o read in radar data in a number of + additional formats (Lassen, McGill, Universal Format, and RADTEC) and + perform automatic dealiasing of Doppler velocities. RSL should be + install prior to installing Py-ART. The environmental variable `RSL_PATH` + should point to the location where RSL was installed if RSL was not + installed in the default location (/usr/local/trmm), such as a anaconda path + (/usr/anaconda3/envs/pyart_env/. + +* In order to read files which are stored in HDF5 files the + `h5py `_ package and related libraries must be + installed. + +* A linear programming solver and Python wrapper to use the LP phase + processing method. `CyLP `_ is recommended as + it gives the fastest results, but + `PyGLPK `_ and + `CVXOPT `_ are also supported. The underlying LP + solvers `CBC `_ or + `GLPK `_ will also be required depending + on which wrapper is used. When using `CyLP `_ + a path to coincbc is needed by setting the `COIN_INSTALL_DIR` path, such as + (/usr/anaconda3/envs/pyart_env/). + +* `Cartopy `_. If installed, + the ability to plot grids on geographic maps is available. + +* `xarray `_. If installed, gives the + ability to work with the grid dataset used in grid plotting. + +* `Basemap `_. If installed, also gives the + ability to plot grids on geographic maps, but Cartopy is recommended over + Basemap. + +* `wradlib `_. Needed to calculate the texture + of a differential phase field. + +* `pytest `_. + Required to run the Py-ART unit tests. - user_reference/index - dev_reference/index - auto_examples/index +* `gdal `_. + Required to output GeoTIFFs from `Grid` objects. -The files used in the examples are available for download_. +Getting help +============ +Py-ART has a `mailing list `_ where you can ask questions and request help. -.. _download: https://engineering.arm.gov/~jhelmus/pyart_example_data/ +Contributing +============ +Py-ART is an open source software package distributed under the `New BSD License `_ +Source code for the package is available on `GitHub `_. Feature requests and bug reports +can be submitted to the `Issue tracker `_, posting to the pyart-users `mailing list `_. +Contributions of source code, documentation or additional example are always +appreciated from both developers and users. To learn more on contributing to Py-ART, see +the contributor's guide. diff --git a/doc/source/setting_up_an_environment.rst b/doc/source/setting_up_an_environment.rst new file mode 100644 index 0000000000..814d32ab14 --- /dev/null +++ b/doc/source/setting_up_an_environment.rst @@ -0,0 +1,133 @@ +Setting up an Environment +========================= + + +Anaconda +-------- + +Creating environments using Anaconda is recommended due to the ability to +create more than one environment. It is also recommended because you can +keep dependencies separate from one another that might conflict if you had +them all in your root environment. For example, if you had all the dependencies +for a Pandas environment and all the dependencies for a Py-ART environment in +your root environment, there might be conflicts between channels and packages. +So Anaconda allows you to create multiple environments to avoid these issues. + +To download and install `Anaconda `_. + +While Anaconda is downloading, it will ask if you want to set a path to it, or +let Anaconda set a default path. After choosing, Anaconda should finish +downloading. After it is done, exit the terminal and open a new one to make +sure the environment path is set. If conda command is not found, there is help +on running conda and fixing the environment path, found here: + +* `How to Run Conda `_ + +Setting a Channel +----------------- + +Anaconda has a cloud that stores many of its packages. It is recommended, at +times, to use the conda-forge channel instead. Conda-Forge is a community led +collection of packages, and typically contains the most recent versions of the +packages required for Py-ART. Also Py-ART is on Conda-Forge. Having packages in +an environment, within the same channel, helps avoid conflict issues. To add +conda-forge as the priority channel, simply do:: + + conda config --add channels conda-forge + +You can also just flag the channel when conda install packages such as:: + + conda install -c conda-forge numpy + +More on managing channels can be found here: + +* `Managing Channels `_ + +Creating an Environment +----------------------- + +There are a few ways to create a conda environment for using Py-ART or other +packages. One way is to use the environment file, found here: + +* https://github.com/ARM-DOE/pyart/blob/master/environment.yml + +To create an environment using this file, use the command:: + + conda env create -f environment.yml + +This will then create an environment called pyart_env that can be activated +by:: + + source activate pyart_env + +or deactivated after use:: + + source deactivate pyart_env + +Once the environment is created and activated, you can install more packages +into the environment by simply conda installing them. An example of this is, +if you want Jupyter Notebook to run in that enviroment with those packages:: + + conda install -c conda-forge jupyter notebook + +while that environment is activated. Another way to create a conda environment +is by doing it from scratch using the conda create command. An example of this:: + + conda create -n pyart_env -c conda-forge python=3.6 arm_pyart netCDF4 + cartopy scipy numpy matplotlib + +This will also create an environment called pyart_env that can be activate the +same way, as mentioned above. To then run your coding editor within the +environment, run in the command line:: + + python + +or:: + + ipython + +or:: + + jupyter notebook + +or even:: + + spyder + +depending on what you installed in your environment and want to use for coding. + +Adding Optional Dependencies with setting Paths +----------------------------------------------- + +There are other optional dependencies that can enhance the use of Py-ART. One, +such package is `CyLP `_. To get CyLP to work, +installing of the package `coincbc `_ is +needed as a dependency for CyLP. Simply do:: + + conda install -c conda-forge coincbc + +within your pyart_env. After that though, the coincbc path needs to be exported +so CyLP knows where to find it during its install. To do this:: + + export COIN_INSTALL_DIR=/Users/yourusername/youranacondadir/envs/pyart_env + +or real example on a Linux machine:: + + export COIN_INSTALL_DIR=/home/zsherman/anaconda3/envs/pyart_env + +CyLP was actually adapted by Jonathan Helmus to be Python 3 compatible, so we +will install a specific CyLP branch after doing the export path step above. +GitHub repositories can actually be pip installed within your environment. So +to install the CyLP version we want:: + + pip install git+https://github.com/jjhelmus/CyLP.git@py3 + +This will install a Python 3 compatible version of CyLP found on GitHub. + +More Information +---------------- + +For more an conda and help with conda: + +* https://conda.io/docs/ +* https://gitter.im/conda/conda diff --git a/doc/source/user_reference/aux_io.rst b/doc/source/user_reference/aux_io.rst deleted file mode 100644 index 21afd42c34..0000000000 --- a/doc/source/user_reference/aux_io.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.aux_io diff --git a/doc/source/user_reference/bridge.rst b/doc/source/user_reference/bridge.rst deleted file mode 100644 index 4e18c0280d..0000000000 --- a/doc/source/user_reference/bridge.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.bridge diff --git a/doc/source/user_reference/core.rst b/doc/source/user_reference/core.rst deleted file mode 100644 index 4260dd88ff..0000000000 --- a/doc/source/user_reference/core.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.core diff --git a/doc/source/user_reference/correct.rst b/doc/source/user_reference/correct.rst deleted file mode 100644 index 7878708420..0000000000 --- a/doc/source/user_reference/correct.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.correct diff --git a/doc/source/user_reference/filters.rst b/doc/source/user_reference/filters.rst deleted file mode 100644 index 53ab0cb488..0000000000 --- a/doc/source/user_reference/filters.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.filters diff --git a/doc/source/user_reference/graph.rst b/doc/source/user_reference/graph.rst deleted file mode 100644 index 8377d404df..0000000000 --- a/doc/source/user_reference/graph.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.graph diff --git a/doc/source/user_reference/index.rst b/doc/source/user_reference/index.rst deleted file mode 100644 index 4ed145742b..0000000000 --- a/doc/source/user_reference/index.rst +++ /dev/null @@ -1,38 +0,0 @@ -.. _user: - -##################### -User Reference Manual -##################### - -:Release: |version| -:Date: |today| - - -This is the Py-ART user community’s reference guide, and it covers the most -commonly used public functions, modules and classes. -If more detail is required, the :ref:`developer` is an exhaustive listing of -all private and public functions, modules and classes. - -.. toctree:: - :maxdepth: 1 - - core - io - aux_io - correct - retrieve - graph - map - filters - util - bridge - testing - -Packages level functions: - -.. currentmodule:: pyart -.. autosummary:: - :toctree: generated/ - - load_config - test diff --git a/doc/source/user_reference/io.rst b/doc/source/user_reference/io.rst deleted file mode 100644 index 9a04fce848..0000000000 --- a/doc/source/user_reference/io.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.io diff --git a/doc/source/user_reference/map.rst b/doc/source/user_reference/map.rst deleted file mode 100644 index f524a56a3d..0000000000 --- a/doc/source/user_reference/map.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.map diff --git a/doc/source/user_reference/retrieve.rst b/doc/source/user_reference/retrieve.rst deleted file mode 100644 index 206c28d14f..0000000000 --- a/doc/source/user_reference/retrieve.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.retrieve diff --git a/doc/source/user_reference/testing.rst b/doc/source/user_reference/testing.rst deleted file mode 100644 index 663341842b..0000000000 --- a/doc/source/user_reference/testing.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.testing diff --git a/doc/source/user_reference/util.rst b/doc/source/user_reference/util.rst deleted file mode 100644 index f9b0342ee2..0000000000 --- a/doc/source/user_reference/util.rst +++ /dev/null @@ -1 +0,0 @@ -.. automodule:: pyart.util diff --git a/doc/sphinxext/LICENSE.txt b/doc/sphinxext/LICENSE.txt deleted file mode 100644 index b15c699dce..0000000000 --- a/doc/sphinxext/LICENSE.txt +++ /dev/null @@ -1,94 +0,0 @@ -------------------------------------------------------------------------------- - The files - - numpydoc.py - - docscrape.py - - docscrape_sphinx.py - - phantom_import.py - have the following license: - -Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR -IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, -INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, -STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. - -------------------------------------------------------------------------------- - The files - - compiler_unparse.py - - comment_eater.py - - traitsdoc.py - have the following license: - -This software is OSI Certified Open Source Software. -OSI Certified is a certification mark of the Open Source Initiative. - -Copyright (c) 2006, Enthought, Inc. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of Enthought, Inc. nor the names of its contributors may - be used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -------------------------------------------------------------------------------- - The file - - plot_directive.py - originates from Matplotlib (http://matplotlib.sf.net/) which has - the following license: - -Copyright (c) 2002-2008 John D. Hunter; All Rights Reserved. - -1. This LICENSE AGREEMENT is between John D. Hunter (“JDH”), and the Individual or Organization (“Licensee”) accessing and otherwise using matplotlib software in source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, JDH hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use matplotlib 0.98.3 alone or in any derivative version, provided, however, that JDH’s License Agreement and JDH’s notice of copyright, i.e., “Copyright (c) 2002-2008 John D. Hunter; All Rights Reserved” are retained in matplotlib 0.98.3 alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on or incorporates matplotlib 0.98.3 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to matplotlib 0.98.3. - -4. JDH is making matplotlib 0.98.3 available to Licensee on an “AS IS” basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB 0.98.3 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. - -5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB 0.98.3 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING MATPLOTLIB 0.98.3, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between JDH and Licensee. This License Agreement does not grant permission to use JDH trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using matplotlib 0.98.3, Licensee agrees to be bound by the terms and conditions of this License Agreement. - diff --git a/doc/sphinxext/MANIFEST.in b/doc/sphinxext/MANIFEST.in deleted file mode 100644 index 5176d485b8..0000000000 --- a/doc/sphinxext/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include numpydoc/tests *.py -include *.txt diff --git a/doc/sphinxext/README.txt b/doc/sphinxext/README.txt deleted file mode 100644 index 6ba63e6d85..0000000000 --- a/doc/sphinxext/README.txt +++ /dev/null @@ -1,45 +0,0 @@ -===================================== -numpydoc -- Numpy's Sphinx extensions -===================================== - -Numpy's documentation uses several custom extensions to Sphinx. These -are shipped in this ``numpydoc`` package, in case you want to make use -of them in third-party projects. - -The following extensions are available: - - - ``numpydoc``: support for the Numpy docstring format in Sphinx, and add - the code description directives ``np:function``, ``np-c:function``, etc. - that support the Numpy docstring syntax. - - - ``numpydoc.traitsdoc``: For gathering documentation about Traits attributes. - - - ``numpydoc.plot_directive``: Adaptation of Matplotlib's ``plot::`` - directive. Note that this implementation may still undergo severe - changes or eventually be deprecated. - - -numpydoc -======== - -Numpydoc inserts a hook into Sphinx's autodoc that converts docstrings -following the Numpy/Scipy format to a form palatable to Sphinx. - -Options -------- - -The following options can be set in conf.py: - -- numpydoc_use_plots: bool - - Whether to produce ``plot::`` directives for Examples sections that - contain ``import matplotlib``. - -- numpydoc_show_class_members: bool - - Whether to show all members of a class in the Methods and Attributes - sections automatically. - -- numpydoc_edit_link: bool (DEPRECATED -- edit your HTML template instead) - - Whether to insert an edit link after docstrings. diff --git a/doc/sphinxext/gen_rst.py b/doc/sphinxext/gen_rst.py deleted file mode 100644 index a02ab93471..0000000000 --- a/doc/sphinxext/gen_rst.py +++ /dev/null @@ -1,1108 +0,0 @@ -""" -Example generation for Py-ART - -Generate the rst files for the examples by iterating over the Python -example files. - -Files that generate images should state with 'plot' - -""" - -# This files was adapted from the scikit-learn file of the same name: -# https://github.com/scikit-learn/scikit-learn/blob/master/doc/sphinxext/gen_rst.py -# -# The scikit-learn project uses this following License which applies to this -# file. -# -# New BSD License -# -# Copyright (c) 2007-2013 The scikit-learn developers. -# All rights reserved. -# -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# a. Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# b. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# c. Neither the name of the Scikit-learn Developers nor the names of -# its contributors may be used to endorse or promote products -# derived from this software without specific prior written -# permission. -# -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -# DAMAGE. - -from __future__ import division, print_function - -from time import time -import os -import re -import shutil -import traceback -import glob -import sys -import gzip -import posixpath - -# Try Python 2 first, otherwise load from Python 3 -try: - from StringIO import StringIO - import cPickle as pickle - import urllib2 as urllib - from urllib2 import HTTPError, URLError -except: - from io import StringIO - import pickle - import urllib.request - import urllib.error - import urllib.parse - from urllib.error import HTTPError, URLError - -try: - # Python 2 built-in - execfile -except NameError: - def execfile(filename, global_vars=None, local_vars=None): - with open(filename, encoding='utf-8') as f: - code = compile(f.read(), filename, 'exec') - exec(code, global_vars, local_vars) - -try: - basestring -except NameError: - basestring = str - -try: - from PIL import Image -except: - import Image - -import matplotlib -matplotlib.use('Agg') - -import token -import tokenize -import numpy as np - - -############################################################################### -# A tee object to redict streams to multiple outputs - -class Tee(object): - - def __init__(self, file1, file2): - self.file1 = file1 - self.file2 = file2 - - def write(self, data): - self.file1.write(data) - self.file2.write(data) - - def flush(self): - self.file1.flush() - self.file2.flush() - -############################################################################### -# Documentation link resolver objects - - -def get_data(url): - """Helper function to get data over http or from a local file""" - if url.startswith('http://'): - resp = urllib.urlopen(url) - encoding = resp.headers.dict.get('content-encoding', 'plain') - data = resp.read() - if encoding == 'plain': - pass - elif encoding == 'gzip': - data = StringIO(data) - data = gzip.GzipFile(fileobj=data).read() - else: - raise RuntimeError('unknown encoding') - else: - with open(url, 'r') as fid: - data = fid.read() - fid.close() - - return data - - -def parse_sphinx_searchindex(searchindex): - """Parse a Sphinx search index - - Parameters - ---------- - searchindex : str - The Sphinx search index (contents of searchindex.js) - - Returns - ------- - filenames : list of str - The file names parsed from the search index. - objects : dict - The objects parsed from the search index. - """ - def _select_block(str_in, start_tag, end_tag): - """Select first block delimited by start_tag and end_tag""" - start_pos = str_in.find(start_tag) - if start_pos < 0: - raise ValueError('start_tag not found') - depth = 0 - for pos in range(start_pos, len(str_in)): - if str_in[pos] == start_tag: - depth += 1 - elif str_in[pos] == end_tag: - depth -= 1 - - if depth == 0: - break - sel = str_in[start_pos + 1:pos] - return sel - - def _parse_dict_recursive(dict_str): - """Parse a dictionary from the search index""" - dict_out = dict() - pos_last = 0 - pos = dict_str.find(':') - while pos >= 0: - key = dict_str[pos_last:pos] - if dict_str[pos + 1] == '[': - # value is a list - pos_tmp = dict_str.find(']', pos + 1) - if pos_tmp < 0: - raise RuntimeError('error when parsing dict') - value = dict_str[pos + 2: pos_tmp].split(',') - # try to convert elements to int - for i in range(len(value)): - try: - value[i] = int(value[i]) - except ValueError: - pass - elif dict_str[pos + 1] == '{': - # value is another dictionary - subdict_str = _select_block(dict_str[pos:], '{', '}') - value = _parse_dict_recursive(subdict_str) - pos_tmp = pos + len(subdict_str) - else: - raise ValueError('error when parsing dict: unknown elem') - - key = key.strip('"') - if len(key) > 0: - dict_out[key] = value - - pos_last = dict_str.find(',', pos_tmp) - if pos_last < 0: - break - pos_last += 1 - pos = dict_str.find(':', pos_last) - - return dict_out - - # parse objects - query = 'objects:' - pos = searchindex.find(query) - if pos < 0: - raise ValueError('"objects:" not found in search index') - - sel = _select_block(searchindex[pos:], '{', '}') - objects = _parse_dict_recursive(sel) - - # parse filenames - query = 'filenames:' - pos = searchindex.find(query) - if pos < 0: - raise ValueError('"filenames:" not found in search index') - filenames = searchindex[pos + len(query) + 1:] - filenames = filenames[:filenames.find(']')] - filenames = [f.strip('"') for f in filenames.split(',')] - - return filenames, objects - - -class SphinxDocLinkResolver(object): - """ Resolve documentation links using searchindex.js generated by Sphinx - - Parameters - ---------- - doc_url : str - The base URL of the project website. - searchindex : str - Filename of searchindex, relative to doc_url. - extra_modules_test : list of str - List of extra module names to test. - relative : bool - Return relative links (only useful for links to documentation of this - package). - """ - - def __init__(self, doc_url, searchindex='searchindex.js', - extra_modules_test=None, relative=False): - self.doc_url = doc_url - self.relative = relative - self._link_cache = {} - - self.extra_modules_test = extra_modules_test - self._page_cache = {} - if doc_url.startswith('http://'): - if relative: - raise ValueError('Relative links are only supported for local ' - 'URLs (doc_url cannot start with "http://)"') - searchindex_url = doc_url + '/' + searchindex - else: - searchindex_url = os.path.join(doc_url, searchindex) - - # detect if we are using relative links on a Windows system - if os.name.lower() == 'nt' and not doc_url.startswith('http://'): - if not relative: - raise ValueError('You have to use relative=True for the local' - 'package on a Windows system.') - self._is_windows = True - else: - self._is_windows = False - - # download and initialize the search index - sindex = get_data(searchindex_url) - filenames, objects = parse_sphinx_searchindex(sindex) - - self._searchindex = dict(filenames=filenames, objects=objects) - - def _get_link(self, cobj): - """Get a valid link, False if not found""" - - fname_idx = None - full_name = cobj['module_short'] + '.' + cobj['name'] - if full_name in self._searchindex['objects']: - value = self._searchindex['objects'][full_name] - if isinstance(value, dict): - value = value[value.keys()[0]] - fname_idx = value[0] - elif cobj['module_short'] in self._searchindex['objects']: - value = self._searchindex['objects'][cobj['module_short']] - if cobj['name'] in value.keys(): - fname_idx = value[cobj['name']][0] - - if fname_idx is not None: - fname = self._searchindex['filenames'][fname_idx] + '.html' - - if self._is_windows: - fname = fname.replace('/', '\\') - link = os.path.join(self.doc_url, fname) - else: - link = posixpath.join(self.doc_url, fname) - - if link in self._page_cache: - html = self._page_cache[link] - else: - html = get_data(link) - self._page_cache[link] = html - - # test if cobj appears in page - comb_names = [cobj['module_short'] + '.' + cobj['name']] - if self.extra_modules_test is not None: - for mod in self.extra_modules_test: - comb_names.append(mod + '.' + cobj['name']) - url = False - for comb_name in comb_names: - if html.find(comb_name) >= 0: - url = link + '#' + comb_name - link = url - else: - link = False - - return link - - def resolve(self, cobj, this_url): - """Resolve the link to the documentation, returns None if not found - - Parameters - ---------- - cobj : dict - Dict with information about the "code object" for which we are - resolving a link. - cobi['name'] : function or class name (str) - cobj['module_short'] : shortened module name (str) - cobj['module'] : module name (str) - this_url: str - URL of the current page. Needed to construct relative URLs - (only used if relative=True in constructor). - - Returns - ------- - link : str | None - The link (URL) to the documentation. - """ - full_name = cobj['module_short'] + '.' + cobj['name'] - link = self._link_cache.get(full_name, None) - if link is None: - # we don't have it cached - link = self._get_link(cobj) - # cache it for the future - self._link_cache[full_name] = link - - if link is False or link is None: - # failed to resolve - return None - - if self.relative: - link = os.path.relpath(link, start=this_url) - if self._is_windows: - # replace '\' with '/' so it on the web - link = link.replace('\\', '/') - - # for some reason, the relative link goes one directory too high up - link = link[3:] - - return link - - -############################################################################### -rst_template = """ - -.. _example_%(short_fname)s: - -%(docstring)s - -**Python source code:** :download:`%(fname)s <%(fname)s>` - -.. literalinclude:: %(fname)s - :lines: %(end_row)s- - """ - -plot_rst_template = """ - -.. _example_%(short_fname)s: - -%(docstring)s - -%(image_list)s - -%(stdout)s - -**Python source code:** :download:`%(fname)s <%(fname)s>` - -.. literalinclude:: %(fname)s - :lines: %(end_row)s- - -**Total running time of the example:** %(time_elapsed) .2f seconds - """ - -# The following strings are used when we have several pictures: we use -# an html div tag that our CSS uses to turn the lists into horizontal -# lists. -HLIST_HEADER = """ -.. rst-class:: horizontal - -""" - -HLIST_IMAGE_TEMPLATE = """ - * - - .. image:: images/%s - :scale: 47 -""" - -SINGLE_IMAGE = """ -.. image:: images/%s - :align: center -""" - - -def extract_docstring(filename, ignore_heading=False): - """ Extract a module-level docstring, if any - """ - if sys.version_info >= (3, 0): - lines = open(filename, encoding='utf-8').readlines() - else: - lines = open(filename).readlines() - start_row = 0 - if lines[0].startswith('#!'): - lines.pop(0) - start_row = 1 - docstring = '' - first_par = '' - line_iterator = iter(lines) - tokens = tokenize.generate_tokens(lambda: next(line_iterator)) - for tok_type, tok_content, _, (erow, _), _ in tokens: - tok_type = token.tok_name[tok_type] - if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'): - continue - elif tok_type == 'STRING': - docstring = eval(tok_content) - # If the docstring is formatted with several paragraphs, extract - # the first one: - paragraphs = '\n'.join( - line.rstrip() for line - in docstring.split('\n')).split('\n\n') - if paragraphs: - if ignore_heading: - if len(paragraphs) > 1: - first_par = re.sub('\n', ' ', paragraphs[1]) - first_par = ((first_par[:95] + '...') - if len(first_par) > 95 else first_par) - else: - raise ValueError("Docstring not found by gallery", - "Please check your example's layout", - " and make sure it's correct") - else: - first_par = paragraphs[0] - - break - return docstring, first_par, erow + 1 + start_row - - -def generate_example_rst(app): - """ Generate the list of examples, as well as the contents of - examples. - """ - root_dir = os.path.join(app.builder.srcdir, 'auto_examples') - example_dir = os.path.abspath(app.builder.srcdir + '/../../' + 'examples') - try: - plot_gallery = eval(app.builder.config.plot_gallery) - except TypeError: - plot_gallery = bool(app.builder.config.plot_gallery) - if not os.path.exists(example_dir): - os.makedirs(example_dir) - if not os.path.exists(root_dir): - os.makedirs(root_dir) - - # we create an index.rst with all examples - fhindex = open(os.path.join(root_dir, 'index.rst'), 'w') - fhindex.write("""\ - - - -.. raw:: html - - - - - -.. raw:: html - - - - - - -Examples -======== - -.. _examples-index: -""") - # Here we don't use an os.walk, but we recurse only twice: flat is - # better than nested. - generate_dir_rst('.', fhindex, example_dir, root_dir, plot_gallery) - for dir in sorted(os.listdir(example_dir)): - if os.path.isdir(os.path.join(example_dir, dir)): - generate_dir_rst(dir, fhindex, example_dir, root_dir, plot_gallery) - fhindex.flush() - - -def extract_line_count(filename, target_dir): - # Extract the line count of a file - example_file = os.path.join(target_dir, filename) - if sys.version_info >= (3, 0): - lines = open(example_file, encoding='utf-8').readlines() - else: - lines = open(example_file).readlines() - start_row = 0 - if lines and lines[0].startswith('#!'): - lines.pop(0) - start_row = 1 - line_iterator = iter(lines) - tokens = tokenize.generate_tokens(lambda: next(line_iterator)) - check_docstring = True - erow_docstring = 0 - for tok_type, _, _, (erow, _), _ in tokens: - tok_type = token.tok_name[tok_type] - if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'): - continue - elif ((tok_type == 'STRING') and check_docstring): - erow_docstring = erow - check_docstring = False - return erow_docstring+1+start_row, erow+1+start_row - - -def line_count_sort(file_list, target_dir): - # Sort the list of examples by line-count - new_list = [x for x in file_list if x.endswith('.py')] - unsorted = np.zeros(shape=(len(new_list), 2)) - unsorted = unsorted.astype(np.object) - for count, exmpl in enumerate(new_list): - docstr_lines, total_lines = extract_line_count(exmpl, target_dir) - unsorted[count][1] = total_lines - docstr_lines - unsorted[count][0] = exmpl - index = np.lexsort((unsorted[:, 0].astype(np.str), - unsorted[:, 1].astype(np.float))) - if not len(unsorted): - return [] - return np.array(unsorted[index][:, 0]).tolist() - - -def generate_dir_rst(dir, fhindex, example_dir, root_dir, plot_gallery): - """ Generate the rst file for an example directory. - """ - if not dir == '.': - target_dir = os.path.join(root_dir, dir) - src_dir = os.path.join(example_dir, dir) - else: - target_dir = root_dir - src_dir = example_dir - if not os.path.exists(os.path.join(src_dir, 'README.txt')): - print(80 * '_') - print('Example directory %s does not have a README.txt file' % - src_dir) - print('Skipping this directory') - print(80 * '_') - return - fhindex.write(""" - - -%s - - -""" % open(os.path.join(src_dir, 'README.txt')).read()) - if not os.path.exists(target_dir): - os.makedirs(target_dir) - sorted_listdir = line_count_sort(os.listdir(src_dir), - src_dir) - for fname in sorted_listdir: - if fname.endswith('py'): - generate_file_rst(fname, target_dir, src_dir, plot_gallery) - new_fname = os.path.join(src_dir, fname) - _, fdocstring, _ = extract_docstring(new_fname, True) - thumb = os.path.join(dir, 'images', 'thumb', fname[:-3] + '.png') - link_name = os.path.join(dir, fname).replace(os.path.sep, '_') - fhindex.write(""" - -.. raw:: html - - -
-
- - -""") - - fhindex.write('.. figure:: %s\n' % thumb) - if link_name.startswith('._'): - link_name = link_name[2:] - if dir != '.': - fhindex.write(' :target: ./%s/%s.html\n\n' % (dir, - fname[:-3])) - else: - fhindex.write(' :target: ./%s.html\n\n' % link_name[:-3]) - fhindex.write(""" :ref:`example_%s` - - -.. raw:: html - - -

%s -

-
- - -.. toctree:: - :hidden: - - %s/%s - -""" % (link_name, fdocstring, dir, fname[:-3])) - fhindex.write(""" -.. raw:: html - -
- """) # clear at the end of the section - -# modules for which we embed links into example code -DOCMODULES = ['pyart', 'matplotlib', 'numpy', 'scipy'] - - -def make_thumbnail(in_fname, out_fname, width, height): - """Make a thumbnail with the same aspect ratio centered in an - image with a given width and height - """ - img = Image.open(in_fname) - width_in, height_in = img.size - scale_w = width / float(width_in) - scale_h = height / float(height_in) - - if height_in * scale_w <= height: - scale = scale_w - else: - scale = scale_h - - width_sc = int(round(scale * width_in)) - height_sc = int(round(scale * height_in)) - - # resize the image - img.thumbnail((width_sc, height_sc), Image.ANTIALIAS) - - # insert centered - thumb = Image.new('RGB', (width, height), (255, 255, 255)) - pos_insert = ((width - width_sc) // 2, (height - height_sc) // 2) - thumb.paste(img, pos_insert) - - thumb.save(out_fname) - - -def get_short_module_name(module_name, obj_name): - """ Get the shortest possible module name """ - parts = module_name.split('.') - short_name = module_name - for i in range(len(parts) - 1, 0, -1): - short_name = '.'.join(parts[:i]) - try: - exec('from %s import %s' % (short_name, obj_name)) - except ImportError: - # get the last working module name - short_name = '.'.join(parts[:(i + 1)]) - break - return short_name - - -def generate_file_rst(fname, target_dir, src_dir, plot_gallery): - """ Generate the rst file for a given example. - """ - base_image_name = os.path.splitext(fname)[0] - image_fname = '%s_%%s.png' % base_image_name - - this_template = rst_template - last_dir = os.path.split(src_dir)[-1] - # to avoid leading . in file names, and wrong names in links - if last_dir == '.' or last_dir == 'examples': - last_dir = '' - else: - last_dir += '_' - short_fname = last_dir + fname - src_file = os.path.join(src_dir, fname) - example_file = os.path.join(target_dir, fname) - shutil.copyfile(src_file, example_file) - - # The following is a list containing all the figure names - figure_list = [] - - image_dir = os.path.join(target_dir, 'images') - thumb_dir = os.path.join(image_dir, 'thumb') - if not os.path.exists(image_dir): - os.makedirs(image_dir) - if not os.path.exists(thumb_dir): - os.makedirs(thumb_dir) - image_path = os.path.join(image_dir, image_fname) - stdout_path = os.path.join(image_dir, - 'stdout_%s.txt' % base_image_name) - time_path = os.path.join(image_dir, - 'time_%s.txt' % base_image_name) - thumb_file = os.path.join(thumb_dir, fname[:-3] + '.png') - time_elapsed = 0 - if plot_gallery and fname.startswith('plot'): - # generate the plot as png image if file name - # starts with plot and if it is more recent than an - # existing image. - first_image_file = image_path % 1 - if os.path.exists(stdout_path): - stdout = open(stdout_path).read() - else: - stdout = '' - if os.path.exists(time_path): - time_elapsed = float(open(time_path).read()) - - if not os.path.exists(first_image_file) or \ - os.stat(first_image_file).st_mtime <= os.stat(src_file).st_mtime: - # We need to execute the code - print('plotting %s' % fname) - t0 = time() - import matplotlib.pyplot as plt - plt.close('all') - cwd = os.getcwd() - try: - # First CD in the original example dir, so that any file - # created by the example get created in this directory - orig_stdout = sys.stdout - os.chdir(os.path.dirname(src_file)) - my_buffer = StringIO() - my_stdout = Tee(sys.stdout, my_buffer) - sys.stdout = my_stdout - my_globals = {'pl': plt} - execfile(os.path.basename(src_file), my_globals) - time_elapsed = time() - t0 - sys.stdout = orig_stdout - my_stdout = my_buffer.getvalue() - - # get variables so we can later add links to the documentation - example_code_obj = {} - for var_name, var in my_globals.items(): - if not hasattr(var, '__module__'): - continue - if not isinstance(var.__module__, basestring): - continue - if var.__module__.split('.')[0] not in DOCMODULES: - continue - - # get the type as a string with other things stripped - tstr = str(type(var)) - tstr = (tstr[tstr.find('\'') - + 1:tstr.rfind('\'')].split('.')[-1]) - # get shortened module name - module_short = get_short_module_name(var.__module__, - tstr) - cobj = {'name': tstr, 'module': var.__module__, - 'module_short': module_short, - 'obj_type': 'object'} - example_code_obj[var_name] = cobj - - # find functions so we can later add links to the documentation - funregex = re.compile('[\w.]+\(') - with open(src_file, 'rt') as fid: - for line in fid.readlines(): - if line.startswith('#'): - continue - for match in funregex.findall(line): - fun_name = match[:-1] - - try: - exec('this_fun = %s' % fun_name, my_globals) - except Exception as err: - # Here, we were not able to execute the - # previous statement, either because the - # fun_name was not a function but a statement - # (print), or because the regexp didn't - # catch the whole function name : - # eg: - # X = something().blah() - # will work for something, but not blah. - - continue - this_fun = my_globals['this_fun'] - if not callable(this_fun): - continue - if not hasattr(this_fun, '__module__'): - continue - if not isinstance(this_fun.__module__, basestring): - continue - if (this_fun.__module__.split('.')[0] - not in DOCMODULES): - continue - - # get shortened module name - fun_name_short = fun_name.split('.')[-1] - module_short = get_short_module_name( - this_fun.__module__, fun_name_short) - cobj = {'name': fun_name_short, - 'module': this_fun.__module__, - 'module_short': module_short, - 'obj_type': 'function'} - example_code_obj[fun_name] = cobj - fid.close() - - if len(example_code_obj) > 0: - # save the dictionary, so we can later add hyperlinks - codeobj_fname = example_file[:-3] + '_codeobj.pickle' - with open(codeobj_fname, 'wb') as fid: - pickle.dump(example_code_obj, fid, - pickle.HIGHEST_PROTOCOL) - fid.close() - - if '__doc__' in my_globals: - # The __doc__ is often printed in the example, we - # don't with to echo it - my_stdout = my_stdout.replace( - my_globals['__doc__'], - '') - my_stdout = my_stdout.strip() - if my_stdout: - stdout = '**Script output**::\n\n %s\n\n' % ( - '\n '.join(my_stdout.split('\n'))) - open(stdout_path, 'w').write(stdout) - open(time_path, 'w').write('%f' % time_elapsed) - os.chdir(cwd) - - # In order to save every figure we have two solutions : - # * iterate from 1 to infinity and call plt.fignum_exists(n) - # (this requires the figures to be numbered - # incrementally: 1, 2, 3 and not 1, 2, 5) - # * iterate over [fig_mngr.num for fig_mngr in - # matplotlib._pylab_helpers.Gcf.get_all_fig_managers()] - for fig_num in (fig_mngr.num for fig_mngr in - matplotlib._pylab_helpers.Gcf.get_all_fig_managers()): - # Set the fig_num figure as the current figure as we can't - # save a figure that's not the current figure. - plt.figure(fig_num) - plt.savefig(image_path % fig_num) - figure_list.append(image_fname % fig_num) - except: - print(80 * '_') - print('%s is not compiling:' % fname) - traceback.print_exc() - print(80 * '_') - finally: - os.chdir(cwd) - sys.stdout = orig_stdout - - print(" - time elapsed : %.2g sec" % time_elapsed) - else: - figure_list = [f[len(image_dir):] - for f in glob.glob(image_path % '[1-9]')] - #for f in glob.glob(image_path % '*')] - - # generate thumb file - this_template = plot_rst_template - if os.path.exists(first_image_file): - make_thumbnail(first_image_file, thumb_file, 400, 280) - - if not os.path.exists(thumb_file): - # create something to replace the thumbnail - make_thumbnail('images/no_image.png', thumb_file, 200, 140) - - docstring, short_desc, end_row = extract_docstring(example_file) - - # Depending on whether we have one or more figures, we're using a - # horizontal list or a single rst call to 'image'. - if len(figure_list) == 1: - figure_name = figure_list[0] - image_list = SINGLE_IMAGE % figure_name.lstrip('/') - else: - image_list = HLIST_HEADER - for figure_name in figure_list: - image_list += HLIST_IMAGE_TEMPLATE % figure_name.lstrip('/') - - f = open(os.path.join(target_dir, fname[:-2] + 'rst'), 'w') - f.write(this_template % locals()) - f.flush() - - -def embed_code_links(app, exception): - """Embed hyperlinks to documentation into example code""" - try: - if exception is not None: - return - print('Embedding documentation hyperlinks in examples..') - - # Add resolvers for the packages for which we want to show links - doc_resolvers = {} - doc_resolvers['pyart'] = SphinxDocLinkResolver(app.builder.outdir, - relative=True) - - doc_resolvers['matplotlib'] = SphinxDocLinkResolver( - 'http://matplotlib.org') - - doc_resolvers['numpy'] = SphinxDocLinkResolver( - 'http://docs.scipy.org/doc/numpy-1.6.0') - - doc_resolvers['scipy'] = SphinxDocLinkResolver( - 'http://docs.scipy.org/doc/scipy-0.11.0/reference') - - example_dir = os.path.join(app.builder.srcdir, 'auto_examples') - html_example_dir = os.path.abspath(os.path.join(app.builder.outdir, - 'auto_examples')) - - # patterns for replacement - link_pattern = '%s' - orig_pattern = '%s' - period = '.' - - for dirpath, _, filenames in os.walk(html_example_dir): - for fname in filenames: - print('\tprocessing: %s' % fname) - full_fname = os.path.join(html_example_dir, dirpath, fname) - subpath = dirpath[len(html_example_dir) + 1:] - pickle_fname = os.path.join(example_dir, subpath, - fname[:-5] + '_codeobj.pickle') - - if os.path.exists(pickle_fname): - # we have a pickle file with the objects to embed links for - with open(pickle_fname, 'rb') as fid: - example_code_obj = pickle.load(fid) - fid.close() - str_repl = {} - # generate replacement strings with the links - for name, cobj in example_code_obj.iteritems(): - this_module = cobj['module'].split('.')[0] - - if this_module not in doc_resolvers: - continue - - link = doc_resolvers[this_module].resolve(cobj, - full_fname) - if link is not None: - parts = name.split('.') - name_html = orig_pattern % parts[0] - for part in parts[1:]: - name_html += period + orig_pattern % part - str_repl[name_html] = link_pattern % (link, name_html) - # do the replacement in the html file - if len(str_repl) > 0: - with open(full_fname, 'rb') as fid: - lines_in = fid.readlines() - with open(full_fname, 'wb') as fid: - for line in lines_in: - line = line.decode('utf-8') - for name, link in str_repl.iteritems(): - line = line.replace(name, link) - fid.write(line.encode('utf-8')) - except HTTPError as e: - print("The following HTTP Error has occurred:\n") - print(e.code) - except URLError as e: - print("\n...\n" - "Warning: Embedding the documentation hyperlinks requires " - "internet access.\nPlease check your network connection.\n" - "Unable to continue embedding due to a URL Error: \n") - print(e.args) - print('[done]') - - -def setup(app): - app.connect('builder-inited', generate_example_rst) - app.add_config_value('plot_gallery', True, 'html') - - # embed links after build is finished - # XXX disable documentation hyperlinks in examples for now - #app.connect('build-finished', embed_code_links) - - # Sphinx hack: sphinx copies generated images to the build directory - # each time the docs are made. If the desired image name already - # exists, it appends a digit to prevent overwrites. The problem is, - # the directory is never cleared. This means that each time you build - # the docs, the number of images in the directory grows. - # - # This question has been asked on the sphinx development list, but there - # was no response: http://osdir.com/ml/sphinx-dev/2011-02/msg00123.html - # - # The following is a hack that prevents this behavior by clearing the - # image build directory each time the docs are built. If sphinx - # changes their layout between versions, this will not work (though - # it should probably not cause a crash). Tested successfully - # on Sphinx 1.0.7 - build_image_dir = 'build/html/_images' - if os.path.exists(build_image_dir): - filelist = os.listdir(build_image_dir) - for filename in filelist: - if filename.endswith('png'): - os.remove(os.path.join(build_image_dir, filename)) diff --git a/doc/sphinxext/numpydoc/__init__.py b/doc/sphinxext/numpydoc/__init__.py deleted file mode 100644 index 0fce2cf747..0000000000 --- a/doc/sphinxext/numpydoc/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from __future__ import division, absolute_import, print_function - -from .numpydoc import setup diff --git a/doc/sphinxext/numpydoc/comment_eater.py b/doc/sphinxext/numpydoc/comment_eater.py deleted file mode 100644 index 8cddd3305f..0000000000 --- a/doc/sphinxext/numpydoc/comment_eater.py +++ /dev/null @@ -1,169 +0,0 @@ -from __future__ import division, absolute_import, print_function - -import sys -if sys.version_info[0] >= 3: - from io import StringIO -else: - from io import StringIO - -import compiler -import inspect -import textwrap -import tokenize - -from .compiler_unparse import unparse - - -class Comment(object): - """ A comment block. - """ - is_comment = True - def __init__(self, start_lineno, end_lineno, text): - # int : The first line number in the block. 1-indexed. - self.start_lineno = start_lineno - # int : The last line number. Inclusive! - self.end_lineno = end_lineno - # str : The text block including '#' character but not any leading spaces. - self.text = text - - def add(self, string, start, end, line): - """ Add a new comment line. - """ - self.start_lineno = min(self.start_lineno, start[0]) - self.end_lineno = max(self.end_lineno, end[0]) - self.text += string - - def __repr__(self): - return '%s(%r, %r, %r)' % (self.__class__.__name__, self.start_lineno, - self.end_lineno, self.text) - - -class NonComment(object): - """ A non-comment block of code. - """ - is_comment = False - def __init__(self, start_lineno, end_lineno): - self.start_lineno = start_lineno - self.end_lineno = end_lineno - - def add(self, string, start, end, line): - """ Add lines to the block. - """ - if string.strip(): - # Only add if not entirely whitespace. - self.start_lineno = min(self.start_lineno, start[0]) - self.end_lineno = max(self.end_lineno, end[0]) - - def __repr__(self): - return '%s(%r, %r)' % (self.__class__.__name__, self.start_lineno, - self.end_lineno) - - -class CommentBlocker(object): - """ Pull out contiguous comment blocks. - """ - def __init__(self): - # Start with a dummy. - self.current_block = NonComment(0, 0) - - # All of the blocks seen so far. - self.blocks = [] - - # The index mapping lines of code to their associated comment blocks. - self.index = {} - - def process_file(self, file): - """ Process a file object. - """ - if sys.version_info[0] >= 3: - nxt = file.__next__ - else: - nxt = file.next - for token in tokenize.generate_tokens(nxt): - self.process_token(*token) - self.make_index() - - def process_token(self, kind, string, start, end, line): - """ Process a single token. - """ - if self.current_block.is_comment: - if kind == tokenize.COMMENT: - self.current_block.add(string, start, end, line) - else: - self.new_noncomment(start[0], end[0]) - else: - if kind == tokenize.COMMENT: - self.new_comment(string, start, end, line) - else: - self.current_block.add(string, start, end, line) - - def new_noncomment(self, start_lineno, end_lineno): - """ We are transitioning from a noncomment to a comment. - """ - block = NonComment(start_lineno, end_lineno) - self.blocks.append(block) - self.current_block = block - - def new_comment(self, string, start, end, line): - """ Possibly add a new comment. - - Only adds a new comment if this comment is the only thing on the line. - Otherwise, it extends the noncomment block. - """ - prefix = line[:start[1]] - if prefix.strip(): - # Oops! Trailing comment, not a comment block. - self.current_block.add(string, start, end, line) - else: - # A comment block. - block = Comment(start[0], end[0], string) - self.blocks.append(block) - self.current_block = block - - def make_index(self): - """ Make the index mapping lines of actual code to their associated - prefix comments. - """ - for prev, block in zip(self.blocks[:-1], self.blocks[1:]): - if not block.is_comment: - self.index[block.start_lineno] = prev - - def search_for_comment(self, lineno, default=None): - """ Find the comment block just before the given line number. - - Returns None (or the specified default) if there is no such block. - """ - if not self.index: - self.make_index() - block = self.index.get(lineno, None) - text = getattr(block, 'text', default) - return text - - -def strip_comment_marker(text): - """ Strip # markers at the front of a block of comment text. - """ - lines = [] - for line in text.splitlines(): - lines.append(line.lstrip('#')) - text = textwrap.dedent('\n'.join(lines)) - return text - - -def get_class_traits(klass): - """ Yield all of the documentation for trait definitions on a class object. - """ - # FIXME: gracefully handle errors here or in the caller? - source = inspect.getsource(klass) - cb = CommentBlocker() - cb.process_file(StringIO(source)) - mod_ast = compiler.parse(source) - class_ast = mod_ast.node.nodes[0] - for node in class_ast.code.nodes: - # FIXME: handle other kinds of assignments? - if isinstance(node, compiler.ast.Assign): - name = node.nodes[0].name - rhs = unparse(node.expr).strip() - doc = strip_comment_marker(cb.search_for_comment(node.lineno, default='')) - yield name, rhs, doc - diff --git a/doc/sphinxext/numpydoc/compiler_unparse.py b/doc/sphinxext/numpydoc/compiler_unparse.py deleted file mode 100644 index 8933a83db3..0000000000 --- a/doc/sphinxext/numpydoc/compiler_unparse.py +++ /dev/null @@ -1,865 +0,0 @@ -""" Turn compiler.ast structures back into executable python code. - - The unparse method takes a compiler.ast tree and transforms it back into - valid python code. It is incomplete and currently only works for - import statements, function calls, function definitions, assignments, and - basic expressions. - - Inspired by python-2.5-svn/Demo/parser/unparse.py - - fixme: We may want to move to using _ast trees because the compiler for - them is about 6 times faster than compiler.compile. -""" -from __future__ import division, absolute_import, print_function - -import sys -from compiler.ast import Const, Name, Tuple, Div, Mul, Sub, Add - -if sys.version_info[0] >= 3: - from io import StringIO -else: - from StringIO import StringIO - -def unparse(ast, single_line_functions=False): - s = StringIO() - UnparseCompilerAst(ast, s, single_line_functions) - return s.getvalue().lstrip() - -op_precedence = { 'compiler.ast.Power':3, 'compiler.ast.Mul':2, 'compiler.ast.Div':2, - 'compiler.ast.Add':1, 'compiler.ast.Sub':1 } - -class UnparseCompilerAst: - """ Methods in this class recursively traverse an AST and - output source code for the abstract syntax; original formatting - is disregarged. - """ - - ######################################################################### - # object interface. - ######################################################################### - - def __init__(self, tree, file = sys.stdout, single_line_functions=False): - """ Unparser(tree, file=sys.stdout) -> None. - - Print the source for tree to file. - """ - self.f = file - self._single_func = single_line_functions - self._do_indent = True - self._indent = 0 - self._dispatch(tree) - self._write("\n") - self.f.flush() - - ######################################################################### - # Unparser private interface. - ######################################################################### - - ### format, output, and dispatch methods ################################ - - def _fill(self, text = ""): - "Indent a piece of text, according to the current indentation level" - if self._do_indent: - self._write("\n"+" "*self._indent + text) - else: - self._write(text) - - def _write(self, text): - "Append a piece of text to the current line." - self.f.write(text) - - def _enter(self): - "Print ':', and increase the indentation." - self._write(": ") - self._indent += 1 - - def _leave(self): - "Decrease the indentation level." - self._indent -= 1 - - def _dispatch(self, tree): - "_dispatcher function, _dispatching tree type T to method _T." - if isinstance(tree, list): - for t in tree: - self._dispatch(t) - return - meth = getattr(self, "_"+tree.__class__.__name__) - if tree.__class__.__name__ == 'NoneType' and not self._do_indent: - return - meth(tree) - - - ######################################################################### - # compiler.ast unparsing methods. - # - # There should be one method per concrete grammar type. They are - # organized in alphabetical order. - ######################################################################### - - def _Add(self, t): - self.__binary_op(t, '+') - - def _And(self, t): - self._write(" (") - for i, node in enumerate(t.nodes): - self._dispatch(node) - if i != len(t.nodes)-1: - self._write(") and (") - self._write(")") - - def _AssAttr(self, t): - """ Handle assigning an attribute of an object - """ - self._dispatch(t.expr) - self._write('.'+t.attrname) - - def _Assign(self, t): - """ Expression Assignment such as "a = 1". - - This only handles assignment in expressions. Keyword assignment - is handled separately. - """ - self._fill() - for target in t.nodes: - self._dispatch(target) - self._write(" = ") - self._dispatch(t.expr) - if not self._do_indent: - self._write('; ') - - def _AssName(self, t): - """ Name on left hand side of expression. - - Treat just like a name on the right side of an expression. - """ - self._Name(t) - - def _AssTuple(self, t): - """ Tuple on left hand side of an expression. - """ - - # _write each elements, separated by a comma. - for element in t.nodes[:-1]: - self._dispatch(element) - self._write(", ") - - # Handle the last one without writing comma - last_element = t.nodes[-1] - self._dispatch(last_element) - - def _AugAssign(self, t): - """ +=,-=,*=,/=,**=, etc. operations - """ - - self._fill() - self._dispatch(t.node) - self._write(' '+t.op+' ') - self._dispatch(t.expr) - if not self._do_indent: - self._write(';') - - def _Bitand(self, t): - """ Bit and operation. - """ - - for i, node in enumerate(t.nodes): - self._write("(") - self._dispatch(node) - self._write(")") - if i != len(t.nodes)-1: - self._write(" & ") - - def _Bitor(self, t): - """ Bit or operation - """ - - for i, node in enumerate(t.nodes): - self._write("(") - self._dispatch(node) - self._write(")") - if i != len(t.nodes)-1: - self._write(" | ") - - def _CallFunc(self, t): - """ Function call. - """ - self._dispatch(t.node) - self._write("(") - comma = False - for e in t.args: - if comma: self._write(", ") - else: comma = True - self._dispatch(e) - if t.star_args: - if comma: self._write(", ") - else: comma = True - self._write("*") - self._dispatch(t.star_args) - if t.dstar_args: - if comma: self._write(", ") - else: comma = True - self._write("**") - self._dispatch(t.dstar_args) - self._write(")") - - def _Compare(self, t): - self._dispatch(t.expr) - for op, expr in t.ops: - self._write(" " + op + " ") - self._dispatch(expr) - - def _Const(self, t): - """ A constant value such as an integer value, 3, or a string, "hello". - """ - self._dispatch(t.value) - - def _Decorators(self, t): - """ Handle function decorators (eg. @has_units) - """ - for node in t.nodes: - self._dispatch(node) - - def _Dict(self, t): - self._write("{") - for i, (k, v) in enumerate(t.items): - self._dispatch(k) - self._write(": ") - self._dispatch(v) - if i < len(t.items)-1: - self._write(", ") - self._write("}") - - def _Discard(self, t): - """ Node for when return value is ignored such as in "foo(a)". - """ - self._fill() - self._dispatch(t.expr) - - def _Div(self, t): - self.__binary_op(t, '/') - - def _Ellipsis(self, t): - self._write("...") - - def _From(self, t): - """ Handle "from xyz import foo, bar as baz". - """ - # fixme: Are From and ImportFrom handled differently? - self._fill("from ") - self._write(t.modname) - self._write(" import ") - for i, (name,asname) in enumerate(t.names): - if i != 0: - self._write(", ") - self._write(name) - if asname is not None: - self._write(" as "+asname) - - def _Function(self, t): - """ Handle function definitions - """ - if t.decorators is not None: - self._fill("@") - self._dispatch(t.decorators) - self._fill("def "+t.name + "(") - defaults = [None] * (len(t.argnames) - len(t.defaults)) + list(t.defaults) - for i, arg in enumerate(zip(t.argnames, defaults)): - self._write(arg[0]) - if arg[1] is not None: - self._write('=') - self._dispatch(arg[1]) - if i < len(t.argnames)-1: - self._write(', ') - self._write(")") - if self._single_func: - self._do_indent = False - self._enter() - self._dispatch(t.code) - self._leave() - self._do_indent = True - - def _Getattr(self, t): - """ Handle getting an attribute of an object - """ - if isinstance(t.expr, (Div, Mul, Sub, Add)): - self._write('(') - self._dispatch(t.expr) - self._write(')') - else: - self._dispatch(t.expr) - - self._write('.'+t.attrname) - - def _If(self, t): - self._fill() - - for i, (compare,code) in enumerate(t.tests): - if i == 0: - self._write("if ") - else: - self._write("elif ") - self._dispatch(compare) - self._enter() - self._fill() - self._dispatch(code) - self._leave() - self._write("\n") - - if t.else_ is not None: - self._write("else") - self._enter() - self._fill() - self._dispatch(t.else_) - self._leave() - self._write("\n") - - def _IfExp(self, t): - self._dispatch(t.then) - self._write(" if ") - self._dispatch(t.test) - - if t.else_ is not None: - self._write(" else (") - self._dispatch(t.else_) - self._write(")") - - def _Import(self, t): - """ Handle "import xyz.foo". - """ - self._fill("import ") - - for i, (name,asname) in enumerate(t.names): - if i != 0: - self._write(", ") - self._write(name) - if asname is not None: - self._write(" as "+asname) - - def _Keyword(self, t): - """ Keyword value assignment within function calls and definitions. - """ - self._write(t.name) - self._write("=") - self._dispatch(t.expr) - - def _List(self, t): - self._write("[") - for i,node in enumerate(t.nodes): - self._dispatch(node) - if i < len(t.nodes)-1: - self._write(", ") - self._write("]") - - def _Module(self, t): - if t.doc is not None: - self._dispatch(t.doc) - self._dispatch(t.node) - - def _Mul(self, t): - self.__binary_op(t, '*') - - def _Name(self, t): - self._write(t.name) - - def _NoneType(self, t): - self._write("None") - - def _Not(self, t): - self._write('not (') - self._dispatch(t.expr) - self._write(')') - - def _Or(self, t): - self._write(" (") - for i, node in enumerate(t.nodes): - self._dispatch(node) - if i != len(t.nodes)-1: - self._write(") or (") - self._write(")") - - def _Pass(self, t): - self._write("pass\n") - - def _Printnl(self, t): - self._fill("print ") - if t.dest: - self._write(">> ") - self._dispatch(t.dest) - self._write(", ") - comma = False - for node in t.nodes: - if comma: self._write(', ') - else: comma = True - self._dispatch(node) - - def _Power(self, t): - self.__binary_op(t, '**') - - def _Return(self, t): - self._fill("return ") - if t.value: - if isinstance(t.value, Tuple): - text = ', '.join([ name.name for name in t.value.asList() ]) - self._write(text) - else: - self._dispatch(t.value) - if not self._do_indent: - self._write('; ') - - def _Slice(self, t): - self._dispatch(t.expr) - self._write("[") - if t.lower: - self._dispatch(t.lower) - self._write(":") - if t.upper: - self._dispatch(t.upper) - #if t.step: - # self._write(":") - # self._dispatch(t.step) - self._write("]") - - def _Sliceobj(self, t): - for i, node in enumerate(t.nodes): - if i != 0: - self._write(":") - if not (isinstance(node, Const) and node.value is None): - self._dispatch(node) - - def _Stmt(self, tree): - for node in tree.nodes: - self._dispatch(node) - - def _Sub(self, t): - self.__binary_op(t, '-') - - def _Subscript(self, t): - self._dispatch(t.expr) - self._write("[") - for i, value in enumerate(t.subs): - if i != 0: - self._write(",") - self._dispatch(value) - self._write("]") - - def _TryExcept(self, t): - self._fill("try") - self._enter() - self._dispatch(t.body) - self._leave() - - for handler in t.handlers: - self._fill('except ') - self._dispatch(handler[0]) - if handler[1] is not None: - self._write(', ') - self._dispatch(handler[1]) - self._enter() - self._dispatch(handler[2]) - self._leave() - - if t.else_: - self._fill("else") - self._enter() - self._dispatch(t.else_) - self._leave() - - def _Tuple(self, t): - - if not t.nodes: - # Empty tuple. - self._write("()") - else: - self._write("(") - - # _write each elements, separated by a comma. - for element in t.nodes[:-1]: - self._dispatch(element) - self._write(", ") - - # Handle the last one without writing comma - last_element = t.nodes[-1] - self._dispatch(last_element) - - self._write(")") - - def _UnaryAdd(self, t): - self._write("+") - self._dispatch(t.expr) - - def _UnarySub(self, t): - self._write("-") - self._dispatch(t.expr) - - def _With(self, t): - self._fill('with ') - self._dispatch(t.expr) - if t.vars: - self._write(' as ') - self._dispatch(t.vars.name) - self._enter() - self._dispatch(t.body) - self._leave() - self._write('\n') - - def _int(self, t): - self._write(repr(t)) - - def __binary_op(self, t, symbol): - # Check if parenthesis are needed on left side and then dispatch - has_paren = False - left_class = str(t.left.__class__) - if (left_class in op_precedence.keys() and - op_precedence[left_class] < op_precedence[str(t.__class__)]): - has_paren = True - if has_paren: - self._write('(') - self._dispatch(t.left) - if has_paren: - self._write(')') - # Write the appropriate symbol for operator - self._write(symbol) - # Check if parenthesis are needed on the right side and then dispatch - has_paren = False - right_class = str(t.right.__class__) - if (right_class in op_precedence.keys() and - op_precedence[right_class] < op_precedence[str(t.__class__)]): - has_paren = True - if has_paren: - self._write('(') - self._dispatch(t.right) - if has_paren: - self._write(')') - - def _float(self, t): - # if t is 0.1, str(t)->'0.1' while repr(t)->'0.1000000000001' - # We prefer str here. - self._write(str(t)) - - def _str(self, t): - self._write(repr(t)) - - def _tuple(self, t): - self._write(str(t)) - - ######################################################################### - # These are the methods from the _ast modules unparse. - # - # As our needs to handle more advanced code increase, we may want to - # modify some of the methods below so that they work for compiler.ast. - ######################################################################### - -# # stmt -# def _Expr(self, tree): -# self._fill() -# self._dispatch(tree.value) -# -# def _Import(self, t): -# self._fill("import ") -# first = True -# for a in t.names: -# if first: -# first = False -# else: -# self._write(", ") -# self._write(a.name) -# if a.asname: -# self._write(" as "+a.asname) -# -## def _ImportFrom(self, t): -## self._fill("from ") -## self._write(t.module) -## self._write(" import ") -## for i, a in enumerate(t.names): -## if i == 0: -## self._write(", ") -## self._write(a.name) -## if a.asname: -## self._write(" as "+a.asname) -## # XXX(jpe) what is level for? -## -# -# def _Break(self, t): -# self._fill("break") -# -# def _Continue(self, t): -# self._fill("continue") -# -# def _Delete(self, t): -# self._fill("del ") -# self._dispatch(t.targets) -# -# def _Assert(self, t): -# self._fill("assert ") -# self._dispatch(t.test) -# if t.msg: -# self._write(", ") -# self._dispatch(t.msg) -# -# def _Exec(self, t): -# self._fill("exec ") -# self._dispatch(t.body) -# if t.globals: -# self._write(" in ") -# self._dispatch(t.globals) -# if t.locals: -# self._write(", ") -# self._dispatch(t.locals) -# -# def _Print(self, t): -# self._fill("print ") -# do_comma = False -# if t.dest: -# self._write(">>") -# self._dispatch(t.dest) -# do_comma = True -# for e in t.values: -# if do_comma:self._write(", ") -# else:do_comma=True -# self._dispatch(e) -# if not t.nl: -# self._write(",") -# -# def _Global(self, t): -# self._fill("global") -# for i, n in enumerate(t.names): -# if i != 0: -# self._write(",") -# self._write(" " + n) -# -# def _Yield(self, t): -# self._fill("yield") -# if t.value: -# self._write(" (") -# self._dispatch(t.value) -# self._write(")") -# -# def _Raise(self, t): -# self._fill('raise ') -# if t.type: -# self._dispatch(t.type) -# if t.inst: -# self._write(", ") -# self._dispatch(t.inst) -# if t.tback: -# self._write(", ") -# self._dispatch(t.tback) -# -# -# def _TryFinally(self, t): -# self._fill("try") -# self._enter() -# self._dispatch(t.body) -# self._leave() -# -# self._fill("finally") -# self._enter() -# self._dispatch(t.finalbody) -# self._leave() -# -# def _excepthandler(self, t): -# self._fill("except ") -# if t.type: -# self._dispatch(t.type) -# if t.name: -# self._write(", ") -# self._dispatch(t.name) -# self._enter() -# self._dispatch(t.body) -# self._leave() -# -# def _ClassDef(self, t): -# self._write("\n") -# self._fill("class "+t.name) -# if t.bases: -# self._write("(") -# for a in t.bases: -# self._dispatch(a) -# self._write(", ") -# self._write(")") -# self._enter() -# self._dispatch(t.body) -# self._leave() -# -# def _FunctionDef(self, t): -# self._write("\n") -# for deco in t.decorators: -# self._fill("@") -# self._dispatch(deco) -# self._fill("def "+t.name + "(") -# self._dispatch(t.args) -# self._write(")") -# self._enter() -# self._dispatch(t.body) -# self._leave() -# -# def _For(self, t): -# self._fill("for ") -# self._dispatch(t.target) -# self._write(" in ") -# self._dispatch(t.iter) -# self._enter() -# self._dispatch(t.body) -# self._leave() -# if t.orelse: -# self._fill("else") -# self._enter() -# self._dispatch(t.orelse) -# self._leave -# -# def _While(self, t): -# self._fill("while ") -# self._dispatch(t.test) -# self._enter() -# self._dispatch(t.body) -# self._leave() -# if t.orelse: -# self._fill("else") -# self._enter() -# self._dispatch(t.orelse) -# self._leave -# -# # expr -# def _Str(self, tree): -# self._write(repr(tree.s)) -## -# def _Repr(self, t): -# self._write("`") -# self._dispatch(t.value) -# self._write("`") -# -# def _Num(self, t): -# self._write(repr(t.n)) -# -# def _ListComp(self, t): -# self._write("[") -# self._dispatch(t.elt) -# for gen in t.generators: -# self._dispatch(gen) -# self._write("]") -# -# def _GeneratorExp(self, t): -# self._write("(") -# self._dispatch(t.elt) -# for gen in t.generators: -# self._dispatch(gen) -# self._write(")") -# -# def _comprehension(self, t): -# self._write(" for ") -# self._dispatch(t.target) -# self._write(" in ") -# self._dispatch(t.iter) -# for if_clause in t.ifs: -# self._write(" if ") -# self._dispatch(if_clause) -# -# def _IfExp(self, t): -# self._dispatch(t.body) -# self._write(" if ") -# self._dispatch(t.test) -# if t.orelse: -# self._write(" else ") -# self._dispatch(t.orelse) -# -# unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"} -# def _UnaryOp(self, t): -# self._write(self.unop[t.op.__class__.__name__]) -# self._write("(") -# self._dispatch(t.operand) -# self._write(")") -# -# binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%", -# "LShift":">>", "RShift":"<<", "BitOr":"|", "BitXor":"^", "BitAnd":"&", -# "FloorDiv":"//", "Pow": "**"} -# def _BinOp(self, t): -# self._write("(") -# self._dispatch(t.left) -# self._write(")" + self.binop[t.op.__class__.__name__] + "(") -# self._dispatch(t.right) -# self._write(")") -# -# boolops = {_ast.And: 'and', _ast.Or: 'or'} -# def _BoolOp(self, t): -# self._write("(") -# self._dispatch(t.values[0]) -# for v in t.values[1:]: -# self._write(" %s " % self.boolops[t.op.__class__]) -# self._dispatch(v) -# self._write(")") -# -# def _Attribute(self,t): -# self._dispatch(t.value) -# self._write(".") -# self._write(t.attr) -# -## def _Call(self, t): -## self._dispatch(t.func) -## self._write("(") -## comma = False -## for e in t.args: -## if comma: self._write(", ") -## else: comma = True -## self._dispatch(e) -## for e in t.keywords: -## if comma: self._write(", ") -## else: comma = True -## self._dispatch(e) -## if t.starargs: -## if comma: self._write(", ") -## else: comma = True -## self._write("*") -## self._dispatch(t.starargs) -## if t.kwargs: -## if comma: self._write(", ") -## else: comma = True -## self._write("**") -## self._dispatch(t.kwargs) -## self._write(")") -# -# # slice -# def _Index(self, t): -# self._dispatch(t.value) -# -# def _ExtSlice(self, t): -# for i, d in enumerate(t.dims): -# if i != 0: -# self._write(': ') -# self._dispatch(d) -# -# # others -# def _arguments(self, t): -# first = True -# nonDef = len(t.args)-len(t.defaults) -# for a in t.args[0:nonDef]: -# if first:first = False -# else: self._write(", ") -# self._dispatch(a) -# for a,d in zip(t.args[nonDef:], t.defaults): -# if first:first = False -# else: self._write(", ") -# self._dispatch(a), -# self._write("=") -# self._dispatch(d) -# if t.vararg: -# if first:first = False -# else: self._write(", ") -# self._write("*"+t.vararg) -# if t.kwarg: -# if first:first = False -# else: self._write(", ") -# self._write("**"+t.kwarg) -# -## def _keyword(self, t): -## self._write(t.arg) -## self._write("=") -## self._dispatch(t.value) -# -# def _Lambda(self, t): -# self._write("lambda ") -# self._dispatch(t.args) -# self._write(": ") -# self._dispatch(t.body) - - - diff --git a/doc/sphinxext/numpydoc/docscrape.py b/doc/sphinxext/numpydoc/docscrape.py deleted file mode 100644 index ed1760b0cc..0000000000 --- a/doc/sphinxext/numpydoc/docscrape.py +++ /dev/null @@ -1,585 +0,0 @@ -"""Extract reference documentation from the NumPy source tree. - -""" -from __future__ import division, absolute_import, print_function - -import inspect -import textwrap -import re -import pydoc -from warnings import warn -import collections -import sys - - -class Reader(object): - """A line-based string reader. - - """ - def __init__(self, data): - """ - Parameters - ---------- - data : str - String with lines separated by '\n'. - - """ - if isinstance(data, list): - self._str = data - else: - self._str = data.split('\n') # store string as list of lines - - self.reset() - - def __getitem__(self, n): - return self._str[n] - - def reset(self): - self._l = 0 # current line nr - - def read(self): - if not self.eof(): - out = self[self._l] - self._l += 1 - return out - else: - return '' - - def seek_next_non_empty_line(self): - for l in self[self._l:]: - if l.strip(): - break - else: - self._l += 1 - - def eof(self): - return self._l >= len(self._str) - - def read_to_condition(self, condition_func): - start = self._l - for line in self[start:]: - if condition_func(line): - return self[start:self._l] - self._l += 1 - if self.eof(): - return self[start:self._l+1] - return [] - - def read_to_next_empty_line(self): - self.seek_next_non_empty_line() - - def is_empty(line): - return not line.strip() - - return self.read_to_condition(is_empty) - - def read_to_next_unindented_line(self): - def is_unindented(line): - return (line.strip() and (len(line.lstrip()) == len(line))) - return self.read_to_condition(is_unindented) - - def peek(self, n=0): - if self._l + n < len(self._str): - return self[self._l + n] - else: - return '' - - def is_empty(self): - return not ''.join(self._str).strip() - - -class ParseError(Exception): - def __str__(self): - message = self.message - if hasattr(self, 'docstring'): - message = "%s in %r" % (message, self.docstring) - return message - - -class NumpyDocString(collections.Mapping): - def __init__(self, docstring, config={}): - orig_docstring = docstring - docstring = textwrap.dedent(docstring).split('\n') - - self._doc = Reader(docstring) - self._parsed_data = { - 'Signature': '', - 'Summary': [''], - 'Extended Summary': [], - 'Parameters': [], - 'Returns': [], - 'Yields': [], - 'Raises': [], - 'Warns': [], - 'Other Parameters': [], - 'Attributes': [], - 'Methods': [], - 'See Also': [], - 'Notes': [], - 'Warnings': [], - 'References': '', - 'Examples': '', - 'index': {} - } - - try: - self._parse() - except ParseError as e: - e.docstring = orig_docstring - raise - - def __getitem__(self, key): - return self._parsed_data[key] - - def __setitem__(self, key, val): - if key not in self._parsed_data: - warn("Unknown section %s" % key) - else: - self._parsed_data[key] = val - - def __iter__(self): - return iter(self._parsed_data) - - def __len__(self): - return len(self._parsed_data) - - def _is_at_section(self): - self._doc.seek_next_non_empty_line() - - if self._doc.eof(): - return False - - l1 = self._doc.peek().strip() # e.g. Parameters - - if l1.startswith('.. index::'): - return True - - l2 = self._doc.peek(1).strip() # ---------- or ========== - return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) - - def _strip(self, doc): - i = 0 - j = 0 - for i, line in enumerate(doc): - if line.strip(): - break - - for j, line in enumerate(doc[::-1]): - if line.strip(): - break - - return doc[i:len(doc)-j] - - def _read_to_next_section(self): - section = self._doc.read_to_next_empty_line() - - while not self._is_at_section() and not self._doc.eof(): - if not self._doc.peek(-1).strip(): # previous line was empty - section += [''] - - section += self._doc.read_to_next_empty_line() - - return section - - def _read_sections(self): - while not self._doc.eof(): - data = self._read_to_next_section() - name = data[0].strip() - - if name.startswith('..'): # index section - yield name, data[1:] - elif len(data) < 2: - yield StopIteration - else: - yield name, self._strip(data[2:]) - - def _parse_param_list(self, content): - r = Reader(content) - params = [] - while not r.eof(): - header = r.read().strip() - if ' : ' in header: - arg_name, arg_type = header.split(' : ')[:2] - else: - arg_name, arg_type = header, '' - - desc = r.read_to_next_unindented_line() - desc = dedent_lines(desc) - - params.append((arg_name, arg_type, desc)) - - return params - - _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" - r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) - - def _parse_see_also(self, content): - """ - func_name : Descriptive text - continued text - another_func_name : Descriptive text - func_name1, func_name2, :meth:`func_name`, func_name3 - - """ - items = [] - - def parse_item_name(text): - """Match ':role:`name`' or 'name'""" - m = self._name_rgx.match(text) - if m: - g = m.groups() - if g[1] is None: - return g[3], None - else: - return g[2], g[1] - raise ParseError("%s is not a item name" % text) - - def push_item(name, rest): - if not name: - return - name, role = parse_item_name(name) - items.append((name, list(rest), role)) - del rest[:] - - current_func = None - rest = [] - - for line in content: - if not line.strip(): - continue - - m = self._name_rgx.match(line) - if m and line[m.end():].strip().startswith(':'): - push_item(current_func, rest) - current_func, line = line[:m.end()], line[m.end():] - rest = [line.split(':', 1)[1].strip()] - if not rest[0]: - rest = [] - elif not line.startswith(' '): - push_item(current_func, rest) - current_func = None - if ',' in line: - for func in line.split(','): - if func.strip(): - push_item(func, []) - elif line.strip(): - current_func = line - elif current_func is not None: - rest.append(line.strip()) - push_item(current_func, rest) - return items - - def _parse_index(self, section, content): - """ - .. index: default - :refguide: something, else, and more - - """ - def strip_each_in(lst): - return [s.strip() for s in lst] - - out = {} - section = section.split('::') - if len(section) > 1: - out['default'] = strip_each_in(section[1].split(','))[0] - for line in content: - line = line.split(':') - if len(line) > 2: - out[line[1]] = strip_each_in(line[2].split(',')) - return out - - def _parse_summary(self): - """Grab signature (if given) and summary""" - if self._is_at_section(): - return - - # If several signatures present, take the last one - while True: - summary = self._doc.read_to_next_empty_line() - summary_str = " ".join([s.strip() for s in summary]).strip() - if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): - self['Signature'] = summary_str - if not self._is_at_section(): - continue - break - - if summary is not None: - self['Summary'] = summary - - if not self._is_at_section(): - self['Extended Summary'] = self._read_to_next_section() - - def _parse(self): - self._doc.reset() - self._parse_summary() - - sections = list(self._read_sections()) - section_names = set([section for section, content in sections]) - - has_returns = 'Returns' in section_names - has_yields = 'Yields' in section_names - # We could do more tests, but we are not. Arbitrarily. - if has_returns and has_yields: - msg = 'Docstring contains both a Returns and Yields section.' - raise ValueError(msg) - - for (section, content) in sections: - if not section.startswith('..'): - section = (s.capitalize() for s in section.split(' ')) - section = ' '.join(section) - if section in ('Parameters', 'Returns', 'Yields', 'Raises', - 'Warns', 'Other Parameters', 'Attributes', - 'Methods'): - self[section] = self._parse_param_list(content) - elif section.startswith('.. index::'): - self['index'] = self._parse_index(section, content) - elif section == 'See Also': - self['See Also'] = self._parse_see_also(content) - else: - self[section] = content - - # string conversion routines - - def _str_header(self, name, symbol='-'): - return [name, len(name)*symbol] - - def _str_indent(self, doc, indent=4): - out = [] - for line in doc: - out += [' '*indent + line] - return out - - def _str_signature(self): - if self['Signature']: - return [self['Signature'].replace('*', '\*')] + [''] - else: - return [''] - - def _str_summary(self): - if self['Summary']: - return self['Summary'] + [''] - else: - return [] - - def _str_extended_summary(self): - if self['Extended Summary']: - return self['Extended Summary'] + [''] - else: - return [] - - def _str_param_list(self, name): - out = [] - if self[name]: - out += self._str_header(name) - for param, param_type, desc in self[name]: - if param_type: - out += ['%s : %s' % (param, param_type)] - else: - out += [param] - out += self._str_indent(desc) - out += [''] - return out - - def _str_section(self, name): - out = [] - if self[name]: - out += self._str_header(name) - out += self[name] - out += [''] - return out - - def _str_see_also(self, func_role): - if not self['See Also']: - return [] - out = [] - out += self._str_header("See Also") - last_had_desc = True - for func, desc, role in self['See Also']: - if role: - link = ':%s:`%s`' % (role, func) - elif func_role: - link = ':%s:`%s`' % (func_role, func) - else: - link = "`%s`_" % func - if desc or last_had_desc: - out += [''] - out += [link] - else: - out[-1] += ", %s" % link - if desc: - out += self._str_indent([' '.join(desc)]) - last_had_desc = True - else: - last_had_desc = False - out += [''] - return out - - def _str_index(self): - idx = self['index'] - out = [] - out += ['.. index:: %s' % idx.get('default', '')] - for section, references in idx.items(): - if section == 'default': - continue - out += [' :%s: %s' % (section, ', '.join(references))] - return out - - def __str__(self, func_role=''): - out = [] - out += self._str_signature() - out += self._str_summary() - out += self._str_extended_summary() - for param_list in ('Parameters', 'Returns', 'Yields', - 'Other Parameters', 'Raises', 'Warns'): - out += self._str_param_list(param_list) - out += self._str_section('Warnings') - out += self._str_see_also(func_role) - for s in ('Notes', 'References', 'Examples'): - out += self._str_section(s) - for param_list in ('Attributes', 'Methods'): - out += self._str_param_list(param_list) - out += self._str_index() - return '\n'.join(out) - - -def indent(str, indent=4): - indent_str = ' '*indent - if str is None: - return indent_str - lines = str.split('\n') - return '\n'.join(indent_str + l for l in lines) - - -def dedent_lines(lines): - """Deindent a list of lines maximally""" - return textwrap.dedent("\n".join(lines)).split("\n") - - -def header(text, style='-'): - return text + '\n' + style*len(text) + '\n' - - -class FunctionDoc(NumpyDocString): - def __init__(self, func, role='func', doc=None, config={}): - self._f = func - self._role = role # e.g. "func" or "meth" - - if doc is None: - if func is None: - raise ValueError("No function or docstring given") - doc = inspect.getdoc(func) or '' - NumpyDocString.__init__(self, doc) - - if not self['Signature'] and func is not None: - func, func_name = self.get_func() - try: - try: - signature = str(inspect.signature(func)) - except (AttributeError, ValueError): - # try to read signature, backward compat for older Python - if sys.version_info[0] >= 3: - argspec = inspect.getfullargspec(func) - else: - argspec = inspect.getargspec(func) - signature = inspect.formatargspec(*argspec) - signature = '%s%s' % (func_name, signature.replace('*', '\*')) - except TypeError: - signature = '%s()' % func_name - self['Signature'] = signature - - def get_func(self): - func_name = getattr(self._f, '__name__', self.__class__.__name__) - if inspect.isclass(self._f): - func = getattr(self._f, '__call__', self._f.__init__) - else: - func = self._f - return func, func_name - - def __str__(self): - out = '' - - func, func_name = self.get_func() - signature = self['Signature'].replace('*', '\*') - - roles = {'func': 'function', - 'meth': 'method'} - - if self._role: - if self._role not in roles: - print("Warning: invalid role %s" % self._role) - out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''), - func_name) - - out += super(FunctionDoc, self).__str__(func_role=self._role) - return out - - -class ClassDoc(NumpyDocString): - - extra_public_methods = ['__call__'] - - def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc, - config={}): - if not inspect.isclass(cls) and cls is not None: - raise ValueError("Expected a class or None, but got %r" % cls) - self._cls = cls - - self.show_inherited_members = config.get( - 'show_inherited_class_members', True) - - if modulename and not modulename.endswith('.'): - modulename += '.' - self._mod = modulename - - if doc is None: - if cls is None: - raise ValueError("No class or documentation string given") - doc = pydoc.getdoc(cls) - - NumpyDocString.__init__(self, doc) - - if config.get('show_class_members', True): - def splitlines_x(s): - if not s: - return [] - else: - return s.splitlines() - - for field, items in [('Methods', self.methods), - ('Attributes', self.properties)]: - if not self[field]: - doc_list = [] - for name in sorted(items): - try: - doc_item = pydoc.getdoc(getattr(self._cls, name)) - doc_list.append((name, '', splitlines_x(doc_item))) - except AttributeError: - pass # method doesn't exist - self[field] = doc_list - - @property - def methods(self): - if self._cls is None: - return [] - return [name for name, func in inspect.getmembers(self._cls) - if ((not name.startswith('_') - or name in self.extra_public_methods) - and isinstance(func, collections.Callable) - and self._is_show_member(name))] - - @property - def properties(self): - if self._cls is None: - return [] - return [name for name, func in inspect.getmembers(self._cls) - if (not name.startswith('_') and - (func is None or isinstance(func, property) or - inspect.isgetsetdescriptor(func)) - and self._is_show_member(name))] - - def _is_show_member(self, name): - if self.show_inherited_members: - return True # show all class members - if name not in self._cls.__dict__: - return False # class member is inherited, we do not show it - return True diff --git a/doc/sphinxext/numpydoc/docscrape_sphinx.py b/doc/sphinxext/numpydoc/docscrape_sphinx.py deleted file mode 100644 index 1eaa6a9b25..0000000000 --- a/doc/sphinxext/numpydoc/docscrape_sphinx.py +++ /dev/null @@ -1,284 +0,0 @@ -from __future__ import division, absolute_import, print_function - -import sys -import re -import inspect -import textwrap -import pydoc -import sphinx -import collections - -from .docscrape import NumpyDocString, FunctionDoc, ClassDoc - -if sys.version_info[0] >= 3: - sixu = lambda s: s -else: - sixu = lambda s: unicode(s, 'unicode_escape') - - -class SphinxDocString(NumpyDocString): - def __init__(self, docstring, config={}): - NumpyDocString.__init__(self, docstring, config=config) - self.load_config(config) - - def load_config(self, config): - self.use_plots = config.get('use_plots', False) - self.class_members_toctree = config.get('class_members_toctree', True) - - # string conversion routines - def _str_header(self, name, symbol='`'): - return ['.. rubric:: ' + name, ''] - - def _str_field_list(self, name): - return [':' + name + ':'] - - def _str_indent(self, doc, indent=4): - out = [] - for line in doc: - out += [' '*indent + line] - return out - - def _str_signature(self): - return [''] - if self['Signature']: - return ['``%s``' % self['Signature']] + [''] - else: - return [''] - - def _str_summary(self): - return self['Summary'] + [''] - - def _str_extended_summary(self): - return self['Extended Summary'] + [''] - - def _str_returns(self, name='Returns'): - out = [] - if self[name]: - out += self._str_field_list(name) - out += [''] - for param, param_type, desc in self[name]: - if param_type: - out += self._str_indent(['**%s** : %s' % (param.strip(), - param_type)]) - else: - out += self._str_indent([param.strip()]) - if desc: - out += [''] - out += self._str_indent(desc, 8) - out += [''] - return out - - def _str_param_list(self, name): - out = [] - if self[name]: - out += self._str_field_list(name) - out += [''] - for param, param_type, desc in self[name]: - if param_type: - out += self._str_indent(['**%s** : %s' % (param.strip(), - param_type)]) - else: - out += self._str_indent(['**%s**' % param.strip()]) - if desc: - out += [''] - out += self._str_indent(desc, 8) - out += [''] - return out - - @property - def _obj(self): - if hasattr(self, '_cls'): - return self._cls - elif hasattr(self, '_f'): - return self._f - return None - - def _str_member_list(self, name): - """ - Generate a member listing, autosummary:: table where possible, - and a table where not. - - """ - out = [] - if self[name]: - out += ['.. rubric:: %s' % name, ''] - prefix = getattr(self, '_name', '') - - if prefix: - prefix = '~%s.' % prefix - - autosum = [] - others = [] - for param, param_type, desc in self[name]: - param = param.strip() - - # Check if the referenced member can have a docstring or not - param_obj = getattr(self._obj, param, None) - if not (callable(param_obj) - or isinstance(param_obj, property) - or inspect.isgetsetdescriptor(param_obj)): - param_obj = None - - if param_obj and (pydoc.getdoc(param_obj) or not desc): - # Referenced object has a docstring - autosum += [" %s%s" % (prefix, param)] - else: - others.append((param, param_type, desc)) - - if autosum: - out += ['.. autosummary::'] - if self.class_members_toctree: - out += [' :toctree:'] - out += [''] + autosum - - if others: - maxlen_0 = max(3, max([len(x[0]) for x in others])) - hdr = sixu("=")*maxlen_0 + sixu(" ") + sixu("=")*10 - fmt = sixu('%%%ds %%s ') % (maxlen_0,) - out += ['', '', hdr] - for param, param_type, desc in others: - desc = sixu(" ").join(x.strip() for x in desc).strip() - if param_type: - desc = "(%s) %s" % (param_type, desc) - out += [fmt % (param.strip(), desc)] - out += [hdr] - out += [''] - return out - - def _str_section(self, name): - out = [] - if self[name]: - out += self._str_header(name) - out += [''] - content = textwrap.dedent("\n".join(self[name])).split("\n") - out += content - out += [''] - return out - - def _str_see_also(self, func_role): - out = [] - if self['See Also']: - see_also = super(SphinxDocString, self)._str_see_also(func_role) - out = ['.. seealso::', ''] - out += self._str_indent(see_also[2:]) - return out - - def _str_warnings(self): - out = [] - if self['Warnings']: - out = ['.. warning::', ''] - out += self._str_indent(self['Warnings']) - return out - - def _str_index(self): - idx = self['index'] - out = [] - if len(idx) == 0: - return out - - out += ['.. index:: %s' % idx.get('default', '')] - for section, references in idx.items(): - if section == 'default': - continue - elif section == 'refguide': - out += [' single: %s' % (', '.join(references))] - else: - out += [' %s: %s' % (section, ','.join(references))] - return out - - def _str_references(self): - out = [] - if self['References']: - out += self._str_header('References') - if isinstance(self['References'], str): - self['References'] = [self['References']] - out.extend(self['References']) - out += [''] - # Latex collects all references to a separate bibliography, - # so we need to insert links to it - if sphinx.__version__ >= "0.6": - out += ['.. only:: latex', ''] - else: - out += ['.. latexonly::', ''] - items = [] - for line in self['References']: - m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I) - if m: - items.append(m.group(1)) - out += [' ' + ", ".join(["[%s]_" % item for item in items]), ''] - return out - - def _str_examples(self): - examples_str = "\n".join(self['Examples']) - - if (self.use_plots and 'import matplotlib' in examples_str - and 'plot::' not in examples_str): - out = [] - out += self._str_header('Examples') - out += ['.. plot::', ''] - out += self._str_indent(self['Examples']) - out += [''] - return out - else: - return self._str_section('Examples') - - def __str__(self, indent=0, func_role="obj"): - out = [] - out += self._str_signature() - out += self._str_index() + [''] - out += self._str_summary() - out += self._str_extended_summary() - out += self._str_param_list('Parameters') - out += self._str_returns('Returns') - out += self._str_returns('Yields') - for param_list in ('Other Parameters', 'Raises', 'Warns'): - out += self._str_param_list(param_list) - out += self._str_warnings() - out += self._str_see_also(func_role) - out += self._str_section('Notes') - out += self._str_references() - out += self._str_examples() - for param_list in ('Attributes', 'Methods'): - out += self._str_member_list(param_list) - out = self._str_indent(out, indent) - return '\n'.join(out) - - -class SphinxFunctionDoc(SphinxDocString, FunctionDoc): - def __init__(self, obj, doc=None, config={}): - self.load_config(config) - FunctionDoc.__init__(self, obj, doc=doc, config=config) - - -class SphinxClassDoc(SphinxDocString, ClassDoc): - def __init__(self, obj, doc=None, func_doc=None, config={}): - self.load_config(config) - ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config) - - -class SphinxObjDoc(SphinxDocString): - def __init__(self, obj, doc=None, config={}): - self._f = obj - self.load_config(config) - SphinxDocString.__init__(self, doc, config=config) - - -def get_doc_object(obj, what=None, doc=None, config={}): - if what is None: - if inspect.isclass(obj): - what = 'class' - elif inspect.ismodule(obj): - what = 'module' - elif isinstance(obj, collections.Callable): - what = 'function' - else: - what = 'object' - if what == 'class': - return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc, - config=config) - elif what in ('function', 'method'): - return SphinxFunctionDoc(obj, doc=doc, config=config) - else: - if doc is None: - doc = pydoc.getdoc(obj) - return SphinxObjDoc(obj, doc, config=config) diff --git a/doc/sphinxext/numpydoc/linkcode.py b/doc/sphinxext/numpydoc/linkcode.py deleted file mode 100644 index 1ad3ab82cb..0000000000 --- a/doc/sphinxext/numpydoc/linkcode.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -""" - linkcode - ~~~~~~~~ - - Add external links to module code in Python object descriptions. - - :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. - -""" -from __future__ import division, absolute_import, print_function - -import warnings -import collections - -warnings.warn("This extension has been accepted to Sphinx upstream. " - "Use the version from there (Sphinx >= 1.2) " - "https://bitbucket.org/birkenfeld/sphinx/pull-request/47/sphinxextlinkcode", - FutureWarning, stacklevel=1) - - -from docutils import nodes - -from sphinx import addnodes -from sphinx.locale import _ -from sphinx.errors import SphinxError - -class LinkcodeError(SphinxError): - category = "linkcode error" - -def doctree_read(app, doctree): - env = app.builder.env - - resolve_target = getattr(env.config, 'linkcode_resolve', None) - if not isinstance(env.config.linkcode_resolve, collections.Callable): - raise LinkcodeError( - "Function `linkcode_resolve` is not given in conf.py") - - domain_keys = dict( - py=['module', 'fullname'], - c=['names'], - cpp=['names'], - js=['object', 'fullname'], - ) - - for objnode in doctree.traverse(addnodes.desc): - domain = objnode.get('domain') - uris = set() - for signode in objnode: - if not isinstance(signode, addnodes.desc_signature): - continue - - # Convert signode to a specified format - info = {} - for key in domain_keys.get(domain, []): - value = signode.get(key) - if not value: - value = '' - info[key] = value - if not info: - continue - - # Call user code to resolve the link - uri = resolve_target(domain, info) - if not uri: - # no source - continue - - if uri in uris or not uri: - # only one link per name, please - continue - uris.add(uri) - - onlynode = addnodes.only(expr='html') - onlynode += nodes.reference('', '', internal=False, refuri=uri) - onlynode[0] += nodes.inline('', _('[source]'), - classes=['viewcode-link']) - signode += onlynode - -def setup(app): - app.connect('doctree-read', doctree_read) - app.add_config_value('linkcode_resolve', None, '') diff --git a/doc/sphinxext/numpydoc/numpydoc.py b/doc/sphinxext/numpydoc/numpydoc.py deleted file mode 100644 index 6aefe0c258..0000000000 --- a/doc/sphinxext/numpydoc/numpydoc.py +++ /dev/null @@ -1,205 +0,0 @@ -""" -======== -numpydoc -======== - -Sphinx extension that handles docstrings in the Numpy standard format. [1] - -It will: - -- Convert Parameters etc. sections to field lists. -- Convert See Also section to a See also entry. -- Renumber references. -- Extract the signature from the docstring, if it can't be determined - otherwise. - -.. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt - -""" -from __future__ import division, absolute_import, print_function - -import sys -import re -import pydoc -import sphinx -import inspect -import collections - -if sphinx.__version__ < '1.0.1': - raise RuntimeError("Sphinx 1.0.1 or newer is required") - -from .docscrape_sphinx import get_doc_object, SphinxDocString - -if sys.version_info[0] >= 3: - sixu = lambda s: s -else: - sixu = lambda s: unicode(s, 'unicode_escape') - - -def mangle_docstrings(app, what, name, obj, options, lines, - reference_offset=[0]): - - cfg = {'use_plots': app.config.numpydoc_use_plots, - 'show_class_members': app.config.numpydoc_show_class_members, - 'show_inherited_class_members': - app.config.numpydoc_show_inherited_class_members, - 'class_members_toctree': app.config.numpydoc_class_members_toctree} - - u_NL = sixu('\n') - if what == 'module': - # Strip top title - pattern = '^\\s*[#*=]{4,}\\n[a-z0-9 -]+\\n[#*=]{4,}\\s*' - title_re = re.compile(sixu(pattern), re.I | re.S) - lines[:] = title_re.sub(sixu(''), u_NL.join(lines)).split(u_NL) - else: - doc = get_doc_object(obj, what, u_NL.join(lines), config=cfg) - if sys.version_info[0] >= 3: - doc = str(doc) - else: - doc = unicode(doc) - lines[:] = doc.split(u_NL) - - if (app.config.numpydoc_edit_link and hasattr(obj, '__name__') and - obj.__name__): - if hasattr(obj, '__module__'): - v = dict(full_name=sixu("%s.%s") % (obj.__module__, obj.__name__)) - else: - v = dict(full_name=obj.__name__) - lines += [sixu(''), sixu('.. htmlonly::'), sixu('')] - lines += [sixu(' %s') % x for x in - (app.config.numpydoc_edit_link % v).split("\n")] - - # replace reference numbers so that there are no duplicates - references = [] - for line in lines: - line = line.strip() - m = re.match(sixu('^.. \\[([a-z0-9_.-])\\]'), line, re.I) - if m: - references.append(m.group(1)) - - # start renaming from the longest string, to avoid overwriting parts - references.sort(key=lambda x: -len(x)) - if references: - for i, line in enumerate(lines): - for r in references: - if re.match(sixu('^\\d+$'), r): - new_r = sixu("R%d") % (reference_offset[0] + int(r)) - else: - new_r = sixu("%s%d") % (r, reference_offset[0]) - lines[i] = lines[i].replace(sixu('[%s]_') % r, - sixu('[%s]_') % new_r) - lines[i] = lines[i].replace(sixu('.. [%s]') % r, - sixu('.. [%s]') % new_r) - - reference_offset[0] += len(references) - - -def mangle_signature(app, what, name, obj, options, sig, retann): - # Do not try to inspect classes that don't define `__init__` - if (inspect.isclass(obj) and - (not hasattr(obj, '__init__') or - 'initializes x; see ' in pydoc.getdoc(obj.__init__))): - return '', '' - - if not (isinstance(obj, collections.Callable) or - hasattr(obj, '__argspec_is_invalid_')): - return - - if not hasattr(obj, '__doc__'): - return - - doc = SphinxDocString(pydoc.getdoc(obj)) - if doc['Signature']: - sig = re.sub(sixu("^[^(]*"), sixu(""), doc['Signature']) - return sig, sixu('') - - -def setup(app, get_doc_object_=get_doc_object): - if not hasattr(app, 'add_config_value'): - return # probably called by nose, better bail out - - global get_doc_object - get_doc_object = get_doc_object_ - - app.connect('autodoc-process-docstring', mangle_docstrings) - app.connect('autodoc-process-signature', mangle_signature) - app.add_config_value('numpydoc_edit_link', None, False) - app.add_config_value('numpydoc_use_plots', None, False) - app.add_config_value('numpydoc_show_class_members', True, True) - app.add_config_value('numpydoc_show_inherited_class_members', True, True) - app.add_config_value('numpydoc_class_members_toctree', True, True) - - # Extra mangling domains - app.add_domain(NumpyPythonDomain) - app.add_domain(NumpyCDomain) - - metadata = {'parallel_read_safe': True} - return metadata - -# ------------------------------------------------------------------------------ -# Docstring-mangling domains -# ------------------------------------------------------------------------------ - -from docutils.statemachine import ViewList -from sphinx.domains.c import CDomain -from sphinx.domains.python import PythonDomain - - -class ManglingDomainBase(object): - directive_mangling_map = {} - - def __init__(self, *a, **kw): - super(ManglingDomainBase, self).__init__(*a, **kw) - self.wrap_mangling_directives() - - def wrap_mangling_directives(self): - for name, objtype in list(self.directive_mangling_map.items()): - self.directives[name] = wrap_mangling_directive( - self.directives[name], objtype) - - -class NumpyPythonDomain(ManglingDomainBase, PythonDomain): - name = 'np' - directive_mangling_map = { - 'function': 'function', - 'class': 'class', - 'exception': 'class', - 'method': 'function', - 'classmethod': 'function', - 'staticmethod': 'function', - 'attribute': 'attribute', - } - indices = [] - - -class NumpyCDomain(ManglingDomainBase, CDomain): - name = 'np-c' - directive_mangling_map = { - 'function': 'function', - 'member': 'attribute', - 'macro': 'function', - 'type': 'class', - 'var': 'object', - } - - -def wrap_mangling_directive(base_directive, objtype): - class directive(base_directive): - def run(self): - env = self.state.document.settings.env - - name = None - if self.arguments: - m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0]) - name = m.group(2).strip() - - if not name: - name = self.arguments[0] - - lines = list(self.content) - mangle_docstrings(env.app, objtype, name, None, None, lines) - self.content = ViewList(lines, self.content.parent) - - return base_directive.run(self) - - return directive diff --git a/doc/sphinxext/numpydoc/phantom_import.py b/doc/sphinxext/numpydoc/phantom_import.py deleted file mode 100644 index 9a60b4a35b..0000000000 --- a/doc/sphinxext/numpydoc/phantom_import.py +++ /dev/null @@ -1,167 +0,0 @@ -""" -============== -phantom_import -============== - -Sphinx extension to make directives from ``sphinx.ext.autodoc`` and similar -extensions to use docstrings loaded from an XML file. - -This extension loads an XML file in the Pydocweb format [1] and -creates a dummy module that contains the specified docstrings. This -can be used to get the current docstrings from a Pydocweb instance -without needing to rebuild the documented module. - -.. [1] http://code.google.com/p/pydocweb - -""" -from __future__ import division, absolute_import, print_function - -import imp, sys, compiler, types, os, inspect, re - -def setup(app): - app.connect('builder-inited', initialize) - app.add_config_value('phantom_import_file', None, True) - -def initialize(app): - fn = app.config.phantom_import_file - if (fn and os.path.isfile(fn)): - print("[numpydoc] Phantom importing modules from", fn, "...") - import_phantom_module(fn) - -#------------------------------------------------------------------------------ -# Creating 'phantom' modules from an XML description -#------------------------------------------------------------------------------ -def import_phantom_module(xml_file): - """ - Insert a fake Python module to sys.modules, based on a XML file. - - The XML file is expected to conform to Pydocweb DTD. The fake - module will contain dummy objects, which guarantee the following: - - - Docstrings are correct. - - Class inheritance relationships are correct (if present in XML). - - Function argspec is *NOT* correct (even if present in XML). - Instead, the function signature is prepended to the function docstring. - - Class attributes are *NOT* correct; instead, they are dummy objects. - - Parameters - ---------- - xml_file : str - Name of an XML file to read - - """ - import lxml.etree as etree - - object_cache = {} - - tree = etree.parse(xml_file) - root = tree.getroot() - - # Sort items so that - # - Base classes come before classes inherited from them - # - Modules come before their contents - all_nodes = dict([(n.attrib['id'], n) for n in root]) - - def _get_bases(node, recurse=False): - bases = [x.attrib['ref'] for x in node.findall('base')] - if recurse: - j = 0 - while True: - try: - b = bases[j] - except IndexError: break - if b in all_nodes: - bases.extend(_get_bases(all_nodes[b])) - j += 1 - return bases - - type_index = ['module', 'class', 'callable', 'object'] - - def base_cmp(a, b): - x = cmp(type_index.index(a.tag), type_index.index(b.tag)) - if x != 0: return x - - if a.tag == 'class' and b.tag == 'class': - a_bases = _get_bases(a, recurse=True) - b_bases = _get_bases(b, recurse=True) - x = cmp(len(a_bases), len(b_bases)) - if x != 0: return x - if a.attrib['id'] in b_bases: return -1 - if b.attrib['id'] in a_bases: return 1 - - return cmp(a.attrib['id'].count('.'), b.attrib['id'].count('.')) - - nodes = root.getchildren() - nodes.sort(base_cmp) - - # Create phantom items - for node in nodes: - name = node.attrib['id'] - doc = (node.text or '').decode('string-escape') + "\n" - if doc == "\n": doc = "" - - # create parent, if missing - parent = name - while True: - parent = '.'.join(parent.split('.')[:-1]) - if not parent: break - if parent in object_cache: break - obj = imp.new_module(parent) - object_cache[parent] = obj - sys.modules[parent] = obj - - # create object - if node.tag == 'module': - obj = imp.new_module(name) - obj.__doc__ = doc - sys.modules[name] = obj - elif node.tag == 'class': - bases = [object_cache[b] for b in _get_bases(node) - if b in object_cache] - bases.append(object) - init = lambda self: None - init.__doc__ = doc - obj = type(name, tuple(bases), {'__doc__': doc, '__init__': init}) - obj.__name__ = name.split('.')[-1] - elif node.tag == 'callable': - funcname = node.attrib['id'].split('.')[-1] - argspec = node.attrib.get('argspec') - if argspec: - argspec = re.sub('^[^(]*', '', argspec) - doc = "%s%s\n\n%s" % (funcname, argspec, doc) - obj = lambda: 0 - obj.__argspec_is_invalid_ = True - if sys.version_info[0] >= 3: - obj.__name__ = funcname - else: - obj.func_name = funcname - obj.__name__ = name - obj.__doc__ = doc - if inspect.isclass(object_cache[parent]): - obj.__objclass__ = object_cache[parent] - else: - class Dummy(object): pass - obj = Dummy() - obj.__name__ = name - obj.__doc__ = doc - if inspect.isclass(object_cache[parent]): - obj.__get__ = lambda: None - object_cache[name] = obj - - if parent: - if inspect.ismodule(object_cache[parent]): - obj.__module__ = parent - setattr(object_cache[parent], name.split('.')[-1], obj) - - # Populate items - for node in root: - obj = object_cache.get(node.attrib['id']) - if obj is None: continue - for ref in node.findall('ref'): - if node.tag == 'class': - if ref.attrib['ref'].startswith(node.attrib['id'] + '.'): - setattr(obj, ref.attrib['name'], - object_cache.get(ref.attrib['ref'])) - else: - setattr(obj, ref.attrib['name'], - object_cache.get(ref.attrib['ref'])) diff --git a/doc/sphinxext/numpydoc/plot_directive.py b/doc/sphinxext/numpydoc/plot_directive.py deleted file mode 100644 index 2014f85707..0000000000 --- a/doc/sphinxext/numpydoc/plot_directive.py +++ /dev/null @@ -1,642 +0,0 @@ -""" -A special directive for generating a matplotlib plot. - -.. warning:: - - This is a hacked version of plot_directive.py from Matplotlib. - It's very much subject to change! - - -Usage ------ - -Can be used like this:: - - .. plot:: examples/example.py - - .. plot:: - - import matplotlib.pyplot as plt - plt.plot([1,2,3], [4,5,6]) - - .. plot:: - - A plotting example: - - >>> import matplotlib.pyplot as plt - >>> plt.plot([1,2,3], [4,5,6]) - -The content is interpreted as doctest formatted if it has a line starting -with ``>>>``. - -The ``plot`` directive supports the options - - format : {'python', 'doctest'} - Specify the format of the input - - include-source : bool - Whether to display the source code. Default can be changed in conf.py - -and the ``image`` directive options ``alt``, ``height``, ``width``, -``scale``, ``align``, ``class``. - -Configuration options ---------------------- - -The plot directive has the following configuration options: - - plot_include_source - Default value for the include-source option - - plot_pre_code - Code that should be executed before each plot. - - plot_basedir - Base directory, to which plot:: file names are relative to. - (If None or empty, file names are relative to the directoly where - the file containing the directive is.) - - plot_formats - File formats to generate. List of tuples or strings:: - - [(suffix, dpi), suffix, ...] - - that determine the file format and the DPI. For entries whose - DPI was omitted, sensible defaults are chosen. - - plot_html_show_formats - Whether to show links to the files in HTML. - -TODO ----- - -* Refactor Latex output; now it's plain images, but it would be nice - to make them appear side-by-side, or in floats. - -""" -from __future__ import division, absolute_import, print_function - -import sys, os, glob, shutil, imp, warnings, re, textwrap, traceback -import sphinx - -if sys.version_info[0] >= 3: - from io import StringIO -else: - from io import StringIO - -import warnings -warnings.warn("A plot_directive module is also available under " - "matplotlib.sphinxext; expect this numpydoc.plot_directive " - "module to be deprecated after relevant features have been " - "integrated there.", - FutureWarning, stacklevel=2) - - -#------------------------------------------------------------------------------ -# Registration hook -#------------------------------------------------------------------------------ - -def setup(app): - setup.app = app - setup.config = app.config - setup.confdir = app.confdir - - app.add_config_value('plot_pre_code', '', True) - app.add_config_value('plot_include_source', False, True) - app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True) - app.add_config_value('plot_basedir', None, True) - app.add_config_value('plot_html_show_formats', True, True) - - app.add_directive('plot', plot_directive, True, (0, 1, False), - **plot_directive_options) - -#------------------------------------------------------------------------------ -# plot:: directive -#------------------------------------------------------------------------------ -from docutils.parsers.rst import directives -from docutils import nodes - -def plot_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return run(arguments, content, options, state_machine, state, lineno) -plot_directive.__doc__ = __doc__ - -def _option_boolean(arg): - if not arg or not arg.strip(): - # no argument given, assume used as a flag - return True - elif arg.strip().lower() in ('no', '0', 'false'): - return False - elif arg.strip().lower() in ('yes', '1', 'true'): - return True - else: - raise ValueError('"%s" unknown boolean' % arg) - -def _option_format(arg): - return directives.choice(arg, ('python', 'lisp')) - -def _option_align(arg): - return directives.choice(arg, ("top", "middle", "bottom", "left", "center", - "right")) - -plot_directive_options = {'alt': directives.unchanged, - 'height': directives.length_or_unitless, - 'width': directives.length_or_percentage_or_unitless, - 'scale': directives.nonnegative_int, - 'align': _option_align, - 'class': directives.class_option, - 'include-source': _option_boolean, - 'format': _option_format, - } - -#------------------------------------------------------------------------------ -# Generating output -#------------------------------------------------------------------------------ - -from docutils import nodes, utils - -try: - # Sphinx depends on either Jinja or Jinja2 - import jinja2 - def format_template(template, **kw): - return jinja2.Template(template).render(**kw) -except ImportError: - import jinja - def format_template(template, **kw): - return jinja.from_string(template, **kw) - -TEMPLATE = """ -{{ source_code }} - -{{ only_html }} - - {% if source_link or (html_show_formats and not multi_image) %} - ( - {%- if source_link -%} - `Source code <{{ source_link }}>`__ - {%- endif -%} - {%- if html_show_formats and not multi_image -%} - {%- for img in images -%} - {%- for fmt in img.formats -%} - {%- if source_link or not loop.first -%}, {% endif -%} - `{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__ - {%- endfor -%} - {%- endfor -%} - {%- endif -%} - ) - {% endif %} - - {% for img in images %} - .. figure:: {{ build_dir }}/{{ img.basename }}.png - {%- for option in options %} - {{ option }} - {% endfor %} - - {% if html_show_formats and multi_image -%} - ( - {%- for fmt in img.formats -%} - {%- if not loop.first -%}, {% endif -%} - `{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__ - {%- endfor -%} - ) - {%- endif -%} - {% endfor %} - -{{ only_latex }} - - {% for img in images %} - .. image:: {{ build_dir }}/{{ img.basename }}.pdf - {% endfor %} - -""" - -class ImageFile(object): - def __init__(self, basename, dirname): - self.basename = basename - self.dirname = dirname - self.formats = [] - - def filename(self, format): - return os.path.join(self.dirname, "%s.%s" % (self.basename, format)) - - def filenames(self): - return [self.filename(fmt) for fmt in self.formats] - -def run(arguments, content, options, state_machine, state, lineno): - if arguments and content: - raise RuntimeError("plot:: directive can't have both args and content") - - document = state_machine.document - config = document.settings.env.config - - options.setdefault('include-source', config.plot_include_source) - - # determine input - rst_file = document.attributes['source'] - rst_dir = os.path.dirname(rst_file) - - if arguments: - if not config.plot_basedir: - source_file_name = os.path.join(rst_dir, - directives.uri(arguments[0])) - else: - source_file_name = os.path.join(setup.confdir, config.plot_basedir, - directives.uri(arguments[0])) - code = open(source_file_name, 'r').read() - output_base = os.path.basename(source_file_name) - else: - source_file_name = rst_file - code = textwrap.dedent("\n".join(map(str, content))) - counter = document.attributes.get('_plot_counter', 0) + 1 - document.attributes['_plot_counter'] = counter - base, ext = os.path.splitext(os.path.basename(source_file_name)) - output_base = '%s-%d.py' % (base, counter) - - base, source_ext = os.path.splitext(output_base) - if source_ext in ('.py', '.rst', '.txt'): - output_base = base - else: - source_ext = '' - - # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames - output_base = output_base.replace('.', '-') - - # is it in doctest format? - is_doctest = contains_doctest(code) - if 'format' in options: - if options['format'] == 'python': - is_doctest = False - else: - is_doctest = True - - # determine output directory name fragment - source_rel_name = relpath(source_file_name, setup.confdir) - source_rel_dir = os.path.dirname(source_rel_name) - while source_rel_dir.startswith(os.path.sep): - source_rel_dir = source_rel_dir[1:] - - # build_dir: where to place output files (temporarily) - build_dir = os.path.join(os.path.dirname(setup.app.doctreedir), - 'plot_directive', - source_rel_dir) - if not os.path.exists(build_dir): - os.makedirs(build_dir) - - # output_dir: final location in the builder's directory - dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir, - source_rel_dir)) - - # how to link to files from the RST file - dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir), - source_rel_dir).replace(os.path.sep, '/') - build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/') - source_link = dest_dir_link + '/' + output_base + source_ext - - # make figures - try: - results = makefig(code, source_file_name, build_dir, output_base, - config) - errors = [] - except PlotError as err: - reporter = state.memo.reporter - sm = reporter.system_message( - 2, "Exception occurred in plotting %s: %s" % (output_base, err), - line=lineno) - results = [(code, [])] - errors = [sm] - - # generate output restructuredtext - total_lines = [] - for j, (code_piece, images) in enumerate(results): - if options['include-source']: - if is_doctest: - lines = [''] - lines += [row.rstrip() for row in code_piece.split('\n')] - else: - lines = ['.. code-block:: python', ''] - lines += [' %s' % row.rstrip() - for row in code_piece.split('\n')] - source_code = "\n".join(lines) - else: - source_code = "" - - opts = [':%s: %s' % (key, val) for key, val in list(options.items()) - if key in ('alt', 'height', 'width', 'scale', 'align', 'class')] - - only_html = ".. only:: html" - only_latex = ".. only:: latex" - - if j == 0: - src_link = source_link - else: - src_link = None - - result = format_template( - TEMPLATE, - dest_dir=dest_dir_link, - build_dir=build_dir_link, - source_link=src_link, - multi_image=len(images) > 1, - only_html=only_html, - only_latex=only_latex, - options=opts, - images=images, - source_code=source_code, - html_show_formats=config.plot_html_show_formats) - - total_lines.extend(result.split("\n")) - total_lines.extend("\n") - - if total_lines: - state_machine.insert_input(total_lines, source=source_file_name) - - # copy image files to builder's output directory - if not os.path.exists(dest_dir): - os.makedirs(dest_dir) - - for code_piece, images in results: - for img in images: - for fn in img.filenames(): - shutil.copyfile(fn, os.path.join(dest_dir, - os.path.basename(fn))) - - # copy script (if necessary) - if source_file_name == rst_file: - target_name = os.path.join(dest_dir, output_base + source_ext) - f = open(target_name, 'w') - f.write(unescape_doctest(code)) - f.close() - - return errors - - -#------------------------------------------------------------------------------ -# Run code and capture figures -#------------------------------------------------------------------------------ - -import matplotlib -matplotlib.use('Agg') -import matplotlib.pyplot as plt -import matplotlib.image as image -from matplotlib import _pylab_helpers - -import exceptions - -def contains_doctest(text): - try: - # check if it's valid Python as-is - compile(text, '', 'exec') - return False - except SyntaxError: - pass - r = re.compile(r'^\s*>>>', re.M) - m = r.search(text) - return bool(m) - -def unescape_doctest(text): - """ - Extract code from a piece of text, which contains either Python code - or doctests. - - """ - if not contains_doctest(text): - return text - - code = "" - for line in text.split("\n"): - m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line) - if m: - code += m.group(2) + "\n" - elif line.strip(): - code += "# " + line.strip() + "\n" - else: - code += "\n" - return code - -def split_code_at_show(text): - """ - Split code at plt.show() - - """ - - parts = [] - is_doctest = contains_doctest(text) - - part = [] - for line in text.split("\n"): - if (not is_doctest and line.strip() == 'plt.show()') or \ - (is_doctest and line.strip() == '>>> plt.show()'): - part.append(line) - parts.append("\n".join(part)) - part = [] - else: - part.append(line) - if "\n".join(part).strip(): - parts.append("\n".join(part)) - return parts - -class PlotError(RuntimeError): - pass - -def run_code(code, code_path, ns=None): - # Change the working directory to the directory of the example, so - # it can get at its data files, if any. - pwd = os.getcwd() - old_sys_path = list(sys.path) - if code_path is not None: - dirname = os.path.abspath(os.path.dirname(code_path)) - os.chdir(dirname) - sys.path.insert(0, dirname) - - # Redirect stdout - stdout = sys.stdout - sys.stdout = StringIO() - - # Reset sys.argv - old_sys_argv = sys.argv - sys.argv = [code_path] - - try: - try: - code = unescape_doctest(code) - if ns is None: - ns = {} - if not ns: - exec(setup.config.plot_pre_code, ns) - exec(code, ns) - except (Exception, SystemExit) as err: - raise PlotError(traceback.format_exc()) - finally: - os.chdir(pwd) - sys.argv = old_sys_argv - sys.path[:] = old_sys_path - sys.stdout = stdout - return ns - - -#------------------------------------------------------------------------------ -# Generating figures -#------------------------------------------------------------------------------ - -def out_of_date(original, derived): - """ - Returns True if derivative is out-of-date wrt original, - both of which are full file paths. - """ - return (not os.path.exists(derived) - or os.stat(derived).st_mtime < os.stat(original).st_mtime) - - -def makefig(code, code_path, output_dir, output_base, config): - """ - Run a pyplot script *code* and save the images under *output_dir* - with file names derived from *output_base* - - """ - - # -- Parse format list - default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 50} - formats = [] - for fmt in config.plot_formats: - if isinstance(fmt, str): - formats.append((fmt, default_dpi.get(fmt, 80))) - elif type(fmt) in (tuple, list) and len(fmt)==2: - formats.append((str(fmt[0]), int(fmt[1]))) - else: - raise PlotError('invalid image format "%r" in plot_formats' % fmt) - - # -- Try to determine if all images already exist - - code_pieces = split_code_at_show(code) - - # Look for single-figure output files first - all_exists = True - img = ImageFile(output_base, output_dir) - for format, dpi in formats: - if out_of_date(code_path, img.filename(format)): - all_exists = False - break - img.formats.append(format) - - if all_exists: - return [(code, [img])] - - # Then look for multi-figure output files - results = [] - all_exists = True - for i, code_piece in enumerate(code_pieces): - images = [] - for j in range(1000): - img = ImageFile('%s_%02d_%02d' % (output_base, i, j), output_dir) - for format, dpi in formats: - if out_of_date(code_path, img.filename(format)): - all_exists = False - break - img.formats.append(format) - - # assume that if we have one, we have them all - if not all_exists: - all_exists = (j > 0) - break - images.append(img) - if not all_exists: - break - results.append((code_piece, images)) - - if all_exists: - return results - - # -- We didn't find the files, so build them - - results = [] - ns = {} - - for i, code_piece in enumerate(code_pieces): - # Clear between runs - plt.close('all') - - # Run code - run_code(code_piece, code_path, ns) - - # Collect images - images = [] - fig_managers = _pylab_helpers.Gcf.get_all_fig_managers() - for j, figman in enumerate(fig_managers): - if len(fig_managers) == 1 and len(code_pieces) == 1: - img = ImageFile(output_base, output_dir) - else: - img = ImageFile("%s_%02d_%02d" % (output_base, i, j), - output_dir) - images.append(img) - for format, dpi in formats: - try: - figman.canvas.figure.savefig(img.filename(format), dpi=dpi) - except exceptions.BaseException as err: - raise PlotError(traceback.format_exc()) - img.formats.append(format) - - # Results - results.append((code_piece, images)) - - return results - - -#------------------------------------------------------------------------------ -# Relative pathnames -#------------------------------------------------------------------------------ - -try: - from os.path import relpath -except ImportError: - # Copied from Python 2.7 - if 'posix' in sys.builtin_module_names: - def relpath(path, start=os.path.curdir): - """Return a relative version of a path""" - from os.path import sep, curdir, join, abspath, commonprefix, \ - pardir - - if not path: - raise ValueError("no path specified") - - start_list = abspath(start).split(sep) - path_list = abspath(path).split(sep) - - # Work out how much of the filepath is shared by start and path. - i = len(commonprefix([start_list, path_list])) - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - elif 'nt' in sys.builtin_module_names: - def relpath(path, start=os.path.curdir): - """Return a relative version of a path""" - from os.path import sep, curdir, join, abspath, commonprefix, \ - pardir, splitunc - - if not path: - raise ValueError("no path specified") - start_list = abspath(start).split(sep) - path_list = abspath(path).split(sep) - if start_list[0].lower() != path_list[0].lower(): - unc_path, rest = splitunc(path) - unc_start, rest = splitunc(start) - if bool(unc_path) ^ bool(unc_start): - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) - else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_list[0], start_list[0])) - # Work out how much of the filepath is shared by start and path. - for i in range(min(len(start_list), len(path_list))): - if start_list[i].lower() != path_list[i].lower(): - break - else: - i += 1 - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - else: - raise RuntimeError("Unsupported platform (no relpath available!)") diff --git a/doc/sphinxext/numpydoc/tests/test_docscrape.py b/doc/sphinxext/numpydoc/tests/test_docscrape.py deleted file mode 100644 index 634bef444f..0000000000 --- a/doc/sphinxext/numpydoc/tests/test_docscrape.py +++ /dev/null @@ -1,913 +0,0 @@ -# -*- encoding:utf-8 -*- -from __future__ import division, absolute_import, print_function - -import sys, textwrap - -from numpydoc.docscrape import NumpyDocString, FunctionDoc, ClassDoc -from numpydoc.docscrape_sphinx import SphinxDocString, SphinxClassDoc -from nose.tools import * - -if sys.version_info[0] >= 3: - sixu = lambda s: s -else: - sixu = lambda s: unicode(s, 'unicode_escape') - - -doc_txt = '''\ - numpy.multivariate_normal(mean, cov, shape=None, spam=None) - - Draw values from a multivariate normal distribution with specified - mean and covariance. - - The multivariate normal or Gaussian distribution is a generalisation - of the one-dimensional normal distribution to higher dimensions. - - Parameters - ---------- - mean : (N,) ndarray - Mean of the N-dimensional distribution. - - .. math:: - - (1+2+3)/3 - - cov : (N, N) ndarray - Covariance matrix of the distribution. - shape : tuple of ints - Given a shape of, for example, (m,n,k), m*n*k samples are - generated, and packed in an m-by-n-by-k arrangement. Because - each sample is N-dimensional, the output shape is (m,n,k,N). - - Returns - ------- - out : ndarray - The drawn samples, arranged according to `shape`. If the - shape given is (m,n,...), then the shape of `out` is is - (m,n,...,N). - - In other words, each entry ``out[i,j,...,:]`` is an N-dimensional - value drawn from the distribution. - list of str - This is not a real return value. It exists to test - anonymous return values. - - Other Parameters - ---------------- - spam : parrot - A parrot off its mortal coil. - - Raises - ------ - RuntimeError - Some error - - Warns - ----- - RuntimeWarning - Some warning - - Warnings - -------- - Certain warnings apply. - - Notes - ----- - Instead of specifying the full covariance matrix, popular - approximations include: - - - Spherical covariance (`cov` is a multiple of the identity matrix) - - Diagonal covariance (`cov` has non-negative elements only on the diagonal) - - This geometrical property can be seen in two dimensions by plotting - generated data-points: - - >>> mean = [0,0] - >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis - - >>> x,y = multivariate_normal(mean,cov,5000).T - >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() - - Note that the covariance matrix must be symmetric and non-negative - definite. - - References - ---------- - .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic - Processes," 3rd ed., McGraw-Hill Companies, 1991 - .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," - 2nd ed., Wiley, 2001. - - See Also - -------- - some, other, funcs - otherfunc : relationship - - Examples - -------- - >>> mean = (1,2) - >>> cov = [[1,0],[1,0]] - >>> x = multivariate_normal(mean,cov,(3,3)) - >>> print x.shape - (3, 3, 2) - - The following is probably true, given that 0.6 is roughly twice the - standard deviation: - - >>> print list( (x[0,0,:] - mean) < 0.6 ) - [True, True] - - .. index:: random - :refguide: random;distributions, random;gauss - - ''' -doc = NumpyDocString(doc_txt) - -doc_yields_txt = """ -Test generator - -Yields ------- -a : int - The number of apples. -b : int - The number of bananas. -int - The number of unknowns. -""" -doc_yields = NumpyDocString(doc_yields_txt) - - -def test_signature(): - assert doc['Signature'].startswith('numpy.multivariate_normal(') - assert doc['Signature'].endswith('spam=None)') - -def test_summary(): - assert doc['Summary'][0].startswith('Draw values') - assert doc['Summary'][-1].endswith('covariance.') - -def test_extended_summary(): - assert doc['Extended Summary'][0].startswith('The multivariate normal') - -def test_parameters(): - assert_equal(len(doc['Parameters']), 3) - assert_equal([n for n,_,_ in doc['Parameters']], ['mean','cov','shape']) - - arg, arg_type, desc = doc['Parameters'][1] - assert_equal(arg_type, '(N, N) ndarray') - assert desc[0].startswith('Covariance matrix') - assert doc['Parameters'][0][-1][-2] == ' (1+2+3)/3' - -def test_other_parameters(): - assert_equal(len(doc['Other Parameters']), 1) - assert_equal([n for n,_,_ in doc['Other Parameters']], ['spam']) - arg, arg_type, desc = doc['Other Parameters'][0] - assert_equal(arg_type, 'parrot') - assert desc[0].startswith('A parrot off its mortal coil') - -def test_returns(): - assert_equal(len(doc['Returns']), 2) - arg, arg_type, desc = doc['Returns'][0] - assert_equal(arg, 'out') - assert_equal(arg_type, 'ndarray') - assert desc[0].startswith('The drawn samples') - assert desc[-1].endswith('distribution.') - - arg, arg_type, desc = doc['Returns'][1] - assert_equal(arg, 'list of str') - assert_equal(arg_type, '') - assert desc[0].startswith('This is not a real') - assert desc[-1].endswith('anonymous return values.') - -def test_yields(): - section = doc_yields['Yields'] - assert_equal(len(section), 3) - truth = [('a', 'int', 'apples.'), - ('b', 'int', 'bananas.'), - ('int', '', 'unknowns.')] - for (arg, arg_type, desc), (arg_, arg_type_, end) in zip(section, truth): - assert_equal(arg, arg_) - assert_equal(arg_type, arg_type_) - assert desc[0].startswith('The number of') - assert desc[0].endswith(end) - -def test_returnyield(): - doc_text = """ -Test having returns and yields. - -Returns -------- -int - The number of apples. - -Yields ------- -a : int - The number of apples. -b : int - The number of bananas. - -""" - assert_raises(ValueError, NumpyDocString, doc_text) - -def test_notes(): - assert doc['Notes'][0].startswith('Instead') - assert doc['Notes'][-1].endswith('definite.') - assert_equal(len(doc['Notes']), 17) - -def test_references(): - assert doc['References'][0].startswith('..') - assert doc['References'][-1].endswith('2001.') - -def test_examples(): - assert doc['Examples'][0].startswith('>>>') - assert doc['Examples'][-1].endswith('True]') - -def test_index(): - assert_equal(doc['index']['default'], 'random') - assert_equal(len(doc['index']), 2) - assert_equal(len(doc['index']['refguide']), 2) - -def non_blank_line_by_line_compare(a,b): - a = textwrap.dedent(a) - b = textwrap.dedent(b) - a = [l.rstrip() for l in a.split('\n') if l.strip()] - b = [l.rstrip() for l in b.split('\n') if l.strip()] - for n,line in enumerate(a): - if not line == b[n]: - raise AssertionError("Lines %s of a and b differ: " - "\n>>> %s\n<<< %s\n" % - (n,line,b[n])) -def test_str(): - # doc_txt has the order of Notes and See Also sections flipped. - # This should be handled automatically, and so, one thing this test does - # is to make sure that See Also precedes Notes in the output. - non_blank_line_by_line_compare(str(doc), -"""numpy.multivariate_normal(mean, cov, shape=None, spam=None) - -Draw values from a multivariate normal distribution with specified -mean and covariance. - -The multivariate normal or Gaussian distribution is a generalisation -of the one-dimensional normal distribution to higher dimensions. - -Parameters ----------- -mean : (N,) ndarray - Mean of the N-dimensional distribution. - - .. math:: - - (1+2+3)/3 - -cov : (N, N) ndarray - Covariance matrix of the distribution. -shape : tuple of ints - Given a shape of, for example, (m,n,k), m*n*k samples are - generated, and packed in an m-by-n-by-k arrangement. Because - each sample is N-dimensional, the output shape is (m,n,k,N). - -Returns -------- -out : ndarray - The drawn samples, arranged according to `shape`. If the - shape given is (m,n,...), then the shape of `out` is is - (m,n,...,N). - - In other words, each entry ``out[i,j,...,:]`` is an N-dimensional - value drawn from the distribution. -list of str - This is not a real return value. It exists to test - anonymous return values. - -Other Parameters ----------------- -spam : parrot - A parrot off its mortal coil. - -Raises ------- -RuntimeError - Some error - -Warns ------ -RuntimeWarning - Some warning - -Warnings --------- -Certain warnings apply. - -See Also --------- -`some`_, `other`_, `funcs`_ - -`otherfunc`_ - relationship - -Notes ------ -Instead of specifying the full covariance matrix, popular -approximations include: - - - Spherical covariance (`cov` is a multiple of the identity matrix) - - Diagonal covariance (`cov` has non-negative elements only on the diagonal) - -This geometrical property can be seen in two dimensions by plotting -generated data-points: - ->>> mean = [0,0] ->>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis - ->>> x,y = multivariate_normal(mean,cov,5000).T ->>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() - -Note that the covariance matrix must be symmetric and non-negative -definite. - -References ----------- -.. [1] A. Papoulis, "Probability, Random Variables, and Stochastic - Processes," 3rd ed., McGraw-Hill Companies, 1991 -.. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," - 2nd ed., Wiley, 2001. - -Examples --------- ->>> mean = (1,2) ->>> cov = [[1,0],[1,0]] ->>> x = multivariate_normal(mean,cov,(3,3)) ->>> print x.shape -(3, 3, 2) - -The following is probably true, given that 0.6 is roughly twice the -standard deviation: - ->>> print list( (x[0,0,:] - mean) < 0.6 ) -[True, True] - -.. index:: random - :refguide: random;distributions, random;gauss""") - - -def test_yield_str(): - non_blank_line_by_line_compare(str(doc_yields), -"""Test generator - -Yields ------- -a : int - The number of apples. -b : int - The number of bananas. -int - The number of unknowns. - -.. index:: """) - - -def test_sphinx_str(): - sphinx_doc = SphinxDocString(doc_txt) - non_blank_line_by_line_compare(str(sphinx_doc), -""" -.. index:: random - single: random;distributions, random;gauss - -Draw values from a multivariate normal distribution with specified -mean and covariance. - -The multivariate normal or Gaussian distribution is a generalisation -of the one-dimensional normal distribution to higher dimensions. - -:Parameters: - - **mean** : (N,) ndarray - - Mean of the N-dimensional distribution. - - .. math:: - - (1+2+3)/3 - - **cov** : (N, N) ndarray - - Covariance matrix of the distribution. - - **shape** : tuple of ints - - Given a shape of, for example, (m,n,k), m*n*k samples are - generated, and packed in an m-by-n-by-k arrangement. Because - each sample is N-dimensional, the output shape is (m,n,k,N). - -:Returns: - - **out** : ndarray - - The drawn samples, arranged according to `shape`. If the - shape given is (m,n,...), then the shape of `out` is is - (m,n,...,N). - - In other words, each entry ``out[i,j,...,:]`` is an N-dimensional - value drawn from the distribution. - - list of str - - This is not a real return value. It exists to test - anonymous return values. - -:Other Parameters: - - **spam** : parrot - - A parrot off its mortal coil. - -:Raises: - - **RuntimeError** - - Some error - -:Warns: - - **RuntimeWarning** - - Some warning - -.. warning:: - - Certain warnings apply. - -.. seealso:: - - :obj:`some`, :obj:`other`, :obj:`funcs` - - :obj:`otherfunc` - relationship - -.. rubric:: Notes - -Instead of specifying the full covariance matrix, popular -approximations include: - - - Spherical covariance (`cov` is a multiple of the identity matrix) - - Diagonal covariance (`cov` has non-negative elements only on the diagonal) - -This geometrical property can be seen in two dimensions by plotting -generated data-points: - ->>> mean = [0,0] ->>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis - ->>> x,y = multivariate_normal(mean,cov,5000).T ->>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() - -Note that the covariance matrix must be symmetric and non-negative -definite. - -.. rubric:: References - -.. [1] A. Papoulis, "Probability, Random Variables, and Stochastic - Processes," 3rd ed., McGraw-Hill Companies, 1991 -.. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," - 2nd ed., Wiley, 2001. - -.. only:: latex - - [1]_, [2]_ - -.. rubric:: Examples - ->>> mean = (1,2) ->>> cov = [[1,0],[1,0]] ->>> x = multivariate_normal(mean,cov,(3,3)) ->>> print x.shape -(3, 3, 2) - -The following is probably true, given that 0.6 is roughly twice the -standard deviation: - ->>> print list( (x[0,0,:] - mean) < 0.6 ) -[True, True] -""") - - -def test_sphinx_yields_str(): - sphinx_doc = SphinxDocString(doc_yields_txt) - non_blank_line_by_line_compare(str(sphinx_doc), -"""Test generator - -:Yields: - - **a** : int - - The number of apples. - - **b** : int - - The number of bananas. - - int - - The number of unknowns. -""") - - -doc2 = NumpyDocString(""" - Returns array of indices of the maximum values of along the given axis. - - Parameters - ---------- - a : {array_like} - Array to look in. - axis : {None, integer} - If None, the index is into the flattened array, otherwise along - the specified axis""") - -def test_parameters_without_extended_description(): - assert_equal(len(doc2['Parameters']), 2) - -doc3 = NumpyDocString(""" - my_signature(*params, **kwds) - - Return this and that. - """) - -def test_escape_stars(): - signature = str(doc3).split('\n')[0] - assert_equal(signature, 'my_signature(\*params, \*\*kwds)') - - def my_func(a, b, **kwargs): - pass - - fdoc = FunctionDoc(func=my_func) - assert_equal(fdoc['Signature'], 'my_func(a, b, \*\*kwargs)') - -doc4 = NumpyDocString( - """a.conj() - - Return an array with all complex-valued elements conjugated.""") - -def test_empty_extended_summary(): - assert_equal(doc4['Extended Summary'], []) - -doc5 = NumpyDocString( - """ - a.something() - - Raises - ------ - LinAlgException - If array is singular. - - Warns - ----- - SomeWarning - If needed - """) - -def test_raises(): - assert_equal(len(doc5['Raises']), 1) - name,_,desc = doc5['Raises'][0] - assert_equal(name,'LinAlgException') - assert_equal(desc,['If array is singular.']) - -def test_warns(): - assert_equal(len(doc5['Warns']), 1) - name,_,desc = doc5['Warns'][0] - assert_equal(name,'SomeWarning') - assert_equal(desc,['If needed']) - -def test_see_also(): - doc6 = NumpyDocString( - """ - z(x,theta) - - See Also - -------- - func_a, func_b, func_c - func_d : some equivalent func - foo.func_e : some other func over - multiple lines - func_f, func_g, :meth:`func_h`, func_j, - func_k - :obj:`baz.obj_q` - :class:`class_j`: fubar - foobar - """) - - assert len(doc6['See Also']) == 12 - for func, desc, role in doc6['See Also']: - if func in ('func_a', 'func_b', 'func_c', 'func_f', - 'func_g', 'func_h', 'func_j', 'func_k', 'baz.obj_q'): - assert(not desc) - else: - assert(desc) - - if func == 'func_h': - assert role == 'meth' - elif func == 'baz.obj_q': - assert role == 'obj' - elif func == 'class_j': - assert role == 'class' - else: - assert role is None - - if func == 'func_d': - assert desc == ['some equivalent func'] - elif func == 'foo.func_e': - assert desc == ['some other func over', 'multiple lines'] - elif func == 'class_j': - assert desc == ['fubar', 'foobar'] - -def test_see_also_print(): - class Dummy(object): - """ - See Also - -------- - func_a, func_b - func_c : some relationship - goes here - func_d - """ - pass - - obj = Dummy() - s = str(FunctionDoc(obj, role='func')) - assert(':func:`func_a`, :func:`func_b`' in s) - assert(' some relationship' in s) - assert(':func:`func_d`' in s) - -doc7 = NumpyDocString(""" - - Doc starts on second line. - - """) - -def test_empty_first_line(): - assert doc7['Summary'][0].startswith('Doc starts') - - -def test_no_summary(): - str(SphinxDocString(""" - Parameters - ----------""")) - - -def test_unicode(): - doc = SphinxDocString(""" - öäöäöäöäöåååå - - öäöäöäööäååå - - Parameters - ---------- - ååå : äää - ööö - - Returns - ------- - ååå : ööö - äää - - """) - assert isinstance(doc['Summary'][0], str) - assert doc['Summary'][0] == 'öäöäöäöäöåååå' - -def test_plot_examples(): - cfg = dict(use_plots=True) - - doc = SphinxDocString(""" - Examples - -------- - >>> import matplotlib.pyplot as plt - >>> plt.plot([1,2,3],[4,5,6]) - >>> plt.show() - """, config=cfg) - assert 'plot::' in str(doc), str(doc) - - doc = SphinxDocString(""" - Examples - -------- - .. plot:: - - import matplotlib.pyplot as plt - plt.plot([1,2,3],[4,5,6]) - plt.show() - """, config=cfg) - assert str(doc).count('plot::') == 1, str(doc) - -def test_class_members(): - - class Dummy(object): - """ - Dummy class. - - """ - def spam(self, a, b): - """Spam\n\nSpam spam.""" - pass - def ham(self, c, d): - """Cheese\n\nNo cheese.""" - pass - @property - def spammity(self): - """Spammity index""" - return 0.95 - - class Ignorable(object): - """local class, to be ignored""" - pass - - for cls in (ClassDoc, SphinxClassDoc): - doc = cls(Dummy, config=dict(show_class_members=False)) - assert 'Methods' not in str(doc), (cls, str(doc)) - assert 'spam' not in str(doc), (cls, str(doc)) - assert 'ham' not in str(doc), (cls, str(doc)) - assert 'spammity' not in str(doc), (cls, str(doc)) - assert 'Spammity index' not in str(doc), (cls, str(doc)) - - doc = cls(Dummy, config=dict(show_class_members=True)) - assert 'Methods' in str(doc), (cls, str(doc)) - assert 'spam' in str(doc), (cls, str(doc)) - assert 'ham' in str(doc), (cls, str(doc)) - assert 'spammity' in str(doc), (cls, str(doc)) - - if cls is SphinxClassDoc: - assert '.. autosummary::' in str(doc), str(doc) - else: - assert 'Spammity index' in str(doc), str(doc) - - class SubDummy(Dummy): - """ - Subclass of Dummy class. - - """ - def ham(self, c, d): - """Cheese\n\nNo cheese.\nOverloaded Dummy.ham""" - pass - - def bar(self, a, b): - """Bar\n\nNo bar""" - pass - - for cls in (ClassDoc, SphinxClassDoc): - doc = cls(SubDummy, config=dict(show_class_members=True, - show_inherited_class_members=False)) - assert 'Methods' in str(doc), (cls, str(doc)) - assert 'spam' not in str(doc), (cls, str(doc)) - assert 'ham' in str(doc), (cls, str(doc)) - assert 'bar' in str(doc), (cls, str(doc)) - assert 'spammity' not in str(doc), (cls, str(doc)) - - if cls is SphinxClassDoc: - assert '.. autosummary::' in str(doc), str(doc) - else: - assert 'Spammity index' not in str(doc), str(doc) - - doc = cls(SubDummy, config=dict(show_class_members=True, - show_inherited_class_members=True)) - assert 'Methods' in str(doc), (cls, str(doc)) - assert 'spam' in str(doc), (cls, str(doc)) - assert 'ham' in str(doc), (cls, str(doc)) - assert 'bar' in str(doc), (cls, str(doc)) - assert 'spammity' in str(doc), (cls, str(doc)) - - if cls is SphinxClassDoc: - assert '.. autosummary::' in str(doc), str(doc) - else: - assert 'Spammity index' in str(doc), str(doc) - -def test_duplicate_signature(): - # Duplicate function signatures occur e.g. in ufuncs, when the - # automatic mechanism adds one, and a more detailed comes from the - # docstring itself. - - doc = NumpyDocString( - """ - z(x1, x2) - - z(a, theta) - """) - - assert doc['Signature'].strip() == 'z(a, theta)' - - -class_doc_txt = """ - Foo - - Parameters - ---------- - f : callable ``f(t, y, *f_args)`` - Aaa. - jac : callable ``jac(t, y, *jac_args)`` - Bbb. - - Attributes - ---------- - t : float - Current time. - y : ndarray - Current variable values. - x : float - Some parameter - - Methods - ------- - a - b - c - - Examples - -------- - For usage examples, see `ode`. -""" - -def test_class_members_doc(): - doc = ClassDoc(None, class_doc_txt) - non_blank_line_by_line_compare(str(doc), - """ - Foo - - Parameters - ---------- - f : callable ``f(t, y, *f_args)`` - Aaa. - jac : callable ``jac(t, y, *jac_args)`` - Bbb. - - Examples - -------- - For usage examples, see `ode`. - - Attributes - ---------- - t : float - Current time. - y : ndarray - Current variable values. - x : float - Some parameter - - Methods - ------- - a - - b - - c - - .. index:: - - """) - -def test_class_members_doc_sphinx(): - class Foo: - @property - def x(self): - """Test attribute""" - return None - - doc = SphinxClassDoc(Foo, class_doc_txt) - non_blank_line_by_line_compare(str(doc), - """ - Foo - - :Parameters: - - **f** : callable ``f(t, y, *f_args)`` - - Aaa. - - **jac** : callable ``jac(t, y, *jac_args)`` - - Bbb. - - .. rubric:: Examples - - For usage examples, see `ode`. - - .. rubric:: Attributes - - .. autosummary:: - :toctree: - - x - - === ========== - t (float) Current time. - y (ndarray) Current variable values. - === ========== - - .. rubric:: Methods - - === ========== - a - b - c - === ========== - - """) - -if __name__ == "__main__": - import nose - nose.run() diff --git a/doc/sphinxext/numpydoc/tests/test_linkcode.py b/doc/sphinxext/numpydoc/tests/test_linkcode.py deleted file mode 100644 index 340166a485..0000000000 --- a/doc/sphinxext/numpydoc/tests/test_linkcode.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import division, absolute_import, print_function - -import numpydoc.linkcode - -# No tests at the moment... diff --git a/doc/sphinxext/numpydoc/tests/test_phantom_import.py b/doc/sphinxext/numpydoc/tests/test_phantom_import.py deleted file mode 100644 index 80fae08f4e..0000000000 --- a/doc/sphinxext/numpydoc/tests/test_phantom_import.py +++ /dev/null @@ -1,12 +0,0 @@ -from __future__ import division, absolute_import, print_function - -import sys -from nose import SkipTest - -def test_import(): - if sys.version_info[0] >= 3: - raise SkipTest("phantom_import not ported to Py3") - - import numpydoc.phantom_import - -# No tests at the moment... diff --git a/doc/sphinxext/numpydoc/tests/test_plot_directive.py b/doc/sphinxext/numpydoc/tests/test_plot_directive.py deleted file mode 100644 index 1ea1076947..0000000000 --- a/doc/sphinxext/numpydoc/tests/test_plot_directive.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import division, absolute_import, print_function - -import sys -from nose import SkipTest - -def test_import(): - if sys.version_info[0] >= 3: - raise SkipTest("plot_directive not ported to Python 3 (use the one from Matplotlib instead)") - import numpydoc.plot_directive - -# No tests at the moment... diff --git a/doc/sphinxext/numpydoc/tests/test_traitsdoc.py b/doc/sphinxext/numpydoc/tests/test_traitsdoc.py deleted file mode 100644 index fe5078c496..0000000000 --- a/doc/sphinxext/numpydoc/tests/test_traitsdoc.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import division, absolute_import, print_function - -import sys -from nose import SkipTest - -def test_import(): - if sys.version_info[0] >= 3: - raise SkipTest("traitsdoc not ported to Python3") - import numpydoc.traitsdoc - -# No tests at the moment... diff --git a/doc/sphinxext/numpydoc/traitsdoc.py b/doc/sphinxext/numpydoc/traitsdoc.py deleted file mode 100644 index 2468565a6c..0000000000 --- a/doc/sphinxext/numpydoc/traitsdoc.py +++ /dev/null @@ -1,143 +0,0 @@ -""" -========= -traitsdoc -========= - -Sphinx extension that handles docstrings in the Numpy standard format, [1] -and support Traits [2]. - -This extension can be used as a replacement for ``numpydoc`` when support -for Traits is required. - -.. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard -.. [2] http://code.enthought.com/projects/traits/ - -""" -from __future__ import division, absolute_import, print_function - -import inspect -import os -import pydoc -import collections - -from . import docscrape -from . import docscrape_sphinx -from .docscrape_sphinx import SphinxClassDoc, SphinxFunctionDoc, SphinxDocString - -from . import numpydoc - -from . import comment_eater - -class SphinxTraitsDoc(SphinxClassDoc): - def __init__(self, cls, modulename='', func_doc=SphinxFunctionDoc): - if not inspect.isclass(cls): - raise ValueError("Initialise using a class. Got %r" % cls) - self._cls = cls - - if modulename and not modulename.endswith('.'): - modulename += '.' - self._mod = modulename - self._name = cls.__name__ - self._func_doc = func_doc - - docstring = pydoc.getdoc(cls) - docstring = docstring.split('\n') - - # De-indent paragraph - try: - indent = min(len(s) - len(s.lstrip()) for s in docstring - if s.strip()) - except ValueError: - indent = 0 - - for n,line in enumerate(docstring): - docstring[n] = docstring[n][indent:] - - self._doc = docscrape.Reader(docstring) - self._parsed_data = { - 'Signature': '', - 'Summary': '', - 'Description': [], - 'Extended Summary': [], - 'Parameters': [], - 'Returns': [], - 'Yields': [], - 'Raises': [], - 'Warns': [], - 'Other Parameters': [], - 'Traits': [], - 'Methods': [], - 'See Also': [], - 'Notes': [], - 'References': '', - 'Example': '', - 'Examples': '', - 'index': {} - } - - self._parse() - - def _str_summary(self): - return self['Summary'] + [''] - - def _str_extended_summary(self): - return self['Description'] + self['Extended Summary'] + [''] - - def __str__(self, indent=0, func_role="func"): - out = [] - out += self._str_signature() - out += self._str_index() + [''] - out += self._str_summary() - out += self._str_extended_summary() - for param_list in ('Parameters', 'Traits', 'Methods', - 'Returns', 'Yields', 'Raises'): - out += self._str_param_list(param_list) - out += self._str_see_also("obj") - out += self._str_section('Notes') - out += self._str_references() - out += self._str_section('Example') - out += self._str_section('Examples') - out = self._str_indent(out,indent) - return '\n'.join(out) - -def looks_like_issubclass(obj, classname): - """ Return True if the object has a class or superclass with the given class - name. - - Ignores old-style classes. - """ - t = obj - if t.__name__ == classname: - return True - for klass in t.__mro__: - if klass.__name__ == classname: - return True - return False - -def get_doc_object(obj, what=None, config=None): - if what is None: - if inspect.isclass(obj): - what = 'class' - elif inspect.ismodule(obj): - what = 'module' - elif isinstance(obj, collections.Callable): - what = 'function' - else: - what = 'object' - if what == 'class': - doc = SphinxTraitsDoc(obj, '', func_doc=SphinxFunctionDoc, config=config) - if looks_like_issubclass(obj, 'HasTraits'): - for name, trait, comment in comment_eater.get_class_traits(obj): - # Exclude private traits. - if not name.startswith('_'): - doc['Traits'].append((name, trait, comment.splitlines())) - return doc - elif what in ('function', 'method'): - return SphinxFunctionDoc(obj, '', config=config) - else: - return SphinxDocString(pydoc.getdoc(obj), config=config) - -def setup(app): - # init numpydoc - numpydoc.setup(app, get_doc_object) - diff --git a/doc/sphinxext/setup.py b/doc/sphinxext/setup.py deleted file mode 100644 index 5b3454b3a7..0000000000 --- a/doc/sphinxext/setup.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import division, print_function - -import setuptools -from distutils.core import setup - -import sys -if sys.version_info[0] >= 3 and sys.version_info[1] < 3 or \ - sys.version_info[0] <= 2 and sys.version_info[1] < 6: - raise RuntimeError("Python version 2.6, 2.7 or >= 3.3 required.") - -version = "0.4.dev" - -setup( - name="numpydoc", - packages=["numpydoc"], - version=version, - description="Sphinx extension to support docstrings in Numpy format", - # classifiers from http://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=["Development Status :: 3 - Alpha", - "Environment :: Plugins", - "License :: OSI Approved :: BSD License", - "Topic :: Documentation"], - keywords="sphinx numpy", - author="Pauli Virtanen and others", - author_email="pav@iki.fi", - url="http://github.com/numpy/numpy/tree/master/doc/sphinxext", - license="BSD", - requires=["sphinx (>= 1.0.1)"], - package_data={'numpydoc': ['tests/test_*.py']}, - test_suite = 'nose.collector', -) diff --git a/examples/plotting/plot_grid_data_overlay.py b/examples/plotting/plot_grid_data_overlay.py deleted file mode 100644 index 74e96a643a..0000000000 --- a/examples/plotting/plot_grid_data_overlay.py +++ /dev/null @@ -1,79 +0,0 @@ -""" -==================================== -Create a grid plot with data overlay -==================================== - -An example which creates a plot of a gridded NEXRAD radar on a map -with latitude and NCEP North American regional reanalysis (NARR) pressure -is plotted on top of the grid. - -""" -print(__doc__) - -# Author Jonathan J. Helmus, Cory Weber -# License: BSD 3 clause - -import numpy as np -import matplotlib -import matplotlib.pyplot as plt -from netCDF4 import num2date, date2num, Dataset -import pyart - - -# read in the NEXRAD data, create the display -fname = '20110520100000_nexrad_grid.nc' -grid = pyart.io.read_grid(fname) -display = pyart.graph.GridMapDisplayBasemap(grid) - -# create the figure -font = {'size': 10} -matplotlib.rc('font', **font) -fig = plt.figure(figsize=[10, 8]) - -# Add Basic Title -title = 'Basic Plot with Overlay Example Title' -# Xleft%, ybot% -fig.text(0.5, 0.9, title, horizontalalignment='center', fontsize=24) - -# panel sizes xleft%, ybot%, xright% ,ytop% -map_panel_axes = [0.05, 0.15, 0.9, 0.7] -colorbar_panel_axes = [0.15, 0.09, 0.7, .010] - -# parameters -level = 5 -vmin = -8 -vmax = 64 -lat = 36.5 -lon = -98.5 - -# panel 1, basemap, radar reflectivity and NARR overlay -ax1 = fig.add_axes(map_panel_axes) -display.plot_grid('REF', level=level, vmin=vmin, vmax=vmax, title_flag=False, - colorbar_flag=False) - -# load overlay data -url = 'narr-a_221_20110520_0000_000.nc' -data = Dataset(url) - -# extract data at correct time -grid_date = num2date(grid.time['data'], grid.time['units'])[0] -data_time = data.variables['time'] -t_idx = abs(data_time[:] - date2num(grid_date, data_time.units)).argmin() -prmsl = 0.01 * data.variables['prmsl'][t_idx] - -# plot the reanalysis on the basemap -lons, lats = np.meshgrid(data.variables['lon'], data.variables['lat'][:]) - -x, y = display.basemap(lons, lats) -clevs = np.arange(900, 1100., 1.) - -display.basemap.contour(x, y, prmsl, clevs, colors='k', linewidths=1.) - -# colorbar -cbax = fig.add_axes(colorbar_panel_axes) -display.plot_colorbar(cax=cbax) - -# enable below to add cross hairs -# display.plot_crosshairs(lon=lon, lat=lat) - -plt.show() diff --git a/examples/plotting/plot_grid_three_panel.py b/examples/plotting/plot_grid_three_panel.py deleted file mode 100644 index 651ebfef52..0000000000 --- a/examples/plotting/plot_grid_three_panel.py +++ /dev/null @@ -1,102 +0,0 @@ -""" -============================== -Create a three panel grid plot -============================== - -An example which creates a three-panel plot of a gridded NEXRAD radar on a map -with latitude and longitude slices of the reflectivity. The NCEP -North American regional reanalysis (NARR) pressure is plotted on top of the -grid. - -""" -print(__doc__) - -# Author Jonathan J. Helmus -# License: BSD 3 clause - -import numpy as np -import matplotlib -import matplotlib.pyplot as plt -from netCDF4 import num2date, date2num, Dataset -import pyart - - -# read in the NEXRAD data, create the display -fname = '20110520100000_nexrad_grid.nc' -grid = pyart.io.read_grid(fname) -display = pyart.graph.GridMapDisplayBasemap(grid) - -# create the figure -font = {'size': 16} -matplotlib.rc('font', **font) -fig = plt.figure(figsize=[15, 8]) - -# panel sizes -map_panel_axes = [0.05, 0.05, .4, .80] -x_cut_panel_axes = [0.55, 0.10, .4, .30] -y_cut_panel_axes = [0.55, 0.50, .4, .30] -colorbar_panel_axes = [0.05, 0.90, .4, .03] - -# parameters -level = 5 -vmin = -8 -vmax = 64 -lat = 36.5 -lon = -98.5 - -# panel 1, basemap, radar reflectivity and NARR overlay -ax1 = fig.add_axes(map_panel_axes) -display.plot_basemap() -display.plot_grid('REF', level=level, vmin=vmin, vmax=vmax, title_flag=False, - colorbar_flag=False) -display.plot_crosshairs(lon=lon, lat=lat) - -# fetch NCEP NARR data -grid_date = num2date(grid.time['data'], grid.time['units'])[0] -y_m_d = grid_date.strftime('%Y%m%d') -y_m = grid_date.strftime('%Y%m') -url = ('http://nomads.ncdc.noaa.gov/dods/NCEP_NARR_DAILY/' + y_m + '/' + - y_m_d + '/narr-a_221_' + y_m_d + '_0000_000') -# Use a local copy of the online NCEP NARR data, this file can be created with -# the command: -# nccopy http://nomads.ncdc.noaa.gov/dods/NCEP_NARR_DAILY/201105/20110520/narr-a_221_20110520_0000_000?lon,lat,time,prmsl narr-a_221_20110520_0000_000.nc -# comment out the next line to retrieve the data from the OPeNDAP server. -url = 'narr-a_221_20110520_0000_000.nc' -data = Dataset(url) - -# extract data at correct time -data_time = data.variables['time'] -t_idx = abs(data_time[:] - date2num(grid_date, data_time.units)).argmin() -prmsl = 0.01 * data.variables['prmsl'][t_idx] - -# plot the reanalysis on the basemap -lons, lats = np.meshgrid(data.variables['lon'], data.variables['lat'][:]) -x, y = display.basemap(lons, lats) -clevs = np.arange(900, 1100., 1.) -display.basemap.contour(x, y, prmsl, clevs, colors='k', linewidths=1.) - -# colorbar -cbax = fig.add_axes(colorbar_panel_axes) -display.plot_colorbar(cax=cbax) - -# panel 2, longitude slice. -ax2 = fig.add_axes(x_cut_panel_axes) -display.plot_longitude_slice( - 'REF', lon=lon, lat=lat, vmin=vmin, vmax=vmax, title_flag=False, - colorbar_flag=False, edges=False, - axislabels=('Distance from SGP CF (km)', 'Height (km)')) - -# panel 3, latitude slice -ax3 = fig.add_axes(y_cut_panel_axes) -display.plot_latitude_slice( - 'REF', lon=lon, lat=lat, vmin=vmin, vmax=vmax, title_flag=False, - colorbar_flag=False, edges=False, - axislabels=('', 'Height (km)')) - -# add a title -slc_height = grid.z['data'][level] -dts = num2date(grid.time['data'], grid.time['units']) -datestr = dts[0].strftime('%H:%M Z on %Y-%m-%d') -title = 'Sliced at ' + str(slc_height) + ' meters at ' + datestr -fig.text(0.5, 0.9, title) -plt.show() diff --git a/examples/plotting/plot_ppi_basemap_with_rings.py b/examples/plotting/plot_ppi_basemap_with_rings.py deleted file mode 100644 index 96cea1aa27..0000000000 --- a/examples/plotting/plot_ppi_basemap_with_rings.py +++ /dev/null @@ -1,47 +0,0 @@ -""" -============================== -Create a PPI plot on a basemap -============================== - -An example which creates a PPI plot of a file with a basemap background -and range rings - -""" -print(__doc__) - -# Author: Scott Collis (scollis@anl.gov) -# License: BSD 3 clause - -import numpy as np -import matplotlib.pyplot as plt -import pyart - -# read in the file, create a RadarMapDisplayBasemap object -filename = 'nsaxsaprppiC1.a1.20140201.184802.nc' -radar = pyart.io.read(filename) -display = pyart.graph.RadarMapDisplayBasemap(radar) - -# plot the second tilt -display.plot_ppi_map('reflectivity_horizontal', 1, vmin=-20, vmax=20, - min_lon=-157.1, max_lon=-156, min_lat=71.2, max_lat=71.6, - lon_lines=np.arange(-158, -154, .2), projection='lcc', - lat_lines=np.arange(69, 72, .1), resolution='h', - lat_0=radar.latitude['data'][0], - lon_0=radar.longitude['data'][0]) - -# plot range rings at 10, 20, 30 and 40km -display.plot_range_ring(10., line_style='k-') -display.plot_range_ring(20., line_style='k--') -display.plot_range_ring(30., line_style='k-') -display.plot_range_ring(40., line_style='k--') - -# plots cross hairs -display.plot_line_xy(np.array([-40000.0, 40000.0]), np.array([0.0, 0.0]), - line_style='k-') -display.plot_line_xy(np.array([0.0, 0.0]), np.array([-20000.0, 200000.0]), - line_style='k-') - -# Indicate the radar location with a point -display.plot_point(radar.longitude['data'][0], radar.latitude['data'][0]) - -plt.show() diff --git a/examples/plotting/plot_ppi_cartopy_with_rings.py b/examples/plotting/plot_ppi_with_rings.py similarity index 100% rename from examples/plotting/plot_ppi_cartopy_with_rings.py rename to examples/plotting/plot_ppi_with_rings.py diff --git a/pyart/aux_io/__init__.py b/pyart/aux_io/__init__.py index b9e8ced995..f2bda8524e 100644 --- a/pyart/aux_io/__init__.py +++ b/pyart/aux_io/__init__.py @@ -1,15 +1,9 @@ """ -================================================ -Auxiliary input and output (:mod:`pyart.aux_io`) -================================================ - -.. currentmodule:: pyart.aux_io - Additional classes and functions for reading and writing data from a number of file formats. These auxiliary input/output routines are not as well polished as those in -:mod:`pyart.io`. They may require addition dependencies beyond those required +:mod:`pyart.io`. They may require addition dependencies beyond those required for a standard Py-ART install, use non-standard function parameter and naming, are not supported by the :py:func:`pyart.io.read` function and are not fully tested if tested at all. Please use these at your own risk. @@ -17,21 +11,6 @@ Bugs in these function should be reported but fixing them may not be a priority. -Reading radar data -================== - -.. autosummary:: - :toctree: generated/ - - read_d3r_gcpex_nc - read_gamic - read_kazr - read_noxp_iphex_nc - read_odim_h5 - read_pattern - read_radx - read_rainbow_wrl - """ from .pattern import read_pattern diff --git a/pyart/aux_io/arm_vpt.py b/pyart/aux_io/arm_vpt.py index 5d59bd787d..841d97a3c4 100644 --- a/pyart/aux_io/arm_vpt.py +++ b/pyart/aux_io/arm_vpt.py @@ -1,7 +1,4 @@ """ -pyart.aux_io.arm_vpt -==================== - Routines for reading ARM vertically-pointing radar ingest (e.g., a1) files. These files are characterized by being NetCDF files that do not fully conform to the CF/Radial convention. Nonetheless this module borrows heavily from the diff --git a/pyart/aux_io/d3r_gcpex_nc.py b/pyart/aux_io/d3r_gcpex_nc.py index 345d9333a9..c52592d842 100644 --- a/pyart/aux_io/d3r_gcpex_nc.py +++ b/pyart/aux_io/d3r_gcpex_nc.py @@ -1,15 +1,6 @@ """ -pyart.aux_io.d3r_gcpex_nc -========================= - Routines for reading GCPEX D3R files. -.. autosummary:: - :toctree: generated/ - - read_d3r_gcpex_nc - _ncvar_to_dict - """ import datetime diff --git a/pyart/aux_io/edge_netcdf.py b/pyart/aux_io/edge_netcdf.py index 37f13bda6a..0289b97614 100644 --- a/pyart/aux_io/edge_netcdf.py +++ b/pyart/aux_io/edge_netcdf.py @@ -1,14 +1,6 @@ """ -pyart.aux_io.edge_necdf -======================= - Utilities for reading EDGE NetCDF files. -.. autosummary:: - :toctree: generated/ - - read_edge_netcdf - """ import datetime diff --git a/pyart/aux_io/gamic_hdf5.py b/pyart/aux_io/gamic_hdf5.py index 2a9406f705..91bab53ce7 100644 --- a/pyart/aux_io/gamic_hdf5.py +++ b/pyart/aux_io/gamic_hdf5.py @@ -1,17 +1,6 @@ """ -pyart.aux_io.read_gamic -======================= - Utilities for reading gamic hdf5 files. -.. autosummary:: - :toctree: generated/ - - read_gamic - _get_instrument_params - _avg_radial_angles - _prt_mode_from_unfolding - """ # TODO to move out of aux_io namespace: diff --git a/pyart/aux_io/gamicfile.py b/pyart/aux_io/gamicfile.py index 975ef15a9b..5541f70a86 100644 --- a/pyart/aux_io/gamicfile.py +++ b/pyart/aux_io/gamicfile.py @@ -1,21 +1,6 @@ """ -pyart.aux_io.gamicfile -====================== - GAMICFile class and utility functions. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - GAMICFile - -.. autosummary:: - :toctree: generated/ - - _get_gamic_sweep_data - - """ import numpy as np diff --git a/pyart/aux_io/noxp_iphex_nc.py b/pyart/aux_io/noxp_iphex_nc.py index 8c023b970e..4a45f563fa 100644 --- a/pyart/aux_io/noxp_iphex_nc.py +++ b/pyart/aux_io/noxp_iphex_nc.py @@ -1,15 +1,6 @@ """ -pyart.aux_io.noxp_iphex_nc -========================== - Routines for reading IPHEx NOXP files. -.. autosummary:: - :toctree: generated/ - - read_noxp_iphex_nc - _ncvar_to_dict - """ import datetime diff --git a/pyart/aux_io/odim_h5.py b/pyart/aux_io/odim_h5.py index 2c37e5e70e..d9c05a6ca7 100644 --- a/pyart/aux_io/odim_h5.py +++ b/pyart/aux_io/odim_h5.py @@ -1,16 +1,6 @@ """ -pyart.aux_io.odim_h5 -==================== - Routines for reading ODIM_H5 files. -.. autosummary:: - :toctree: generated/ - - read_odim_h5 - _to_str - _get_odim_h5_sweep_data - """ import datetime diff --git a/pyart/aux_io/pattern.py b/pyart/aux_io/pattern.py index 6b21f81fc3..7f7717efbc 100644 --- a/pyart/aux_io/pattern.py +++ b/pyart/aux_io/pattern.py @@ -1,17 +1,8 @@ """ -pyart.aux_io.pattern -==================== - Routines for reading files from the X-band radar from the PATTERN_ project. .. _PATTERN: http://www.mi.uni-hamburg.de/PATTERN-Pre.6763.0.html - -.. autosummary:: - :toctree: generated/ - - read_pattern - """ import datetime diff --git a/pyart/aux_io/radx.py b/pyart/aux_io/radx.py index 5c24bd5467..d4867a2f43 100644 --- a/pyart/aux_io/radx.py +++ b/pyart/aux_io/radx.py @@ -1,14 +1,6 @@ """ -pyart.aux_io.radx -================= - Reading files using Radx to first convert the file to Cf.Radial format -.. autosummary:: - :toctree: generated/ - - read_radx - """ import os diff --git a/pyart/aux_io/rainbow_wrl.py b/pyart/aux_io/rainbow_wrl.py index 8ce15f7ac1..e701b4e478 100755 --- a/pyart/aux_io/rainbow_wrl.py +++ b/pyart/aux_io/rainbow_wrl.py @@ -1,17 +1,6 @@ """ -pyart.aux_io.rainbow -==================== - Routines for reading RAINBOW files (Used by SELEX) using the wradlib library -.. autosummary:: - :toctree: generated/ - - read_rainbow_wrl - _get_angle - _get_data - _get_time - """ # specific modules for this function @@ -126,7 +115,6 @@ def read_rainbow_wrl(filename, field_names=None, additional_metadata=None, after the `file_field_names` and `field_names` parameters. Set to None to include all fields not specified by exclude_fields. - Returns ------- radar : Radar diff --git a/pyart/aux_io/sinarame_h5.py b/pyart/aux_io/sinarame_h5.py index 719d69a450..6430f5a7c2 100644 --- a/pyart/aux_io/sinarame_h5.py +++ b/pyart/aux_io/sinarame_h5.py @@ -1,17 +1,6 @@ """ -pyart.aux_io.sinarame_h5 -======================== - Routines for reading sinarame_H5 files. -.. autosummary:: - :toctree: generated/ - - read_sinarame_h5 - write_sinarame_cfradial - _to_str - _get_SINARAME_h5_sweep_data - """ from datetime import datetime diff --git a/pyart/bridge/__init__.py b/pyart/bridge/__init__.py index 4dd30e2011..53d489f4d9 100644 --- a/pyart/bridge/__init__.py +++ b/pyart/bridge/__init__.py @@ -1,21 +1,12 @@ """ -================================================ -Bridging to other toolkits (:mod:`pyart.bridge`) -================================================ - -.. currentmodule:: pyart.bridge - Py-ART can act as bridge to other community software projects. The functionality in this namespace is available in other pyart namespaces. -Phase functions -=============== +Current extensions: + * wradlib https://wradlib.org/ -.. autosummary:: - :toctree: generated/ - texture_of_complex_phase """ diff --git a/pyart/bridge/wradlib_bridge.py b/pyart/bridge/wradlib_bridge.py index 7b9e9392a8..274bfd04c2 100644 --- a/pyart/bridge/wradlib_bridge.py +++ b/pyart/bridge/wradlib_bridge.py @@ -1,14 +1,6 @@ """ -pyart.bridge.wradlib -==================== - Py-ART methods linking to wradlib functions, http://wradlib.org/ -.. autosummary:: - :toctree: generated/ - - texture_of_complex_phase - """ try: diff --git a/pyart/config.py b/pyart/config.py index 63e122bca9..e07a729ec1 100644 --- a/pyart/config.py +++ b/pyart/config.py @@ -1,21 +1,6 @@ """ -pyart.config -============ - Py-ART configuration. -.. autosummary:: - :toctree: generated/ - - load_config - get_metadata - get_fillvalue - get_field_name - get_field_colormap - get_field_limits - get_field_mapping - FileMetadata - """ import os diff --git a/pyart/core/__init__.py b/pyart/core/__init__.py index ab86f23957..14836a943d 100644 --- a/pyart/core/__init__.py +++ b/pyart/core/__init__.py @@ -1,36 +1,6 @@ """ -======================== -Core (:mod:`pyart.core`) -======================== - -.. currentmodule:: pyart.core - Core Py-ART classes and function for interacting with weather radar data. -Core classes -============ - -.. autosummary:: - :toctree: generated/ - - Radar - Grid - HorizontalWindProfile - -Coordinate transformations -========================== - -.. autosummary:: - :toctree: generated/ - - antenna_to_cartesian - antenna_vectors_to_cartesian - cartesian_to_geographic - cartesian_vectors_to_geographic - cartesian_to_geographic_aeqd - geographic_to_cartesian - geographic_to_cartesian_aeqd - """ from .radar import Radar diff --git a/pyart/core/grid.py b/pyart/core/grid.py index f17b00712e..7f500ddb6e 100644 --- a/pyart/core/grid.py +++ b/pyart/core/grid.py @@ -1,22 +1,6 @@ """ -pyart.core.grid -=============== - An class for holding gridded Radar data. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - Grid - -.. autosummary:: - :toctree: generated/ - - _point_data_factory - _point_lon_lat_data_factory - _point_altitude_data_factory - """ import numpy as np diff --git a/pyart/core/radar.py b/pyart/core/radar.py index ddcd0d693a..4d99a83bd7 100644 --- a/pyart/core/radar.py +++ b/pyart/core/radar.py @@ -1,24 +1,6 @@ """ -pyart.core.radar -================ - A general central radial scanning (or dwelling) instrument class. -.. autosummary:: - :toctree: generated/ - - _rays_per_sweep_data_factory - _gate_data_factory - _gate_lon_lat_data_factory - _gate_altitude_data_factory - -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - Radar - - """ import copy diff --git a/pyart/core/transforms.py b/pyart/core/transforms.py index 20d20ea725..046e9de8e0 100644 --- a/pyart/core/transforms.py +++ b/pyart/core/transforms.py @@ -1,33 +1,8 @@ """ -pyart.core.transforms -===================== - Transformations between coordinate systems. Routines for converting between Cartesian/Cartographic (x, y, z), Geographic (latitude, longitude, altitude) and antenna (azimuth, elevation, range) coordinate systems. -.. autosummary:: - :toctree: generated/ - - antenna_to_cartesian - antenna_vectors_to_cartesian - antenna_to_cartesian_track_relative - antenna_to_cartesian_earth_relative - antenna_to_cartesian_aircraft_relative - - cartesian_to_geographic - cartesian_vectors_to_geographic - geographic_to_cartesian - cartesian_to_geographic_aeqd - geographic_to_cartesian_aeqd - - _interpolate_axes_edges - _interpolate_azimuth_edges - _interpolate_elevation_edges - _interpolate_range_edges - _half_angle_complex - - """ import warnings @@ -413,7 +388,7 @@ def geographic_to_cartesian_aeqd(lon, lat, lon_0, lat_0, R=6370997.): Transform a set of geographic coordinates (lat, lon) to Cartesian/Cartographic coordinates (x, y) using a azimuthal equidistant - map projection [1]. + map projection [1]_. .. math:: @@ -580,7 +555,7 @@ def cartesian_to_geographic_aeqd(x, y, lon_0, lat_0, R=6370997.): Transform a set of Cartesian/Cartographic coordinates (x, y) to geographic coordinate system (lat, lon) using a azimuthal equidistant - map projection [1]. + map projection [1]_. .. math:: diff --git a/pyart/core/wind_profile.py b/pyart/core/wind_profile.py index ee032649ca..7a0d88f9a9 100644 --- a/pyart/core/wind_profile.py +++ b/pyart/core/wind_profile.py @@ -1,14 +1,6 @@ """ -pyart.core.wind_profile -======================= - Storage of wind profiles. -.. autosummary:: - :toctree: generated/ - - HorizontalWindProfile - """ diff --git a/pyart/correct/__init__.py b/pyart/correct/__init__.py index 7ecaf6678d..3ff2846449 100644 --- a/pyart/correct/__init__.py +++ b/pyart/correct/__init__.py @@ -1,45 +1,6 @@ """ -======================================== -Radar Corrections (:mod:`pyart.correct`) -======================================== - -.. currentmodule:: pyart.correct - Correct radar fields. -Velocity unfolding -================== - -.. autosummary:: - :toctree: generated/ - - dealias_fourdd - dealias_unwrap_phase - dealias_region_based - -Other corrections -================= - -.. autosummary:: - :toctree: generated/ - - calculate_attenuation - calculate_attenuation_zphi - calculate_attenuation_philinear - phase_proc_lp - despeckle_field - correct_noise_rhohv - correct_bias - phase_proc_lp_gf - -Helper functions -================ - -.. autosummary:: - :toctree: generated/ - - find_objects - """ from .dealias import dealias_fourdd diff --git a/pyart/correct/_common_dealias.py b/pyart/correct/_common_dealias.py index 943ef2bf57..e60f5556ce 100644 --- a/pyart/correct/_common_dealias.py +++ b/pyart/correct/_common_dealias.py @@ -1,18 +1,6 @@ """ -pyart.correct._common_dealias -============================= - Routines used by multiple dealiasing functions. -.. autosummary:: - :toctree: generated/ - - _parse_fields - _parse_nyquist_vel - _parse_gatefilter - _parse_rays_wrap_around - _set_limits - """ import numpy as np diff --git a/pyart/correct/_fast_edge_finder.pyx b/pyart/correct/_fast_edge_finder.pyx index 3fd104251a..e108dd15a2 100644 --- a/pyart/correct/_fast_edge_finder.pyx +++ b/pyart/correct/_fast_edge_finder.pyx @@ -1,14 +1,6 @@ """ -pyart.correct._fast_edge_finder -=============================== - Cython routine for quickly finding edges between connected regions. -.. autosummary:: - :toctree: generated/ - - _fast_edge_finder - """ import numpy as np diff --git a/pyart/correct/_fourdd_interface.pyx b/pyart/correct/_fourdd_interface.pyx index f03cfab58d..0b0e51cb4c 100644 --- a/pyart/correct/_fourdd_interface.pyx +++ b/pyart/correct/_fourdd_interface.pyx @@ -1,15 +1,6 @@ """ -pyart.correct._fourdd_interface -=============================== - Cython wrapper around the University of Washington FourDD algorithm. -.. autosummary:: - :toctree: generated/ - - create_soundvolume - fourdd_dealias - """ cimport _fourdd_h @@ -33,20 +24,20 @@ cpdef create_soundvolume(radialVelVolume, Radial velocities which will be dealiased, shape used to create soundvolume. hc : ndarray - Sounding heights in meters. Must be a contiguous one-dimensional + Sounding heights in meters. Must be a contiguous one-dimensional float32 array. sc : ndarray - Sounding wind speed in m/s. Must be a contiguous one-dimensional + Sounding wind speed in m/s. Must be a contiguous one-dimensional float32 array. dc : ndarray - Sounding wind direction in degrees. Must be a contiguous + Sounding wind direction in degrees. Must be a contiguous one-dimensional float32 array. maxshear : float Maximum vertical shear which will be incorperated into the created volume. sign : int Sign convention which the radial velocities in the created volume - will follow. A value of 1 represents when positive values + will follow. A value of 1 represents when positive values velocities are towards the radar, -1 represents when negative velocities are towards the radar. @@ -96,7 +87,7 @@ cpdef fourdd_dealias( results, this radar should represent the previous volume scan in time. If the last velocity volume is unavailable, set this to None. soundVolume : _RslVolume or None - Volume created from sounding data. If unavailable, set this to None. + Volume created from sounding data. If unavailable, set this to None. soundVolume and lastVelVolume cannot both be None. filt : int Flag controlling Bergen and Albers filter, 1 = yes, 0 = no. @@ -105,16 +96,16 @@ cpdef fourdd_dealias( ---------------- compthresh : float Fraction of the Nyquist velocity to use as a threshold when performing - continity (initial) dealiasing. Velocities differences above this + continity (initial) dealiasing. Velocities differences above this threshold will not be marked as gate from which to begin unfolding during spatial dealiasing. compthresh2 : float The same as compthresh but the value used during the second pass of - dealasing. This second pass is only performed in both a sounding + dealasing. This second pass is only performed in both a sounding and last volume are provided. thresh : float Fraction of the Nyquist velocity to use as a threshold when performing - spatial dealiasing. Horizontally adjacent gates with velocities above + spatial dealiasing. Horizontally adjacent gates with velocities above this theshold will count against assigning the gate in question the velocity value being tested. ckval : float @@ -135,13 +126,13 @@ cpdef fourdd_dealias( both a sounding volume and last volume are provided. rm : int Determines what should be done with gates that are left unfolded - after the first pass of dealiasing. A value of 1 will remove these + after the first pass of dealiasing. A value of 1 will remove these gates, a value of 0 sets these gates to their initial velocity. If both a sounding volume and last volume are provided this parameter is ignored. proximity : int Number of gates and rays to include of either side of the current gate - during window dealiasing. This value may be doubled in cases where + during window dealiasing. This value may be doubled in cases where a standard sized window does not capture a sufficient number of good valued gates. mingood : int diff --git a/pyart/correct/_unwrap_1d.pyx b/pyart/correct/_unwrap_1d.pyx index be433b4165..9e0a2a1d8b 100644 --- a/pyart/correct/_unwrap_1d.pyx +++ b/pyart/correct/_unwrap_1d.pyx @@ -2,22 +2,12 @@ #cython: boundscheck=False #cython: nonecheck=False #cython: wraparound=False -""" -pyart.correct._unwrap_1d -======================== - -.. autosummary:: - :toctree: generated/ - - unwrap_1d - -""" from libc.math cimport M_PI def unwrap_1d(double[::1] image, double[::1] unwrapped_image): - '''Phase unwrapping using the naive approach.''' + """ Phase unwrapping using the naive approach. """ cdef: Py_ssize_t i double difference diff --git a/pyart/correct/_unwrap_2d.pyx b/pyart/correct/_unwrap_2d.pyx index ffe3dcf6d8..8e8fa1023a 100644 --- a/pyart/correct/_unwrap_2d.pyx +++ b/pyart/correct/_unwrap_2d.pyx @@ -1,14 +1,3 @@ -""" -pyart.correct._unwrap_2d -======================== - -.. autosummary:: - :toctree: generated/ - - unwrap_2d - -""" - cdef extern void unwrap2D(double* wrapped_image, double* unwrapped_image, unsigned char* input_mask, @@ -19,7 +8,7 @@ def unwrap_2d(double[:, ::1] image, unsigned char[:, ::1] mask, double[:, ::1] unwrapped_image, wrap_around): - """ 2D phase unwrapping. """ + # 2D phase unwrapping. unwrap2D(&image[0, 0], &unwrapped_image[0, 0], &mask[0, 0], diff --git a/pyart/correct/_unwrap_3d.pyx b/pyart/correct/_unwrap_3d.pyx index 58915a7f03..9d5ee8302b 100644 --- a/pyart/correct/_unwrap_3d.pyx +++ b/pyart/correct/_unwrap_3d.pyx @@ -1,14 +1,3 @@ -""" -pyart.correct._unwrap_3d -======================== - -.. autosummary:: - :toctree: generated/ - - unwrap_3d - -""" - cdef extern void unwrap3D(double* wrapped_volume, double* unwrapped_volume, unsigned char* input_mask, @@ -19,7 +8,7 @@ def unwrap_3d(double[:, :, ::1] image, unsigned char[:, :, ::1] mask, double[:, :, ::1] unwrapped_image, wrap_around): - """ 3D phase unwrapping. """ + # 3D phase unwrapping. unwrap3D(&image[0, 0, 0], &unwrapped_image[0, 0, 0], &mask[0, 0, 0], diff --git a/pyart/correct/attenuation.py b/pyart/correct/attenuation.py index fd4084e5de..4e8fda5264 100644 --- a/pyart/correct/attenuation.py +++ b/pyart/correct/attenuation.py @@ -1,25 +1,9 @@ """ -pyart.correct.attenuation -========================= - Attenuation correction from polarimetric radars. Code adapted from method in Gu et al, JAMC 2011, 50, 39. Adapted by Scott Collis and Scott Giangrande, refactored by Jonathan Helmus. New code added by Meteo Swiss and inserted into Py-ART by Robert Jackson. -.. autosummary:: - :toctree: generated/ - - calculate_attenuation - calculate_attenuation_zphi - calculate_attenuation_philinear - get_mask_fzl - _prepare_phidp - _get_param_attzphi - _param_attzphi_table - _get_param_attphilinear - _param_attphilinear_table - """ from copy import deepcopy diff --git a/pyart/correct/bias_and_noise.py b/pyart/correct/bias_and_noise.py index 8b3bc10b4f..1ea875f40e 100755 --- a/pyart/correct/bias_and_noise.py +++ b/pyart/correct/bias_and_noise.py @@ -1,15 +1,6 @@ """ -pyart.correct.bias_and_noise -=================== - Corrects polarimetric variables for noise -.. autosummary:: - :toctree: generated/ - - correct_noise_rhohv - correct_bias - """ import numpy as np diff --git a/pyart/correct/dealias.py b/pyart/correct/dealias.py index 3448b5aae0..8896ef41c2 100644 --- a/pyart/correct/dealias.py +++ b/pyart/correct/dealias.py @@ -1,15 +1,6 @@ """ -pyart.correct.dealias -===================== - Front end to the University of Washington 4DD code for Doppler dealiasing. -.. autosummary:: - :toctree: generated/ - - dealias_fourdd - _create_rsl_volume - """ import numpy as np diff --git a/pyart/correct/despeckle.py b/pyart/correct/despeckle.py index c58b3adae3..9b2ce499ba 100644 --- a/pyart/correct/despeckle.py +++ b/pyart/correct/despeckle.py @@ -1,24 +1,6 @@ """ -pyart.correct.despeckle -======================= - Find contiguous objects in scans and despeckle away ones that are too small. -.. autosummary:: - :toctree: generated/ - - despeckle_field - find_objects - _adjust_for_periodic_boundary - _append_labels - _check_for_360 - _check_sweeps - _check_threshold - _generate_dict - _get_data - _get_labels - _smooth_data - """ import numpy as np diff --git a/pyart/correct/phase_proc.py b/pyart/correct/phase_proc.py index 759e4f612b..ea3dcc581a 100644 --- a/pyart/correct/phase_proc.py +++ b/pyart/correct/phase_proc.py @@ -1,7 +1,4 @@ """ -pyart.correct.phase_proc -======================== - Utilities for working with phase data. Code based upon algorithm descriped in: @@ -9,33 +6,6 @@ Adapted by Scott Collis and Scott Giangrande, refactored by Jonathan Helmus. -.. autosummary:: - :toctree: generated/ - - det_sys_phase - _det_sys_phase - fzl_index - det_process_range - snr - unwrap_masked - smooth_masked - smooth_and_trim - smooth_and_trim_scan - noise - get_phidp_unf - construct_A_matrix - construct_B_vectors - LP_solver_cvxopt - LP_solver_pyglpk - solve_cylp - LP_solver_cylp_mp - LP_solver_cylp - phase_proc_lp - phase_proc_lp_gf - get_phidp_unf_gf - det_sys_phase_gf - _det_sys_phase_gf - """ import copy diff --git a/pyart/correct/region_dealias.py b/pyart/correct/region_dealias.py index d69e10a36b..8cf472f26d 100644 --- a/pyart/correct/region_dealias.py +++ b/pyart/correct/region_dealias.py @@ -1,27 +1,6 @@ """ -pyart.correct.region_dealias -============================ - Region based dealiasing using a dynamic network reduction for region joining. -.. autosummary:: - :toctree: generated/ - - dealias_region_based - _find_regions - _find_sweep_interval_splits - _combine_regions - _edge_sum_and_count - _cost_function - _gradient - -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - _RegionTracker - _EdgeTracker - """ import warnings diff --git a/pyart/correct/unwrap.py b/pyart/correct/unwrap.py index e2f1fe587f..25fb9511a4 100644 --- a/pyart/correct/unwrap.py +++ b/pyart/correct/unwrap.py @@ -1,22 +1,6 @@ """ -pyart.correct.unwrap -==================== - Dealias using multidimensional phase unwrapping algorithms. -.. autosummary:: - :toctree: generated/ - - dealias_unwrap_phase - _dealias_unwrap_3d - _dealias_unwrap_2d - _dealias_unwrap_1d - _verify_unwrap_unit - _is_radar_cubic - _is_radar_sweep_aligned - _is_radar_sequential - _is_sweep_sequential - """ import numpy as np @@ -36,7 +20,8 @@ def dealias_unwrap_phase( rays_wrap_around=None, keep_original=False, set_limits=True, vel_field=None, corr_vel_field=None, skip_checks=False, **kwargs): """ - Dealias Doppler velocities using multi-dimensional phase unwrapping. + Dealias Doppler velocities using multi-dimensional phase unwrapping + [1]_ and [2]_. Parameters ---------- @@ -101,14 +86,14 @@ def dealias_unwrap_phase( References ---------- .. [1] Miguel Arevallilo Herraez, David R. Burton, Michael J. Lalor, - and Munther A. Gdeisat, "Fast two-dimensional phase-unwrapping - algorithm based on sorting by reliability following a noncontinuous - path", Journal Applied Optics, Vol. 41, No. 35 (2002) 7437, + and Munther A. Gdeisat, "Fast two-dimensional phase-unwrapping + algorithm based on sorting by reliability following a noncontinuous + path", Journal Applied Optics, Vol. 41, No. 35 (2002) 7437, .. [2] Abdul-Rahman, H., Gdeisat, M., Burton, D., & Lalor, M., "Fast - three-dimensional phase-unwrapping algorithm based on sorting by - reliability following a non-continuous path. In W. Osten, - C. Gorecki, & E. L. Novak (Eds.), Optical Metrology (2005) 32--40, - International Society for Optics and Photonics. + three-dimensional phase-unwrapping algorithm based on sorting by + reliability following a non-continuous path. In W. Osten, + C. Gorecki, & E. L. Novak (Eds.), Optical Metrology (2005) 32--40, + International Society for Optics and Photonics. """ vel_field, corr_vel_field = _parse_fields(vel_field, corr_vel_field) diff --git a/pyart/exceptions.py b/pyart/exceptions.py index 510b337d9e..d5e1ac6bbb 100644 --- a/pyart/exceptions.py +++ b/pyart/exceptions.py @@ -1,17 +1,6 @@ """ -pyart.exceptions -================ - Custom Py-ART exceptions. -.. autosummary:: - :toctree: generated/ - - MissingOptionalDependency - DeprecatedAttribute - DeprecatedFunctionName - _deprecated_alias - """ import warnings diff --git a/pyart/filters/__init__.py b/pyart/filters/__init__.py index 55893eea34..644eed6c03 100644 --- a/pyart/filters/__init__.py +++ b/pyart/filters/__init__.py @@ -1,25 +1,6 @@ """ -============================== -Filters (:mod:`pyart.filters`) -============================== - -.. currentmodule:: pyart.filters - Classes for specifying what gates are included and excluded from routines. -Filtering radar data -==================== - -.. autosummary:: - :toctree: generated/ - - GateFilter - moment_based_gate_filter - moment_and_texture_based_gate_filter - temp_based_gate_filter - iso0_based_gate_filter - - """ from .gatefilter import GateFilter, moment_based_gate_filter diff --git a/pyart/filters/gatefilter.py b/pyart/filters/gatefilter.py index d66977bc05..9479f7b0a9 100644 --- a/pyart/filters/gatefilter.py +++ b/pyart/filters/gatefilter.py @@ -1,24 +1,7 @@ """ -pyart.correct.filters -===================== - Functions for creating gate filters (masks) which can be used it various corrections routines in Py-ART. -.. autosummary:: - :toctree: generated/ - - moment_based_gate_filter - moment_and_texture_based_gate_filter - temp_based_gate_filter - iso0_based_gate_filter - -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - GateFilter - """ from copy import deepcopy @@ -457,23 +440,6 @@ class GateFilter(object): conditions. False will begin with all gates excluded from which a set of gates to include should be set using the include methods. - Attributes - ---------- - gate_excluded : array, dtype=bool - Boolean array indicating if a gate should be excluded from a - calculation. Elements marked True indicate the corresponding gate - should be excluded. Those marked False should be included. - This is read-only attribute, any changes to the array will NOT - be reflected in gate_included and will be lost when the attribute is - accessed again. - gate_included : array, dtype=bool - Boolean array indicating if a gate should be included in a - calculation. Elements marked True indicate the corresponding gate - should be include. Those marked False should be excluded. - This is read-only attribute, any changes to the array will NOT - be reflected in gate_excluded and will be lost when the attribute is - accessed again. - Examples -------- >>> import pyart @@ -509,10 +475,26 @@ def copy(self): @property def gate_included(self): + """ + Boolean array indicating if a gate should be included in a + calculation. Elements marked True indicate the corresponding gate + should be include. Those marked False should be excluded. + This is read-only attribute, any changes to the array will NOT + be reflected in gate_excluded and will be lost when the attribute is + accessed again. + """ return ~self._gate_excluded.copy() @property def gate_excluded(self): + """ + Boolean array indicating if a gate should be excluded from a + calculation. Elements marked True indicate the corresponding gate + should be excluded. Those marked False should be included. + This is read-only attribute, any changes to the array will NOT + be reflected in gate_included and will be lost when the attribute is + accessed again. + """ return self._gate_excluded.copy() def _get_fdata(self, field): @@ -713,7 +695,7 @@ def exclude_gates(self, mask, exclude_masked=True, op='or'): meet any of the conditions. 'and', the default for include methods, is typically desired when building up a set of conditions where the desired effect is to include gates which meet any of the - conditions. Note that the 'and' method MAY results in including + conditions. Note that the 'and' method MAY results in including gates which have previously been excluded because they were masked or invalid. @@ -759,7 +741,7 @@ def include_not_transition( meet any of the conditions. 'and', the default for include methods, is typically desired when building up a set of conditions where the desired effect is to include gates which meet any of the - conditions. Note that the 'or' method MAY results in excluding + conditions. Note that the 'or' method MAY results in excluding gates which have previously been included. """ @@ -866,7 +848,7 @@ def include_gates(self, mask, exclude_masked=True, op='and'): meet any of the conditions. 'and', the default for include methods, is typically desired when building up a set of conditions where the desired effect is to include gates which meet any of the - conditions. Note that the 'or' method MAY results in excluding + conditions. Note that the 'or' method MAY results in excluding gates which have previously been included. """ diff --git a/pyart/graph/__init__.py b/pyart/graph/__init__.py index e29072ce18..48df69eb71 100644 --- a/pyart/graph/__init__.py +++ b/pyart/graph/__init__.py @@ -1,31 +1,54 @@ """ -============================= -Graphing (:mod:`pyart.graph`) -============================= - -.. currentmodule:: pyart.graph - Creating plots of Radar and Grid fields. -Plotting radar data -=================== - -.. autosummary:: - :toctree: generated/ +There are also Radar related colormaps and colorblind friendly radar +colormaps for plotting. - RadarDisplay - RadarMapDisplay - AirborneRadarDisplay - RadarMapDisplayBasemap +Available colormaps, reversed versions (_r) are also provided, these +colormaps are available within matplotlib with names 'pyart_COLORMAP': -Plotting grid data -================== + * BlueBrown10 + * BlueBrown11 + * BrBu10 + * BrBu12 + * Bu10 + * Bu7 + * BuDOr12 + * BuDOr18 + * BuDRd12 + * BuDRd18 + * BuGr14 + * BuGy8 + * BuOr10 + * BuOr12 + * BuOr8 + * BuOrR14 + * Carbone11 + * Carbone17 + * Carbone42 + * Cat12 + * EWilson17 + * GrMg16 + * Gray5 + * Gray9 + * NWSRef + * NWSVel + * NWS_SPW + * PD17 + * RRate11 + * RdYlBu11b + * RefDiff + * SCook18 + * StepSeq25 + * SymGray12 + * Theodore16 + * Wild25 -.. autosummary:: - :toctree: generated/ +Colorblind friendly - GridMapDisplay - GridMapDisplayBasemap + * LangRainbow12 + * HomeyerRainbow + * balance """ diff --git a/pyart/graph/_cm.py b/pyart/graph/_cm.py index 17c71d0e41..9f9433327e 100644 --- a/pyart/graph/_cm.py +++ b/pyart/graph/_cm.py @@ -1,7 +1,4 @@ """ -pyart.graph._cm -=============== - Data for radar related colormaps. """ diff --git a/pyart/graph/_cm_colorblind.py b/pyart/graph/_cm_colorblind.py index c8e91992cc..c413d17743 100644 --- a/pyart/graph/_cm_colorblind.py +++ b/pyart/graph/_cm_colorblind.py @@ -1,6 +1,4 @@ """ -pyart.graph._cm_colorblind - Data for colorblind friendly radar colormaps """ diff --git a/pyart/graph/cm.py b/pyart/graph/cm.py index a899636344..591eae247e 100644 --- a/pyart/graph/cm.py +++ b/pyart/graph/cm.py @@ -1,18 +1,6 @@ """ -pyart.graph.cm -============== - Radar related colormaps. -.. autosummary:: - :toctree: generated/ - - revcmap - _reverser - _reverse_cmap_spec - _generate_cmap - - Available colormaps, reversed versions (_r) are also provided, these colormaps are available within matplotlib with names 'pyart_COLORMAP': diff --git a/pyart/graph/cm_colorblind.py b/pyart/graph/cm_colorblind.py index 2bcedac803..ff763b884c 100644 --- a/pyart/graph/cm_colorblind.py +++ b/pyart/graph/cm_colorblind.py @@ -1,14 +1,6 @@ """ -pyart.graph.cm_colorblind -========================= - Colorblind friendly radar colormaps -.. autosummary:: - :toctree: generated/ - - _generate_cmap - Available colormaps, reversed versions are also provided, these colormaps are available within matplotlib with names pyart_COLORMAP': diff --git a/pyart/graph/common.py b/pyart/graph/common.py index 4858834f4b..dfec1f8d68 100644 --- a/pyart/graph/common.py +++ b/pyart/graph/common.py @@ -1,34 +1,6 @@ """ -pyart.graph.common -================== - Common graphing routines. -.. autosummary:: - :toctree: generated/ - - parse_ax - parse_ax_fig - parse_cmap - parse_vmin_vmax - parse_lon_lat - generate_colorbar_label - generate_field_name - generate_radar_name - generate_grid_name - generate_radar_time_begin - generate_radar_time_sweep - generate_grid_time_begin - generate_filename - generate_grid_filename - generate_title - generate_grid_title - generate_longitudinal_level_title - generate_latitudinal_level_title - generate_vpt_title - generate_ray_title - set_limits - """ import matplotlib.pyplot as plt diff --git a/pyart/graph/gridmapdisplay.py b/pyart/graph/gridmapdisplay.py index a8cec4428d..a9146d1ef6 100644 --- a/pyart/graph/gridmapdisplay.py +++ b/pyart/graph/gridmapdisplay.py @@ -1,16 +1,7 @@ """ -pyart.graph.gridmapdisplay -========================== - A class for plotting grid objects using xarray plotting and cartopy. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - GridMapDisplay - """ import warnings diff --git a/pyart/graph/gridmapdisplay_basemap.py b/pyart/graph/gridmapdisplay_basemap.py index 4a306c70bf..aa45492263 100644 --- a/pyart/graph/gridmapdisplay_basemap.py +++ b/pyart/graph/gridmapdisplay_basemap.py @@ -1,15 +1,6 @@ """ -pyart.graph.gridmapdisplay_basemap -================================== - A class for plotting grid objects with a basemap. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - GridMapDisplayBasemap - """ import warnings diff --git a/pyart/graph/radardisplay.py b/pyart/graph/radardisplay.py index e777e69707..fc91f40e5e 100644 --- a/pyart/graph/radardisplay.py +++ b/pyart/graph/radardisplay.py @@ -1,15 +1,6 @@ """ -pyart.graph.radardisplay -========================= - Class for creating plots from Radar objects. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - RadarDisplay - """ import warnings diff --git a/pyart/graph/radardisplay_airborne.py b/pyart/graph/radardisplay_airborne.py index 584431c978..d7083694f3 100644 --- a/pyart/graph/radardisplay_airborne.py +++ b/pyart/graph/radardisplay_airborne.py @@ -1,15 +1,6 @@ """ -pyart.graph.radardisplay_airborne -================================= - Class for creating plots from Airborne Radar objects. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - AirborneRadarDisplay - """ import numpy as np diff --git a/pyart/graph/radarmapdisplay.py b/pyart/graph/radarmapdisplay.py index e62584c880..720c8a294a 100644 --- a/pyart/graph/radarmapdisplay.py +++ b/pyart/graph/radarmapdisplay.py @@ -1,16 +1,7 @@ """ -pyart.graph.radarmapdisplay -=========================== - Class for creating plots on a geographic map using a Radar object using Cartopy for drawing maps. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - RadarMapDisplay - """ import warnings diff --git a/pyart/graph/radarmapdisplay_basemap.py b/pyart/graph/radarmapdisplay_basemap.py index ed629ee650..d8a322dbbd 100644 --- a/pyart/graph/radarmapdisplay_basemap.py +++ b/pyart/graph/radarmapdisplay_basemap.py @@ -1,15 +1,6 @@ """ -pyart.graph.radarmapdisplay_basemap -=================================== - Class for creating plots on a geographic map using a Radar object and Basemap. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - RadarMapDisplayBasemap - """ import warnings diff --git a/pyart/io/__init__.py b/pyart/io/__init__.py index d4f58c0de7..c6efc76544 100644 --- a/pyart/io/__init__.py +++ b/pyart/io/__init__.py @@ -1,79 +1,11 @@ """ -================================== -Input and output (:mod:`pyart.io`) -================================== - -.. currentmodule:: pyart.io - Functions to read and write radar and grid data to and from a number of file formats. -Reading radar data -================== - In most cases the :py:func:`pyart.io.read` function should be used to read in radar data from a file. In certain cases the function the read function for the format in question should be used. -.. autosummary:: - :toctree: generated/ - - read - read_rsl - read_mdv - read_sigmet - read_cfradial - read_chl - read_nexrad_archive - read_nexrad_cdm - read_nexrad_level3 - read_uf - -Writing radar data -================== - -.. autosummary:: - :toctree: generated/ - - write_cfradial - write_uf - -Reading grid data -================= - -.. autosummary:: - :toctree: generated/ - - read_grid - read_grid_mdv - -Writing grid data -================= - -.. autosummary:: - :toctree: generated/ - - write_grid - write_grid_mdv - write_grid_geotiff - -Reading Sonde data -================== - -.. autosummary:: - :toctree: generated/ - - read_arm_sonde - read_arm_sonde_vap - -Special use -=========== - -.. autosummary:: - :toctree: generated/ - - prepare_for_read - """ from .rsl import read_rsl diff --git a/pyart/io/_rsl_interface.pyx b/pyart/io/_rsl_interface.pyx index 8523a2d5c8..a5049449c0 100644 --- a/pyart/io/_rsl_interface.pyx +++ b/pyart/io/_rsl_interface.pyx @@ -1,26 +1,6 @@ """ -pyart.io._rsl_interface -======================= - Cython wrapper around the NASA TRMM RSL library. -.. autosummary:: - :toctree: generated/ - - copy_volume - create_volume - _label_volume - -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - RslFile - _RslVolume - _RslSweep - _RslRay - - """ cimport _rsl_h diff --git a/pyart/io/_sigmet_noaa_hh.py b/pyart/io/_sigmet_noaa_hh.py index 64f3ebd09d..67b642a161 100644 --- a/pyart/io/_sigmet_noaa_hh.py +++ b/pyart/io/_sigmet_noaa_hh.py @@ -1,16 +1,7 @@ """ -pyart.io._sigmet_noaa_hh -======================== - Functions needed for reading Sigmet files from the airborne radar located on NOAA's Hurricane Hunter aircraft. -.. autosummary:: - :toctree: generated/ - - _decode_noaa_hh_hdr - _georeference_yprime - """ import numpy as np diff --git a/pyart/io/_sigmetfile.pyx b/pyart/io/_sigmetfile.pyx index 414531b91f..4d2084f321 100644 --- a/pyart/io/_sigmetfile.pyx +++ b/pyart/io/_sigmetfile.pyx @@ -1,27 +1,6 @@ """ -pyart.io._sigmetfile -==================== - A class and supporting functions for reading Sigmet (raw format) files. -.. autosummary:: - :toctree: generated/ - - SigmetFile - convert_sigmet_data - bin2_to_angle - bin4_to_angle - _data_types_from_mask - _is_bit_set - _parse_ray_headers - _unpack_structure - _unpack_key - _unpack_ingest_data_headers - _unpack_ingest_data_header - _unpack_raw_prod_bhdr - _unpack_product_hdr - _unpack_ingest_header - """ import struct import datetime diff --git a/pyart/io/arm_sonde.py b/pyart/io/arm_sonde.py index 2e0e3b212a..cf1ef0ff45 100644 --- a/pyart/io/arm_sonde.py +++ b/pyart/io/arm_sonde.py @@ -1,15 +1,6 @@ """ -pyart.io.arm_sonde -================== - Utilities for ARM sonde NetCDF files. -.. autosummary:: - :toctree: generated/ - - read_arm_sonde - read_arm_sonde_vap - """ import netCDF4 diff --git a/pyart/io/auto_read.py b/pyart/io/auto_read.py index 105d0782d1..96bd6c1793 100644 --- a/pyart/io/auto_read.py +++ b/pyart/io/auto_read.py @@ -1,15 +1,6 @@ """ -pyart.io.auto_read -================== - Automatic reading of radar files by detecting format. -.. autosummary:: - :toctree: generated/ - - read - determine_filetype - """ import bz2 diff --git a/pyart/io/cfradial.py b/pyart/io/cfradial.py index b593c6712a..cdc9121a6e 100644 --- a/pyart/io/cfradial.py +++ b/pyart/io/cfradial.py @@ -1,25 +1,6 @@ """ -pyart.io.cfradial -================= - Utilities for reading CF/Radial files. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - _NetCDFVariableDataExtractor - -.. autosummary:: - :toctree: generated/ - - read_cfradial - write_cfradial - _find_all_meta_group_vars - _ncvar_to_dict - _unpack_variable_gate_field_dic - _create_ncvar - """ import datetime diff --git a/pyart/io/chl.py b/pyart/io/chl.py index f70592d539..5b069a1409 100644 --- a/pyart/io/chl.py +++ b/pyart/io/chl.py @@ -1,21 +1,6 @@ """ -pyart.io.chl -============ - Utilities for reading CSU-CHILL CHL files. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - ChlFile - -.. autosummary:: - :toctree: generated/ - - read_chl - _unpack_structure - """ from datetime import datetime diff --git a/pyart/io/common.py b/pyart/io/common.py index d7459b0e8f..453fe6ee02 100644 --- a/pyart/io/common.py +++ b/pyart/io/common.py @@ -1,17 +1,6 @@ """ -pyart.io.common -=============== - Input/output routines common to many file formats. -.. autosummary:: - :toctree: generated/ - - prepare_for_read - stringarray_to_chararray - _test_arguments - make_time_unit_str - """ import bz2 diff --git a/pyart/io/grid_io.py b/pyart/io/grid_io.py index 3788cf4454..71a73b8010 100644 --- a/pyart/io/grid_io.py +++ b/pyart/io/grid_io.py @@ -1,16 +1,6 @@ """ -pyart.io.grid_io -================ - Reading and writing Grid objects. -.. autosummary:: - :toctree: generated/ - - read_grid - write_grid - _make_coordinatesystem_dict - """ import datetime diff --git a/pyart/io/mdv_common.py b/pyart/io/mdv_common.py index 0ed83c36f2..eb462c1bd8 100644 --- a/pyart/io/mdv_common.py +++ b/pyart/io/mdv_common.py @@ -1,16 +1,6 @@ """ -pyart.io.mdv_common -=================== - Functions and classes common between MDV grid and radar files. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - MdvFile - _MdvVolumeDataExtractor - """ # Code is adapted from Nitin Bharadwaj's Matlab code diff --git a/pyart/io/mdv_grid.py b/pyart/io/mdv_grid.py index 4dc5cb6825..b148677482 100644 --- a/pyart/io/mdv_grid.py +++ b/pyart/io/mdv_grid.py @@ -1,16 +1,6 @@ """ -pyart.io.mdv_grid -================== - Utilities for reading and writing of MDV grid files. -.. autosummary:: - :toctree: generated/ - - write_grid_mdv - read_grid_mdv - _time_dic_to_datetime - """ diff --git a/pyart/io/mdv_radar.py b/pyart/io/mdv_radar.py index 1cac5c259b..43e730b900 100644 --- a/pyart/io/mdv_radar.py +++ b/pyart/io/mdv_radar.py @@ -1,14 +1,6 @@ """ -pyart.io.mdv_radar -================== - Utilities for reading of MDV radar files. -.. autosummary:: - :toctree: generated/ - - read_mdv - """ import numpy as np diff --git a/pyart/io/nexrad_archive.py b/pyart/io/nexrad_archive.py index 5051cdf199..a32c799e60 100644 --- a/pyart/io/nexrad_archive.py +++ b/pyart/io/nexrad_archive.py @@ -1,23 +1,6 @@ """ -pyart.io.nexrad_archive -======================= - Functions for reading NEXRAD Level II Archive files. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - _NEXRADLevel2StagedField - -.. autosummary:: - :toctree: generated/ - - read_nexrad_archive - _find_range_params - _find_scans_to_interp - _interpolate_scan - """ import warnings diff --git a/pyart/io/nexrad_cdm.py b/pyart/io/nexrad_cdm.py index da03bfe062..c7a600fcbf 100644 --- a/pyart/io/nexrad_cdm.py +++ b/pyart/io/nexrad_cdm.py @@ -1,17 +1,6 @@ """ -pyart.io.nexrad_cdm -=================== - Functions for accessing Common Data Model (CDM) NEXRAD Level 2 files. -.. autosummary:: - :toctree: generated/ - - read_nexrad_cdm - _scan_info - _populate_scan_dic - _get_moment_data - """ from datetime import datetime, timedelta diff --git a/pyart/io/nexrad_common.py b/pyart/io/nexrad_common.py index c78bd205c8..830de89bad 100644 --- a/pyart/io/nexrad_common.py +++ b/pyart/io/nexrad_common.py @@ -1,14 +1,6 @@ """ -pyart.io.nexrad_common -====================== - Data and functions common to all types of NEXRAD files. -.. autosummary:: - :toctree: generated/ - - get_nexrad_location - """ # The functions in this module are intended to be used in other # nexrad related modules. The functions are not and should not be diff --git a/pyart/io/nexrad_interpolate.pyx b/pyart/io/nexrad_interpolate.pyx index b771fba4b9..de948366e8 100644 --- a/pyart/io/nexrad_interpolate.pyx +++ b/pyart/io/nexrad_interpolate.pyx @@ -1,14 +1,6 @@ """ -pyart.io.nexrad_interpolate -=========================== - Interpolation of NEXRAD moments from 1000 meter to 250 meter gate spacing. -.. autosummary:: - :toctree: generated/ - - _fast_interpolate_scan - """ def _fast_interpolate_scan( diff --git a/pyart/io/nexrad_level2.py b/pyart/io/nexrad_level2.py index 9b675e1247..6dc2b451d6 100644 --- a/pyart/io/nexrad_level2.py +++ b/pyart/io/nexrad_level2.py @@ -1,24 +1,5 @@ """ -pyart.io.nexrad_level2 -====================== - -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - NEXRADLevel2File - -.. autosummary:: - :toctree: generated/ - - _decompress_records - _get_record_from_buf - _get_msg31_data_block - _structure_size - _unpack_from_buf - _unpack_structure - - +Functions for reading NEXRAD level 2 files. """ diff --git a/pyart/io/nexrad_level3.py b/pyart/io/nexrad_level3.py index c6c4b406f4..141d4a39f9 100644 --- a/pyart/io/nexrad_level3.py +++ b/pyart/io/nexrad_level3.py @@ -1,26 +1,6 @@ """ -pyart.io.nexrad_level3 -====================== - Class for reading data from NEXRAD Level 3 files. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - NEXRADLevel3File - -.. autosummary:: - :toctree: generated/ - - nexrad_level3_message_code - _datetime_from_mdate_mtime - _structure_size - _unpack_from_buf - _unpack_structure - _int16_to_float16 - - """ # This file is part of the Py-ART, the Python ARM Radar Toolkit diff --git a/pyart/io/nexradl3_read.py b/pyart/io/nexradl3_read.py index 09d2689eb9..9b6126d7c0 100644 --- a/pyart/io/nexradl3_read.py +++ b/pyart/io/nexradl3_read.py @@ -1,14 +1,6 @@ """ -pyart.io.nexradl3_read -====================== - Functions for reading NEXRAD Level 3 products. -.. autosummary:: - :toctree: generated/ - - read_nexrad_level3 - """ import numpy as np @@ -33,7 +25,7 @@ def read_nexrad_level3(filename, field_names=None, additional_metadata=None, NWS WSR-88D Level III Data Collection and Distribution Network have been tests. Other NEXRAD Level 3 files may or may not work. A file-like object pointing to the beginning of such a file is also - supported. + supported [2]_. field_names : dict, optional Dictionary mapping NEXRAD level 3 product number to radar field names. If the product number of the file does not appear in this dictionary diff --git a/pyart/io/output_to_geotiff.py b/pyart/io/output_to_geotiff.py index f9a939feee..f5658ee820 100644 --- a/pyart/io/output_to_geotiff.py +++ b/pyart/io/output_to_geotiff.py @@ -1,16 +1,6 @@ """ -pyart.io.write_grid_geotiff -=========================== - Write a Py-ART Grid object to a GeoTIFF file. -.. autosummary:: - :toctree: generated/ - - write_grid_geotiff - _get_rgb_values - _create_sld - """ import os diff --git a/pyart/io/rsl.py b/pyart/io/rsl.py index 720374757e..3dd5b36c41 100644 --- a/pyart/io/rsl.py +++ b/pyart/io/rsl.py @@ -1,22 +1,6 @@ """ -pyart.io.rsl -============ - Python wrapper around the RSL library. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - _RslVolumeDataExtractor - -.. autosummary:: - :toctree: generated/ - - read_rsl - VOLUMENUM2RSLNAME - RSLNAME2VOLUMENUM - """ import datetime diff --git a/pyart/io/sigmet.py b/pyart/io/sigmet.py index e1a6edcc38..a0798067f5 100644 --- a/pyart/io/sigmet.py +++ b/pyart/io/sigmet.py @@ -1,21 +1,6 @@ """ -pyart.io.sigmet -=============== - Reading and writing of Sigmet (raw format) files -.. autosummary:: - :toctree: generated/ - - read_sigmet - ymds_time_to_datetime - _is_time_ordered_by_reversal - _is_time_ordered_by_roll - _is_time_ordered_by_reverse_roll - _time_order_data_and_metadata_roll - _time_order_data_and_metadata_reverse - _time_order_data_and_metadata_full - """ import datetime diff --git a/pyart/io/uf.py b/pyart/io/uf.py index f468828ca2..1889f8428c 100644 --- a/pyart/io/uf.py +++ b/pyart/io/uf.py @@ -1,16 +1,6 @@ """ -pyart.io.uf -=========== - Reading of Universal format (UF) files. -.. autosummary:: - :toctree: generated/ - - read_uf - _get_scan_type - _get_instrument_parameters - """ import warnings diff --git a/pyart/io/uf_write.py b/pyart/io/uf_write.py index c55b4e726f..7bd689dc2f 100644 --- a/pyart/io/uf_write.py +++ b/pyart/io/uf_write.py @@ -1,22 +1,6 @@ """ -pyart.io.uf_write -================= - Functions for writing UF files. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - UFRayCreator - -.. autosummary:: - :toctree: generated/ - - write_uf - _d_to_dms - _pack_structure - """ import math diff --git a/pyart/io/uffile.py b/pyart/io/uffile.py index 38c5a9c57d..4294ce3851 100644 --- a/pyart/io/uffile.py +++ b/pyart/io/uffile.py @@ -1,23 +1,6 @@ """ -pyart.io.uffile -=============== - Low level class for reading Universal Format (UF) files. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - UFFile - UFRay - -.. autosummary:: - :toctree: generated/ - - _structure_size - _unpack_from_buf - _unpack_structure - """ # This file is part of the Py-ART, the Python ARM Radar Toolkit diff --git a/pyart/lazydict.py b/pyart/lazydict.py index 7ed602c38a..67853909a5 100644 --- a/pyart/lazydict.py +++ b/pyart/lazydict.py @@ -1,15 +1,6 @@ """ -pyart.lazydict -============== - A dictionary-like class supporting lazy loading of specified keys. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - LazyLoadDict - """ from collections.abc import MutableMapping diff --git a/pyart/map/__init__.py b/pyart/map/__init__.py index 38b51cb06b..aba5e7d1a2 100644 --- a/pyart/map/__init__.py +++ b/pyart/map/__init__.py @@ -1,23 +1,7 @@ """ -========================== -Mapping (:mod:`pyart.map`) -========================== - -.. current modules:: pyart.map - Py-ART has a robust function for mapping radar data from the collected radar coordinates to Cartesian coordinates. -.. autosummary:: - :toctree: generated/ - - grid_from_radars - map_to_grid - map_gates_to_grid - example_roi_func_constant - example_roi_func_dist - example_roi_func_dist_beam - """ from .grid_mapper import map_to_grid, grid_from_radars diff --git a/pyart/map/_gate_to_grid_map.pyx b/pyart/map/_gate_to_grid_map.pyx index 765d0a46bf..f7d5bf5a56 100644 --- a/pyart/map/_gate_to_grid_map.pyx +++ b/pyart/map/_gate_to_grid_map.pyx @@ -1,20 +1,7 @@ """ -pyart.map._gate_to_grid_map -=========================== - Cython classes and functions for efficient mapping of radar gates to a uniform grid. -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - GateToGridMapper - RoIFunction - ConstantRoI - DistRoI - DistBeamRoI - """ from libc.math cimport sqrt, exp, ceil, floor, sin, cos, tan, asin diff --git a/pyart/map/gates_to_grid.py b/pyart/map/gates_to_grid.py index 9ca602911b..27a686a235 100644 --- a/pyart/map/gates_to_grid.py +++ b/pyart/map/gates_to_grid.py @@ -1,21 +1,6 @@ """ -pyart.map.gates_to_grid -======================= - Generate a Cartesian grid by mapping from radar gates onto the grid. -.. autosummary:: - :toctree: generated/ - - map_gates_to_grid - _detemine_cy_weighting_func - _find_projparams - _parse_gatefilters - _determine_fields - _find_offsets - _find_grid_params - _parse_roi_func - """ import warnings diff --git a/pyart/map/grid_mapper.py b/pyart/map/grid_mapper.py index e6bc1f8988..f1794149f3 100644 --- a/pyart/map/grid_mapper.py +++ b/pyart/map/grid_mapper.py @@ -1,28 +1,6 @@ """ -pyart.map.grid_mapper -===================== - Utilities for mapping radar objects to Cartesian grids. -.. autosummary:: - :toctree: generated/ - - grid_from_radars - map_to_grid - example_roi_func_constant - example_roi_func_dist - _unify_times_for_radars - _load_nn_field_data - _gen_roi_func_constant - _gen_roi_func_dist - _gen_roi_func_dist_beam - -.. autosummary:: - :toctree: generated/ - :template: dev_template.rst - - NNLocator - """ import warnings diff --git a/pyart/retrieve/__init__.py b/pyart/retrieve/__init__.py index eebf2337d0..836c1c89d2 100644 --- a/pyart/retrieve/__init__.py +++ b/pyart/retrieve/__init__.py @@ -1,43 +1,6 @@ """ -======================================== -Radar Retrievals (:mod:`pyart.retrieve`) -======================================== - -.. currentmodule:: pyart.retrieve - Radar retrievals. -Radar retrievals -================ - -.. autosummary:: - :toctree: generated/ - - kdp_maesaka - calculate_snr_from_reflectivity - calculate_velocity_texture - compute_snr - compute_l - compute_cdr - compute_noisedBZ - fetch_radar_time_profile - map_profile_to_gates - steiner_conv_strat - hydroclass_semisupervised - get_freq_band - texture_of_complex_phase - grid_displacement_pc - grid_shift - est_rain_rate_zpoly - est_rain_rate_z - est_rain_rate_kdp - est_rain_rate_a - est_rain_rate_zkdp - est_rain_rate_za - est_rain_rate_hydro - velocity_azimuth_display - quasi_vertical_profile - """ from .kdp_proc import kdp_maesaka, kdp_schneebeli, kdp_vulpiani diff --git a/pyart/retrieve/_kdp_proc.pyx b/pyart/retrieve/_kdp_proc.pyx index 866313615c..1257daf3f3 100644 --- a/pyart/retrieve/_kdp_proc.pyx +++ b/pyart/retrieve/_kdp_proc.pyx @@ -1,15 +1,6 @@ """ -pyart.retrieve._kdp_proc -======================== - Cython routines for specific differential phase retrievals. -.. autosummary:: - :toctree: generated/ - - lowpass_maesaka_term - lowpass_maesaka_jac - """ # Necessary and/or potential future improvements to this module: diff --git a/pyart/retrieve/advection.py b/pyart/retrieve/advection.py index c45fb05797..6397ed9724 100644 --- a/pyart/retrieve/advection.py +++ b/pyart/retrieve/advection.py @@ -1,15 +1,6 @@ """ -pyart.retrieve.advection -======================== - Advection calculations. -.. autosummary:: - :toctree: generated/ - - grid_displacement_pc - grid_shift - """ import copy diff --git a/pyart/retrieve/convv.py b/pyart/retrieve/convv.py index c0c6989a4a..2591be4828 100644 --- a/pyart/retrieve/convv.py +++ b/pyart/retrieve/convv.py @@ -1,7 +1,5 @@ """ -pyart.retrieve.convv -==================== - +Convv Class """ import numpy as np diff --git a/pyart/retrieve/echo_class.py b/pyart/retrieve/echo_class.py index 946bd0cb66..597a71c3f4 100644 --- a/pyart/retrieve/echo_class.py +++ b/pyart/retrieve/echo_class.py @@ -1,21 +1,6 @@ """ -pyart.retrieve.echo_class -========================= - Functions for echo classification. -.. autosummary:: - :toctree: generated/ - - steiner_conv_strat - hydroclass_semisupervised - get_freq_band - _standardize - _assign_to_class - _get_mass_centers - _mass_centers_table - _data_limits_table - """ import numpy as np diff --git a/pyart/retrieve/gate_id.py b/pyart/retrieve/gate_id.py index 372a809916..2937163e71 100644 --- a/pyart/retrieve/gate_id.py +++ b/pyart/retrieve/gate_id.py @@ -1,12 +1,6 @@ """ -pyart.retrieve.gate_id -====================== - -.. autosummary:: - :toctree: generated/ - - map_profile_to_gates - fetch_radar_time_profile +Functions that retrieve height of the gates and the profile interpolated +onto the radar gates. """ diff --git a/pyart/retrieve/kdp_proc.py b/pyart/retrieve/kdp_proc.py index 2c188e8fd4..95b1e6ee53 100755 --- a/pyart/retrieve/kdp_proc.py +++ b/pyart/retrieve/kdp_proc.py @@ -1,30 +1,9 @@ """ -pyart.retrieve.kdp_proc -======================= - Module for retrieving specific differential phase (KDP) from radar total differential phase (PSIDP) measurements. Total differential phase is a function of propagation differential phase (PHIDP), backscatter differential phase (DELTAHV), and the system phase offset. -.. autosummary:: - :toctree: generated/ - - kdp_schneebeli - kdp_vulpiani - kdp_maesaka - filter_psidp - boundary_conditions_maesaka - - _kdp_estimation_backward_fixed - _kdp_estimation_forward_fixed - _kdp_kalman_profile - _kdp_vulpiani_profile - _cost_maesaka - _jac_maesaka - _forward_reverse_phidp - _parse_range_resolution - """ from functools import partial diff --git a/pyart/retrieve/qpe.py b/pyart/retrieve/qpe.py index 7a95628be4..46ce85035b 100644 --- a/pyart/retrieve/qpe.py +++ b/pyart/retrieve/qpe.py @@ -1,24 +1,6 @@ """ -pyart.retrieve.qpe -================== - Functions for rainfall rate estimation. -.. autosummary:: - :toctree: generated/ - - est_rain_rate_zpoly - est_rain_rate_z - est_rain_rate_kdp - est_rain_rate_a - est_rain_rate_zkdp - est_rain_rate_za - est_rain_rate_hydro - _get_coeff_rkdp - _coeff_rkdp_table - _get_coeff_ra - _coeff_ra_table - """ from warnings import warn diff --git a/pyart/retrieve/qvp.py b/pyart/retrieve/qvp.py index 3bd100fd3d..a22d5d49bf 100644 --- a/pyart/retrieve/qvp.py +++ b/pyart/retrieve/qvp.py @@ -1,14 +1,6 @@ """ -pyart.retrieve.quasi_vertical_profile -===================================== - Retrieval of QVPs from a radar object -.. autosummary:: - :toctree: generated/ - - quasi_vertical_profile - """ import numpy as np diff --git a/pyart/retrieve/simple_moment_calculations.py b/pyart/retrieve/simple_moment_calculations.py index fd7fe571cb..96f022efb5 100644 --- a/pyart/retrieve/simple_moment_calculations.py +++ b/pyart/retrieve/simple_moment_calculations.py @@ -1,19 +1,6 @@ """ -pyart.retrieve.simple_moment_calculations -========================================= - Simple moment calculations. -.. autosummary:: - :toctree: generated/ - - calculate_snr_from_reflectivity - compute_noisedBZ - compute_snr - compute_l - compute_cdr - calculate_velocity_texture - """ import numpy as np diff --git a/pyart/retrieve/vad.py b/pyart/retrieve/vad.py index c25bfc03ba..009e4c76bb 100644 --- a/pyart/retrieve/vad.py +++ b/pyart/retrieve/vad.py @@ -1,18 +1,6 @@ """ -pyart.retrieve.velocity_azimuth_display -======================================= - Retrieval of VADs from a radar object. -.. autosummary:: - :toctreeL generated/ - :template: dev_template.rst - - velocity_azimuth_display - _vad_calculation - _interval_mean - _sd_to_uv - """ import numpy as np diff --git a/pyart/testing/__init__.py b/pyart/testing/__init__.py index e43336f442..2ec310dc93 100644 --- a/pyart/testing/__init__.py +++ b/pyart/testing/__init__.py @@ -1,34 +1,6 @@ """ -======================================== -Testing Utilities (:mod:`pyart.testing`) -======================================== - -.. currentmodule:: pyart.testing - -Utilities helpful when writing and running unit tests. - -Testing functions -================= - -.. autosummary:: - :toctree: generated/ - - make_empty_ppi_radar - make_target_radar - make_single_ray_radar - make_velocity_aliased_radar - make_empty_grid - make_target_grid - make_storm_grid - make_normal_storm - -Testing classes -=============== - -.. autosummary:: - :toctree: generated/ - - InTemporaryDirectory +Utilities helpful when writing and running unit tests such as sample files +and sample objects. """ diff --git a/pyart/testing/sample_files.py b/pyart/testing/sample_files.py index 0ba0846beb..b38d3ecd05 100644 --- a/pyart/testing/sample_files.py +++ b/pyart/testing/sample_files.py @@ -1,28 +1,22 @@ """ -pyart.testing.sample_files -========================== - Sample radar files in a number of formats. Many of these files are incomplete, they should only be used for testing, not production. -.. autosummary:: - :toctree: generated/ - - MDV_PPI_FILE - MDV_RHI_FILE - CFRADIAL_PPI_FILE - CFRADIAL_RHI_FILE - CHL_RHI_FILE - SIGMET_PPI_FILE - SIGMET_RHI_FILE - NEXRAD_ARCHIVE_MSG31_FILE - NEXRAD_ARCHIVE_MSG31_COMPRESSED_FILE - NEXRAD_ARCHIVE_MSG1_FILE - NEXRAD_LEVEL3_MSG19 - NEXRAD_LEVEL3_MSG163 - NEXRAD_CDM_FILE - UF_FILE - INTERP_SOUNDE_FILE +MDV_PPI_FILE +MDV_RHI_FILE +CFRADIAL_PPI_FILE +CFRADIAL_RHI_FILE +CHL_RHI_FILE +SIGMET_PPI_FILE +SIGMET_RHI_FILE +NEXRAD_ARCHIVE_MSG31_FILE +NEXRAD_ARCHIVE_MSG31_COMPRESSED_FILE +NEXRAD_ARCHIVE_MSG1_FILE +NEXRAD_LEVEL3_MSG19 +NEXRAD_LEVEL3_MSG163 +NEXRAD_CDM_FILE +UF_FILE +INTERP_SOUNDE_FILE """ diff --git a/pyart/testing/sample_objects.py b/pyart/testing/sample_objects.py index bebd34911c..13c4c36c35 100644 --- a/pyart/testing/sample_objects.py +++ b/pyart/testing/sample_objects.py @@ -1,21 +1,6 @@ """ -pyart.testing.sample_objects -============================ - Functions for creating sample Radar and Grid objects. -.. autosummary:: - :toctree: generated/ - - make_empty_ppi_radar - make_target_radar - make_velocity_aliased_radar - make_single_ray_radar - make_empty_grid - make_target_grid - make_storm_grid - make_normal_storm - """ import numpy as np diff --git a/pyart/testing/tmpdirs.py b/pyart/testing/tmpdirs.py index f80c16710e..3fe9587c1f 100644 --- a/pyart/testing/tmpdirs.py +++ b/pyart/testing/tmpdirs.py @@ -1,17 +1,6 @@ """ -pyart.testing.tmpdirs -===================== - Classes for creating and cleaning temporary directories in unit tests. -.. autosummary:: - :toctree: generated/ - - TemporaryDirectory - InTemporaryDirectory - InGivenDirectory - - This module is taken from the nibable project. The following license applies: :: diff --git a/pyart/util/__init__.py b/pyart/util/__init__.py index f302fd078f..ae148544b8 100644 --- a/pyart/util/__init__.py +++ b/pyart/util/__init__.py @@ -1,52 +1,8 @@ """ -============================= -Utilities (:mod:`pyart.util`) -============================= - Miscellaneous utility functions. The location and names of these functions within Py-ART may change between -versions without depeciation, use with caution. - -.. currentmodule:: pyart.util - -Direction statistics -==================== - -.. autosummary:: - :toctree: generated/ - - angular_mean - angular_std - angular_mean_deg - angular_std_deg - interval_mean - interval_std - mean_of_two_angles - mean_of_two_angles_deg - -Miscellaneous functions -======================= - -.. autosummary:: - :toctree: generated/ - - cross_section_ppi - cross_section_rhi - datetime_from_radar - datetimes_from_radar - datetime_from_dataset - datetimes_from_dataset - datetime_from_grid - estimate_noise_hs74 - is_vpt - to_vpt - join_radar - simulated_vel_from_profile - texture_along_ray - texture - rolling_window - angular_texture_2d +versions without depreciation, use with caution. """ diff --git a/pyart/util/circular_stats.py b/pyart/util/circular_stats.py index ace0f73fc0..8affb4c272 100644 --- a/pyart/util/circular_stats.py +++ b/pyart/util/circular_stats.py @@ -1,21 +1,6 @@ """ -pyart.util.circular_stats -========================= - Functions for computing statistics on circular (directional) distributions. -.. autosummary:: - :toctree: generated/ - - angular_mean - angular_std - angular_mean_deg - angular_std_deg - interval_mean - interval_std - mean_of_two_angles - mean_of_two_angles_deg - """ import numpy as np diff --git a/pyart/util/datetime_utils.py b/pyart/util/datetime_utils.py index 40e7ca0996..7a732e8391 100644 --- a/pyart/util/datetime_utils.py +++ b/pyart/util/datetime_utils.py @@ -1,17 +1,6 @@ """ -pyart.util.datetime_utils -========================= - Functions for converting date and time between various forms. -.. autosummary:: - :toctree: generated/ - datetime_from_radar - datetimes_from_radar - datetime_from_dataset - datetimes_from_dataset - datetime_from_grid - """ try: diff --git a/pyart/util/hildebrand_sekhon.py b/pyart/util/hildebrand_sekhon.py index c2312c71c4..f5d3136c3c 100644 --- a/pyart/util/hildebrand_sekhon.py +++ b/pyart/util/hildebrand_sekhon.py @@ -1,14 +1,6 @@ """ -pyart.util.hildebrand_sekhon -============================ - Estimation of noise in Doppler spectra using the Hildebrand Sekhon method. -.. autosummary:: - :toctree: generated/ - - estimate_noise_hs74 - """ import numpy as np diff --git a/pyart/util/radar_utils.py b/pyart/util/radar_utils.py index 03024cbbe7..afb2467087 100644 --- a/pyart/util/radar_utils.py +++ b/pyart/util/radar_utils.py @@ -1,16 +1,6 @@ """ -pyart.util.radar_utils -====================== - Functions for working radar instances. -.. autosummary:: - :toctree: generated/ - - is_vpt - to_vpt - join_radar - """ import copy diff --git a/pyart/util/sigmath.py b/pyart/util/sigmath.py index 4570348080..7e10a38194 100644 --- a/pyart/util/sigmath.py +++ b/pyart/util/sigmath.py @@ -1,17 +1,6 @@ """ -pyart.util.sigmath -================== - Function for mathematical, signal processing and numerical routines. -.. autosummary:: - :toctree: generated/ - - angular_texture_2d - rolling_window - texture - texture_along_ray - """ import numpy as np diff --git a/pyart/util/simulated_vel.py b/pyart/util/simulated_vel.py index c393c5ee1c..2218b474cc 100644 --- a/pyart/util/simulated_vel.py +++ b/pyart/util/simulated_vel.py @@ -1,14 +1,6 @@ """ -pyart.util.simulated_vel -======================== - Function for creating simulated velocity fields. -.. autosummary:: - :toctree: generated/ - - simulated_vel_from_profile - """ import numpy as np diff --git a/pyart/util/xsect.py b/pyart/util/xsect.py index 36d5b32c89..3bc6f3c09a 100644 --- a/pyart/util/xsect.py +++ b/pyart/util/xsect.py @@ -1,17 +1,6 @@ """ -pyart.util.xsect -================ - Function for extracting cross sections from radar volumes. -.. autosummary:: - :toctree: generated/ - - cross_section_ppi - cross_section_rhi - _construct_xsect_radar - _copy_dic - """ from copy import copy