diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 01f4cef22..000000000 --- a/.coveragerc +++ /dev/null @@ -1,20 +0,0 @@ -[run] -branch = True -concurrency = - multiprocessing - threading - subprocess -source = - peas - pocs -omit = - pocs/utils/data.py -parallel = True - -[report] -exclude_lines = - pragma: no cover - -ignore_errors = True -omit = - tests/* diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..bcb02874b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,16 @@ +!.git +docs/* +.eggs +.idea +.venv +venv +*.egg-info +.github + +*.md +!README*.md + +*.log +*.pdf +**/*.pyc +**/__pycache__ \ No newline at end of file diff --git a/.github/workflows/pythontest.yaml b/.github/workflows/pythontest.yaml index 058dae299..e257653d5 100644 --- a/.github/workflows/pythontest.yaml +++ b/.github/workflows/pythontest.yaml @@ -1,4 +1,4 @@ -name: Test Python Package +name: Test Python on: [push, pull_request] jobs: @@ -8,37 +8,54 @@ jobs: matrix: python-version: [3.7] steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Lint with flake8 - run: | - pip install flake8 - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=100 --statistics + - name: Checkout code + uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + - name: Lint with flake8 + run: | + pip install flake8 + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=100 --statistics test: runs-on: ubuntu-latest strategy: matrix: python-version: [3.7] steps: - - uses: actions/checkout@v2 - - name: Fetch all history for all tags and branches for versioneer - run: git fetch --prune --unshallow - - name: Pull pocs image - run: | - docker pull gcr.io/panoptes-exp/pocs:latest - - name: Test with pytest in pocs container - run: | - ci_env=`bash <(curl -s https://codecov.io/env)` - docker run -i \ - $ci_env \ - -e LOCAL_USER_ID=$(id -u) \ - -e TRAVIS=true \ - -v $(pwd):/var/panoptes/POCS \ - gcr.io/panoptes-exp/pocs:latest \ - scripts/testing/run-tests.sh + - name: Checkout code + uses: actions/checkout@v2 + - name: Fetch all history for all tags and branches + run: git fetch --prune --unshallow + - name: Build pocs image + run: | + docker build -t pocs:testing -f docker/latest.Dockerfile . + - name: Test with pytest in pocs container + run: | + mkdir -p coverage_dir && chmod 777 coverage_dir + ci_env=`bash <(curl -s https://codecov.io/env)` + docker run -i \ + $ci_env \ + -e REPORT_FILE="/tmp/coverage/coverage.xml" \ + --network "host" \ + -v $PWD:/var/panoptes/logs \ + -v $PWD/coverage_dir:/tmp/coverage \ + pocs:testing \ + scripts/testing/run-tests.sh + - name: Upload coverage report to codecov.io + uses: codecov/codecov-action@v1 + if: success() + with: + name: codecov-upload + file: coverage_dir/coverage.xml + fail_ci_if_error: true + - name: Create log file artifact + uses: actions/upload-artifact@v1 + if: always() + with: + name: log-files + path: panoptes-testing.log diff --git a/.gitignore b/.gitignore index 006c5c5f3..bcbbc904e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,89 +1,51 @@ -# PANOPTES specific files -conf_files/*_local.yaml -examples/notebooks/*.fits -examples/notebooks/*.jpeg - -# Development support -sftp-config.json - -# emacs backups +# Temporary and binary files *~ -\#*\# - -# TeX products -*.aux -*.log -*.pdf -*.toc - -# Compiled files -*.py[co] -*.a -*.o +*.py[cod] *.so -__pycache__ - -# Ignore .c files by default to avoid including generated code. If you want to -# add a non-generated .c extension, use `git add -f filename.c`. -*.c - -# Other generated files -_build -*/cython_version.py -htmlcov -.coverage -.coverage.* -MANIFEST - -# Sphinx -docs/api -docs/_build -docs/_static +*.cfg +!.isort.cfg +!setup.cfg +*.orig +*.log +*.pot +__pycache__/* +.cache/* +.*.swp +*/.ipynb_checkpoints/* +.DS_Store -# Eclipse editor project files +# Project files +.ropeproject .project .pydevproject .settings - -# Pycharm editor project files .idea +tags -# Packages/installer info +# Package files *.egg -*.eggs -*.cache -*.egg-info -dist -build -eggs -parts -var -sdist -develop-eggs +*.eggs/ .installed.cfg -distribute-*.tar.gz - -# Other -.*.swp - -# Mac OSX -.DS_Store - -# Eclipse project files -.settings -.project -.pydevproject - -# Ignore IPython notebook (Jupyter) checkpoints. -.ipynb_checkpoints -notebooks/* - -# Ignore link to weather_plots in web/static/ -weather_plots +*.egg-info -# Ignore pytest.ini file in root of project. Especially useful -# on a unit to skip tests that interact with hardware. -/pytest.ini +# Unittest and coverage +htmlcov/* +.coverage +.tox +junit.xml +coverage.xml +.pytest_cache/ + +# Build and docs folder/files +build/* +dist/* +sdist/* +docs/api/* +docs/_rst/* +docs/_build/* +cover/* +MANIFEST -# Ignore pytest's cache of data across tests. -/.pytest_cache +# Per-project virtualenvs +.venv*/ +**/.ipynb_checkpoints/** diff --git a/.pycodestyle.cfg b/.pycodestyle.cfg deleted file mode 100644 index 3a7f877fa..000000000 --- a/.pycodestyle.cfg +++ /dev/null @@ -1,16 +0,0 @@ -[pycodestyle] -; Pycodestyle config options are: -; exclude, filename, select, ignore, max-line-length, max-doc-length, -; hang-closing, count, format, quiet, show-pep8, show-source, -; statistics, verbose - -; Style violations to ignore. -; -; E501: line too long (82 > 79 characters) -; -; For the full list, see: -; https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes - -ignore = E501 - -max-line-length = 99 diff --git a/.readthedocs.yml b/.readthedocs.yml index 12c7dd25c..f4a658576 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,12 +1,20 @@ # .readthedocs.yml -build: - image: latest +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details -formats: - - htmlzip +# Required +version: 2 -requirements_file: docs/requirements.txt +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +formats: all python: - version: 3.6 - setup_py_install: true + version: 3.7 + install: + - requirements: docs/requirements.txt + - method: pip + path: . + system_packages: true diff --git a/.travis.yml b/.travis.yml index c7889c400..1f010372e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,94 +1,18 @@ dist: xenial sudo: required language: python -services: - - mongodb python: - "3.6" -env: - - PANDIR=$HOME POCS=$TRAVIS_BUILD_DIR PANUSER=$USER ARDUINO_VERSION=1.8.1 +services: +- docker before_install: - - mkdir -p $PANDIR/logs - - mkdir -p $PANDIR/astrometry/data - - ln -s $POCS $PANDIR/POCS - - pip install -U pip - - pip install coveralls - - # Install arudino files - - cd $PANDIR - - export DISPLAY=:1.0 - - export - - wget http://downloads.arduino.cc/arduino-${ARDUINO_VERSION}-linux64.tar.xz - - tar xf arduino-${ARDUINO_VERSION}-linux64.tar.xz - - sudo mv arduino-${ARDUINO_VERSION} /usr/local/share/arduino - - sudo ln -s /usr/local/share/arduino/arduino /usr/local/bin/arduino - - # Install miniconda - - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - - bash miniconda.sh -b -p $HOME/miniconda - - export PATH="$HOME/miniconda/bin:$PANDIR/astrometry/bin:$PATH" - - hash -r - - - conda config --set always_yes yes --set changeps1 no - - conda update -q conda - - conda info -a # Useful for debugging any issues with conda - - conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION - - source activate test-environment - - conda install numpy scipy - - # Install astrometry.net - - wget https://github.com/dstndstn/astrometry.net/releases/download/0.78/astrometry.net-0.78.tar.gz - - tar zxvf astrometry.net-0.78.tar.gz && cd astrometry.net-0.78 - - make && make py && make install INSTALL_DIR=$PANDIR/astrometry - - echo 'add_path $PANDIR/astrometry/data' | sudo tee --append $PANDIR/astrometry/etc/astrometry.cfg -addons: - apt: - packages: - - gphoto2 - - libcairo2-dev - - libnetpbm10-dev - - netpbm - - libpng12-dev - - libjpeg-dev - - python-numpy - - python-pyfits - - python-dev - - zlib1g-dev - - libbz2-dev - - swig - - libcfitsio-bin - - libcfitsio-dev -install: - - cd $PANDIR - # install POCS and requirements - - cd $POCS - - pip install -r requirements.txt - - pip install -r docs/requirements.txt - - pip install -e . - # Force creation of config file. - - python -c "import astroplan" - # Custom IERS urls in config file. - - | - echo "\ - - [utils.iers.iers] - iers_auto_url = https://storage.googleapis.com/panoptes-resources/iers/ser7.dat - iers_auto_url_mirror = https://storage.googleapis.com/panoptes-resources/iers/ser7.dat - " >> $HOME/.astropy/config/astropy.cfg - # Download IERS and astroetry.net files. - - python pocs/utils/data.py --folder $PANDIR/astrometry/data +- docker pull gcr.io/panoptes-exp/pocs:latest +- ci_env=`bash <(curl -s https://codecov.io/env)` +install: true script: - - export BOARD="arduino:avr:micro" - - arduino --verify --board $BOARD resources/arduino_files/camera_board/camera_board.ino - - arduino --verify --board $BOARD resources/arduino_files/power_board/power_board.ino - - arduino --verify --board $BOARD resources/arduino_files/telemetry_board/telemetry_board.ino - - export PYTHONPATH="$PYTHONPATH:$POCS/scripts/coverage" - - export COVERAGE_PROCESS_START=.coveragerc - - coverage run $(which pytest) -v --test-databases all - - coverage combine -cache: - pip: true - directories: - - $PANDIR/astrometry/ -after_success: - - bash <(curl -s https://codecov.io/bash) +- docker run -it + $ci_env + -e LOCAL_USER_ID=0 + -v $TRAVIS_BUILD_DIR:/var/panoptes/POCS + gcr.io/panoptes-exp/pocs:latest + scripts/testing/run-tests.sh diff --git a/AUTHORS.rst b/AUTHORS.rst new file mode 100644 index 000000000..0895d288c --- /dev/null +++ b/AUTHORS.rst @@ -0,0 +1,26 @@ +============ +Contributors +============ + +* Wilfred Tyler Gee +* Josh Walawender +* James Synge +* Demezhan Marikov +* Anthony Horton +* Brendan Orenstein +* Mike Butterfield +* TaylahB +* James Synge +* jermainegug <32515601+jermainegug@users.noreply.github.com> +* blackflip14 +* danjampro +* Sushant Mehta +* kmeagle1515 <46345142+kmeagle1515@users.noreply.github.com> +* Dan Proole +* Jenny Tong +* Kate Storey-Fisher +* Lee Spitler +* Luca +* Sean Marquez +* lucasholucasho +* megwill4268 diff --git a/CHANGELOG.rst b/CHANGELOG.rst new file mode 100644 index 000000000..261e78a58 --- /dev/null +++ b/CHANGELOG.rst @@ -0,0 +1,287 @@ +CHANGELOG +========= + +All notable changes to this project will be documented in this file. + +The format is based on `Keep a Changelog `__, and this project +adheres to `Semantic Versioning `__. + +[0.7.0dev] +-------------------- + +If you thought 9 months between releases was a long time, how about 18 +months! :) This version has a lot of breaking changes and is not +backwards compatible with previous versions. The release is a (big) stepping +stone on the way to ``0.8.0`` and (eventually!) a ``1.0.0``. + +The entire repo has been redesigned to support docker images. This comes +with a number of changes, including the refactoring of many items into +the `panoptes-utils `__ repo. + +There are a lot of changes included in this release, highlights below: + +Added +~~~~~ + +* Storing an explicit ``safety`` collection in the database. +* Configuration file specific for testing rather than relying on ``pocs.yaml``. +* Convenience scripts for running tests inside docker container: + ``scripts/testing/test-software.sh`` +* GitHub Actions for testing and coverage upload. + +Changed +~~~~~~~ + +* Docker as default. (#951). +* Weather items have moved to `aag-weather `__. + * Two docker containers run from the ``aag-weather`` image and have a ``docker/docker-compose-aag.yaml`` file to start. +* Config items related to the configuration system have been moved to the `Config Server `__ in ``panoptes-utils`` repo. + * The main interface for POCS related items is through ``self.get_config``, which can take a key and a default, e.g. ``self.get_config('mount.horizon', default='30 deg')``. + * Test writing is affected and is currently more difficult than would be ideal. An updated test writing document will be following this release. + +* Logging has changed to `loguru `__ and has been greatly simplified: + * ``get_root_logger`` has been replaced by ``get_logger``. +* The ``per-run`` logs have been removed and have been replaced by two logs files: + * ``$PANDIR/logs/panoptes.log``: Log file meant for watching on the + command line (via ``tail``) or for otherwise human-readable logs. + Rotated daily at 11:30 am. Only the previous days' log is + retained. + * ``$PANDIR/logs/panoptes_YYYYMMDD.log``: Log file meant for archive + or information gathering. Stored in JSON format for ingestion into + log analysis service. Rotated daily at 11:30 and stored in a + compressed file for 7 days. Future updates will add option to + upload to google servers. + +* ``loguru`` provides two new log levels + + * ``trace``: one level below ``debug``. + * ``success``: one level above ``info``. + +* **Breaking** Mount: unparking has been moved from the + ``ready`` to the ``slewing`` state. This fixes a problem where after + waiting 10 minutes for observation check, the mount would move from + park to home to park without checking weather safety. +* Documentation updates. +* Lots of conversions to ``f-strings``. +* Renamed codecov configuration file to be compliant. +* Switch to pyscaffold for package maintenance. +* "Waiting" method changes: + * `sleep` has been renamed to `wait`. +* All `status()` methods have been converted to properties that return a useful dict. +* Making proper abstractmethods. +* Documentation updates where found. +* Many log and f-string fixes. +* `pocs.config_port` property available publicly. +* horizon check for state happens directly in `run`. + +Removed +~~~~~~~ + +* Cleanup of any stale or unused code. +* All ``mongo`` related code. +* Consolidate configration files: ``.pycodestyle.cfg``, ``.coveragerc`` + into ``setup.cfg``. +* Weather related items. These have been moved to + ```aag-weather`` `__. +* All notebook tutorials in favor of + ```panoptes-tutorials`` `__. +* Remove all old install and startup scripts. + +[0.6.2] - 2018-09-27 +-------------------- + +One week between releases is a lot better than 9 months! ;) Some small +but important changes mark this release including faster testing times +on local machines. Also a quick release to remove some of the CloudSQL +features (but see the shiny new Cloud Functions over in the +`panoptes-network `__ +repo!). + +Fixed +~~~~~ + +* Cameras +* Use unit\_id for sequence and image ids. Important for processing + consistency [#613]. +* State Machine + +Changed +~~~~~~~ + +* Camera +* Remove camera creation from Observatory [#612]. +* Smarter event waiting [#625]. +* More cleanup, especially path names and pretty images [#610, #613, + #614, #620]. +* Mount +* Testing +* Caching some of the build dirs [#611]. +* Only use Mongo DB type during local testing - Local testing with + 1/3rd the wait! [#616]. +* Google Cloud [#599] +* Storage improvements [#601]. + +Added +~~~~~ + +* Misc +* CountdownTimer utility [#625]. + +Removed +~~~~~~~ + +* Google Cloud [#599] +* Reverted some of the CloudSQL connectivity [#652] +* Cameras +* Remove spline smoothing focus [#621]. + +[0.6.1] - 2018-09-20 +-------------------- + +| Lots of changes in this release. In particular we've pushed through a +lot of changes +| (especially with the help of @jamessynge) to make the development +process a lot +| smoother. This has in turn contribute to the quality of the codebase. + +Too long between releases but even more exciting improvements to come! +Next up is tackling the events notification system, which will let us +start having some vastly improved UI features. + +Below is a list of some of the changes. + +Thanks to first-time contributors: @jermainegug @jeremylan as well as +contributions from many folks over at +https://github.com/AstroHuntsman/huntsman-pocs. + +Fixed +~~~~~ + +* Cameras +* Fix for DATE-OBS fits header [#589]. +* Better property settings for DSLRs [#589]. +* Pretty image improvements [#589]. +* Autofocus improvements for SBIG/Focuser [#535]. +* Primary camera updates [#614, 620]. +* Many bug fixes [#457, #589]. +* State Machine +* Many fixes [#509, #518]. + +Changed +~~~~~~~ + +* Mount +* POCS Shell: Hitting ``Ctrl-c`` will complete movement through states + [#590]. +* Pointing updates, including ``auto_correct`` [#580]. +* Tracking mode updates (**fixes for Northern Hemisphere only!**) + [#549]. +* Serial interaction improvements [#388, #403]. +* Shutdown improvements [#407, #421]. +* Dome +* Changes from May Huntsman commissioning run [#535] +* Messaging +* Better and consistent topic terminology [#593, #605]. +* Anticipation of coming events. +* Misc +* Default to rereading the fields file for targets [#488]. +* Timelapse updates [#523, #591]. + +Added +~~~~~ + +* Cameras +* Basic scripts for bias and dark frames. +* Add support for Optec FocusLynx based focus controllers [#512]. +* Pretty images from FITS files. Thanks @jermainegug! [#538]. +* Testing +* pyflakes testing support for bug squashing! :bettle: [#596]. +* pycodestyle for better code! [#594]. +* Threads instead of process [#468]. +* Fix coverage & Travis config for concurrency [#566]. +* Google Cloud [#599] +* Added instructions for authentication [#600]. +* Add a ``pan_id`` to units for GCE interaction[#595]. +* Adding Google CloudDB interaction [#602]. +* Sensors +* Much work on arduinos and sensors [#422]. +* Misc +* Startup scripts for easier setup [#475]. +* Install scripts for Ubuntu 18.04 [#585]. +* New database type: mongo, file, memory [#414]. +* Twitter! Slack! Social median interactions. Hooray! Thanks + @jeremylan! [#522] + +[0.6.0] - 2017-12-30 +-------------------- + +Changed +~~~~~~~ + +* Enforce 100 character limit for code + `159 `__. +* Using root-relative module imports + `252 `__. +* ``Observatory`` is now a parameter for a POCS instance + `195 `__. +* Better handling of simulator types + `200 `__. +* Log improvements: +* Separate files for each level and new naming scheme + `165 `__. +* Reduced log format + `254 `__. +* Better reusing of logger + `192 `__. +* Single shared MongoClient connection + `228 `__. +* Improvements to build process + `176 `__, + `166 `__. +* State machine location more flexible + `209 `__, + `219 `__ +* Testing improvments + `249 `__. +* Updates to many wiki pages. +* Misc bug fixes and improvements. + +Added +~~~~~ + +* Merge PEAS into POCS + `169 `__. +* Merge PACE into POCS + `167 `__. +* Support added for testing of serial devices + `164 `__, + `180 `__. +* Basic dome support + `231 `__, + `248 `__. +* Polar alignment helper functions moved from PIAA + `265 `__. + +Removed +~~~~~~~ + +* Remove threading support from rs232.SerialData + `148 `__. + +[0.5.1] - 2017-12-02 +-------------------- + +Added +~~~~~ + +* First real release! +* Working POCS features: +* mount (iOptron) +* cameras (DSLR, SBIG) +* focuer (Birger) +* scheduler (simple) +* Relies on separate repositories PEAS and PACE +* Automated testing with travis-ci.org +* Code coverage via codecov.io +* Basic install scripts + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 41934e278..000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,146 +0,0 @@ -Please see the -[code of conduct](https://github.com/panoptes/POCS/blob/develop/CODE_OF_CONDUCT.md) -for our playground rules and follow them during all your contributions. - -# Getting Started - -We prefer that all changes to POCS have an associated -[GitHub Issue in the project](https://github.com/panoptes/POCS/issues) -that explains why it is needed. This allows us to debate the best -approach to address the issue before folks spend a lot of time -writing code. If you are unsure about a possible contribution to -the project, please contact the project owners about your idea; -of course, an [issue](https://github.com/panoptes/POCS/issues) is a -good way to do this. - -# Pull Request Process -_This is a summary of the process. See -[the POCS wiki](https://github.com/panoptes/POCS/wiki/PANOPTES-Feature-Development-Process) -for more info._ - -* Pre-requisites - - Ensure you have a [github account.](https://github.com/join) - - [Setup ssh access for github](https://help.github.com/en/github/authenticating-to-github/connecting-to-github-with-ssh). - - If the change you wish to make is not already an - [Issue in the project](https://github.com/panoptes/POCS/issues), - please create one specifying the need. -* Process - - Create a fork of the repository via github (button in top-right). - - Clone your fork to your local system: - - `git clone git@github.com:YOUR-GITHUB-NAME/POCS.git` - - Set the "upstream" branch to `panoptes`: - - `cd POCS` - - `git remote add upstream https://github.com/panoptes/POCS.git` - - `git fetch upstream` - - Use a topic branch within your fork to make changes. All of our repositories have a - default branch of `develop` when you first clone them, but your work should be in a - separate branch (see note below). Your branch should almost always be based off of - the `upstream/develop` branch: - - Create a branch with a descriptive name, e.g.: - - `git checkout -b new-camera-simulator upstream/develop` - - `git checkout -b issue-28 upstream/develop` - - Ensure that your code meets this project's standards (see Testing and Code Formatting below). - - Run `python setup.py test` from the `$POCS` directory before pushing to github - - Submit a pull request to the repository, be sure to reference the issue number it addresses. - - - > Note: See ["A successful Git branching model"](https://nvie.com/posts/a-successful-git-branching-model/) for details - on how the repository is structured. - - -# Setting up Local Environment - - Follow instructions in the [README](https://github.com/panoptes/POCS/blob/develop/README.md) - as well as the [Coding in PANOPTES](https://github.com/panoptes/POCS/wiki/Coding-in-PANOPTES) - document. - - -# Testing - - All changes should have corresponding tests and existing tests should pass after - your changes. - - For more on testing see the - [Coding in PANOPTES](https://github.com/panoptes/POCS/wiki/Coding-in-PANOPTES) page. - -# Code Formatting - -- All Python should use [PEP 8 Standards](https://www.python.org/dev/peps/pep-0008/) - - Line length is set at 100 characters instead of 80. - - It is recommended to have your editor auto-format code whenever you save a file - rather than attempt to go back and change an entire file all at once. There are - many plugins that exist for this. - - You can also use - [yapf (Yet Another Python Formatter)](https://github.com/google/yapf) - for which POCS includes a style file (.style.yapf). For example: - ```bash - # cd to the root of your workspace. - cd $(git rev-parse --show-toplevel) - # Format the modified python files in your workspace. - yapf -i $(git diff --name-only | egrep '\.py$') - ``` -- Do not leave in commented-out code or unnecessary whitespace. -- Variable/function/class and file names should be meaningful and descriptive. -- File names should be lower case and underscored, not contain spaces. For example, `my_file.py` -instead of `My File.py`. -- Define any project specific terminology or abbreviations you use in the file you use them. -- Test imports are slightly different because `pocs/tests` and `peas/tests` are not Python - packages (those directories don't contain an `__init__.py` file). For imports of `pocs` or - `peas` code, use root-relative imports as described above. For importing test packages and - modules, assume the test doing the imports is in the root directory. - -# Log Messages - -Use appropriate logging: -- Log level: - - DEBUG (i.e. `self.logger.debug()`) should attempt to capture all run-time - information. - - INFO (i.e. `self.logger.info()`) should be used sparingly and meant to convey - information to a person actively watching a running unit. - - WARNING (i.e. `self.logger.warning()`) should alert when something does not - go as expected but operation of unit can continue. - - ERROR (i.e. `self.logger.error()`) should be used at critical levels when - operation cannot continue. -- The logger supports variable information without the use of the `format` method. -- There is a `say` method available on the main `POCS` class that is meant to be -used in friendly manner to convey information to a user. This should be used only -for personable output and is typically displayed in the "chat box"of the PAWS -website. These messages are also sent to the INFO level logger. - -#### Logging examples: - -_Note: These are meant to illustrate the logging calls and are not necessarily indicative of real -operation_ - -``` -self.logger.info("PANOPTES unit initialized: {}", self.config['name']) - -self.say("I'm all ready to go, first checking the weather") - -self.logger.debug("Setting up weather station") - -self.logger.warning('Problem getting wind safety: {}'.format(e)) - -self.logger.debug("Rain: {} Clouds: {} Dark: {} Temp: {:.02f}", - is_raining, - is_cloudy, - is_dark, - temp_celsius -) - -self.logger.error('Unable to connect to AAG Cloud Sensor, cannot continue') -``` - -#### Viewing log files - -- You typically want to follow an active log file by using `tail -F` on the command line. -- The [`grc`](https://github.com/garabik/grc) (generic colouriser) can be used with -`tail` to get pretty log files. - -``` -(panoptes-env) $ grc tail -F $PANDIR/logs/pocs_shell.log -``` - -The following screenshot shows commands entered into a `jupyter-console` in the top -panel and the log file in the bottom panel. - -

- -

diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 000000000..e95bb6ca8 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,268 @@ +================== +CONTRIBUTING GUIDE +================== + +Please see the `code of +conduct `__ +for our playground rules and follow them during all your contributions. + +Getting Started +=============== + +We prefer that all changes to POCS have an associated `GitHub Issue in +the project `__ that explains +why it is needed. This allows us to debate the best approach to address +the issue before folks spend a lot of time writing code. If you are +unsure about a possible contribution to the project, please contact the +project owners about your idea; of course, an +`issue `__ is a good way to do +this. + +Pull Request Process +==================== + +.. note:: + + This is a summary of the process. See the `POCS wiki `_ for more info. + +- Pre-requisites +- Ensure you have a `github account. `__ +- `Setup ssh access for + github `__. +- If the change you wish to make is not already an `Issue in the + project `__, please create + one specifying the need. + +Process +^^^^^^^ + +1. Create a fork of the repository via github (button in top-right). +2. Clone your fork to your local system: + + .. code-block:: + bash + + cd $PANDIR + git clone git@github.com:YOUR-GITHUB-NAME/POCS.git + +3. Set the "upstream" branch to ``panoptes`` and fetch the upstream changes: + + .. code-block:: + bash + + cd POCS + git remote add upstream https://github.com/panoptes/POCS.git + git fetch upstream + +4. Use a topic branch within your fork to make changes. All of our repositories + have a default branch of ``develop`` when you first clone them, but your work + should be in a separate branch (see note below). Your branch should be based + off of the ``upstream/develop`` branch. + + Create a branch with a descriptive name, e.g.: + + .. code-block:: + bash + + git checkout -b new-camera-simulator upstream/develop + git checkout -b issue-28 upstream/develop + +5. Ensure that your code meets this project's standards (see Testing and Code + Formatting below). + +6. Run the testing suite locally to ensure that all tests are passing. See Testing below. + +7. Submit a pull request to the repository, be sure to reference the issue number it addresses. + +.. note:: + + See `"A successful Git branching model" `__ + for details on how the repository is structured. + + +Code Formatting +=============== + +- All Python should use `PEP 8 + Standards `__ +- Line length is set at 100 characters instead of 80. +- It is recommended to have your editor auto-format code whenever you + save a file rather than attempt to go back and change an entire file + all at once. There are many plugins that exist for this. +- You can also use `yapf (Yet Another Python + Formatter) `__ for which POCS + includes a style file (.style.yapf). For example:: + + # cd to the root of your workspace. + cd $(git rev-parse --show-toplevel) + # Format the modified python files in your workspace. + yapf -i $(git diff --name-only | egrep '\.py$')`` + +- Do not leave in commented-out code or unnecessary whitespace. +- Variable/function/class and file names should be meaningful and + descriptive. +- File names should be lower case and underscored, not contain spaces. + For example, ``my_file.py`` instead of ``My File.py``. +- Define any project specific terminology or abbreviations you use in + the file you use them. + +Log Messages +============ + +Use appropriate logging: + +* DEBUG (i.e. ``self.logger.debug()``) should attempt to capture all run*time information. + +* INFO (i.e. ``self.logger.info()``) should be used sparingly and meant to convey information to a person actively watching a running unit. + +* WARNING (i.e. ``self.logger.warning()``) should alert when something does not go as expected but operation of unit can continue. + +* ERROR (i.e. ``self.logger.error()``) should be used at critical levels when operation cannot continue. + +* The logger supports variable information without the use of the ``format`` method. + +* There is a ``say`` method available on the main ``POCS`` class that is meant to be used in friendly manner to convey information to a user. This should be used only for personable output and is typically displayed in the "chat box"of the PAWS website. These messages are also sent to the INFO level logger. + +Logging examples: +^^^^^^^^^^^^^^^^^ + +*Note: These are meant to illustrate the logging calls and are not +necessarily indicative of real operation* + +.. code-block:: + python + + self.say("I'm all ready to go, first checking the weather") + + self.logger.info(f'PANOPTES unit initialized: {self.name}') + + self.logger.debug("Setting up weather station") + + self.logger.warning(f'Problem getting wind safety: {e!r}') + + self.logger.debug(f'Rain: {is_raining} Clouds: {is_cloudy} Dark: {is_dark} Temp: {temp:.02f}') + + self.logger.error('Unable to connect to AAG Cloud Sensor, cannot continue') + +Viewing log files +^^^^^^^^^^^^^^^^^ + +- You typically want to follow an active log file by using ``tail -F`` + on the command line. + +.. code-block:: bash + + tail -F $PANDIR/logs/panoptes.log + + +Test POCS +========= + +POCS comes with a testing suite that allows it to test that all of the software +works and is installed correctly. Running the test suite by default will use simulators for all of the hardware and is meant to test that +the software works correctly. Additionally, the testing suite can be run +with various flags to test that attached hardware is working properly. + +Software Testing +^^^^^^^^^^^^^^^^ + +There are a few scenarios where you want to run the test suite: + +#. You are getting your unit ready and want to test software is + installed correctly. +#. You are upgrading to a new release of software (POCS, its + dependencies or the operating system). +#. You are helping develop code for POCS and want test your code doesn't + break something. + +Testing your installation +^^^^^^^^^^^^^^^^^^^^^^^^^ + +In order to test your installation you should have followed all of the steps above +for getting your unit ready. To run the test suite, you will need to open a terminal +and navigate to the ``$POCS`` directory. + +.. code:: bash + + cd $POCS + + # Run the software testing + scripts/testing/test-software.sh + +.. note:: + + The test suite will give you some warnings about what is going + on and give you a chance to cancel the tests (via ``Ctrl-c``). + +It is often helpful to view the log output in another terminal window +while the test suite is running: + +.. code:: bash + + # Follow the log file + tail -F $PANDIR/logs/panoptes.log + +Testing your code changes +^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. note:: + + This step is meant for people helping with software development. + +The testing suite will automatically be run against any code committed to our github +repositories. However, the test suite should also be run locally before pushing +to github. This can be done either by running the entire test suite as above or +by running an individual test related to the code you are changing. For instance, +to test the code related to the cameras one can run: + +.. code:: bash + + pytest -xv pocs/tests/test_camera.py + +Here the ``-x`` option will stop the tests upon the first failure and the ``-v`` makes +the testing verbose. +Note that some tests might require additional software. This software is +installed in the docker image, which is used by the ``test-software.sh`` +script above), but is **not** used when calling ``pytest`` directly. For +instance, anything requiring plate solving needs ``astrometry.net`` +installed. + +Any new code should also include proper tests. See below for details. + +Writing tests +^^^^^^^^^^^^^ + +All code changes should include tests. We strive to maintain a high code coverage +and new code should necessarily maintain or increase code coverage. +For more details see the `Writing +Tests `__ +page. + +Hardware Testing +~~~~~~~~~~~~~~~~ + +Hardware testing uses the same testing suite as the software testing but with +additional options passed on the command line to signify what hardware should be +tested. + +The options to pass to ``pytest`` is ``--with-hardware``, which accepts a list of +possible hardware items that are connected. This list includes ``camera``, ``mount``, +and ``weather``. Optionally you can use ``all`` to test a fully connected unit. + +.. warning:: + + The hardware tests do not perform safety checking of the weather or + dark sky. The ``weather`` test mentioned above tests if a weather station is + connected but does not test the safety conditions. It is assumed that hardware + testing is always done with direct supervision. + +.. code:: bash + + # Test an attached camera + pytest --with-hardware=camera + + # Test an attached camera and mount + pytest --with-hardware=camera,mount + + # Test a fully connected unit + pytest --with-hardware=all \ No newline at end of file diff --git a/Changelog.md b/Changelog.md deleted file mode 100644 index f95e373f6..000000000 --- a/Changelog.md +++ /dev/null @@ -1,131 +0,0 @@ -## [0.6.2] - 2018-09-27 - -One week between releases is a lot better than 9 months! ;) Some small but important changes mark this release including faster testing times on local machines. Also a quick release to remove some of the CloudSQL features (but see the shiny new Cloud Functions over in the [panoptes-network](https://github.com/panoptes/panoptes-network) repo!). - -### Fixed -* Cameras - * Use unit_id for sequence and image ids. Important for processing consistency [#613]. -* State Machine - -### Changed -* Camera - * Remove camera creation from Observatory [#612]. - * Smarter event waiting [#625]. - * More cleanup, especially path names and pretty images [#610, #613, #614, #620]. -* Mount -* Testing - * Caching some of the build dirs [#611]. - * Only use Mongo DB type during local testing - Local testing with 1/3rd the wait! [#616]. -* Google Cloud [#599] - * Storage improvements [#601]. - -### Added -* Misc - * CountdownTimer utility [#625]. - -### Removed -* Google Cloud [#599] - * Reverted some of the CloudSQL connectivity [#652] -* Cameras - * Remove spline smoothing focus [#621]. - -## [0.6.1] - 2018-09-20 - -Lots of changes in this release. In particular we've pushed through a lot of changes -(especially with the help of @jamessynge) to make the development process a lot -smoother. This has in turn contribute to the quality of the codebase. - -Too long between releases but even more exciting improvements to come! Next up is tackling the events notification system, which will let us start having some vastly improved UI features. - -Below is a list of some of the changes. - -Thanks to first-time contributors: @jermainegug @jeremylan as well as contributions from many folks over at https://github.com/AstroHuntsman/huntsman-pocs. - -### Fixed -* Cameras - * Fix for DATE-OBS fits header [#589]. - * Better property settings for DSLRs [#589]. - * Pretty image improvements [#589]. - * Autofocus improvements for SBIG/Focuser [#535]. - * Primary camera updates [#614, 620]. - * Many bug fixes [#457, #589]. -* State Machine - * Many fixes [#509, #518]. - -### Changed -* Mount - * POCS Shell: Hitting `Ctrl-c` will complete movement through states [#590]. - * Pointing updates, including `auto_correct` [#580]. - * Tracking mode updates (**fixes for Northern Hemisphere only!**) [#549]. - * Serial interaction improvements [#388, #403]. - * Shutdown improvements [#407, #421]. -* Dome - * Changes from May Huntsman commissioning run [#535] -* Messaging - * Better and consistent topic terminology [#593, #605]. - * Anticipation of coming events. -* Misc - * Default to rereading the fields file for targets [#488]. - * Timelapse updates [#523, #591]. - -### Added -* Cameras - * Basic scripts for bias and dark frames. - * Add support for Optec FocusLynx based focus controllers [#512]. - * Pretty images from FITS files. Thanks @jermainegug! [#538]. -* Testing - * pyflakes testing support for bug squashing! :bettle: [#596]. - * pycodestyle for better code! [#594]. - * Threads instead of process [#468]. - * Fix coverage & Travis config for concurrency [#566]. -* Google Cloud [#599] - * Added instructions for authentication [#600]. - * Add a `pan_id` to units for GCE interaction[#595]. - * Adding Google CloudDB interaction [#602]. -* Sensors - * Much work on arduinos and sensors [#422]. -* Misc - * Startup scripts for easier setup [#475]. - * Install scripts for Ubuntu 18.04 [#585]. - * New database type: mongo, file, memory [#414]. - * Twitter! Slack! Social median interactions. Hooray! Thanks @jeremylan! [#522] - -## [0.6.0] - 2017-12-30 -### Changed -- Enforce 100 character limit for code [159](https://github.com/panoptes/POCS/pull/159). -- Using root-relative module imports [252](https://github.com/panoptes/POCS/pull/252). -- `Observatory` is now a parameter for a POCS instance [195](https://github.com/panoptes/POCS/pull/195). -- Better handling of simulator types [200](https://github.com/panoptes/POCS/pull/200). -- Log improvements: - - Separate files for each level and new naming scheme [165](https://github.com/panoptes/POCS/pull/165). - - Reduced log format [254](https://github.com/panoptes/POCS/pull/254). - - Better reusing of logger [192](https://github.com/panoptes/POCS/pull/192). -- Single shared MongoClient connection [228](https://github.com/panoptes/POCS/pull/228). -- Improvements to build process [176](https://github.com/panoptes/POCS/pull/176), [166](https://github.com/panoptes/POCS/pull/166). -- State machine location more flexible [209](https://github.com/panoptes/POCS/pull/209), [219](https://github.com/panoptes/POCS/pull/219) -- Testing improvments [249](https://github.com/panoptes/POCS/pull/249). -- Updates to many wiki pages. -- Misc bug fixes and improvements. - -### Added -- Merge PEAS into POCS [169](https://github.com/panoptes/POCS/pull/169). -- Merge PACE into POCS [167](https://github.com/panoptes/POCS/pull/167). -- Support added for testing of serial devices [164](https://github.com/panoptes/POCS/pull/164), [180](https://github.com/panoptes/POCS/pull/180). -- Basic dome support [231](https://github.com/panoptes/POCS/pull/231), [248](https://github.com/panoptes/POCS/pull/248). -- Polar alignment helper functions moved from PIAA [265](https://github.com/panoptes/POCS/pull/265). - -### Removed -- Remove threading support from rs232.SerialData [148](https://github.com/panoptes/POCS/pull/148). - -## [0.5.1] - 2017-12-02 -### Added -- First real release! -- Working POCS features: - + mount (iOptron) - + cameras (DSLR, SBIG) - + focuer (Birger) - + scheduler (simple) -- Relies on separate repositories PEAS and PACE -- Automated testing with travis-ci.org -- Code coverage via codecov.io -- Basic install scripts diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index a9e77c9d6..000000000 --- a/Dockerfile +++ /dev/null @@ -1,51 +0,0 @@ -# PANOPTES development container - -FROM ubuntu as build-env -MAINTAINER Developers for PANOPTES project - -ARG pan_dir=/var/panoptes - -ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 -ENV ENV /root/.bashrc -ENV SHELL /bin/bash -ENV PANDIR $pan_dir -ENV PANLOG $PANDIR/logs -ENV POCS $PANDIR/POCS -ENV PAWS $PANDIR/PAWS -ENV PANUSER root -ENV SOLVE_FIELD=/usr/bin/solve-field - -# Use "bash" as replacement for "sh" -# Note: I don't think this is the preferred way to do this anymore -RUN rm /bin/sh && ln -s /bin/bash /bin/sh \ - && apt-get update --fix-missing \ - && apt-get -y full-upgrade \ - && apt-get -y install wget build-essential zlib1g-dev bzip2 ca-certificates astrometry.net git \ - && rm -rf /var/lib/apt/lists/* \ - && wget --quiet https://raw.githubusercontent.com/panoptes/POCS/develop/scripts/install/install-dependencies.sh -O ~/install-pocs-dependencies.sh \ - && wget --quiet https://raw.githubusercontent.com/panoptes/POCS/develop/scripts/install/apt-packages-list.txt -O ~/apt-packages-list.txt \ - && wget --quiet https://raw.githubusercontent.com/panoptes/POCS/develop/scripts/install/conda-packages-list.txt -O ~/conda-packages-list.txt \ - && /bin/bash ~/install-pocs-dependencies.sh --no-conda --no-mongodb \ - && rm ~/install-pocs-dependencies.sh \ - && rm ~/conda-packages-list.txt \ - && rm ~/apt-packages-list.txt \ - && echo 'export PATH=/opt/conda/bin:$PATH' > /root/.bashrc \ - && mkdir -p $POCS \ - && mkdir -p $PAWS \ - && mkdir -p $PANLOG \ - && wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/anaconda.sh \ - && /bin/bash ~/anaconda.sh -b -p /opt/conda \ - && rm ~/anaconda.sh \ - && cd $pan_dir \ - && wget --quiet https://github.com/panoptes/POCS/archive/develop.tar.gz -O POCS.tar.gz \ - && tar zxf POCS.tar.gz -C $POCS --strip-components=1 \ - && rm POCS.tar.gz \ - && cd $POCS && /opt/conda/bin/pip install -Ur requirements.txt \ - && /opt/conda/bin/pip install -U setuptools \ - && /opt/conda/bin/python setup.py install \ - && cd $PANDIR \ - && /opt/conda/bin/conda clean --all --yes - -WORKDIR ${POCS} - -CMD ["/bin/bash"] \ No newline at end of file diff --git a/LICENSE.txt b/LICENSE.txt index 6786f7642..0a9739dd4 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,6 +1,7 @@ The MIT License (MIT) -Copyright (c) 2014-2016 PANOPTES +Copyright (c) 2014-2020 Project PANOPTES + Copyright 2016 Google Inc. Permission is hereby granted, free of charge, to any person obtaining a copy diff --git a/README.md b/README.md deleted file mode 100644 index bb8b7d380..000000000 --- a/README.md +++ /dev/null @@ -1,329 +0,0 @@ -Welcome to POCS documentation! -================================ -

-PANOPTES logo -

-
- -[![Build Status](https://travis-ci.org/panoptes/POCS.svg?branch=develop)](https://travis-ci.org/panoptes/POCS) -[![codecov](https://codecov.io/gh/panoptes/POCS/branch/develop/graph/badge.svg)](https://codecov.io/gh/panoptes/POCS) -[![astropy](http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat)](http://www.astropy.org/) - - - -## :warning: :warning: - -> (Feb. 2020) There is currently an [open PR](https://github.com/panoptes/POCS/pull/951) that will be a mostly backwards-incompatible change with current POCS and the information below. - -> If you are here shopping for [GSoC 2020](https://summerofcode.withgoogle.com/) please make sure to contact the PANOPTES team before doing any work. Also check the [GSoC 2020 Project Page](https://projectpanoptes.org/gsoc-2020/) for updates. - -> See also the [issues in our panoptes-utils](https://github.com/panoptes/panoptes-utils/issues) repository for some tasks that can be worked on immediately without too much interference from this merge process. - -Milestones Roadmap: - -`v0.7.0` Items related to preparing the docker branch for merge. Basically work being done right now. To be completed Feb/March 2020. -`v0.8.0` Merge of `docker` into `develop` and backwards-incompatible breaking changes to POCS. To be completed March 2020. -`v1.0.0` The real deal. Merge of `develop` into `master`. Summer 2020? - -Things are labelled as `v1.0.0` if they are at some unspecified point in the future but ideally within about 6 months. Issues labelled with this milestone should either be dealt with or removed as stale by the due date. - -The distinction between `0.7.0` and `0.8.0` is a little fuzzy right now since it's all happening rapidly. The more important target is probably `v0.8.0`. - -Basically, "things that need to happen now to get stuff working" should be `v0.7.0` or `v0.8.0` depending on how quickly that needs to happen. - -"Things that will/can happen after the docker migration" should be `v1.0.0`. - -"Things that don't really have a due date and are probably minor (but might be major, they are just not critical for now)" should be `v1.0.0` - -## :warning: :warning: - - -# Overview - -[PANOPTES](http://projectpanoptes.org) is an open source citizen science project -that is designed to find exoplanets with digital cameras. The goal of PANOPTES is -to establish a global network of of robotic cameras run by amateur astronomers -and schools in order to monitor, as continuously as possible, a very large number -of stars. For more general information about the project, including the science -case and resources for interested individuals, see the -[project overview](http://projectpanoptes.org/v1/overview/). - -POCS (PANOPTES Observatory Control System) is the main software driver for the -PANOPTES unit, responsible for high-level control of the unit. There are also -files for a one-time upload to the arduino hardware, as well as various scripts -to read information from the environmental sensors. - -# Getting Started - -POCS is designed to control a fully constructed PANOPTES unit. Additionally, -POCS can be run with simulators when hardware is not present or when the system -is being developed. - -For information on building a PANOPTES unit, see the main [PANOPTES](http://projectpanoptes.org) website. - -To get started with POCS there are three easy steps: - -1. **Setup** POCS on the computer you will be using for your unit or for development. -2. **Test** your POCS setup by running our testing script -3. **Start using POCS!** - -See below for more details. - -## Setup - -### Manual install - -* [Computer setup](https://github.com/panoptes/POCS/wiki/Panoptes-Computer-Setup) -* While logged in as user panoptes: - * Create /var/panoptes, owned by user panoptes (for a computer that will be - controlling a PANOPTES unit), or as yourself for development of the - PANOPTES software: - ```bash - sudo mkdir -p /var/panoptes - sudo chown panoptes /var/panoptes - chmod 755 /var/panoptes - mkdir /var/panoptes/logs - ``` - * Define these environment variables, both in your current shell and in - `$HOME/.bash_profile` (to only apply to user panoptes) or in `/etc/profile` - (to apply to all users). - ```bash - export PANDIR=/var/panoptes # Main Dir - export PANLOG=${PANDIR}/logs # Log files - export POCS=${PANDIR}/POCS # Observatory Control - export PAWS=${PANDIR}/PAWS # Web Interface - export PIAA=${PANDIR}/PIAA # Image Analysis - export PANUSER=panoptes # PANOPTES linux user - ``` - * Clone the PANOPTES software repositories into /var/panoptes: - ```bash - cd ${PANDIR} - git clone https://github.com/panoptes/POCS.git - git clone https://github.com/panoptes/PAWS.git - git clone https://github.com/panoptes/PIAA.git - ``` - * Install the software dependencies of the PANOPTES software: - ```bash - ${POCS}/scripts/install/install-dependencies.sh - ``` - * To pickup the changes to PATH, etc., log out and log back in. - * Run setup.py to install the software. - * If you'll be doing development of the software, use these commands: - ```bash - python ${POCS}/setup.py develop - python ${PIAA}/setup.py develop - ``` - * If the computer is for controlling a PANOPTES unit, use these commands: - ```bash - python ${POCS}/setup.py install - python ${PIAA}/setup.py install - ``` - -### Docker - -[Docker](https://www.docker.com/what-docker) is an application that lets you run existing -services, in this case POCS, on a kind of virtual machine. By running via Docker you -are guranteeing you are using a setup that works, saving you time on setup and -other issues that you might run into doing a manual install. - -Of course, this also means that you need to set up Docker. Additionally, you will -need to be able to log into our Google Docker container storage area so you can pull -down the existing image. The steps below should help you to get going. - -#### Install Docker - -Depending on what operating system you are using there are different ways of getting -Docker on your system. The Docker [installation page](https://www.docker.com/community-edition) -should have all the answers you need. - -#### Install gcloud - -`gcloud` is a command line utility that lets you interact with many of the Google -cloud services. We will primarily use this to authenticate your account but this -is also used, for example, to upload images your PANOPTES unit takes. - -See the gcloud [installation page](https://cloud.google.com/sdk/docs/#install_the_latest_cloud_tools_version_cloudsdk_current_version) -for easy install instructions. - -#### Let Docker use gcloud - -Docker needs to be able to use your `gcloud` login to pull the PANOPTES images. There -are some helper scripts to make this easier (from [here](https://cloud.google.com/container-registry/docs/advanced-authentication)): - -``` -gcloud components install docker-credential-gcr -docker-credential-gcr configure-docker -docker-credential-gcr gcr-login -``` - -#### Pull POCS container - -``` - docker pull gcr.io/panoptes-survey/pocs:latest -``` - -#### Start the POCS image - -``` -docker run -it -p 9000:9000 --name pocs gcr.io/panoptes-survey/pocs -``` - -The POCS image will automatically start [Jupyter Lab](https://jupyter.org/) running -on port 9000 of your local browser. The above command should display a link that you -copy and paste into your browser to get you started. - -## Test POCS - -POCS comes with a testing suite that allows it to test that all of the software -works and is installed correctly. Running the test suite by default will use simulators -for all of the hardware and is meant to test that the software works correctly. -Additionally, the testing suite can be run with various flags to test that attached -hardware is working properly. - -All of the test files live in `$POCS/pocs/tests`. - -### Software Testing - -There are a few scenarios where you want to run the test suite: - -1. You are getting your unit ready and want to test software is installed correctly. -2. You are upgrading to a new release of software (POCS, its dependencies or the operating system). -2. You are helping develop code for POCS and want test your code doesn't break something. - -#### Testing your installation - -In order to test your installation you should have followed all of the steps above -for getting your unit ready. To run the test suite, you will need to open a terminal -and navigate to the `$POCS` directory. - -```bash -# Change to $POCS directory -(panoptes-env) $ cd $POCS - -# Run the software testing -(panoptes-env) $ pytest -``` - -> :bulb: NOTE: The test suite can take a while to run and often appears to be stalled. -> Check the log files to ensure activity is happening. The tests can be cancelled by -> pressing `Ctrl-c` (sometimes entering this command multiple times is required). - -It is often helpful to view the log output in another terminal window while the test suite is running: - -```bash -# Follow the log file -$ tail -f $PANDIR/logs/panoptes.log -``` - - -The output from this will look something like: - -```bash -(panoptes-env) $ pytest -=========================== test session starts ====================================== -platform linux -- Python 3.5.2, pytest-3.2.3, py-1.4.34, pluggy-0.4.0 -rootdir: /storage/panoptes/POCS, inifile: -plugins: cov-2.4.0 - -collected 260 items -pocs/tests/test_base_scheduler.py ............... -pocs/tests/test_camera.py ........s..ssssss..................ssssssssssssssssssssssssss -pocs/tests/test_codestyle.py . -pocs/tests/test_config.py ............. -pocs/tests/test_constraints.py .............. -pocs/tests/test_database.py ... -pocs/tests/test_dispatch_scheduler.py ........ -pocs/tests/test_field.py .... -pocs/tests/test_focuser.py .......sssssss.. -pocs/tests/test_images.py .......... -pocs/tests/test_ioptron.py . -pocs/tests/test_messaging.py .... -pocs/tests/test_mount_simulator.py .............. -pocs/tests/test_observation.py ................. -pocs/tests/test_observatory.py ................s....... -pocs/tests/test_pocs.py .......................... -pocs/tests/test_utils.py ............. -pocs/tests/bisque/test_dome.py ssss -pocs/tests/bisque/test_mount.py sssssssssss -pocs/tests/bisque/test_run.py s - -=========================== 203 passed, 57 skipped, 6 warnings in 435.76 seconds =================================== - -``` - -Here you can see that certain tests were skipped (`s`) for various reasons while -the others passed. Skipped tests are skipped on purpose and thus are not considered -failures. Usually tests are skipped because there is no attached hardware -(see below for running tests with hardware attached). All passing tests are represented -by a single period (`.`) and any failures would show as a `F`. If there are any failures -while running the tests the output from those failures will be displayed. - -#### Testing your code changes - -> :bulb: NOTE: This step is meant for people helping with software development - -The testing suite will automatically be run against any code committed to our github -repositories. However, the test suite should also be run locally before pushing -to github. This can be done either by running the entire test suite as above or -by running an individual test related to the code you are changing. For instance, -to test the code related to the cameras one can run: - -```bash -(panoptes-env) $ pytest -xv pocs/tests/test_camera.py -``` - -Here the `-x` option will stop the tests upon the first failure and the `-v` makes -the testing verbose. - -Any new code should also include proper tests. See below for details. - -#### Writing tests - -All code changes should include tests. We strive to maintain a high code coverage -and new code should necessarily maintain or increase code coverage. - -For more details see the [Writing Tests](https://github.com/panoptes/POCS/wiki/Writing-Tests-for-POCS) page. - -### Hardware Testing - -Hardware testing uses the same testing suite as the software testing but with -additional options passed on the command line to signify what hardware should be -tested. - -The options to pass to `pytest` is `--with-hardware`, which accepts a list of -possible hardware items that are connected. This list includes `camera`, `mount`, -and `weather`. Optionally you can use `all` to test a fully connected unit. - -> :warning: The hardware tests do not perform safety checking of the weather or -> dark sky. The `weather` test mentioned above tests if a weather station is -> connected but does not test the safety conditions. It is assumed that hardware -> testing is always done with direct supervision. - -```bash -# Test an attached camera -pytest --with-hardware=camera - -# Test an attached camera and mount -pytest --with-hardware=camera,mount - -# Test a fully connected unit -pytest --with-hardware=all -``` - -**In Progress** - -## Use POCS - -### For running a unit - -* [Polar alignment test](https://github.com/panoptes/POCS/wiki/Polar-Alignment-Test) - -### For helping develop POCS software - -See [Coding in PANOPTES](https://github.com/panoptes/POCS/wiki/Coding-in-PANOPTES) - -Links ------ - -- PANOPTES Homepage: http://projectpanoptes.org -- Source Code: http://github.com/panoptes/POCS diff --git a/README.rst b/README.rst new file mode 100644 index 000000000..f2cbb85a2 --- /dev/null +++ b/README.rst @@ -0,0 +1,126 @@ +PANOPTES Observatory Control System +=================================== + +.. raw:: html + +

+ PANOPTES logo +

+ +|PyPI version| |Build Status| |codecov| |Documentation Status| + +- `PANOPTES Observatory Control + System <#panoptes-observatory-control-system>`__ +- `Overview <#overview>`__ +- `Getting Started <#getting-started>`__ +- `Install <#install-script>`__ +- `Test POCS <#test-pocs>`__ +- `Links <#links>`__ + + +.. warning:: + + The recent `v0.7.0` release of POCS is not backwards compatible. If you + are one of the folks running that software, please either do a reinstall + of your system using the instructions below or see our `forum `__ + for advice. + + +Overview +-------- + +`PANOPTES `__ is an open source citizen science project +that is designed to find transiting exoplanets with digital cameras. The goal of +PANOPTES is to establish a global network of of robotic cameras run by amateur +astronomers schools in order to monitor, as continuously as possible, a very large +number of stars. For more general information about the project, including the +science case and resources for interested individuals, see the `about page `__. + +POCS (PANOPTES Observatory Control System) is the main software driver for the +PANOPTES unit, responsible for high-level control of the unit. This repository +also contains a number of scripts for running a full instance of POCS. + +Getting Started +--------------- + +POCS is designed to control a fully constructed PANOPTES unit. Additionally, +POCS can be run with simulators when hardware is not present or when the system +is being developed. + +For information on building a PANOPTES unit, see the main `PANOPTES `__ website and join the +`community forum `__. + +To get started with POCS there are three easy steps: + +#. **Setup** POCS on the computer you will be using for your unit or for + development. +#. **Test** your POCS setup by running our testing script +#. **Start using POCS!** + +See below for more details. + +Install +------- + +POCS Environment +^^^^^^^^^^^^^^^^ + +If you are running a PANOPTES unit then you will most likely want the entire +PANOPTES environment. + +There is a bash shell script that will attempt to install an entire working POCS +system on your computer. Some folks even report that it works on a Mac. + +To test the script, open a terminal and enter: + +.. code-block:: bash + + curl -L https://install.projectpanoptes.org | bash + +Or using `wget`: + +.. code-block:: bash + + wget -O - https://install.projectpanoptes.org | bash + +POCS Module +^^^^^^^^^^^ + +If you want just the POCS module, for instance if you want to override it in +your own OCS (see `Huntsman-POCS `_ +for an example), then install via `pip`: + +.. code-block:: bash + + pip install panoptes-pocs + +If you want the extra features, such as Google Cloud Platform connectivity, then +use the extras options: + +.. code-block:: bash + + pip install "panoptes-pocs[google]" + +Test POCS +--------- + +See the Testing section of the :ref:`contribute` guide. + +Links +----- + +- PANOPTES Homepage: https://projectpanoptes.org +- PANOPTES Data Explorer: https://www.panoptes-data.net +- Community Forum: https://forum.projectpanoptes.org +- Source Code: https://github.com/panoptes/POCS + +.. |Build Status| image:: https://travis-ci.org/panoptes/POCS.svg?branch=develop + :target: https://travis-ci.org/panoptes/POCS +.. |codecov| image:: https://codecov.io/gh/panoptes/POCS/branch/develop/graph/badge.svg + :target: https://codecov.io/gh/panoptes/POCS +.. |astropy| image:: http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat + :target: http://www.astropy.org/ +.. |PyPI version| image:: https://badge.fury.io/py/panoptes-pocs.svg + :target: https://badge.fury.io/py/panoptes-pocs +.. |Documentation Status| image:: https://readthedocs.org/projects/pocs/badge/?version=latest + :target: https://pocs.readthedocs.io/en/latest/?badge=latest diff --git a/bin/peas-shell b/bin/peas-shell new file mode 100755 index 000000000..6c0d28364 --- /dev/null +++ b/bin/peas-shell @@ -0,0 +1,11 @@ +#!/bin/bash -ie + +USER_ID=$(id -u) +DOCKER_NAME="peas-shell" + +if [ ! "$(docker ps -q -f name=${DOCKER_NAME})" ]; then + echo "${DOCKER_NAME} not running. Start services with scripts/pocs-docker.sh" +else + docker exec --user "${USER_ID}" -it peas-shell /bin/zsh -ic "python ${POCS}/scripts/${DOCKER_NAME}.py" +fi + diff --git a/bin/pocs b/bin/pocs new file mode 100755 index 000000000..9f6d40e81 --- /dev/null +++ b/bin/pocs @@ -0,0 +1,62 @@ +#!/bin/bash -ie + +usage() { + echo -n "################################################## +# Start POCS via Docker. +# +################################################## + + $ $(basename $0) [COMMAND] + + Options: + COMMAND These options are passed at the end of the docker-compose command. + To start all service simply pass 'up'. + + Examples: + + # Start all services in the foreground. + $POCS/bin/pocs up + + #Start specific docker containers in the background with + $POCS/bin/pocs up --no-deps -d + + e.g. + + # Start config-server service in the background. + $POCS/bin/pocs up --no-deps -d config-server + + # Read the logs from the config-server + $POCS/bin/pocs logs config-server + + # Manually stop docker containers in the with + docker stop +" +} + +START=${1:-help} +if [ "${START}" = 'help' ] || [ "${START}" = '-h' ] || [ "${START}" = '--help' ]; then + usage + exit 1 +fi + +PARAMS="$@" + +cd "$PANDIR" +CMD="docker-compose \ + --project-directory ${PANDIR} \ + -f panoptes-utils/docker/docker-compose.yaml \ + -f POCS/docker/docker-compose-aag.yaml \ + -f POCS/docker/docker-compose.yaml \ + -p panoptes" + +# If user only asked to start, check if already running and if so use "-d" option. +if [[ "$PARAMS" == "up" ]]; then + if [[ ! -z $(eval "${CMD} top") ]]; then + echo "Some containers already running, using -d to only start non-running containers." + echo "For more info on managing docker containers manually, run bin/pocs --help". + PARAMS="up -d" + fi +fi + +# Run the docker-compose command with user params. +eval "${CMD} ${PARAMS}" diff --git a/bin/pocs-cmd b/bin/pocs-cmd new file mode 100755 index 000000000..f15c131a1 --- /dev/null +++ b/bin/pocs-cmd @@ -0,0 +1,15 @@ +#!/bin/bash -e + +USER_ID=$(id -u) +DOCKER_NAME="pocs-shell" + +if [ ! "$(docker ps -q -f name=${DOCKER_NAME})" ]; then + echo "${DOCKER_NAME} not running. Start services with scripts/pocs-docker.sh" +else + if [ $# -eq 0 ]; then + echo "Starting shell on ${DOCKER_NAME}" + docker exec --user "${USER_ID}" -it "${DOCKER_NAME}" /bin/zsh -i + else + docker exec --user "${USER_ID}" -it "${DOCKER_NAME}" /bin/zsh -ic "$@" + fi +fi diff --git a/bin/pocs-shell b/bin/pocs-shell new file mode 100755 index 000000000..48c5e4cf1 --- /dev/null +++ b/bin/pocs-shell @@ -0,0 +1,14 @@ +#!/bin/bash -ie + +USER_ID=$(id -u) +DOCKER_NAME="pocs-shell" + +INDEX_DIR=${INDEX_DIR:-${PANDIR}/astrometry/data} +ASTROMETRY_URL=${ASTROMETRY_URL:-http://broiler.astrometry.net/~dstn/4200} + +if [ ! "$(docker ps -q -f name=${DOCKER_NAME})" ]; then + echo "${DOCKER_NAME} not running. Start services with scripts/pocs-docker.sh" +else + docker exec --user "${USER_ID}" -it pocs-shell /bin/zsh -ic "python ${POCS}/scripts/${DOCKER_NAME}.py" +fi + diff --git a/bin/pocs_shell b/bin/pocs_shell deleted file mode 100755 index 810b78b7e..000000000 --- a/bin/pocs_shell +++ /dev/null @@ -1,879 +0,0 @@ -#!/usr/bin/env python3 -import os -import readline -import time -import zmq - -from cmd import Cmd -from subprocess import TimeoutExpired -from pprint import pprint - -from astropy import units as u -from astropy.coordinates import AltAz -from astropy.coordinates import ICRS -from astropy.utils import console - -from pocs import hardware -from pocs.core import POCS -from pocs.observatory import Observatory -from pocs.camera import create_cameras_from_config -from pocs.scheduler.field import Field -from pocs.scheduler.observation import Observation -from pocs.utils import current_time -from pocs.utils import string_to_params -from pocs.utils import error -from pocs.utils import images as img_utils -from pocs.utils.images import fits as fits_utils -from pocs.utils.images import cr2 as cr2_utils -from pocs.utils.images import polar_alignment as polar_alignment_utils -from pocs.utils.database import PanDB -from pocs.utils.messaging import PanMessaging - - -class PocsShell(Cmd): - """A simple command loop for running the PANOPTES Observatory Control System.""" - - intro = 'Welcome to POCS Shell! Type ? for help' - prompt = 'POCS > ' - procs = dict() - pocs = None - - _running = False - - msg_subscriber = None - msg_publisher = None - cmd_publisher = None - - cmd_pub_port = 6500 - cmd_sub_port = 6501 - msg_pub_port = 6510 - msg_sub_port = 6511 - - @property - def is_setup(self): - """True if POCS is setup, False otherwise.""" - if self.pocs is None: - print_warning('POCS has not been setup. Please run `setup_pocs`') - return False - return True - - @property - def is_safe(self): - """True if POCS is setup and weather conditions are safe, False otherwise.""" - return self.is_setup and self.pocs.is_safe() - - @property - def ready(self): - """True if POCS is ready to observe, False otherwise.""" - if not self.is_setup: - return False - - if self.pocs.observatory.mount.is_parked: - print_warning('Mount is parked. To unpark run `unpark`') - return False - - return self.pocs.is_safe() - - def do_display_config(self, *arg): - """ Display the config file for POCS """ - try: - if self.pocs and self.pocs.config: - pprint(self.pocs.config) - else: - print_warning("No config file for POCS.") - - except AttributeError: - print_warning("Please run `setup_pocs` before trying to run `display_config`") - - def do_drift_align(self, *arg): - """Enter the drift alignment shell.""" - self.do_reset_pocs() - print_info('*' * 80) - i = DriftShell() - i.cmdloop() - - def do_start_messaging(self, *arg): - """Starts the messaging system for the POCS ecosystem. - - This starts both a command forwarder and a message forwarder as separate - processes. - - The command forwarder has the pocs_shell and PAWS as PUBlishers and POCS - itself as a SUBscriber to those commands - - The message forwarder has POCS as a PUBlisher and the pocs_shell and PAWS - as SUBscribers to those messages - - Arguments: - *arg {str} -- Unused - """ - print_info("Starting messaging") - - # Send commands to POCS via this publisher - try: - self.cmd_publisher = PanMessaging.create_publisher( - self.cmd_pub_port) - print_info("Command publisher started on port {}".format( - self.cmd_pub_port)) - except Exception as e: - print_warning("Can't start command publisher: {}".format(e)) - - try: - self.cmd_subscriber = PanMessaging.create_subscriber( - self.cmd_sub_port) - print_info("Command subscriber started on port {}".format( - self.cmd_sub_port)) - except Exception as e: - print_warning("Can't start command subscriber: {}".format(e)) - - # Receive messages from POCS via this subscriber - try: - self.msg_subscriber = PanMessaging.create_subscriber( - self.msg_sub_port) - print_info("Message subscriber started on port {}".format( - self.msg_sub_port)) - except Exception as e: - print_warning("Can't start message subscriber: {}".format(e)) - - # Send messages to PAWS - try: - self.msg_publisher = PanMessaging.create_publisher( - self.msg_pub_port) - print_info("Message publisher started on port {}".format( - self.msg_pub_port)) - except Exception as e: - print_warning("Can't start message publisher: {}".format(e)) - - def do_setup_pocs(self, *arg): - """Setup and initialize a POCS instance.""" - args, kwargs = string_to_params(*arg) - - simulator = kwargs.get('simulator', list()) - if isinstance(simulator, str): - simulator = [simulator] - - # TODO(wtgee) Incorporate real power readings - if 'power' not in simulator: - simulator.append('power') - - if 'POCSTIME' in os.environ: - print_warning("Clearing POCSTIME variable") - del os.environ['POCSTIME'] - - try: - cameras = create_cameras_from_config(simulator=simulator) - observatory = Observatory(simulator=simulator, cameras=cameras) - self.pocs = POCS(observatory, messaging=True) - self.pocs.initialize() - except error.PanError as e: - print_warning('Problem setting up POCS: {}'.format(e)) - - def help_setup_pocs(self): - print('''Setup and initialize a POCS instance. - - setup_pocs [simulate] - -simulate is a space-separated list of hardware to simulate. -Hardware names: {} (or all for all hardware)'''.format( - ','.join(hardware.get_all_names()))) - - def complete_setup_pocs(self, text, line, begidx, endidx): - """Provide completions for simulator names.""" - names = ['all'] + hardware.get_all_names() - return [name for name in names if name.startswith(text)] - - def do_reset_pocs(self, *arg): - """Discards the POCS instance. - - Does NOT park the mount, nor execute power_down. - """ - self.pocs = None - - def do_run_pocs(self, *arg): - """Make POCS `run` the state machine. - - Continues until the user presses Ctrl-C or the state machine - exits, such as due to an error.""" - if self.pocs is not None: - if self.msg_subscriber is None: - self.do_start_messaging() - - print_info("Starting POCS - Press Ctrl-c to interrupt") - - try: - self.pocs.run() - except KeyboardInterrupt: - print_warning('POCS interrupted, parking') - if self.pocs.state not in ['sleeping', 'housekeeping', 'parked', 'parking']: - self.pocs.park() - else: - self.pocs.observatory.mount.home_and_park() - self._obs_run_retries = 0 # Don't retry - finally: - print_info('POCS stopped.') - else: - print_warning('Please run `setup_pocs` before trying to run') - - def do_status(self, *arg): - """Print the `status` for pocs.""" - if self.pocs is None: - print_warning('Please run `setup_pocs` before trying to run') - return - if self.msg_subscriber is None: - self.do_start_messaging() - status = self.pocs.status() - print() - pprint(status) - print() - - def do_pocs_command(self, cmd): - """Send a command to POCS instance. - - Arguments: - cmd {str} -- Command to be sent - """ - try: - self.cmd_publisher.send_message('POCS-CMD', cmd) - except AttributeError: - print_info('Messaging not started') - - def do_pocs_message(self, cmd): - """Send a message to PAWS and other listeners. - - Arguments: - cmd {str} -- Command to be sent - """ - try: - self.msg_publisher.send_message('POCS-SHELL', cmd) - except AttributeError: - print_info('Messaging not started') - - def do_exit(self, *arg): - """Exits PocsShell.""" - if self.pocs is not None: - self.do_power_down() - - print_info("Bye! Thanks!") - return True - - def emptyline(self): - """Do nothing. - - Without this, Cmd would repeat the last command.""" - pass - - def do_unpark(self, *arg): - """Release the mount so that it can be moved.""" - try: - self.pocs.observatory.mount.unpark() - self.pocs.say("Unparking mount") - except Exception as e: - print_warning('Problem unparking: {}'.format(e)) - - def do_park(self, *arg): - """Park the mount.""" - try: - self.pocs.observatory.mount.park() - self.pocs.say("Mount parked") - except Exception as e: - print_warning('Problem parking: {}'.format(e)) - - def do_go_home(self, *arg): - """Move the mount to home.""" - if self.ready is False: - if self.pocs.is_weather_safe() is False: - self.do_power_down() - - return - - try: - self.pocs.observatory.mount.slew_to_home(blocking=True) - except Exception as e: - print_warning('Problem slewing to home: {}'.format(e)) - - def do_open_dome(self, *arg): - """Open the dome, if there is one.""" - if not self.is_setup: - return - if not self.pocs.observatory.has_dome: - print_warning('There is no dome.') - return - if not self.pocs.is_weather_safe(): - print_warning('Weather conditions are not good, not opening dome.') - return - try: - if self.pocs.observatory.open_dome(): - print_info('Opened the dome.') - else: - print_warning('Failed to open the dome.') - except Exception as e: - print_warning('Problem opening the dome: {}'.format(e)) - - def do_close_dome(self, *arg): - """Close the dome, if there is one.""" - if not self.is_setup: - return - if not self.pocs.observatory.has_dome: - print_warning('There is no dome.') - return - try: - if self.pocs.observatory.close_dome(): - print_info('Closed the dome.') - else: - print_warning('Failed to close the dome.') - except Exception as e: - print_warning('Problem closing the dome: {}'.format(e)) - - def do_power_down(self, *arg): - """Power down the mount; waits until the mount is parked.""" - print_info("Shutting down POCS instance, please wait") - self.pocs.power_down() - - while self.pocs.observatory.mount.is_parked is False: - print_info('.') - time.sleep(5) - - self.pocs = None - - def do_polar_alignment_test(self, *arg): - """Capture images of the pole and compute alignment of mount.""" - if self.ready is False: - return - - start_time = current_time(flatten=True) - - base_dir = '{}/images/drift_align/{}'.format( - os.getenv('PANDIR'), start_time) - plot_fn = '{}/{}_center_overlay.jpg'.format(base_dir, start_time) - - mount = self.pocs.observatory.mount - - print_info("Moving to home position") - self.pocs.say("Moving to home position") - mount.slew_to_home(blocking=True) - - # Polar Rotation - pole_fn = polar_rotation(self.pocs, base_dir=base_dir) - pole_fn = pole_fn.replace('.cr2', '.fits') - - # Mount Rotation - rotate_fn = mount_rotation(self.pocs, base_dir=base_dir) - rotate_fn = rotate_fn.replace('.cr2', '.fits') - - print_info("Moving back to home") - self.pocs.say("Moving back to home") - mount.slew_to_home(blocking=True) - - print_info("Solving celestial pole image") - self.pocs.say("Solving celestial pole image") - try: - pole_center = polar_alignment_utils.analyze_polar_rotation(pole_fn) - except error.SolveError: - print_warning("Unable to solve pole image.") - print_warning("Will proceeed with rotation image but analysis not possible") - pole_center = None - else: - pole_center = (float(pole_center[0]), float(pole_center[1])) - - print_info("Starting analysis of rotation image") - self.pocs.say("Starting analysis of rotation image") - try: - rotate_center = polar_alignment_utils.analyze_ra_rotation(rotate_fn) - except Exception: - print_warning("Unable to process rotation image") - rotate_center = None - - if pole_center is not None and rotate_center is not None: - print_info("Plotting centers") - self.pocs.say("Plotting centers") - - print_info("Pole: {} {}".format(pole_center, pole_fn)) - self.pocs.say("Pole : {:0.2f} x {:0.2f}".format( - pole_center[0], pole_center[1])) - - print_info("Rotate: {} {}".format(rotate_center, rotate_fn)) - self.pocs.say("Rotate: {:0.2f} x {:0.2f}".format( - rotate_center[0], rotate_center[1])) - - d_x = pole_center[0] - rotate_center[0] - d_y = pole_center[1] - rotate_center[1] - - self.pocs.say("d_x: {:0.2f}".format(d_x)) - self.pocs.say("d_y: {:0.2f}".format(d_y)) - - fig = polar_alignment_utils.plot_center( - pole_fn, rotate_fn, pole_center, rotate_center) - - print_info("Plot image: {}".format(plot_fn)) - fig.tight_layout() - fig.savefig(plot_fn) - - try: - os.unlink('/var/panoptes/images/latest.jpg') - except Exception: - pass - try: - os.symlink(plot_fn, '/var/panoptes/images/latest.jpg') - except Exception: - print_warning("Can't link latest image") - - with open('/var/panoptes/images/drift_align/center.txt'.format(base_dir), 'a') as f: - f.write('{}.{},{},{},{},{},{}\n'.format(start_time, pole_center[0], pole_center[ - 1], rotate_center[0], rotate_center[1], d_x, d_y)) - - print_info("Done with polar alignment test") - self.pocs.say("Done with polar alignment test") - - def do_web_listen(self, *arg): - """Goes into a loop listening for commands from PAWS.""" - - if not hasattr(self, 'cmd_subscriber'): - self.do_start_messaging() - - self.pocs.say("Now listening for commands from PAWS") - - poller = zmq.Poller() - poller.register(self.cmd_subscriber.socket, zmq.POLLIN) - - command_lookup = { - 'polar_alignment': self.do_polar_alignment_test, - 'park': self.do_park, - 'unpark': self.do_unpark, - 'home': self.do_go_home, - } - - try: - while True: - # Poll for messages - sockets = dict(poller.poll(500)) # 500 ms timeout - - if self.cmd_subscriber.socket in sockets and \ - sockets[self.cmd_subscriber.socket] == zmq.POLLIN: - - topic, msg_obj = self.cmd_subscriber.receive_message( - flags=zmq.NOBLOCK) - print_info("{} {}".format(topic, msg_obj)) - - # Put the message in a queue to be processed - if topic == 'PAWS-CMD': - try: - print_info("Command received: {}".format( - msg_obj['message'])) - cmd = command_lookup[msg_obj['message']] - cmd() - except KeyError: - pass - except Exception as e: - print_warning( - "Can't perform command: {}".format(e)) - - time.sleep(1) - except KeyboardInterrupt: - self.pocs.say("No longer listening to PAWS") - pass - - -########################################################################## -# Private Methods -########################################################################## - -########################################################################## -# Utility Methods -########################################################################## - -def polar_rotation(pocs, exptime=30, base_dir=None, **kwargs): - assert base_dir is not None, print_warning("base_dir cannot be empty") - - mount = pocs.observatory.mount - - print_info('Performing polar rotation test') - pocs.say('Performing polar rotation test') - mount.slew_to_home(blocking=True) - - analyze_fn = None - - print_info('At home position, taking {} sec exposure'.format(exptime)) - pocs.say('At home position, taking {} sec exposure'.format(exptime)) - procs = dict() - for cam_name, cam in pocs.observatory.cameras.items(): - fn = '{}/pole_{}.cr2'.format(base_dir, cam_name.lower()) - proc = cam.take_exposure(seconds=exptime, filename=fn) - procs[fn] = proc - if cam.is_primary: - analyze_fn = fn - - for fn, proc in procs.items(): - try: - outs, errs = proc.communicate(timeout=(exptime + 15)) - except AttributeError: - continue - except KeyboardInterrupt: - print_warning('Pole test interrupted') - proc.kill() - outs, errs = proc.communicate() - break - except TimeoutExpired: - proc.kill() - outs, errs = proc.communicate() - break - - time.sleep(2) - try: - img_utils.make_pretty_image( - fn, title='Alignment Test - Celestial Pole', primary=True) - cr2_utils.cr2_to_fits(fn, remove_cr2=True) - except AssertionError: - print_warning("Can't make image for {}".format(fn)) - pocs.say("Can't make image for {}".format(fn)) - - return analyze_fn - - -def mount_rotation(pocs, base_dir=None, include_west=False, **kwargs): - mount = pocs.observatory.mount - - print_info("Doing rotation test") - pocs.say("Doing rotation test") - mount.slew_to_home(blocking=True) - exptime = 25 - mount.move_direction(direction='west', seconds=11) - - rotate_fn = None - - # Start exposing on cameras - for direction in ['east', 'west']: - if include_west is False and direction == 'west': - continue - - print_info("Rotating to {}".format(direction)) - pocs.say("Rotating to {}".format(direction)) - procs = dict() - for cam_name, cam in pocs.observatory.cameras.items(): - fn = '{}/rotation_{}_{}.cr2'.format(base_dir, - direction, cam_name.lower()) - proc = cam.take_exposure(seconds=exptime, filename=fn) - procs[fn] = proc - if cam.is_primary: - rotate_fn = fn - - # Move mount - mount.move_direction(direction=direction, seconds=21) - - # Get exposures - for fn, proc in procs.items(): - try: - outs, errs = proc.communicate(timeout=(exptime + 15)) - except AttributeError: - continue - except KeyboardInterrupt: - print_warning('Pole test interrupted') - pocs.say('Pole test interrupted') - proc.kill() - outs, errs = proc.communicate() - break - except TimeoutExpired: - proc.kill() - outs, errs = proc.communicate() - break - - time.sleep(2) - try: - img_utils.make_pretty_image( - fn, title='Alignment Test - Rotate {}'.format(direction), primary=True) - cr2_utils.cr2_to_fits(fn, remove_cr2=True) - except AssertionError: - print_warning("Can't make image for {}".format(fn)) - pocs.say("Can't make image for {}".format(fn)) - - return rotate_fn - - -class DriftShell(Cmd): - intro = 'Drift alignment shell! Type ? for help or `exit` to leave drift alignment.' - prompt = 'POCS:DriftAlign > ' - - pocs = None - base_dir = '{}/images/drift_align'.format(os.getenv('PANDIR')) - - num_pics = 40 - exptime = 30 - - # Coordinates for different tests - coords = { - 'alt_east': (30, 102), - 'alt_west': (20, 262.5), - 'az_east': (70.47, 170), - 'az_west': (70.47, 180), - } - - @property - def ready(self): - if self.pocs is None: - print_warning('POCS has not been setup. Please run `setup_pocs`') - return False - - if self.pocs.observatory.mount.is_parked: - print_warning('Mount is parked. To unpark run `unpark`') - return False - - return self.pocs.is_safe() - - def do_setup_pocs(self, *arg): - """Setup and initialize a POCS instance.""" - args, kwargs = string_to_params(*arg) - simulator = kwargs.get('simulator', []) - print_info("Simulator: {}".format(simulator)) - - try: - self.pocs = POCS(simulator=simulator) - self.pocs.initialize() - except error.PanError as e: - - pass - - def do_drift_test(self, *arg): - if self.ready is False: - return - - args, kwargs = string_to_params(*arg) - - try: - direction = kwargs['direction'] - num_pics = int(kwargs['num_pics']) - exptime = float(kwargs['exptime']) - except Exception: - print_warning( - 'Drift test requires three arguments: direction, num_pics, exptime') - return - - start_time = kwargs.get('start_time', current_time(flatten=True)) - - print_info('{} drift test with {}x {}sec exposures'.format( - direction.capitalize(), num_pics, exptime)) - - if direction: - try: - alt, az = self.coords.get(direction) - except KeyError: - print_error('Invalid direction given') - else: - location = self.pocs.observatory.observer.location - obs = get_observation( - alt=alt, - az=az, - loc=location, - num_exp=num_pics, - exptime=exptime, - name=direction - ) - - self.perform_test(obs, start_time=start_time) - print_info('Test complete, slewing to home') - self.do_go_home() - else: - print_warning('Must pass direction to test: alt_east, alt_west, az_east, az_west') - - def do_full_drift_test(self, *arg): - if not self.ready: - return - - args, kwargs = string_to_params(*arg) - - num_pics = int(kwargs.get('num_pics', self.num_pics)) - exptime = float(kwargs.get('exptime', self.exptime)) - - print_info('Full drift test. Press Ctrl-c to interrupt') - - start_time = current_time(flatten=True) - - for direction in ['alt_east', 'az_east', 'alt_west', 'az_west']: - if not self.ready: - break - - print_info('Performing drift test: {}'.format(direction)) - try: - self.do_drift_test('direction={} num_pics={} exptime={} start_time={}'.format( - direction, num_pics, exptime, start_time)) - except KeyboardInterrupt: - print_warning('Drift test interrupted') - break - - print_info('Slewing to home') - self.do_go_home() - - def do_unpark(self, *arg): - try: - self.pocs.observatory.mount.unpark() - except Exception as e: - print_warning('Problem unparking: {}'.format(e)) - - def do_go_home(self, *arg): - """Move the mount to home.""" - if self.ready is False: - if self.pocs.is_weather_safe() is False: - self.do_power_down() - - return - - try: - self.pocs.observatory.mount.slew_to_home(blocking=True) - except Exception as e: - print_warning('Problem slewing to home: {}'.format(e)) - - def do_power_down(self, *arg): - print_info("Shutting down POCS instance, please wait") - self.pocs.power_down() - - while self.pocs.observatory.mount.is_parked is False: - print_info('.') - time.sleep(5) - - self.pocs = None - - def do_exit(self, *arg): - if self.pocs is not None: - self.do_power_down() - - print_info('Leaving drift alignment') - return True - - def emptyline(self): - if self.ready: - print_info(self.pocs.status()) - - def perform_test(self, observation, start_time=None): - if start_time is None: - start_time = current_time(flatten=True) - - mount = self.pocs.observatory.mount - - mount.set_target_coordinates(observation.field.coord) - # print_info("Slewing to {}".format(coord)) - mount.slew_to_target() - - while mount.is_slewing: - time.sleep(3) - - print_info('At destination, taking pics') - - for i in range(observation.min_nexp): - - if not self.ready: - break - - headers = self.pocs.observatory.get_standard_headers( - observation=observation) - - # All camera images share a similar start time - headers['start_time'] = start_time - - print_info('\t{} of {}'.format(i, observation.min_nexp)) - - events = [] - files = [] - for name, cam in self.pocs.observatory.cameras.items(): - fn = '{}/{}_{}_{}_{:02d}.cr2'.format( - self.base_dir, start_time, observation.field.field_name, name, i) - cam_event = cam.take_observation( - observation, headers=headers, filename=fn) - events.append(cam_event) - files.append(fn.replace('.cr2', '.fits')) - - for e in events: - while not e.is_set(): - time.sleep(5) - - # while files: - # file = files.pop(0) - # process_img(file, start_time) - - -def process_img(fn, start_time, remove_after=True): - # Unpack if already packed - if fn.endswith('.fz'): - fn = fits_utils.fpack(fn, unpack=True) - - if os.path.exists('{}.fz'.format(fn)): - fn = fits_utils.fpack(fn.replace('.fits', '.fits.fz'), unpack=True) - - # Solve the field - try: - fits_utils.get_solve_field(fn) - - # Get header info - header = fits_utils.getheader(fn) - - try: - del header['COMMENT'] - del header['HISTORY'] - except Exception: - pass - - db = PanDB() - - # Add to DB - db.drift_align.insert_one({ - 'data': header, - 'type': 'drift_align', - 'date': current_time(datetime=True), - 'start_time': start_time, - }) - - # Remove file - if remove_after: - try: - os.remove(fn) - except Exception as e: - print_warning('Problem removing file: {}'.format(e)) - except Exception as e: - print_warning('Problem with adding to mongo: {}'.format(e)) - - -def get_observation(alt=None, az=None, loc=None, num_exp=25, exptime=30 * u.second, name=None): - assert alt is not None - assert az is not None - assert loc is not None - - coord = AltAz(az=az * u.deg, alt=alt * u.deg, - obstime=current_time(), location=loc).transform_to(ICRS) - - field = Field(name, coord) - - if not isinstance(exptime, u.Quantity): - exptime *= u.second - - obs = Observation(field, exptime=exptime, - min_nexp=num_exp, exp_set_size=1) - - return obs - - -def print_info(msg): - console.color_print(msg, 'lightgreen') - - -def print_warning(msg): - console.color_print(msg, 'yellow') - - -def print_error(msg): - console.color_print(msg, 'red') - - -if __name__ == '__main__': - import os - import sys - - if not os.getenv('POCS'): - sys.exit("Please set the POCS environment variable.") - - invoked_script = os.path.basename(sys.argv[0]) - histfile = os.path.expanduser('~/.{}_history'.format(invoked_script)) - histfile_size = 1000 - if os.path.exists(histfile): - readline.read_history_file(histfile) - - PocsShell().cmdloop() - - readline.set_history_length(histfile_size) - readline.write_history_file(histfile) diff --git a/.codecov.yml b/codecov.yml similarity index 92% rename from .codecov.yml rename to codecov.yml index b027beaff..3360aac18 100644 --- a/.codecov.yml +++ b/codecov.yml @@ -1,5 +1,4 @@ ignore: - - "pocs/utils/data.py" - "pocs/camera/canon_gphoto2.py" - "pocs/camera/sbig.py" - "pocs/camera/sbigudrv.py" @@ -19,4 +18,3 @@ ignore: - "pocs/filterwheel/libefw.py" - "pocs/filterwheel/sbig.py" - "pocs/filterwheel/zwo.py" - - "peas/weather.py" diff --git a/conf_files/log.yaml b/conf_files/log.yaml deleted file mode 100644 index 564e40742..000000000 --- a/conf_files/log.yaml +++ /dev/null @@ -1,52 +0,0 @@ -logger: - version: 1 - use_utc: True - formatters: - simple: - format: '%(asctime)s - %(message)s' - datefmt: '%H:%M:%S' - detail: - style: '{' - format: '{levelname:.1s}{asctime}.{msecs:03.0f} {filename:>25s}:{lineno:03d}] {message}' - datefmt: '%m%d %H:%M:%S' - handlers: - all: - class: logging.handlers.TimedRotatingFileHandler - level: DEBUG - formatter: detail - when: W6 - backupCount: 4 - info: - class: logging.handlers.TimedRotatingFileHandler - level: INFO - formatter: detail - when: W6 - backupCount: 4 - warn: - class: logging.handlers.TimedRotatingFileHandler - level: WARNING - formatter: detail - when: W6 - backupCount: 4 - error: - class: logging.handlers.TimedRotatingFileHandler - level: ERROR - formatter: detail - when: W6 - backupCount: 4 - loggers: - all: - handlers: [all] - propagate: true - info: - handlers: [info] - propagate: true - warn: - handlers: [warn] - propagate: true - error: - handlers: [error] - propagate: true - root: - level: DEBUG - handlers: [all, warn] \ No newline at end of file diff --git a/conf_files/pocs.yaml b/conf_files/pocs.yaml index a14056527..5be6c3e56 100644 --- a/conf_files/pocs.yaml +++ b/conf_files/pocs.yaml @@ -15,13 +15,14 @@ pan_id: PAN000 location: name: Mauna Loa Observatory - latitude: 19.54 # Degrees - longitude: -155.58 # Degrees - elevation: 3400.0 # Meters - horizon: 30 # Degrees; targets must be above this to be considered valid. - flat_horizon: -6 # Degrees - Flats when sun between this and focus horizon. - focus_horizon: -12 # Degrees - Dark enough to focus on stars. - observe_horizon: -18 # Degrees - Sun below this limit to observe. + latitude: 19.54 deg + longitude: -155.58 deg + elevation: 3400.0 m + horizon: 30 deg # targets must be above this to be considered valid. + flat_horizon: -6 deg # Flats when sun between this and focus horizon. + focus_horizon: -12 deg # Dark enough to focus on stars. + observe_horizon: -18 deg # Sun below this limit to observe. + obsctructions: [] timezone: US/Hawaii gmt_offset: -600 # Offset in minutes from GMT during. # standard time (not daylight saving). @@ -35,11 +36,14 @@ directories: db: name: panoptes type: file + state_machine: simple_state_table + scheduler: type: dispatch fields_file: simple.yaml check_file: False + mount: brand: ioptron model: 30 @@ -51,11 +55,13 @@ mount: non_sidereal_available: True min_tracking_threshold: 100 # ms max_tracking_threshold: 99999 # ms + pointing: auto_correct: True threshold: 100 # arcseconds ~ 10 pixels exptime: 30 # seconds max_iterations: 5 + cameras: auto_detect: True primary: 14d3bd @@ -64,9 +70,26 @@ cameras: model: canon_gphoto2 - model: canon_gphoto2 -messaging: - cmd_port: 6500 - msg_port: 6510 + +######################### Environmental Sensors ################################ +# Configure the environmental sensors that are attached. +# +# Use `auto_detect: True` for most options. Or use a manual configuration: +# +# camera_board: +# serial_port: /dev/ttyACM0 +# control_board: +# serial_port: /dev/ttyACM1 +################################################################################ +environment: + auto_detect: False + camera_board: + serial_port: /dev/ttyACM0 + control_board: + serial_port: /dev/ttyACM1 + weather: + url: http://localhost:5000/latest.json + ########################## Observations ######################################## # An observation folder contains a contiguous sequence of images of a target/field @@ -91,79 +114,15 @@ observations: # By default all images are stored on googlecloud servers and we also # use a few google services to store metadata, communicate with servers, etc. # -# See $POCS/pocs/utils/google/README.md for details about authentication. +# See $PANDIR/panoptes/utils/google/README.md for details about authentication. # # Options to change: # image_storage: If images should be uploaded to Google Cloud Storage. # service_account_key: Location of the JSON service account key. ################################################################################ panoptes_network: - image_storage: True + image_storage: False service_account_key: # Location of JSON account key project_id: panoptes-survey buckets: images: panoptes-survey - -#Enable to output POCS messages to social accounts -# social_accounts: -# twitter: -# consumer_key: [your_consumer_key] -# consumer_secret: [your_consumer_secret] -# access_token: [your_access_token] -# access_token_secret: [your_access_token_secret] -# slack: -# webhook_url: [your_webhook_url] -# output_timestamp: False - - -######################### Environmental Sensors ################################ -# Configure the environmental sensors that are attached. -# -# Use `auto_detect: True` for most options. Or use a manual configuration: -# -# camera_board: -# serial_port: /dev/ttyACM0 -# control_board: -# serial_port: /dev/ttyACM1 -################################################################################ -environment: - auto_detect: False - camera_board: - serial_port: /dev/ttyACM0 - control_board: - serial_port: /dev/ttyACM1 - -######################### Weather Station ###################################### -# Weather station options. -# -# Configure the serial_port as necessary. -# -# Default thresholds should be okay for most locations. -################################################################################ -weather: - aag_cloud: - serial_port: '/dev/ttyUSB1' - threshold_cloudy: -25 - threshold_very_cloudy: -15. - threshold_windy: 50. - threshold_very_windy: 75. - threshold_gusty: 100. - threshold_very_gusty: 125. - threshold_wet: 2200. - threshold_rainy: 1800. - safety_delay: 15 ## minutes - heater: - low_temp: 0 ## deg C - low_delta: 6 ## deg C - high_temp: 20 ## deg C - high_delta: 4 ## deg C - min_power: 10 ## percent - impulse_temp: 10 ## deg C - impulse_duration: 60 ## seconds - impulse_cycle: 600 ## seconds - plot: - amb_temp_limits: [-5, 35] - cloudiness_limits: [-45, 5] - wind_limits: [0, 75] - rain_limits: [700, 3200] - pwm_limits: [-5, 105] diff --git a/conftest.py b/conftest.py index 511226e73..7ebaf58ac 100644 --- a/conftest.py +++ b/conftest.py @@ -1,25 +1,47 @@ -# This is in the root POCS directory so that pytest will recognize the -# options added below without having to also specify pocs/test, or a -# one of the tests in that directory, on the command line; i.e. pytest -# doesn't load pocs/tests/conftest.py until after it has searched for -# tests. -# In addition, there are fixtures defined here that are available to -# all tests, not just those in pocs/tests. - -import copy +import logging import os +import stat import pytest +from _pytest.logging import caplog as _caplog import subprocess import time +import tempfile +import shutil -from pocs import hardware -from pocs.utils.database import PanDB -from pocs.utils.logger import get_root_logger -from pocs.utils.messaging import PanMessaging +from contextlib import suppress +from multiprocessing import Process +from scalpl import Cut -# Global variable set to a bool by can_connect_to_mongo(). -_can_connect_to_mongo = None -_all_databases = ['mongo', 'file', 'memory'] +from panoptes.pocs import hardware +from panoptes.utils.database import PanDB +from panoptes.utils.config import load_config +from panoptes.utils.config.client import set_config +from panoptes.utils.config.server import app as config_server_app + +from panoptes.pocs.utils.logger import get_logger, PanLogger + +# TODO download IERS files. + +_all_databases = ['file', 'memory'] + +LOGGER_INFO = PanLogger() + +logger = get_logger() +logger.enable('panoptes') +logger.level("testing", no=15, icon="🤖", color="") +log_file_path = os.path.join( + os.getenv('PANLOG', '/var/panoptes/logs'), + 'panoptes-testing.log' +) +logger.add(log_file_path, + format=LOGGER_INFO.format, + colorize=True, + enqueue=True, # multiprocessing + backtrace=True, + diagnose=True, + level='TRACE') +# Make the log file world readable. +os.chmod(log_file_path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) def pytest_addoption(parser): @@ -30,13 +52,12 @@ def pytest_addoption(parser): "--with-hardware", nargs='+', default=[], - help=("A comma separated list of hardware to test. List items can include: " + hw_names)) + help=f"A comma separated list of hardware to test. List items can include: {hw_names}") group.addoption( "--without-hardware", nargs='+', default=[], - help=("A comma separated list of hardware to NOT test. " + "List items can include: " + - hw_names)) + help=f"A comma separated list of hardware to NOT test. List items can include: {hw_names}") group.addoption( "--solve", action="store_true", @@ -46,25 +67,21 @@ def pytest_addoption(parser): "--test-databases", nargs="+", default=['file'], - help=("Test databases in the list. List items can include: " + db_names + - ". Note that travis-ci will test all of them by default.")) + help=f"Test databases in the list. List items can include: {db_names}. Note that travis-ci will test all of " + f"them by default.") def pytest_collection_modifyitems(config, items): """Modify tests to skip or not based on cli options. - Certain tests should only be run when the appropriate hardware is attached. Other tests fail if real hardware is attached (e.g. they expect there is no hardware). The names of the types of hardware are in hardware.py, but include 'mount' and 'camera'. For a test that requires a mount, for example, the test should be marked as follows: - `@pytest.mark.with_mount` - And the same applies for the names of other types of hardware. For a test that requires that there be no cameras attached, mark the test as follows: - `@pytest.mark.without_camera` """ @@ -100,42 +117,30 @@ def pytest_collection_modifyitems(config, items): def pytest_runtest_logstart(nodeid, location): """Signal the start of running a single test item. - This hook will be called before pytest_runtest_setup(), pytest_runtest_call() and pytest_runtest_teardown() hooks. - Args: nodeid (str) – full id of the item location – a triple of (filename, linenum, testname) """ - try: - logger = get_root_logger() - logger.critical('') - logger.critical('##########' * 8) - logger.critical(' START TEST {}', nodeid) - logger.critical('') - except Exception: - pass + with suppress(Exception): + logger.log('testing', '##########' * 8) + logger.log('testing', f' START TEST {nodeid}') + logger.log('testing', '') def pytest_runtest_logfinish(nodeid, location): """Signal the complete finish of running a single test item. - This hook will be called after pytest_runtest_setup(), pytest_runtest_call() and pytest_runtest_teardown() hooks. - Args: nodeid (str) – full id of the item location – a triple of (filename, linenum, testname) """ - try: - logger = get_root_logger() - logger.critical('') - logger.critical(' END TEST {}', nodeid) - logger.critical('') - logger.critical('##########' * 8) - except Exception: - pass + with suppress(Exception): + logger.log('testing', '') + logger.log('testing', f' END TEST {nodeid}') + logger.log('testing', '##########' * 8) def pytest_runtest_logreport(report): @@ -143,100 +148,160 @@ def pytest_runtest_logreport(report): if report.skipped or report.outcome != 'failed': return try: - logger = get_root_logger() - logger.critical('') - logger.critical(' TEST {} FAILED during {}\n\n{}\n', report.nodeid, report.when, - report.longreprtext) - cnt = 15 + logger.log('testing', '') + logger.log('testing', f' TEST {report.nodeid} FAILED during {report.when} {report.longreprtext} ') if report.capstdout: - logger.critical('{}Captured stdout during {}{}\n{}\n', '= ' * cnt, report.when, - ' =' * cnt, report.capstdout) + logger.log('testing', f'============ Captured stdout during {report.when} {report.capstdout} ============') if report.capstderr: - logger.critical('{}Captured stderr during {}{}\n{}\n', '* ' * cnt, report.when, - ' *' * cnt, report.capstderr) + logger.log('testing', f'============ Captured stdout during {report.when} {report.capstderr} ============') except Exception: pass -@pytest.fixture -def temp_file(): - temp_file = 'temp' - with open(temp_file, 'w') as f: - f.write('') +@pytest.fixture(scope='session') +def db_name(): + return 'panoptes_testing' - yield temp_file - os.unlink(temp_file) +@pytest.fixture(scope='session') +def images_dir(tmpdir_factory): + directory = tmpdir_factory.mktemp('images') + return str(directory) -@pytest.fixture(scope="session") -def db_name(): - return 'panoptes_testing' +@pytest.fixture(scope='session') +def config_host(): + return 'localhost' -class FakeLogger: - def __init__(self): - self.messages = [] - pass - def _add(self, name, *args): - msg = [name] - assert len(args) == 1 - assert isinstance(args[0], tuple) - msg.append(args[0]) - self.messages.append(msg) +@pytest.fixture(scope='session') +def static_config_port(): + """Used for the session-scoped config_server where no config values + are expected to change during testing. + """ + return '6563' + - def debug(self, *args): - self._add('debug', args) +@pytest.fixture(scope='module') +def config_port(): + """Used for the function-scoped config_server when it is required to change + config values during testing. See `dynamic_config_server` docs below. + """ + return '4861' - def info(self, *args): - self._add('info', args) - def warning(self, *args): - self._add('warning', args) +@pytest.fixture(scope='session') +def config_path(): + return os.path.join(os.getenv('POCS'), 'tests', 'pocs_testing.yaml') - def error(self, *args): - self._add('error', args) - def critical(self, *args): - self._add('critical', args) +@pytest.fixture(scope='session') +def config_server_args(config_path): + loaded_config = load_config(config_files=config_path, ignore_local=True) + return { + 'config_file': config_path, + 'auto_save': False, + 'ignore_local': True, + 'POCS': loaded_config, + 'POCS_cut': Cut(loaded_config) + } + + +def make_config_server(config_host, config_port, config_server_args, images_dir, db_name): + def start_config_server(): + # Load the config items into the app config. + for k, v in config_server_args.items(): + config_server_app.config[k] = v + + # Start the actual flask server. + config_server_app.run(host=config_host, port=config_port) + + proc = Process(target=start_config_server) + proc.start() + + logger.log('testing', f'config_server started with PID={proc.pid}') + + # Give server time to start + time.sleep(1) + + # Adjust various config items for testing + unit_name = 'Generic PANOPTES Unit' + unit_id = 'PAN000' + logger.log('testing', f'Setting testing name and unit_id to {unit_id}') + set_config('name', unit_name, port=config_port) + set_config('pan_id', unit_id, port=config_port) + + logger.log('testing', f'Setting testing database to {db_name}') + set_config('db.name', db_name, port=config_port) + + fields_file = 'simulator.yaml' + logger.log('testing', f'Setting testing scheduler fields_file to {fields_file}') + set_config('scheduler.fields_file', fields_file, port=config_port) + + # TODO(wtgee): determine if we need separate directories for each module. + logger.log('testing', f'Setting temporary image directory for testing') + set_config('directories.images', images_dir, port=config_port) + + # Make everything a simulator + simulators = hardware.get_simulator_names(simulator=['all']) + logger.log('testing', f'Setting all hardware to use simulators: {simulators}') + set_config('simulator', simulators, port=config_port) + + return proc + + +@pytest.fixture(scope='session', autouse=True) +def static_config_server(config_host, static_config_port, config_server_args, images_dir, db_name): + logger.log('testing', f'Starting config_server for testing session') + proc = make_config_server(config_host, static_config_port, config_server_args, images_dir, db_name) + yield proc + pid = proc.pid + proc.terminate() + time.sleep(0.1) + logger.log('testing', f'Killed config_server started with PID={pid}') @pytest.fixture(scope='function') -def fake_logger(): - return FakeLogger() +def dynamic_config_server(config_host, config_port, config_server_args, images_dir, db_name): + """If a test requires changing the configuration we use a function-scoped testing + server. We only do this on tests that require it so we are not constantly starting and stopping + the config server unless necessary. To use this, each test that requires it must use the + `dynamic_config_server` and `config_port` fixtures and must pass the `config_port` to all + instances that are created (propagated through PanBase). + """ + logger.log('testing', f'Starting config_server for testing function') + proc = make_config_server(config_host, config_port, config_server_args, images_dir, db_name) -def can_connect_to_mongo(db_name): - global _can_connect_to_mongo - if _can_connect_to_mongo is None: - logger = get_root_logger() - try: - PanDB(db_type='mongo', db_name=db_name, logger=logger, connect=True) - _can_connect_to_mongo = True - except Exception: - _can_connect_to_mongo = False - logger.info('can_connect_to_mongo = {}', _can_connect_to_mongo) - return _can_connect_to_mongo + yield proc + pid = proc.pid + proc.terminate() + time.sleep(0.1) + logger.log('testing', f'Killed config_server started with PID={pid}') + + +@pytest.fixture +def temp_file(tmp_path): + d = tmp_path + d.mkdir(exist_ok=True) + f = d / 'temp' + yield f + f.unlink(missing_ok=True) @pytest.fixture(scope='function', params=_all_databases) def db_type(request, db_name): - db_list = request.config.option.test_databases if request.param not in db_list and 'all' not in db_list: - pytest.skip("Skipping {} DB, set --test-all-databases=True".format(request.param)) + pytest.skip(f"Skipping {request.param} DB, set --test-all-databases=True") - # If testing mongo, make sure we can connect, otherwise skip. - if request.param == 'mongo' and not can_connect_to_mongo(db_name): - pytest.skip("Can't connect to {} DB, skipping".format(request.param)) PanDB.permanently_erase_database(request.param, db_name, really='Yes', dangerous='Totally') return request.param @pytest.fixture(scope='function') def db(db_type, db_name): - return PanDB( - db_type=db_type, db_name=db_name, logger=get_root_logger(), connect=True) + return PanDB(db_type=db_type, db_name=db_name, connect=True) @pytest.fixture(scope='function') @@ -245,101 +310,64 @@ def memory_db(db_name): return PanDB(db_type='memory', db_name=db_name) -# ----------------------------------------------------------------------- -# Messaging support fixtures. It is important that tests NOT use the same -# ports that the real pocs_shell et al use; when they use the same ports, -# then tests may cause errors in the real system (e.g. by sending a -# shutdown command). - - -@pytest.fixture(scope='module') -def messaging_ports(): - # Some code (e.g. POCS._setup_messaging) assumes that sub and pub ports - # are sequential so these need to match that assumption for now. - return dict(msg_ports=(43001, 43002), cmd_ports=(44001, 44002)) +@pytest.fixture(scope='session') +def data_dir(): + return '/var/panoptes/panoptes-utils/tests/data' @pytest.fixture(scope='function') -def message_forwarder(messaging_ports): - cmd = os.path.join(os.getenv('POCS'), 'scripts', 'run_messaging_hub.py') - args = [cmd] - # Note that the other programs using these port pairs consider - # them to be pub and sub, in that order, but the forwarder sees things - # in reverse: it subscribes to the port that others publish to, - # and it publishes to the port that others subscribe to. - for _, (sub, pub) in messaging_ports.items(): - args.append('--pair') - args.append(str(sub)) - args.append(str(pub)) - - get_root_logger().info('message_forwarder fixture starting: {}', args) - proc = subprocess.Popen(args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) - # It takes a while for the forwarder to start, so allow for that. - # TODO(jamessynge): Come up with a way to speed up these fixtures. - time.sleep(3) - yield messaging_ports - proc.terminate() - +def unsolved_fits_file(data_dir): + orig_file = os.path.join(data_dir, 'unsolved.fits') -@pytest.fixture(scope='function') -def msg_publisher(message_forwarder): - port = message_forwarder['msg_ports'][0] - publisher = PanMessaging.create_publisher(port) - yield publisher - publisher.close() + with tempfile.TemporaryDirectory() as tmpdirname: + copy_file = shutil.copy2(orig_file, tmpdirname) + yield copy_file @pytest.fixture(scope='function') -def msg_subscriber(message_forwarder): - port = message_forwarder['msg_ports'][1] - subscriber = PanMessaging.create_subscriber(port) - yield subscriber - subscriber.close() +def solved_fits_file(data_dir): + orig_file = os.path.join(data_dir, 'solved.fits.fz') - -@pytest.fixture(scope='function') -def cmd_publisher(message_forwarder): - port = message_forwarder['cmd_ports'][0] - publisher = PanMessaging.create_publisher(port) - yield publisher - publisher.close() + with tempfile.TemporaryDirectory() as tmpdirname: + copy_file = shutil.copy2(orig_file, tmpdirname) + yield copy_file @pytest.fixture(scope='function') -def cmd_subscriber(message_forwarder): - port = message_forwarder['cmd_ports'][1] - subscriber = PanMessaging.create_subscriber(port) - yield subscriber - subscriber.close() +def tiny_fits_file(data_dir): + orig_file = os.path.join(data_dir, 'tiny.fits') + + with tempfile.TemporaryDirectory() as tmpdirname: + copy_file = shutil.copy2(orig_file, tmpdirname) + yield copy_file @pytest.fixture(scope='function') -def save_environ(): - old_env = copy.deepcopy(os.environ) - yield - os.environ = old_env +def noheader_fits_file(data_dir): + orig_file = os.path.join(data_dir, 'noheader.fits') - -@pytest.fixture(scope='session') -def data_dir(): - return os.path.join(os.getenv('POCS'), 'pocs', 'tests', 'data') + with tempfile.TemporaryDirectory() as tmpdirname: + copy_file = shutil.copy2(orig_file, tmpdirname) + yield copy_file -@pytest.fixture(scope='session') -def unsolved_fits_file(data_dir): - return os.path.join(data_dir, 'unsolved.fits') - +@pytest.fixture(scope='function') +def cr2_file(data_dir): + cr2_path = os.path.join(data_dir, 'canon.cr2') -@pytest.fixture(scope='session') -def solved_fits_file(data_dir): - return os.path.join(data_dir, 'solved.fits.fz') + if not os.path.exists(cr2_path): + pytest.skip("No CR2 file found, skipping test.") + return cr2_path -@pytest.fixture(scope='session') -def tiny_fits_file(data_dir): - return os.path.join(data_dir, 'tiny.fits') +@pytest.fixture() +def caplog(_caplog): + class PropagatedHandler(logging.Handler): + def emit(self, record): + logging.getLogger(record.name).handle(record) -@pytest.fixture(scope='session') -def noheader_fits_file(data_dir): - return os.path.join(data_dir, 'noheader.fits') + handler_id = logger.add(PropagatedHandler(), format="{message}") + yield _caplog + with suppress(ValueError): + logger.remove(handler_id) diff --git a/docker/build-image.sh b/docker/build-image.sh new file mode 100755 index 000000000..ee8510ab2 --- /dev/null +++ b/docker/build-image.sh @@ -0,0 +1,14 @@ +#!/bin/bash -e + +SOURCE_DIR="${PANDIR}/POCS" +BASE_CLOUD_FILE="cloudbuild.yaml" +TAG="${1:-develop}" + +cd "${SOURCE_DIR}" + +echo "Building gcr.io/panoptes-exp/pocs:${TAG}" +gcloud builds submit \ + --timeout="1h" \ + --substitutions="_TAG=${TAG}" \ + --config "${SOURCE_DIR}/docker/${BASE_CLOUD_FILE}" \ + "${SOURCE_DIR}" diff --git a/docker/cloudbuild.yaml b/docker/cloudbuild.yaml new file mode 100644 index 000000000..882d41ebf --- /dev/null +++ b/docker/cloudbuild.yaml @@ -0,0 +1,19 @@ +steps: +- name: 'docker' + id: 'amd64-build' + args: + - 'build' + - '--build-arg image_url=gcr.io/panoptes-exp/panoptes-utils:${_TAG}' + - '-f=docker/${_TAG}.Dockerfile' + - '--tag=gcr.io/${PROJECT_ID}/panoptes-pocs:${_TAG}' + - '.' + +- name: 'docker' + id: 'amd64-push' + args: + - 'push' + - 'gcr.io/${PROJECT_ID}/panoptes-pocs:${_TAG}' + waitFor: ['amd64-build'] + +images: + - 'gcr.io/${PROJECT_ID}/panoptes-pocs:${_TAG}' diff --git a/docker/docker-compose-aag.yaml b/docker/docker-compose-aag.yaml new file mode 100644 index 000000000..fdcdb94f2 --- /dev/null +++ b/docker/docker-compose-aag.yaml @@ -0,0 +1,40 @@ +version: '3.7' +services: + aag-weather-reader: + image: gcr.io/panoptes-exp/aag-weather:latest + init: true + container_name: aag-weather-reader + privileged: true + network_mode: host + restart: on-failure + volumes: + - pandir:/var/panoptes + command: + - "python" + - "/app/scripts/read-aag.py" + - "--config-file" + - "/var/panoptes/conf_files/pocs_local.yaml" + - "--db-file" + - "/var/panoptes/json_store/panoptes/weather.db" + - "--store-result" + - "--verbose" + aag-weather-server: + image: gcr.io/panoptes-exp/aag-weather:latest + init: true + container_name: aag-weather-server + privileged: true + network_mode: host + environment: + - DB_NAME=/var/panoptes/json_store/panoptes/weather.db + command: ["flask", "run"] + restart: on-failure + volumes: + - pandir:/var/panoptes +volumes: + pandir: + driver: local + driver_opts: + type: none + device: /var/panoptes + o: bind + diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml new file mode 100644 index 000000000..6acf52339 --- /dev/null +++ b/docker/docker-compose.yaml @@ -0,0 +1,47 @@ +version: '3.7' +services: + peas-shell: + image: gcr.io/panoptes-exp/pocs:latest + init: true + container_name: peas-shell + hostname: peas-shell + privileged: true + network_mode: host + env_file: $PANDIR/env + volumes: + - pandir:/var/panoptes + # No-op to keep machine running, use $POCS/bin/peas-shell to access + command: + - "$PANDIR/panoptes-utils/bin/wait-for-it.sh" + - "localhost:6563" + - "--" + - "tail" + - "-f" + - "/dev/null" + pocs-shell: + image: gcr.io/panoptes-exp/pocs:latest + init: true + container_name: pocs-shell + hostname: pocs-shell + privileged: true + network_mode: host + env_file: $PANDIR/env + depends_on: + - "peas-shell" + volumes: + - pandir:/var/panoptes + # No-op to keep machine running, use $POCS/bin/pocs-shell to access + command: + - "$PANDIR/panoptes-utils/bin/wait-for-it.sh" + - "localhost:6563" + - "--" + - "tail" + - "-f" + - "/dev/null" +volumes: + pandir: + driver: local + driver_opts: + type: none + device: /var/panoptes + o: bind diff --git a/docker/latest.Dockerfile b/docker/latest.Dockerfile new file mode 100644 index 000000000..202d82b3b --- /dev/null +++ b/docker/latest.Dockerfile @@ -0,0 +1,55 @@ +ARG image_url=gcr.io/panoptes-exp/panoptes-utils:testing + +FROM $image_url AS pocs-base +LABEL maintainer="developers@projectpanoptes.org" + +ARG pandir=/var/panoptes +ARG arduino_url="https://downloads.arduino.cc/arduino-cli/arduino-cli_latest_Linux_64bit.tar.gz" + +ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 +ENV SHELL /bin/zsh +ENV PANDIR $pandir +ENV POCS ${PANDIR}/POCS +ENV USER panoptes + +RUN apt-get update \ + && apt-get install --no-install-recommends --yes \ + gcc libncurses5-dev udev \ + # GPhoto2 + && wget https://raw.githubusercontent.com/gonzalo/gphoto2-updater/master/gphoto2-updater.sh \ + && chmod +x gphoto2-updater.sh \ + && /bin/bash gphoto2-updater.sh --stable \ + && rm gphoto2-updater.sh \ + # arduino-cli + && wget -q $arduino_url -O arduino-cli.tar.gz \ + # Untar and capture output name (NOTE: assumes only one file). + && tar xvfz arduino-cli.tar.gz \ + && mv arduino-cli /usr/local/bin/arduino-cli \ + && chmod +x /usr/local/bin/arduino-cli + +COPY ./requirements.txt /tmp/requirements.txt +# First deal with pip and PyYAML - see https://github.com/pypa/pip/issues/5247 +RUN pip install --no-cache-dir --no-deps --ignore-installed pip PyYAML && \ + pip install --no-cache-dir -r /tmp/requirements.txt + +# Install module +COPY . ${POCS}/ +RUN cd ${POCS} && pip install -e ".[google]" + +# Cleanup apt. +USER root +RUN apt-get autoremove --purge -y \ + autoconf \ + automake \ + autopoint \ + build-essential \ + gcc \ + gettext \ + libtool \ + pkg-config && \ + apt-get autoremove --purge -y && \ + apt-get -y clean && \ + rm -rf /var/lib/apt/lists/* + +WORKDIR ${POCS} +CMD ["/bin/zsh"] diff --git a/docs/Makefile b/docs/Makefile index 0c6c89a1d..108a030db 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,20 +1,193 @@ -# Minimal makefile for Sphinx documentation +# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build -SPHINXPROJ = PANOPTES -SOURCEDIR = . -BUILDDIR = _build +PAPER = +BUILDDIR = ../build/sphinx/ +AUTODOCDIR = api +AUTODOCBUILD = sphinx-apidoc +PROJECT = POCS +MODULEDIR = ../src/panoptes + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext doc-requirements -# Put it first so that "make" without argument is like "make help". help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* $(AUTODOCDIR) + +$(AUTODOCDIR): $(MODULEDIR) + mkdir -p $@ + $(AUTODOCBUILD) -f -o $@ $^ + +doc-requirements: $(AUTODOCDIR) + +html: doc-requirements + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: doc-requirements + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: doc-requirements + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: doc-requirements + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: doc-requirements + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: doc-requirements + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: doc-requirements + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/$(PROJECT).qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/$(PROJECT).qhc" + +devhelp: doc-requirements + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $HOME/.local/share/devhelp/$(PROJECT)" + @echo "# ln -s $(BUILDDIR)/devhelp $HOME/.local/share/devhelp/$(PROJEC)" + @echo "# devhelp" + +epub: doc-requirements + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +patch-latex: + find _build/latex -iname "*.tex" | xargs -- \ + sed -i'' 's~includegraphics{~includegraphics\[keepaspectratio,max size={\\textwidth}{\\textheight}\]{~g' + +latex: doc-requirements + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + $(MAKE) patch-latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: doc-requirements + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + $(MAKE) patch-latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: doc-requirements + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: doc-requirements + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: doc-requirements + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: doc-requirements + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: doc-requirements + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: doc-requirements + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: doc-requirements + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: doc-requirements + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: doc-requirements + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." -.PHONY: help Makefile +xml: doc-requirements + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file +pseudoxml: doc-requirements + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index e99f0f070..000000000 --- a/docs/README.md +++ /dev/null @@ -1,17 +0,0 @@ -POCS Documentation -================== - -https://panoptes-pocs.readthedocs.io/en/develop/ - -The documentation is hosted on ReadTheDocs and does not need to be -be build locally. If you wish, you can: - -``` -cd $POCS/docs -make html -``` - -which will output html files in `$POCS/docs/_build/html`. - -Todo: - Add ability to generate pdf via `make pdf`. diff --git a/docs/_static/.gitignore b/docs/_static/.gitignore new file mode 100644 index 000000000..3c9636320 --- /dev/null +++ b/docs/_static/.gitignore @@ -0,0 +1 @@ +# Empty directory diff --git a/docs/authors.rst b/docs/authors.rst new file mode 100644 index 000000000..cd8e0913a --- /dev/null +++ b/docs/authors.rst @@ -0,0 +1,2 @@ +.. _authors: +.. include:: ../AUTHORS.rst diff --git a/docs/changelog.rst b/docs/changelog.rst new file mode 100644 index 000000000..783fce050 --- /dev/null +++ b/docs/changelog.rst @@ -0,0 +1,2 @@ +.. _changelog: +.. include:: ../CHANGELOG.rst diff --git a/docs/conf.py b/docs/conf.py index 2f25d33c1..5b13d17a3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,201 +1,287 @@ # -*- coding: utf-8 -*- # -# Configuration file for the Sphinx documentation builder. +# This file is execfile()d with the current directory set to its containing dir. # -# This file does only contain a selection of the most common options. For a -# full list see the documentation: -# http://www.sphinx-doc.org/en/master/config +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import sys +import inspect +import shutil -# -- Path setup -------------------------------------------------------------- +__location__ = os.path.join(os.getcwd(), os.path.dirname( + inspect.getfile(inspect.currentframe()))) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -import os -import sys -from recommonmark.parser import CommonMarkParser +sys.path.insert(0, os.path.join(__location__, '../src')) -# The docs are built on the ReadTheDocs website in a virtualenv -# the we don't necessarily control. The below line is used to -# add POCS to the path without installing or our usual env vars. -sys.path.insert(0, os.path.abspath('../')) +# -- Run sphinx-apidoc ------------------------------------------------------ +# This hack is necessary since RTD does not issue `sphinx-apidoc` before running +# `sphinx-build -b html . _build/html`. See Issue: +# https://github.com/rtfd/readthedocs.org/issues/1139 +# DON'T FORGET: Check the box "Install your project inside a virtualenv using +# setup.py install" in the RTD Advanced Settings. +# Additionally it helps us to avoid running apidoc manually -from pocs.version import __version__ +try: # for Sphinx >= 1.7 + from sphinx.ext import apidoc +except ImportError: + from sphinx import apidoc +output_dir = os.path.join(__location__, "api") +module_dir = os.path.join(__location__, "../src/panoptes") +try: + shutil.rmtree(output_dir) +except FileNotFoundError: + pass -# -- Project information ----------------------------------------------------- +try: + import sphinx + from pkg_resources import parse_version -project = 'POCS' -copyright = '2018, Project PANOPTES Team' -author = 'PANOPTES Team' + cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" + cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) -# The short X.Y version -version = __version__ -# The full version, including alpha/beta/rc tags -release = '' + args = cmd_line.split(" ") + if parse_version(sphinx.__version__) >= parse_version('1.7'): + args = args[1:] + apidoc.main(args) +except Exception as e: + print("Running `sphinx-apidoc` failed!\n{}".format(e)) -# -- General configuration --------------------------------------------------- +# -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -# # needs_sphinx = '1.0' -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ + 'sphinx.ext.autosummary', 'sphinx.ext.autodoc', 'sphinx.ext.doctest', - 'sphinx.ext.napoleon', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', + 'sphinx.ext.viewcode', 'sphinx.ext.coverage', + 'sphinx.ext.ifconfig', 'sphinx.ext.mathjax', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', + 'sphinx.ext.napoleon', + 'matplotlib.sphinxext.plot_directive', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] -source_parsers = {'.md': CommonMarkParser} - +# The suffix of source filenames. +source_suffix = ['.rst'] -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -source_suffix = ['.rst', '.md'] -# source_suffix = '.rst' +# The encoding of source files. +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' +# General information about the project. +project = u'POCS' +copyright = u'2020, Project PANOPTES' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '' # Is set by calling `setup.py docs` +# The full version, including alpha/beta/rc tags. +release = '' # Is set by calling `setup.py docs` + # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None +# language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -# pygments_style = 'sphinx' +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False -# -- Options for HTML output ------------------------------------------------- + +# -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -# html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -# html_theme_options = { - 'logo_only': True, - 'style_external_links': True, - # Toc options - 'collapse_navigation': True, - 'sticky_navigation': True, - 'navigation_depth': 4, - 'includehidden': True, - 'titles_only': False + 'sidebar_width': '300px', + 'page_width': '1200px' } +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +try: + from panoptes.pocs import __version__ as version +except ImportError: + pass +else: + release = version + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = "" +html_logo = '_static/pan-title-black-transparent.png' + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -html_logo = '_static/pan-title-black-transparent.png' +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. -# +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. # html_sidebars = {} +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} -# -- Options for HTMLHelp output --------------------------------------------- +# If false, no module index is generated. +# html_domain_indices = True -# Output file base name for HTML help builder. -htmlhelp_basename = 'PANOPTESdoc' +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True -# -- Options for LaTeX output ------------------------------------------------ +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'pocs-doc' + +# -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - # # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). +# (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - (master_doc, 'PANOPTES.tex', 'PANOPTES Documentation', - 'PANOPTES Team', 'manual'), + ('index', 'user_guide.tex', u'POCS Documentation', + u'Project PANOPTES', 'manual'), ] +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = "" -# -- Options for manual page output ------------------------------------------ +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'panoptes', 'PANOPTES Documentation', - [author], 1) -] - - -# -- Options for Texinfo output ---------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'PANOPTES', 'PANOPTES Documentation', - author, 'PANOPTES', 'One line description of project.', - 'Miscellaneous'), -] +# If true, show page references after internal links. +# latex_show_pagerefs = False +# If true, show URL addresses after external links. +# latex_show_urls = False -# -- Extension configuration ------------------------------------------------- +# Documents to append as an appendix to all manuals. +# latex_appendices = [] -# -- Options for intersphinx extension --------------------------------------- +# If false, no module index is generated. +# latex_domain_indices = True -# Example configuration for intersphinx: refer to the Python standard library. +# -- External mapping ------------------------------------------------------------ +python_version = '.'.join(map(str, sys.version_info[0:2])) intersphinx_mapping = { - 'python': ('https://docs.python.org/', None), + 'sphinx': ('http://www.sphinx-doc.org/en/stable', None), + 'python': ('https://docs.python.org/' + python_version, None), + 'matplotlib': ('https://matplotlib.org', None), + 'numpy': ('https://docs.scipy.org/doc/numpy', None), + 'sklearn': ('http://scikit-learn.org/stable', None), + 'pandas': ('http://pandas.pydata.org/pandas-docs/stable', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), 'astropy': ('http://docs.astropy.org/en/stable/', None), 'astroplan': ('https://astroplan.readthedocs.io/en/latest/', None), + 'panoptes.utils': ('https://panoptes-utils.readthedocs.io/en/latest/', None), } -# -- Options for todo extension ---------------------------------------------- - # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True diff --git a/docs/contribute.rst b/docs/contribute.rst new file mode 100644 index 000000000..6b85df515 --- /dev/null +++ b/docs/contribute.rst @@ -0,0 +1,2 @@ +.. _contribute: +.. include:: ../CONTRIBUTING.rst diff --git a/docs/index.rst b/docs/index.rst index bfd09918f..c164ede9a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,53 +1,26 @@ -PANOPTES Observatory Control System - POCS -============================================ +==== +POCS +==== -.. warning:: - Documentation under construction. +.. include:: ../README.rst - -`PANOPTES `_ is an open source citizen science -project that is designed to find exoplanets with digital cameras. The goal of -PANOPTES is to establish a global network of of robotic cameras run by amateur -astronomers and schools in order to monitor, as continuously as possible, a -very large number of stars. For more general information about the project, -including the science case and resources for interested individuals, see the -`project website `_. - -.. seealso:: - - This is the documentation for the software that controls a running - PANOPTES robotic observatory. This will mostly be useful for developers - working on the control software itself. Normal operating usage of POCS - doesn't require knowledge of the documentation here. - - If you are interested in how to operate a PANOPTES unit, please see the - `User's Guide `_. - - .. todo:: User's guide? +Contents +======== .. toctree:: - :maxdepth: 3 - - panoptes-overview - pocs-overview - - -Project Links -------------- + :maxdepth: 2 -- PANOPTES Homepage: https://projectpanoptes.org -- Forum: https://forum.projectpanoptes.org + License + Authors + Changelog + Module Reference + Contributing Guide -POCS Details ------------- -* `Source Code `_ -* `Release History `_ -* `Known Issues `_ -* `License `_ -Index ------ +Indices and tables +================== * :ref:`genindex` * :ref:`modindex` -* :ref:`search` \ No newline at end of file +* :ref:`search` +* :ref:`contribute` diff --git a/docs/license.rst b/docs/license.rst new file mode 100644 index 000000000..3989c5130 --- /dev/null +++ b/docs/license.rst @@ -0,0 +1,7 @@ +.. _license: + +======= +License +======= + +.. include:: ../LICENSE.txt diff --git a/docs/panoptes-overview.rst b/docs/panoptes-overview.rst deleted file mode 100644 index b3e1c49be..000000000 --- a/docs/panoptes-overview.rst +++ /dev/null @@ -1,27 +0,0 @@ -PANOPTES Overview -================= - -PANOPTES is driven primarily by an open source model with regards to both -software and data. In addition, the goal has always been to create an entire -"scientific platform" that could be used for educational purposes. Because -modern scientific practices usually include some component of software -development [1]_, one of the early design goals for the project was to have a -software base that was not only completely open source but also easily -readable (and modifiable) by individuals just learning how to program. - -Importantly, the software must also be accurate and robust in order to handle -long-term unattended and remote operations and must also be customizable based -on potential hardware differences between units. - -The PANOPTES `code repositories `_ can therefore -be used in two different contexts: automatically by installed hardware units -responsible for collecting data; and interactively by developers, including -students and amateurs, who can choose to modify existing operations, add new -functionality, or simply use the software as a learning tool for astronomy and -software development. The two distinct but equally important uses of the -software, one as an automatic observatory control system (OCS) for data -collection and the other as a tool for learning, place unique constraints on -the decisions made regarding software. I have designed and written all -software with these overarching goals in mind. - -.. [1] Ayer2014, Wilson2014 \ No newline at end of file diff --git a/docs/pocs-alternatives.rst b/docs/pocs-alternatives.rst deleted file mode 100644 index 200f27231..000000000 --- a/docs/pocs-alternatives.rst +++ /dev/null @@ -1,112 +0,0 @@ -================= -POCS Alternatives -================= - -A primary software adage is to avoid "recreating the wheel" and while -automated OCS systems are not unique, an initial review found that none of the -available systems were suitable to the PANOPTES goals outlined in the -`PANOPTES Overview `_. First, all software that -required license fees or was not otherwise free (of cost) and open (to -modification) was not applicable. Second, software was examined in -terms of its ability to handle the hardware and observing requirements of a -PANOPTES unit. Third, the ease-of-use of the software was determined, -both in terms of installation and usage as well as in ability to serve as a -learning tool. Three popular alternatives to the POCS ecosystem were -identified. A brief summary of each is given along with reasons for rejection -(in alphabetical order): - -INDI ----- - -`INDI `_ (Instrument-Neutral-Distributed-Interface) -consists of both a protocol for agnostic hardware control and a library that -implements that protocol in a server/client architecture. INDI is written -specifically as an astronomical tool and seems to be used exclusively within -astronomical applications. The code base is written almost exclusively in -C/C++ and the software is thus static and requires compilation in order to -run. The software is released under a GPLv2 license and undergoes active -development and maintenance. - -The basic idea behind INDI is that hardware (CCDs, domes, mounts, etc.) is -described (via drivers) according to the INDI protocol such that an INDI -server can communicate between that hardware and a given front-end client -(software used by the astronomer which can either be interactive or -automated) using standard Inter-process Communication (ICP) protocols -regardless of the particular details of the hardware. - -This is in fact an ideal setup for a project like PANOPTES and INDI was -initially used as a base design, with POCS serving primarily as an INDI -client and a thin-wrapper around the server. However, because of the lack of -suitable drivers for the chosen mount as well as complications with the -camera driver and the implementation of the server software, this approach -was eventually abandoned. It should be noted, however, that the server/client -architecture and the agnostic hardware implementation in both POCS and INDI -means that the eventual adoption of INDI should be largely straight-forward. -Should a group choose to implement this approach in the future, much of the -hardware specifications contained within POCS could be relegated to INDI, -allowing POCS to be a specific implementation of an INDI server/client -interaction. The specific details of POCS (state-based operation, scheduling -details, data organization and analysis) would remain largely unchanged. - -ROS / OpenROCS --------------- - -`ROS `_ (Robotic Operating System) is a set of software -libraries and various other scripts designed to control robotic components. -The idea is similar to INDI but ROS is designed to work with robotic hardware -in general and has no specific association with astronomy. ROS has a -widespread community and significant adoption within the robotics -community, specifically concerning industrial automation. In addition to -simple hardware control, ROS also implements various robotics-specific -algorithms, such as those associated with machine vision, movement, robotic -geometry (self-awareness of spatial location of various components of the -robot), and more. The specific design goals of ROS relate to its use as a -library for "large-scale integrative robotics research" for "complex" systems [73]. The library is designed to be multi-lingual (with respect to -programming languages) via the exchange of language-agnostic message. The -entire library consists of a number of packages and modules that require -specific management policies (although these can be integrated with the -host-OS system package manager). - -ROS is primarily designed to be used in large-scale applications and -industrial automation and was thus found to be unsuitable for the design -goals of PANOPTES. Specifically, the package management overhead made the -system overly complex when compared with the needs of PANOPTES. While there -are certainly some examples of small-scale robotics implementations available -on the website13 for ROS, the adoption of the software as a basis for -PANOPTES would have required significant overhead merely to understand the -basic operations of POCS. Working with the system was thus seen as too -complex for non-professionals and students. - -However, the advantages of the messaging processing system used by ROS were -immediately obvious and initially the messaging system behind the PANOPETS -libraries was based directly on the ROS messaging packages. Unfortunately, -because of the complexity of maintaining some of the ROS subpackages without -adoption of the overall software suite this path was eventually abandoned. - -The core ideas behind the messaging system (which are actually fairly generic -in nature) have nevertheless been retained. More recently others have pursued -the use of ROS specifically for use within autonomous observatories. While -the authors report success, the lack of available code and -`documentation `_ make the software not worth pursuing in light of the fact that POCS had already undergone significant development -before the paper was made available. - -Details about the code are sparse within the paper and the corresponding `website `_ -(accessed 2017-01-24) doesn’t offer additional details. - -RTS2 ----- - -`RTS2 `_ is a fairly mature project that was originally developed for the BART telescope for autonomous gamma ray burst (GRB) -followup. The overall system is part of the `GLORIA Project `_, -which has some shared goals with the PANOPTES network but is aimed at more -professional-level telescopes and observatories18. The software implements a -client/server system and hardware abstraction layer similar to INDI. The -software base is primarily written in C++ and released under a LGPL-3.0 -license and is under active development. RTS2 further includes logical -control over the system, which includes things such as scheduling, -plate-solving, metadata tracking, etc. - -The primary reason for not pursuing RTS2 as the base for PANOPTES was due to -the desire to employ Python as the dominant language. While RTS2 could -provide for the operational aspects of PANOPTES it was not seen as suitable -for the corresponding educational aspects. diff --git a/docs/pocs-overview.rst b/docs/pocs-overview.rst deleted file mode 100644 index f67eea956..000000000 --- a/docs/pocs-overview.rst +++ /dev/null @@ -1,177 +0,0 @@ -************* -POCS Overview -************* - -The PANOPTES Observatory Control System (POCS) is the primary software -responsible for running a PANOPTES unit. POCS is implemented as a finite state -machine (described below) that has three primary responsibilities: - -* overall control of the unit for taking observations, -* relaying messages between various components of the system, -* and determining the operational safety of the unit. - -POCS is designed such that under normal operating conditions the software is -initialized once and left running from day-to-day, with operation moving to a -sleeping state during daylight hours and observations resuming automatically -each night when POCS determines conditions are safe. - -POCS is implemented as four separate logical layers, where increasing levels -of abstraction take place between each of the layers. These layers are the -low-level Core Layer, the Hardware Abstraction Layer, the Functional Layer, -and the high-level Decision Layer. - - -.. note:: - .. image:: _static/pocs-graph.png - - **POCS software layers** Diagram of POCS software layers. Note that the - items in yellow (Dome, Guider, and TheSkyX) are not typically used by PANOPTES - observatories (note: PAN006 is inside an astrohaven dome). - - TheSkyX interface was added by the `Huntsman Telescope `_, - which also uses POCS for control. They are included in the diagram as a - means of showing the flexibility of the Functional Layer to interact with - components from the HAL. - -==================== -POCS Software Design -==================== - -Core Layer ----------- - -The Core Layer is the lowest level and is responsible for interacting directly -with the hardware. For DSLR cameras this is accomplished by providing -wrappers around the existing `gphoto2 `_ software -package. For PANOPTES, most other attached hardware works via direct RS-232 -serial communication through a USB-to-Serial converter. A utility module was -written for common read/write operations that automatically handles details -associated with buffering, connection, etc. Support for TheSkyX was written -into POCS for the `Huntsman Telescope `_. -The overall goal of the Core Layer is to provide a consistent interface for -modules written at the HAL level. - -Hardware Abstraction Layer (HAL) --------------------------------- - -The use of a HAL is widespread both in computing and robotics. In general, a -HAL is meant to hide low-level hardware and device specific details from -higher level programming [Elkady2012]_. Thus, while every camera ultimately -needs to support, for instance, a ``take_exposure(seconds=120)`` command, the -details of how a specific camera model is programmed to achieve that may be -very different. From the perspective of software at higher levels those -details are not important, all that is important is that all attached cameras -react appropriately to the ``take_exposure`` command. - - -While the Core Layer consists of one module per feature, the HAL implements a -Template Pattern [Gamma1993]_ wherein a base class provides an interface to -be used by higher levels and concrete classes are written for each specific -device type. For example, a base Mount class dictates an interface that -includes methods such as ``slew_to_home``, ``set_target_coordinates``, -``slew_to_target``, ``park``, etc. The concrete implementation for the -iOptron mount then uses the Core Layer level RS-232 commands to issue the -specific serial commands needed to perform those functions. Likewise, a -Paramount ME II concrete implementation of the Mount class would use the Core -Layer interface to `TheSkyX `_ -to implement those same methods. Thus, higher levels of the software can make -a call to ``mount.slew_to_target()`` and expect it to work regardless of the -particular mount type attached. - -Another advantage of this type of setup is that a concrete implementation of -a hardware simulator can be created to test higher-level software without -actually having physical devices attached, which is how much of the PANOPTES -testing framework is implemented [1]_. - - -Functional Layer ----------------- - -The Functional Layer is analogous to a traditional observatory: an -Observatory has a location from which it operates, attached hardware which it -uses to observe, a scheduler (a modified dispatch scheduler [Denny2004]_ in -the case of PANOPTES) to select from the available target_list to form valid -observations, etc. - - -The Observatory (i.e. the Functional Layer) is thus where most of the -operations associated with taking observations actually happen. When the -software is used interactively (as opposed to the usual automatic mode) it is -with the Observatory that an individual would overwhelmingly interact. - -The Functional Layer is also responsible for connecting to and initializing -the attached hardware, specified by accompanying configuration files. The -potential list of targets and the type of scheduler used are also loaded from -a configuration file. The particular type of scheduler is agnostic to the -Observatory, which simply calls ``scheduler.get_observation()`` such that the -external scheduler can handle all the logic of choosing a target. In the -figure listed above this is represented by the "Scheduler" and "Targets" that -are input to the "Observatory." - -Decision Layer --------------- - -The Decision Layer is the highest level of the system and can be viewed as -the "intelligence" layer. When using the software in interactive mode, the -human user takes on the role of the Decision Layer while in automatic -operations this is accomplished via an event-driven finite state machine -(FSM). - -A state machine is a simple model of a system where that system can only -exist in discrete conditions or modes. Those conditions or modes are called -states. Typically states determine how the system reacts to input, either -from a user or the environment. A state machine can exist solely in the -software or the software can be representative of a physical model. For -PANOPTES, the physical unit is the system and POCS models the condition of -the hardware. The "finite" aspect refers to the fact that there are a limited -and known number of states in which the system can exist. - -Examples of PANOPTES states include: - -* ``sleeping``: Occurs in daylight hours, the cameras are facing down, and themount is unresponsive to slew commands. -* ``observing``: The cameras are exposing and the mount is tracking. -* ``scheduling``: The mount is unparked, not slewing or tracking, it is dark, and the software is running through the scheduler. - -PANOPTES states are named with verbs to represent the action the physical -unit is currently performing. - -POCS is designed to have a configurable state machine, with the highest level -logic written in each state definition file. State definition files are meant -to be simple as most of the details of the logic should exist in the -functional layer. Students using POCS for educational purposes will most -likely start with the state files. - -State machines are responsible for mapping inputs (e.g. ``get_ready``, -``schedule``, ``start_slewing``, etc.) to outputs, where the particular -mapping depends on the current state [Lee2017]_. The mappings of input to -output are governed by transition events [2]_. - -State definitions and their transitions are defined external to POCS, -allowing for multiple possible state machines that are agnostic to the layers -below the Decision Layer. This external definition is similar to the -"Scheduler" in the Functional Layer and is represented similarly in the -figure above. - -POCS is responsible for determining operational safety via a query of the -weather station, determination of sun position, etc. The transition for each -state has a set of conditions that must be satisfied in order for a -successful transition to a new state to be accomplished and a requisite check -for operational safety occurs before all transitions. If the system is -determined to be unsafe the machine either transitions to the parking state -or remains in the sleeping or ready state. - -.. include:: pocs-alternatives.rst - -.. [1] Writing hardware simulators, while helpful for testing purposes, can -also add significant overhead to a project. For major projects such as the -LSST or TMT this is obviously a requirement. PANOPTES implements basic -hardware simulators for the mount and camera but full-scale hardware -simulation of specific components has not yet been achieved. - -.. [2] The Python FSM used by POCS is in fact called `transitions `_. - - -.. [Elkady2012] Stuff -.. [Denny2004] Stuff -.. [Lee2017] Stuff -.. [Gamma1993] Stuff \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt index e0bd7d87f..0a70e145b 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,29 +1,3 @@ -astroplan -astropy >= 3.0.0 -ccdproc -codecov -coveralls -Cython -dateparser -gcloud -google-cloud-storage -matplotlib >= 2.0.0,<3.0.0 -mocket -numpy >= 1.6 -pycodestyle == 2.3.1 -pymongo >= 3.2.2 -pyserial >= 3.1.1 -pytest >= 3.4.0 -python_dateutil >= 2.5.3 -pytz -PyYAML >= 3.11 -pyzmq >= 15.3.0 -readline -recommonmark -requests -scikit_image >= 0.12.3 -scipy >= 0.17.1 +-r ../requirements.txt sphinx_rtd_theme -transitions >= 0.4.0 -tweepy -wcsaxes + diff --git a/docs/source/modules.rst b/docs/source/modules.rst deleted file mode 100644 index 345620603..000000000 --- a/docs/source/modules.rst +++ /dev/null @@ -1,8 +0,0 @@ -POCS -==== - -.. toctree:: - :maxdepth: 4 - - peas - pocs diff --git a/docs/source/peas.rst b/docs/source/peas.rst deleted file mode 100644 index d73546b90..000000000 --- a/docs/source/peas.rst +++ /dev/null @@ -1,38 +0,0 @@ -peas package -============ - -Submodules ----------- - -peas.PID module ---------------- - -.. automodule:: peas.PID - :members: - :undoc-members: - :show-inheritance: - -peas.sensors module -------------------- - -.. automodule:: peas.sensors - :members: - :undoc-members: - :show-inheritance: - -peas.weather module -------------------- - -.. automodule:: peas.weather - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: peas - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.camera.rst b/docs/source/pocs.camera.rst deleted file mode 100644 index c3c77f047..000000000 --- a/docs/source/pocs.camera.rst +++ /dev/null @@ -1,78 +0,0 @@ -pocs.camera package -=================== - -Submodules ----------- - -pocs.camera.camera module -------------------------- - -.. automodule:: pocs.camera.camera - :members: - :undoc-members: - :show-inheritance: - -pocs.camera.canon\_gphoto2 module ---------------------------------- - -.. automodule:: pocs.camera.canon_gphoto2 - :members: - :undoc-members: - :show-inheritance: - -pocs.camera.fli module ----------------------- - -.. automodule:: pocs.camera.fli - :members: - :undoc-members: - :show-inheritance: - -pocs.camera.libfli module -------------------------- - -.. automodule:: pocs.camera.libfli - :members: - :undoc-members: - :show-inheritance: - -pocs.camera.libfliconstants module ----------------------------------- - -.. automodule:: pocs.camera.libfliconstants - :members: - :undoc-members: - :show-inheritance: - -pocs.camera.sbig module ------------------------ - -.. automodule:: pocs.camera.sbig - :members: - :undoc-members: - :show-inheritance: - -pocs.camera.sbigudrv module ---------------------------- - -.. automodule:: pocs.camera.sbigudrv - :members: - :undoc-members: - :show-inheritance: - -pocs.camera.simulator module ----------------------------- - -.. automodule:: pocs.camera.simulator - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.camera - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.dome.rst b/docs/source/pocs.dome.rst deleted file mode 100644 index ef02ffd77..000000000 --- a/docs/source/pocs.dome.rst +++ /dev/null @@ -1,54 +0,0 @@ -pocs.dome package -================= - -Submodules ----------- - -pocs.dome.abstract\_serial\_dome module ---------------------------------------- - -.. automodule:: pocs.dome.abstract_serial_dome - :members: - :undoc-members: - :show-inheritance: - -pocs.dome.astrohaven module ---------------------------- - -.. automodule:: pocs.dome.astrohaven - :members: - :undoc-members: - :show-inheritance: - -pocs.dome.bisque module ------------------------ - -.. automodule:: pocs.dome.bisque - :members: - :undoc-members: - :show-inheritance: - -pocs.dome.protocol\_astrohaven\_simulator module ------------------------------------------------- - -.. automodule:: pocs.dome.protocol_astrohaven_simulator - :members: - :undoc-members: - :show-inheritance: - -pocs.dome.simulator module --------------------------- - -.. automodule:: pocs.dome.simulator - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.dome - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.focuser.rst b/docs/source/pocs.focuser.rst deleted file mode 100644 index c4f864329..000000000 --- a/docs/source/pocs.focuser.rst +++ /dev/null @@ -1,46 +0,0 @@ -pocs.focuser package -==================== - -Submodules ----------- - -pocs.focuser.birger module --------------------------- - -.. automodule:: pocs.focuser.birger - :members: - :undoc-members: - :show-inheritance: - -pocs.focuser.focuser module ---------------------------- - -.. automodule:: pocs.focuser.focuser - :members: - :undoc-members: - :show-inheritance: - -pocs.focuser.focuslynx module ------------------------------ - -.. automodule:: pocs.focuser.focuslynx - :members: - :undoc-members: - :show-inheritance: - -pocs.focuser.simulator module ------------------------------ - -.. automodule:: pocs.focuser.simulator - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.focuser - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.mount.rst b/docs/source/pocs.mount.rst deleted file mode 100644 index d6257a809..000000000 --- a/docs/source/pocs.mount.rst +++ /dev/null @@ -1,54 +0,0 @@ -pocs.mount package -================== - -Submodules ----------- - -pocs.mount.bisque module ------------------------- - -.. automodule:: pocs.mount.bisque - :members: - :undoc-members: - :show-inheritance: - -pocs.mount.ioptron module -------------------------- - -.. automodule:: pocs.mount.ioptron - :members: - :undoc-members: - :show-inheritance: - -pocs.mount.mount module ------------------------ - -.. automodule:: pocs.mount.mount - :members: - :undoc-members: - :show-inheritance: - -pocs.mount.serial module ------------------------- - -.. automodule:: pocs.mount.serial - :members: - :undoc-members: - :show-inheritance: - -pocs.mount.simulator module ---------------------------- - -.. automodule:: pocs.mount.simulator - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.mount - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.rst b/docs/source/pocs.rst deleted file mode 100644 index 5e0568e4a..000000000 --- a/docs/source/pocs.rst +++ /dev/null @@ -1,78 +0,0 @@ -pocs package -============ - -Subpackages ------------ - -.. toctree:: - - pocs.camera - pocs.dome - pocs.focuser - pocs.mount - pocs.scheduler - pocs.sensors - pocs.serial_handlers - pocs.state - pocs.tests - pocs.utils - -Submodules ----------- - -pocs.base module ----------------- - -.. automodule:: pocs.base - :members: - :undoc-members: - :show-inheritance: - -pocs.core module ----------------- - -.. automodule:: pocs.core - :members: - :undoc-members: - :show-inheritance: - -pocs.hardware module --------------------- - -.. automodule:: pocs.hardware - :members: - :undoc-members: - :show-inheritance: - -pocs.images module ------------------- - -.. automodule:: pocs.images - :members: - :undoc-members: - :show-inheritance: - -pocs.observatory module ------------------------ - -.. automodule:: pocs.observatory - :members: - :undoc-members: - :show-inheritance: - -pocs.version module -------------------- - -.. automodule:: pocs.version - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.scheduler.rst b/docs/source/pocs.scheduler.rst deleted file mode 100644 index 4efbe5b7f..000000000 --- a/docs/source/pocs.scheduler.rst +++ /dev/null @@ -1,54 +0,0 @@ -pocs.scheduler package -====================== - -Submodules ----------- - -pocs.scheduler.constraint module --------------------------------- - -.. automodule:: pocs.scheduler.constraint - :members: - :undoc-members: - :show-inheritance: - -pocs.scheduler.dispatch module ------------------------------- - -.. automodule:: pocs.scheduler.dispatch - :members: - :undoc-members: - :show-inheritance: - -pocs.scheduler.field module ---------------------------- - -.. automodule:: pocs.scheduler.field - :members: - :undoc-members: - :show-inheritance: - -pocs.scheduler.observation module ---------------------------------- - -.. automodule:: pocs.scheduler.observation - :members: - :undoc-members: - :show-inheritance: - -pocs.scheduler.scheduler module -------------------------------- - -.. automodule:: pocs.scheduler.scheduler - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.scheduler - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.sensors.rst b/docs/source/pocs.sensors.rst deleted file mode 100644 index 1459f480e..000000000 --- a/docs/source/pocs.sensors.rst +++ /dev/null @@ -1,22 +0,0 @@ -pocs.sensors package -==================== - -Submodules ----------- - -pocs.sensors.arduino\_io module -------------------------------- - -.. automodule:: pocs.sensors.arduino_io - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.sensors - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.serial_handlers.rst b/docs/source/pocs.serial_handlers.rst deleted file mode 100644 index 896eb47f4..000000000 --- a/docs/source/pocs.serial_handlers.rst +++ /dev/null @@ -1,22 +0,0 @@ -pocs.serial\_handlers package -============================= - -Submodules ----------- - -pocs.serial\_handlers.protocol\_arduinosimulator module -------------------------------------------------------- - -.. automodule:: pocs.serial_handlers.protocol_arduinosimulator - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.serial_handlers - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.state.rst b/docs/source/pocs.state.rst deleted file mode 100644 index 4ad2768ef..000000000 --- a/docs/source/pocs.state.rst +++ /dev/null @@ -1,29 +0,0 @@ -pocs.state package -================== - -Subpackages ------------ - -.. toctree:: - - pocs.state.states - -Submodules ----------- - -pocs.state.machine module -------------------------- - -.. automodule:: pocs.state.machine - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.state - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.state.states.default.rst b/docs/source/pocs.state.states.default.rst deleted file mode 100644 index b8ab3653a..000000000 --- a/docs/source/pocs.state.states.default.rst +++ /dev/null @@ -1,102 +0,0 @@ -pocs.state.states.default package -================================= - -Submodules ----------- - -pocs.state.states.default.analyzing module ------------------------------------------- - -.. automodule:: pocs.state.states.default.analyzing - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.housekeeping module ---------------------------------------------- - -.. automodule:: pocs.state.states.default.housekeeping - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.observing module ------------------------------------------- - -.. automodule:: pocs.state.states.default.observing - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.parked module ---------------------------------------- - -.. automodule:: pocs.state.states.default.parked - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.parking module ----------------------------------------- - -.. automodule:: pocs.state.states.default.parking - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.pointing module ------------------------------------------ - -.. automodule:: pocs.state.states.default.pointing - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.ready module --------------------------------------- - -.. automodule:: pocs.state.states.default.ready - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.scheduling module -------------------------------------------- - -.. automodule:: pocs.state.states.default.scheduling - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.sleeping module ------------------------------------------ - -.. automodule:: pocs.state.states.default.sleeping - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.slewing module ----------------------------------------- - -.. automodule:: pocs.state.states.default.slewing - :members: - :undoc-members: - :show-inheritance: - -pocs.state.states.default.tracking module ------------------------------------------ - -.. automodule:: pocs.state.states.default.tracking - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.state.states.default - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.state.states.rst b/docs/source/pocs.state.states.rst deleted file mode 100644 index 7ae366203..000000000 --- a/docs/source/pocs.state.states.rst +++ /dev/null @@ -1,17 +0,0 @@ -pocs.state.states package -========================= - -Subpackages ------------ - -.. toctree:: - - pocs.state.states.default - -Module contents ---------------- - -.. automodule:: pocs.state.states - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.tests.bisque.rst b/docs/source/pocs.tests.bisque.rst deleted file mode 100644 index 343d3b68c..000000000 --- a/docs/source/pocs.tests.bisque.rst +++ /dev/null @@ -1,38 +0,0 @@ -pocs.tests.bisque package -========================= - -Submodules ----------- - -pocs.tests.bisque.test\_dome module ------------------------------------ - -.. automodule:: pocs.tests.bisque.test_dome - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.bisque.test\_mount module ------------------------------------- - -.. automodule:: pocs.tests.bisque.test_mount - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.bisque.test\_run module ----------------------------------- - -.. automodule:: pocs.tests.bisque.test_run - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.tests.bisque - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.tests.data.rst b/docs/source/pocs.tests.data.rst deleted file mode 100644 index db426a75c..000000000 --- a/docs/source/pocs.tests.data.rst +++ /dev/null @@ -1,10 +0,0 @@ -pocs.tests.data package -======================= - -Module contents ---------------- - -.. automodule:: pocs.tests.data - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.tests.rst b/docs/source/pocs.tests.rst deleted file mode 100644 index f952a7f3d..000000000 --- a/docs/source/pocs.tests.rst +++ /dev/null @@ -1,232 +0,0 @@ -pocs.tests package -================== - -Subpackages ------------ - -.. toctree:: - - pocs.tests.bisque - pocs.tests.data - pocs.tests.serial_handlers - pocs.tests.utils - -Submodules ----------- - -pocs.tests.conftest module --------------------------- - -.. automodule:: pocs.tests.conftest - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_arduino\_io module ------------------------------------ - -.. automodule:: pocs.tests.test_arduino_io - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_astrohaven\_dome module ----------------------------------------- - -.. automodule:: pocs.tests.test_astrohaven_dome - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_base module ----------------------------- - -.. automodule:: pocs.tests.test_base - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_base\_scheduler module ---------------------------------------- - -.. automodule:: pocs.tests.test_base_scheduler - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_camera module ------------------------------- - -.. automodule:: pocs.tests.test_camera - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_codestyle module ---------------------------------- - -.. automodule:: pocs.tests.test_codestyle - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_config module ------------------------------- - -.. automodule:: pocs.tests.test_config - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_constraints module ------------------------------------ - -.. automodule:: pocs.tests.test_constraints - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_database module --------------------------------- - -.. automodule:: pocs.tests.test_database - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_dispatch\_scheduler module -------------------------------------------- - -.. automodule:: pocs.tests.test_dispatch_scheduler - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_dome\_simulator module ---------------------------------------- - -.. automodule:: pocs.tests.test_dome_simulator - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_field module ------------------------------ - -.. automodule:: pocs.tests.test_field - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_focuser module -------------------------------- - -.. automodule:: pocs.tests.test_focuser - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_horizon\_points module ---------------------------------------- - -.. automodule:: pocs.tests.test_horizon_points - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_images module ------------------------------- - -.. automodule:: pocs.tests.test_images - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_ioptron module -------------------------------- - -.. automodule:: pocs.tests.test_ioptron - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_messaging module ---------------------------------- - -.. automodule:: pocs.tests.test_messaging - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_mount\_simulator module ----------------------------------------- - -.. automodule:: pocs.tests.test_mount_simulator - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_observation module ------------------------------------ - -.. automodule:: pocs.tests.test_observation - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_observatory module ------------------------------------ - -.. automodule:: pocs.tests.test_observatory - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_pocs module ----------------------------- - -.. automodule:: pocs.tests.test_pocs - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_rs232 module ------------------------------ - -.. automodule:: pocs.tests.test_rs232 - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_social\_messaging module ------------------------------------------ - -.. automodule:: pocs.tests.test_social_messaging - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_state\_machine module --------------------------------------- - -.. automodule:: pocs.tests.test_state_machine - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.test\_theskyx\_utils module --------------------------------------- - -.. automodule:: pocs.tests.test_theskyx_utils - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.tests - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.tests.serial_handlers.rst b/docs/source/pocs.tests.serial_handlers.rst deleted file mode 100644 index b8a34104b..000000000 --- a/docs/source/pocs.tests.serial_handlers.rst +++ /dev/null @@ -1,38 +0,0 @@ -pocs.tests.serial\_handlers package -=================================== - -Submodules ----------- - -pocs.tests.serial\_handlers.protocol\_buffers module ----------------------------------------------------- - -.. automodule:: pocs.tests.serial_handlers.protocol_buffers - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.serial\_handlers.protocol\_hooked module ---------------------------------------------------- - -.. automodule:: pocs.tests.serial_handlers.protocol_hooked - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.serial\_handlers.protocol\_no\_op module ---------------------------------------------------- - -.. automodule:: pocs.tests.serial_handlers.protocol_no_op - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.tests.serial_handlers - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.tests.utils.google.rst b/docs/source/pocs.tests.utils.google.rst deleted file mode 100644 index 69277b125..000000000 --- a/docs/source/pocs.tests.utils.google.rst +++ /dev/null @@ -1,22 +0,0 @@ -pocs.tests.utils.google package -=============================== - -Submodules ----------- - -pocs.tests.utils.google.test\_storage module --------------------------------------------- - -.. automodule:: pocs.tests.utils.google.test_storage - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.tests.utils.google - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.tests.utils.rst b/docs/source/pocs.tests.utils.rst deleted file mode 100644 index 88eb83076..000000000 --- a/docs/source/pocs.tests.utils.rst +++ /dev/null @@ -1,61 +0,0 @@ -pocs.tests.utils package -======================== - -Subpackages ------------ - -.. toctree:: - - pocs.tests.utils.google - -Submodules ----------- - -pocs.tests.utils.test\_fits\_utils module ------------------------------------------ - -.. automodule:: pocs.tests.utils.test_fits_utils - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.utils.test\_focus\_utils module ------------------------------------------- - -.. automodule:: pocs.tests.utils.test_focus_utils - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.utils.test\_image\_utils module ------------------------------------------- - -.. automodule:: pocs.tests.utils.test_image_utils - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.utils.test\_polar\_alignment module ----------------------------------------------- - -.. automodule:: pocs.tests.utils.test_polar_alignment - :members: - :undoc-members: - :show-inheritance: - -pocs.tests.utils.test\_utils module ------------------------------------ - -.. automodule:: pocs.tests.utils.test_utils - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.tests.utils - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.utils.google.rst b/docs/source/pocs.utils.google.rst deleted file mode 100644 index 70e4c41ed..000000000 --- a/docs/source/pocs.utils.google.rst +++ /dev/null @@ -1,22 +0,0 @@ -pocs.utils.google package -========================= - -Submodules ----------- - -pocs.utils.google.storage module --------------------------------- - -.. automodule:: pocs.utils.google.storage - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.utils.google - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.utils.images.rst b/docs/source/pocs.utils.images.rst deleted file mode 100644 index d4b5ee62f..000000000 --- a/docs/source/pocs.utils.images.rst +++ /dev/null @@ -1,46 +0,0 @@ -pocs.utils.images package -========================= - -Submodules ----------- - -pocs.utils.images.cr2 module ----------------------------- - -.. automodule:: pocs.utils.images.cr2 - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.images.fits module ------------------------------ - -.. automodule:: pocs.utils.images.fits - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.images.focus module ------------------------------- - -.. automodule:: pocs.utils.images.focus - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.images.polar\_alignment module ------------------------------------------ - -.. automodule:: pocs.utils.images.polar_alignment - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.utils.images - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/pocs.utils.rst b/docs/source/pocs.utils.rst deleted file mode 100644 index 4c84c96ee..000000000 --- a/docs/source/pocs.utils.rst +++ /dev/null @@ -1,118 +0,0 @@ -pocs.utils package -================== - -Subpackages ------------ - -.. toctree:: - - pocs.utils.google - pocs.utils.images - -Submodules ----------- - -pocs.utils.config module ------------------------- - -.. automodule:: pocs.utils.config - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.data module ----------------------- - -.. automodule:: pocs.utils.data - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.database module --------------------------- - -.. automodule:: pocs.utils.database - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.error module ------------------------ - -.. automodule:: pocs.utils.error - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.horizon module -------------------------- - -.. automodule:: pocs.utils.horizon - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.logger module ------------------------- - -.. automodule:: pocs.utils.logger - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.messaging module ---------------------------- - -.. automodule:: pocs.utils.messaging - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.rs232 module ------------------------ - -.. automodule:: pocs.utils.rs232 - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.serializers module ------------------------------ - -.. automodule:: pocs.utils.serializers - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.social\_slack module -------------------------------- - -.. automodule:: pocs.utils.social_slack - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.social\_twitter module ---------------------------------- - -.. automodule:: pocs.utils.social_twitter - :members: - :undoc-members: - :show-inheritance: - -pocs.utils.theskyx module -------------------------- - -.. automodule:: pocs.utils.theskyx - :members: - :undoc-members: - :show-inheritance: - - -Module contents ---------------- - -.. automodule:: pocs.utils - :members: - :undoc-members: - :show-inheritance: diff --git a/matplotlibrc b/matplotlibrc deleted file mode 100644 index 88a836573..000000000 --- a/matplotlibrc +++ /dev/null @@ -1 +0,0 @@ -backend: Agg diff --git a/peas/PID.py b/peas/PID.py deleted file mode 100644 index 54ead159a..000000000 --- a/peas/PID.py +++ /dev/null @@ -1,109 +0,0 @@ -from datetime import datetime - - -class PID: - """ - Pseudocode from Wikipedia: - previous_error = 0 - integral = 0 - - start: - error = setpoint - measured_value - integral = integral + error*dt - derivative = (error - previous_error)/dt - output = Kp*error + Ki*integral + Kd*derivative - previous_error = error - wait(dt) - goto start - - Attributes: - Dval (float): Description - history (list): Description - Ival (float): Description - Kd (TYPE): Description - Ki (TYPE): Description - Kp (TYPE): Description - last_interval (float): Description - last_recalc_time (TYPE): Description - max_age (TYPE): Description - output_limits (TYPE): Description - previous_error (TYPE): Description - Pval (TYPE): Description - set_point (TYPE): Description - """ - - def __init__(self, Kp=2., Ki=0., Kd=1., - set_point=None, output_limits=None, - max_age=None): - self.Kp = Kp - self.Ki = Ki - self.Kd = Kd - self.Pval = None - self.Ival = 0.0 - self.Dval = 0.0 - self.previous_error = None - self.set_point = None - if set_point: - self.set_point = set_point - self.output_limits = output_limits - self.history = [] - self.max_age = max_age - self.last_recalc_time = None - self.last_interval = 0. - - def recalculate(self, value, interval=None, - reset_integral=False, - new_set_point=None): - if new_set_point: - self.set_point = float(new_set_point) - if reset_integral: - self.history = [] - if not interval: - if self.last_recalc_time: - now = datetime.utcnow() - interval = (now - self.last_recalc_time).total_seconds() - else: - interval = 0.0 - - # Pval - error = self.set_point - value - self.Pval = error - - # Ival - for entry in self.history: - entry[2] += interval - for entry in self.history: - if self.max_age: - if entry[2] > self.max_age: - self.history.remove(entry) - self.history.append([error, interval, 0]) - new_Ival = 0 - for entry in self.history: - new_Ival += entry[0] * entry[1] - self.Ival = new_Ival - - # Dval - if self.previous_error: - self.Dval = (error - self.previous_error) / interval - - # Output - output = self.Kp * error + self.Ki * self.Ival + self.Kd * self.Dval - if self.output_limits: - if output > max(self.output_limits): - output = max(self.output_limits) - if output < min(self.output_limits): - output = min(self.output_limits) - self.previous_error = error - - self.last_recalc_time = datetime.utcnow() - self.last_interval = interval - - return output - - def tune(self, Kp=None, Ki=None, Kd=None): - if Kp: - self.Kp = Kp - if Ki: - self.Ki = Ki - if Kd: - self.Kd = Kd diff --git a/peas/weather.py b/peas/weather.py deleted file mode 100755 index fd40940d6..000000000 --- a/peas/weather.py +++ /dev/null @@ -1,966 +0,0 @@ -#!/usr/bin/env python3 - -import numpy as np -import re -import serial -import sys -import time - -from datetime import datetime as dt -from dateutil.parser import parse as date_parser - -import astropy.units as u - -from pocs.utils.config import load_config -from pocs.utils.logger import get_root_logger -from pocs.utils.messaging import PanMessaging - -from .PID import PID - - -def get_mongodb(): - from pocs.utils.database import PanDB - return PanDB() - - -def movingaverage(interval, window_size): - """ A simple moving average function """ - window = np.ones(int(window_size)) / float(window_size) - return np.convolve(interval, window, 'same') - - -# ----------------------------------------------------------------------------- -# AAG Cloud Sensor Class -# ----------------------------------------------------------------------------- -class AAGCloudSensor(object): - - """ - This class is for the AAG Cloud Sensor device which can be communicated with - via serial commands. - - http://www.aagware.eu/aag/cloudwatcherNetwork/TechInfo/Rs232_Comms_v100.pdf - http://www.aagware.eu/aag/cloudwatcherNetwork/TechInfo/Rs232_Comms_v110.pdf - http://www.aagware.eu/aag/cloudwatcherNetwork/TechInfo/Rs232_Comms_v120.pdf - - Command List (from Rs232_Comms_v100.pdf) - !A = Get internal name (recieves 2 blocks) - !B = Get firmware version (recieves 2 blocks) - !C = Get values (recieves 5 blocks) - Zener voltage, Ambient Temperature, Ambient Temperature, Rain Sensor Temperature, HSB - !D = Get internal errors (recieves 5 blocks) - !E = Get rain frequency (recieves 2 blocks) - !F = Get switch status (recieves 2 blocks) - !G = Set switch open (recieves 2 blocks) - !H = Set switch closed (recieves 2 blocks) - !Pxxxx = Set PWM value to xxxx (recieves 2 blocks) - !Q = Get PWM value (recieves 2 blocks) - !S = Get sky IR temperature (recieves 2 blocks) - !T = Get sensor temperature (recieves 2 blocks) - !z = Reset RS232 buffer pointers (recieves 1 blocks) - !K = Get serial number (recieves 2 blocks) - - Return Codes - '1 ' Infra red temperature in hundredth of degree Celsius - '2 ' Infra red sensor temperature in hundredth of degree Celsius - '3 ' Analog0 output 0-1023 => 0 to full voltage (Ambient Temp NTC) - '4 ' Analog2 output 0-1023 => 0 to full voltage (LDR ambient light) - '5 ' Analog3 output 0-1023 => 0 to full voltage (Rain Sensor Temp NTC) - '6 ' Analog3 output 0-1023 => 0 to full voltage (Zener Voltage reference) - 'E1' Number of internal errors reading infra red sensor: 1st address byte - 'E2' Number of internal errors reading infra red sensor: command byte - 'E3' Number of internal errors reading infra red sensor: 2nd address byte - 'E4' Number of internal errors reading infra red sensor: PEC byte NB: the error - counters are reset after being read. - 'N ' Internal Name - 'V ' Firmware Version number - 'Q ' PWM duty cycle - 'R ' Rain frequency counter - 'X ' Switch Opened - 'Y ' Switch Closed - - Advice from the manual: - - * When communicating with the device send one command at a time and wait for - the respective reply, checking that the correct number of characters has - been received. - - * Perform more than one single reading (say, 5) and apply a statistical - analysis to the values to exclude any outlier. - - * The rain frequency measurement is the one that takes more time - 280 ms - - * The following reading cycle takes just less than 3 seconds to perform: - * Perform 5 times: - * get IR temperature - * get Ambient temperature - * get Values - * get Rain Frequency - * get PWM value - * get IR errors - * get SWITCH Status - - """ - - def __init__(self, serial_address=None, store_result=True): - self.config = load_config(config_files='pocs') - self.logger = get_root_logger() - - # Read configuration - self.cfg = self.config['weather']['aag_cloud'] - - self.safety_delay = self.cfg.get('safety_delay', 15.) - - self.db = None - if store_result: - self.db = get_mongodb() - - self.messaging = None - - # Initialize Serial Connection - if serial_address is None: - serial_address = self.cfg.get('serial_port', '/dev/ttyUSB0') - - self.logger.debug('Using serial address: {}'.format(serial_address)) - - if serial_address: - self.logger.info('Connecting to AAG Cloud Sensor') - try: - self.AAG = serial.Serial(serial_address, 9600, timeout=2) - self.logger.info(" Connected to Cloud Sensor on {}".format(serial_address)) - except OSError as e: - self.logger.error('Unable to connect to AAG Cloud Sensor') - self.logger.error(' {}'.format(e.errno)) - self.logger.error(' {}'.format(e.strerror)) - self.AAG = None - except BaseException: - self.logger.error("Unable to connect to AAG Cloud Sensor") - self.AAG = None - else: - self.AAG = None - - # Thresholds - - # Initialize Values - self.last_update = None - self.safe = None - self.ambient_temp = None - self.sky_temp = None - self.wind_speed = None - self.internal_voltage = None - self.LDR_resistance = None - self.rain_sensor_temp = None - self.PWM = None - self.errors = None - self.switch = None - self.safe_dict = None - self.hibernate = 0.500 # time to wait after failed query - - # Set Up Heater - if 'heater' in self.cfg: - self.heater_cfg = self.cfg['heater'] - else: - self.heater_cfg = { - 'low_temp': 0, - 'low_delta': 6, - 'high_temp': 20, - 'high_delta': 4, - 'min_power': 10, - 'impulse_temp': 10, - 'impulse_duration': 60, - 'impulse_cycle': 600, - } - self.heater_PID = PID(Kp=3.0, Ki=0.02, Kd=200.0, - max_age=300, - output_limits=[self.heater_cfg['min_power'], 100]) - - self.impulse_heating = None - self.impulse_start = None - - # Command Translation - self.commands = {'!A': 'Get internal name', - '!B': 'Get firmware version', - '!C': 'Get values', - '!D': 'Get internal errors', - '!E': 'Get rain frequency', - '!F': 'Get switch status', - '!G': 'Set switch open', - '!H': 'Set switch closed', - 'P\d\d\d\d!': 'Set PWM value', - '!Q': 'Get PWM value', - '!S': 'Get sky IR temperature', - '!T': 'Get sensor temperature', - '!z': 'Reset RS232 buffer pointers', - '!K': 'Get serial number', - 'v!': 'Query if anemometer enabled', - 'V!': 'Get wind speed', - 'M!': 'Get electrical constants', - '!Pxxxx': 'Set PWM value to xxxx', - } - self.expects = {'!A': '!N\s+(\w+)!', - '!B': '!V\s+([\d\.\-]+)!', - '!C': '!6\s+([\d\.\-]+)!4\s+([\d\.\-]+)!5\s+([\d\.\-]+)!', - '!D': '!E1\s+([\d\.]+)!E2\s+([\d\.]+)!E3\s+([\d\.]+)!E4\s+([\d\.]+)!', - '!E': '!R\s+([\d\.\-]+)!', - '!F': '!Y\s+([\d\.\-]+)!', - 'P\d\d\d\d!': '!Q\s+([\d\.\-]+)!', - '!Q': '!Q\s+([\d\.\-]+)!', - '!S': '!1\s+([\d\.\-]+)!', - '!T': '!2\s+([\d\.\-]+)!', - '!K': '!K(\d+)\s*\\x00!', - 'v!': '!v\s+([\d\.\-]+)!', - 'V!': '!w\s+([\d\.\-]+)!', - 'M!': '!M(.{12})', - } - self.delays = { - '!E': 0.350, - 'P\d\d\d\d!': 0.750, - } - - self.weather_entries = list() - - if self.AAG: - # Query Device Name - result = self.query('!A') - if result: - self.name = result[0].strip() - self.logger.info(' Device Name is "{}"'.format(self.name)) - else: - self.name = '' - self.logger.warning(' Failed to get Device Name') - sys.exit(1) - - # Query Firmware Version - result = self.query('!B') - if result: - self.firmware_version = result[0].strip() - self.logger.info(' Firmware Version = {}'.format(self.firmware_version)) - else: - self.firmware_version = '' - self.logger.warning(' Failed to get Firmware Version') - sys.exit(1) - - # Query Serial Number - result = self.query('!K') - if result: - self.serial_number = result[0].strip() - self.logger.info(' Serial Number: {}'.format(self.serial_number)) - else: - self.serial_number = '' - self.logger.warning(' Failed to get Serial Number') - sys.exit(1) - - def get_reading(self): - """ Calls commands to be performed each time through the loop """ - weather_data = dict() - - if self.db is None: - self.db = get_mongodb() - else: - weather_data = self.update_weather() - self.calculate_and_set_PWM() - - return weather_data - - def send(self, send, delay=0.100): - - found_command = False - for cmd in self.commands.keys(): - if re.match(cmd, send): - self.logger.debug('Sending command: {}'.format(self.commands[cmd])) - found_command = True - break - if not found_command: - self.logger.warning('Unknown command: "{}"'.format(send)) - return None - - self.logger.debug(' Clearing buffer') - cleared = self.AAG.read(self.AAG.inWaiting()) - if len(cleared) > 0: - self.logger.debug(' Cleared: "{}"'.format(cleared.decode('utf-8'))) - - self.AAG.write(send.encode('utf-8')) - time.sleep(delay) - - result = None - try: - response = self.AAG.read(self.AAG.inWaiting()).decode('utf-8') - except UnicodeDecodeError: - self.logger.debug("Error reading from serial line") - else: - self.logger.debug(' Response: "{}"'.format(response)) - ResponseMatch = re.match('(!.*)\\x11\s{12}0', response) - if ResponseMatch: - result = ResponseMatch.group(1) - else: - result = response - - return result - - def query(self, send, maxtries=5): - found_command = False - for cmd in self.commands.keys(): - if re.match(cmd, send): - self.logger.debug('Sending command: {}'.format(self.commands[cmd])) - found_command = True - break - if not found_command: - self.logger.warning('Unknown command: "{}"'.format(send)) - return None - - if cmd in self.delays.keys(): - self.logger.debug(' Waiting delay time of {:.3f} s'.format(self.delays[cmd])) - delay = self.delays[cmd] - else: - delay = 0.200 - expect = self.expects[cmd] - count = 0 - result = None - while not result and (count <= maxtries): - count += 1 - result = self.send(send, delay=delay) - - MatchExpect = re.match(expect, result) - if not MatchExpect: - self.logger.debug('Did not find {} in response "{}"'.format(expect, result)) - result = None - time.sleep(self.hibernate) - else: - self.logger.debug('Found {} in response "{}"'.format(expect, result)) - result = MatchExpect.groups() - return result - - def get_ambient_temperature(self, n=5): - """ - Populates the self.ambient_temp property - - Calculation is taken from Rs232_Comms_v100.pdf section "Converting values - sent by the device to meaningful units" item 5. - """ - self.logger.debug('Getting ambient temperature') - values = [] - - for i in range(0, n): - try: - value = float(self.query('!T')[0]) - ambient_temp = value / 100. - - except Exception: - pass - else: - self.logger.debug( - ' Ambient Temperature Query = {:.1f}\t{:.1f}'.format(value, ambient_temp)) - values.append(ambient_temp) - - if len(values) >= n - 1: - self.ambient_temp = np.median(values) * u.Celsius - self.logger.debug(' Ambient Temperature = {:.1f}'.format(self.ambient_temp)) - else: - self.ambient_temp = None - self.logger.debug(' Failed to Read Ambient Temperature') - - return self.ambient_temp - - def get_sky_temperature(self, n=9): - """ - Populates the self.sky_temp property - - Calculation is taken from Rs232_Comms_v100.pdf section "Converting values - sent by the device to meaningful units" item 1. - - Does this n times as recommended by the "Communication operational - recommendations" section in Rs232_Comms_v100.pdf - """ - self.logger.debug('Getting sky temperature') - values = [] - for i in range(0, n): - try: - value = float(self.query('!S')[0]) / 100. - except Exception: - pass - else: - self.logger.debug(' Sky Temperature Query = {:.1f}'.format(value)) - values.append(value) - if len(values) >= n - 1: - self.sky_temp = np.median(values) * u.Celsius - self.logger.debug(' Sky Temperature = {:.1f}'.format(self.sky_temp)) - else: - self.sky_temp = None - self.logger.debug(' Failed to Read Sky Temperature') - return self.sky_temp - - def get_values(self, n=5): - """ - Populates the self.internal_voltage, self.LDR_resistance, and - self.rain_sensor_temp properties - - Calculation is taken from Rs232_Comms_v100.pdf section "Converting values - sent by the device to meaningful units" items 4, 6, 7. - """ - self.logger.debug('Getting "values"') - ZenerConstant = 3 - LDRPullupResistance = 56. - RainPullUpResistance = 1 - RainResAt25 = 1 - RainBeta = 3450. - ABSZERO = 273.15 - internal_voltages = [] - LDR_resistances = [] - rain_sensor_temps = [] - for i in range(0, n): - responses = self.query('!C') - try: - internal_voltage = 1023 * ZenerConstant / float(responses[0]) - internal_voltages.append(internal_voltage) - LDR_resistance = LDRPullupResistance / ((1023. / float(responses[1])) - 1.) - LDR_resistances.append(LDR_resistance) - r = np.log((RainPullUpResistance / - ((1023. / float(responses[2])) - 1.)) / RainResAt25) - rain_sensor_temp = 1. / ((r / RainBeta) + (1. / (ABSZERO + 25.))) - ABSZERO - rain_sensor_temps.append(rain_sensor_temp) - except Exception: - pass - - # Median Results - if len(internal_voltages) >= n - 1: - self.internal_voltage = np.median(internal_voltages) * u.volt - self.logger.debug(' Internal Voltage = {:.2f}'.format(self.internal_voltage)) - else: - self.internal_voltage = None - self.logger.debug(' Failed to read Internal Voltage') - - if len(LDR_resistances) >= n - 1: - self.LDR_resistance = np.median(LDR_resistances) * u.kohm - self.logger.debug(' LDR Resistance = {:.0f}'.format(self.LDR_resistance)) - else: - self.LDR_resistance = None - self.logger.debug(' Failed to read LDR Resistance') - - if len(rain_sensor_temps) >= n - 1: - self.rain_sensor_temp = np.median(rain_sensor_temps) * u.Celsius - self.logger.debug(' Rain Sensor Temp = {:.1f}'.format(self.rain_sensor_temp)) - else: - self.rain_sensor_temp = None - self.logger.debug(' Failed to read Rain Sensor Temp') - - return (self.internal_voltage, self.LDR_resistance, self.rain_sensor_temp) - - def get_rain_frequency(self, n=5): - """ - Populates the self.rain_frequency property - """ - self.logger.debug('Getting rain frequency') - values = [] - for i in range(0, n): - try: - value = float(self.query('!E')[0]) - self.logger.debug(' Rain Freq Query = {:.1f}'.format(value)) - values.append(value) - except Exception: - pass - if len(values) >= n - 1: - self.rain_frequency = np.median(values) - self.logger.debug(' Rain Frequency = {:.1f}'.format(self.rain_frequency)) - else: - self.rain_frequency = None - self.logger.debug(' Failed to read Rain Frequency') - return self.rain_frequency - - def get_PWM(self): - """ - Populates the self.PWM property. - - Calculation is taken from Rs232_Comms_v100.pdf section "Converting values - sent by the device to meaningful units" item 3. - """ - self.logger.debug('Getting PWM value') - try: - value = self.query('!Q')[0] - self.PWM = float(value) * 100. / 1023. - self.logger.debug(' PWM Value = {:.1f}'.format(self.PWM)) - except Exception: - self.PWM = None - self.logger.debug(' Failed to read PWM Value') - return self.PWM - - def set_PWM(self, percent, ntries=15): - """ - """ - count = 0 - success = False - if percent < 0.: - percent = 0. - if percent > 100.: - percent = 100. - while not success and count <= ntries: - self.logger.debug('Setting PWM value to {:.1f} %'.format(percent)) - send_digital = int(1023. * float(percent) / 100.) - send_string = 'P{:04d}!'.format(send_digital) - try: - result = self.query(send_string) - except Exception: - result = None - count += 1 - if result is not None: - self.PWM = float(result[0]) * 100. / 1023. - if abs(self.PWM - percent) > 5.0: - self.logger.debug(' Failed to set PWM value!') - time.sleep(2) - else: - success = True - self.logger.debug(' PWM Value = {:.1f}'.format(self.PWM)) - - def get_errors(self): - """ - Populates the self.IR_errors property - """ - self.logger.debug('Getting errors') - response = self.query('!D') - if response: - self.errors = {'error_1': str(int(response[0])), - 'error_2': str(int(response[1])), - 'error_3': str(int(response[2])), - 'error_4': str(int(response[3]))} - self.logger.debug(" Internal Errors: {} {} {} {}".format( - self.errors['error_1'], - self.errors['error_2'], - self.errors['error_3'], - self.errors['error_4'], - )) - - else: - self.errors = {'error_1': None, - 'error_2': None, - 'error_3': None, - 'error_4': None} - return self.errors - - def get_switch(self, maxtries=3): - """ - Populates the self.switch property - - Unlike other queries, this method has to check if the return matches a - !X or !Y pattern (indicating open and closed respectively) rather than - read a value. - """ - self.logger.debug('Getting switch status') - self.switch = None - tries = 0 - status = None - while not status: - tries += 1 - response = self.send('!F') - if re.match('!Y 1!', response): - status = 'OPEN' - elif re.match('!X 1!', response): - status = 'CLOSED' - else: - status = None - if not status and tries >= maxtries: - status = 'UNKNOWN' - self.switch = status - self.logger.debug(' Switch Status = {}'.format(self.switch)) - return self.switch - - def wind_speed_enabled(self): - """ - Method returns true or false depending on whether the device supports - wind speed measurements. - """ - self.logger.debug('Checking if wind speed is enabled') - try: - enabled = bool(self.query('v!')[0]) - if enabled: - self.logger.debug(' Anemometer enabled') - else: - self.logger.debug(' Anemometer not enabled') - except Exception: - enabled = None - return enabled - - def get_wind_speed(self, n=3): - """ - Populates the self.wind_speed property - - Based on the information in Rs232_Comms_v120.pdf document - - Medians n measurements. This isn't mentioned specifically by the manual - but I'm guessing it won't hurt. - """ - self.logger.debug('Getting wind speed') - if self.wind_speed_enabled(): - values = [] - for i in range(0, n): - result = self.query('V!') - if result: - value = float(result[0]) - self.logger.debug(' Wind Speed Query = {:.1f}'.format(value)) - values.append(value) - if len(values) >= 3: - self.wind_speed = np.median(values) * u.km / u.hr - self.logger.debug(' Wind speed = {:.1f}'.format(self.wind_speed)) - else: - self.wind_speed = None - else: - self.wind_speed = None - return self.wind_speed - - def send_message(self, msg, topic='weather'): - if self.messaging is None: - self.messaging = PanMessaging.create_publisher(6510) - - self.messaging.send_message(topic, msg) - - def capture(self, store_result=False, send_message=False, **kwargs): - """ Query the CloudWatcher """ - - self.logger.debug("Updating weather") - - data = {} - data['weather_sensor_name'] = self.name - data['weather_sensor_firmware_version'] = self.firmware_version - data['weather_sensor_serial_number'] = self.serial_number - - if self.get_sky_temperature(): - data['sky_temp_C'] = self.sky_temp.value - if self.get_ambient_temperature(): - data['ambient_temp_C'] = self.ambient_temp.value - self.get_values() - if self.internal_voltage: - data['internal_voltage_V'] = self.internal_voltage.value - if self.LDR_resistance: - data['ldr_resistance_Ohm'] = self.LDR_resistance.value - if self.rain_sensor_temp: - data['rain_sensor_temp_C'] = "{:.02f}".format(self.rain_sensor_temp.value) - if self.get_rain_frequency(): - data['rain_frequency'] = self.rain_frequency - if self.get_PWM(): - data['pwm_value'] = self.PWM - if self.get_errors(): - data['errors'] = self.errors - if self.get_wind_speed(): - data['wind_speed_KPH'] = self.wind_speed.value - - # Make Safety Decision - self.safe_dict = self.make_safety_decision(data) - - data['safe'] = self.safe_dict['Safe'] - data['sky_condition'] = self.safe_dict['Sky'] - data['wind_condition'] = self.safe_dict['Wind'] - data['gust_condition'] = self.safe_dict['Gust'] - data['rain_condition'] = self.safe_dict['Rain'] - - # Store current weather - data['date'] = dt.utcnow() - self.weather_entries.append(data) - - # If we get over a certain amount of entries, trim the earliest - if len(self.weather_entries) > int(self.safety_delay): - del self.weather_entries[:1] - - self.calculate_and_set_PWM() - - if send_message: - self.send_message({'data': data}, topic='weather') - - if store_result: - self.db.insert_current('weather', data) - - return data - - def AAG_heater_algorithm(self, target, last_entry): - """ - Uses the algorithm described in RainSensorHeaterAlgorithm.pdf to - determine PWM value. - - Values are for the default read cycle of 10 seconds. - """ - deltaT = last_entry['rain_sensor_temp_C'] - target - scaling = 0.5 - if deltaT > 8.: - deltaPWM = -40 * scaling - elif deltaT > 4.: - deltaPWM = -20 * scaling - elif deltaT > 3.: - deltaPWM = -10 * scaling - elif deltaT > 2.: - deltaPWM = -6 * scaling - elif deltaT > 1.: - deltaPWM = -4 * scaling - elif deltaT > 0.5: - deltaPWM = -2 * scaling - elif deltaT > 0.3: - deltaPWM = -1 * scaling - elif deltaT < -0.3: - deltaPWM = 1 * scaling - elif deltaT < -0.5: - deltaPWM = 2 * scaling - elif deltaT < -1.: - deltaPWM = 4 * scaling - elif deltaT < -2.: - deltaPWM = 6 * scaling - elif deltaT < -3.: - deltaPWM = 10 * scaling - elif deltaT < -4.: - deltaPWM = 20 * scaling - elif deltaT < -8.: - deltaPWM = 40 * scaling - return int(deltaPWM) - - def calculate_and_set_PWM(self): - """ - Uses the algorithm described in RainSensorHeaterAlgorithm.pdf to decide - whether to use impulse heating mode, then determines the correct PWM - value. - """ - self.logger.debug('Calculating new PWM Value') - # Get Last n minutes of rain history - now = dt.utcnow() - - entries = self.weather_entries - - self.logger.debug(' Found {} entries in last {:d} seconds.'.format( - len(entries), int(self.heater_cfg['impulse_cycle']), )) - - last_entry = self.weather_entries[-1] - rain_history = [x['rain_safe'] for x in entries if 'rain_safe' in x.keys()] - - if 'ambient_temp_C' not in last_entry.keys(): - self.logger.warning( - ' Do not have Ambient Temperature measurement. Can not determine PWM value.') - elif 'rain_sensor_temp_C' not in last_entry.keys(): - self.logger.warning( - ' Do not have Rain Sensor Temperature measurement. Can not determine PWM value.') - else: - # Decide whether to use the impulse heating mechanism - if len(rain_history) > 3 and not np.any(rain_history): - self.logger.debug(' Consistent wet/rain in history. Using impulse heating.') - if self.impulse_heating: - impulse_time = (now - self.impulse_start).total_seconds() - if impulse_time > float(self.heater_cfg['impulse_duration']): - self.logger.debug('Impulse heating on for > {:.0f} s. Turning off.', float( - self.heater_cfg['impulse_duration'])) - self.impulse_heating = False - self.impulse_start = None - else: - self.logger.debug( - ' Impulse heating has been on for {:.0f} seconds.', impulse_time) - else: - self.logger.debug(' Starting impulse heating sequence.') - self.impulse_start = now - self.impulse_heating = True - else: - self.logger.debug(' No impulse heating needed.') - self.impulse_heating = False - self.impulse_start = None - - # Set PWM Based on Impulse Method or Normal Method - if self.impulse_heating: - target_temp = float(last_entry['ambient_temp_C']) + \ - float(self.heater_cfg['impulse_temp']) - if last_entry['rain_sensor_temp_C'] < target_temp: - self.logger.debug(' Rain sensor temp < target. Setting heater to 100 %.') - self.set_PWM(100) - else: - new_PWM = self.AAG_heater_algorithm(target_temp, last_entry) - self.logger.debug( - ' Rain sensor temp > target. Setting heater to {:d} %.'.format(new_PWM)) - self.set_PWM(new_PWM) - else: - if last_entry['ambient_temp_C'] < self.heater_cfg['low_temp']: - deltaT = self.heater_cfg['low_delta'] - elif last_entry['ambient_temp_C'] > self.heater_cfg['high_temp']: - deltaT = self.heater_cfg['high_delta'] - else: - frac = (last_entry['ambient_temp_C'] - self.heater_cfg['low_temp']) /\ - (self.heater_cfg['high_temp'] - self.heater_cfg['low_temp']) - deltaT = self.heater_cfg['low_delta'] + frac * \ - (self.heater_cfg['high_delta'] - self.heater_cfg['low_delta']) - target_temp = last_entry['ambient_temp_C'] + deltaT - new_PWM = int(self.heater_PID.recalculate(float(last_entry['rain_sensor_temp_C']), - new_set_point=target_temp)) - self.logger.debug(' last PID interval = {:.1f} s'.format( - self.heater_PID.last_interval)) - self.logger.debug(' target={:4.1f}, actual={:4.1f}, new PWM={:3.0f}, P={:+3.0f}, I={:+3.0f} ({:2d}), D={:+3.0f}'.format( - target_temp, float(last_entry['rain_sensor_temp_C']), - new_PWM, self.heater_PID.Kp * self.heater_PID.Pval, - self.heater_PID.Ki * self.heater_PID.Ival, - len(self.heater_PID.history), - self.heater_PID.Kd * self.heater_PID.Dval, - )) - self.set_PWM(new_PWM) - - def make_safety_decision(self, current_values): - """ - Method makes decision whether conditions are safe or unsafe. - """ - self.logger.debug('Making safety decision') - self.logger.debug('Found {} weather data entries in last {:.0f} minutes'.format( - len(self.weather_entries), self.safety_delay)) - - safe = False - - # Tuple with condition,safety - cloud = self._get_cloud_safety(current_values) - - try: - wind, gust = self._get_wind_safety(current_values) - except Exception as e: - self.logger.warning('Problem getting wind safety: {}'.format(e)) - wind = ['N/A'] - gust = ['N/A'] - - rain = self._get_rain_safety(current_values) - - safe = cloud[1] & wind[1] & gust[1] & rain[1] - self.logger.debug('Weather Safe: {}'.format(safe)) - - return {'Safe': safe, - 'Sky': cloud[0], - 'Wind': wind[0], - 'Gust': gust[0], - 'Rain': rain[0]} - - def _get_cloud_safety(self, current_values): - safety_delay = self.safety_delay - - entries = self.weather_entries - threshold_cloudy = self.cfg.get('threshold_cloudy', -22.5) - threshold_very_cloudy = self.cfg.get('threshold_very_cloudy', -15.) - - sky_diff = [x['sky_temp_C'] - x['ambient_temp_C'] - for x in entries - if ('ambient_temp_C' and 'sky_temp_C') in x.keys()] - - if len(sky_diff) == 0: - self.logger.debug(' UNSAFE: no sky temperatures found') - sky_safe = False - cloud_condition = 'Unknown' - else: - if max(sky_diff) > threshold_cloudy: - self.logger.debug('UNSAFE: Cloudy in last {} min. Max sky diff {:.1f} C'.format( - safety_delay, max(sky_diff))) - sky_safe = False - else: - sky_safe = True - - last_cloud = current_values['sky_temp_C'] - current_values['ambient_temp_C'] - if last_cloud > threshold_very_cloudy: - cloud_condition = 'Very Cloudy' - elif last_cloud > threshold_cloudy: - cloud_condition = 'Cloudy' - else: - cloud_condition = 'Clear' - self.logger.debug( - 'Cloud Condition: {} (Sky-Amb={:.1f} C)'.format(cloud_condition, sky_diff[-1])) - - return cloud_condition, sky_safe - - def _get_wind_safety(self, current_values): - safety_delay = self.safety_delay - entries = self.weather_entries - - end_time = dt.utcnow() - - threshold_windy = self.cfg.get('threshold_windy', 20.) - threshold_very_windy = self.cfg.get('threshold_very_windy', 30) - - threshold_gusty = self.cfg.get('threshold_gusty', 40.) - threshold_very_gusty = self.cfg.get('threshold_very_gusty', 50.) - - # Wind (average and gusts) - wind_speed = [x['wind_speed_KPH'] - for x in entries - if 'wind_speed_KPH' in x.keys()] - - if len(wind_speed) == 0: - self.logger.debug(' UNSAFE: no wind speed readings found') - wind_safe = False - gust_safe = False - wind_condition = 'Unknown' - gust_condition = 'Unknown' - else: - start_time = entries[0]['date'] - if type(start_time) == str: - start_time = date_parser(entries[0]['date']) - - typical_data_interval = (end_time - start_time).total_seconds() / len(entries) - - mavg_count = int(np.ceil(120. / typical_data_interval)) # What is this 120? - wind_mavg = movingaverage(wind_speed, mavg_count) - - # Windy? - if max(wind_mavg) > threshold_very_windy: - self.logger.debug(' UNSAFE: Very windy in last {:.0f} min. Max wind speed {:.1f} kph'.format( - safety_delay, max(wind_mavg))) - wind_safe = False - else: - wind_safe = True - - if wind_mavg[-1] > threshold_very_windy: - wind_condition = 'Very Windy' - elif wind_mavg[-1] > threshold_windy: - wind_condition = 'Windy' - else: - wind_condition = 'Calm' - self.logger.debug( - ' Wind Condition: {} ({:.1f} km/h)'.format(wind_condition, wind_mavg[-1])) - - # Gusty? - if max(wind_speed) > threshold_very_gusty: - self.logger.debug(' UNSAFE: Very gusty in last {:.0f} min. Max gust speed {:.1f} kph'.format( - safety_delay, max(wind_speed))) - gust_safe = False - else: - gust_safe = True - - current_wind = current_values.get('wind_speed_KPH', 0.0) - if current_wind > threshold_very_gusty: - gust_condition = 'Very Gusty' - elif current_wind > threshold_gusty: - gust_condition = 'Gusty' - else: - gust_condition = 'Calm' - - self.logger.debug( - ' Gust Condition: {} ({:.1f} km/h)'.format(gust_condition, wind_speed[-1])) - - return (wind_condition, wind_safe), (gust_condition, gust_safe) - - def _get_rain_safety(self, current_values): - safety_delay = self.safety_delay - entries = self.weather_entries - threshold_wet = self.cfg.get('threshold_wet', 2000.) - threshold_rain = self.cfg.get('threshold_rainy', 1700.) - - # Rain - rf_value = [x['rain_frequency'] for x in entries if 'rain_frequency' in x.keys()] - - if len(rf_value) == 0: - rain_safe = False - rain_condition = 'Unknown' - else: - # Check current values - if current_values['rain_frequency'] <= threshold_rain: - rain_condition = 'Rain' - rain_safe = False - elif current_values['rain_frequency'] <= threshold_wet: - rain_condition = 'Wet' - rain_safe = False - else: - rain_condition = 'Dry' - rain_safe = True - - # If safe now, check last 15 minutes - if rain_safe: - if min(rf_value) <= threshold_rain: - self.logger.debug(' UNSAFE: Rain in last {:.0f} min.'.format(safety_delay)) - rain_safe = False - elif min(rf_value) <= threshold_wet: - self.logger.debug(' UNSAFE: Wet in last {:.0f} min.'.format(safety_delay)) - rain_safe = False - else: - rain_safe = True - - self.logger.debug(' Rain Condition: {}'.format(rain_condition)) - - return rain_condition, rain_safe diff --git a/pocs/__init__.py b/pocs/__init__.py deleted file mode 100644 index 28aa35a2b..000000000 --- a/pocs/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under a MIT license - see LICENSE.txt -""" -Panoptes Observatory Control System (POCS) is a library for controlling a -PANOPTES hardware unit. POCS provides complete automation of all observing -processes and is intended to be run in an automated fashion. -""" - -import pocs.version - -__copyright__ = "Copyright (c) 2017 Project PANOPTES" -__license__ = "MIT" -__summary__ = "PANOPTES Observatory Control System" -__uri__ = "https://github.com/panoptes/POCS" -__version__ = pocs.version.__version__ diff --git a/pocs/base.py b/pocs/base.py deleted file mode 100644 index 9a28cfb47..000000000 --- a/pocs/base.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys - -from pocs import hardware -from pocs import __version__ -from pocs.utils import config -from pocs.utils.database import PanDB -from pocs.utils.logger import get_root_logger - -# Global vars -_config = None - - -def reset_global_config(): - """Reset the global _config to None. - - Globals such as _config make tests non-hermetic. Enable conftest.py to clear _config - in an explicit fashion. - """ - global _config - _config = None - - -class PanBase(object): - - """ Base class for other classes within the PANOPTES ecosystem - - Defines common properties for each class (e.g. logger, config). - """ - - def __init__(self, *args, **kwargs): - # Load the default and local config files - global _config - if _config is None: - ignore_local_config = kwargs.get('ignore_local_config', False) - _config = config.load_config(ignore_local=ignore_local_config) - - self.__version__ = __version__ - - # Update with run-time config - if 'config' in kwargs: - _config.update(kwargs['config']) - - self._check_config(_config) - self.config = _config - - self.logger = kwargs.get('logger') - if not self.logger: - self.logger = get_root_logger() - - self.config['simulator'] = hardware.get_simulator_names(config=self.config, kwargs=kwargs) - - # Get passed DB or set up new connection - _db = kwargs.get('db', None) - if _db is None: - # If the user requests a db_type then update runtime config - db_type = kwargs.get('db_type', None) - db_name = kwargs.get('db_name', None) - - if db_type is not None: - self.config['db']['type'] = db_type - if db_name is not None: - self.config['db']['name'] = db_name - - db_type = self.config['db']['type'] - db_name = self.config['db']['name'] - - _db = PanDB(db_type=db_type, db_name=db_name, logger=self.logger) - - self.db = _db - - def _check_config(self, temp_config): - """ Checks the config file for mandatory items """ - items_to_check = [ - 'directories', - 'mount', - 'state_machine' - ] - - for item in items_to_check: - config_item = temp_config.get(item, None) - # Warn if not found. - if config_item is None: - self.logger.critical(f'Problem looking up {item} in _check_config') - # Error if not found or empty. - if config_item is None or len(config_item) == 0: - sys.exit(f'{item} must be specified in config, exiting') - - def __getstate__(self): # pragma: no cover - d = dict(self.__dict__) - - if 'logger' in d: - del d['logger'] - - if 'db' in d: - del d['db'] - - return d diff --git a/pocs/camera/__init__.py b/pocs/camera/__init__.py deleted file mode 100644 index 3203be14c..000000000 --- a/pocs/camera/__init__.py +++ /dev/null @@ -1,205 +0,0 @@ -from collections import OrderedDict -import re -import shutil -import subprocess - -from astropy import units as u - -from pocs import hardware -from pocs.utils import error -from pocs.utils import load_module -from pocs.utils.config import load_config - -from pocs.camera.camera import AbstractCamera # pragma: no flakes -from pocs.camera.camera import AbstractGPhotoCamera # pragma: no flakes - -from pocs.utils import logger as logger_module - - -def list_connected_cameras(): - """Detect connected cameras. - - Uses gphoto2 to try and detect which cameras are connected. Cameras should - be known and placed in config but this is a useful utility. - - Returns: - list: A list of the ports with detected cameras. - """ - - gphoto2 = shutil.which('gphoto2') - if not gphoto2: # pragma: no cover - raise error.NotFound('The gphoto2 command is missing, please install.') - command = [gphoto2, '--auto-detect'] - result = subprocess.check_output(command) - lines = result.decode('utf-8').split('\n') - - ports = [] - - for line in lines: - camera_match = re.match(r'([\w\d\s_\.]{30})\s(usb:\d{3},\d{3})', line) - if camera_match: - # camera_name = camera_match.group(1).strip() - port = camera_match.group(2).strip() - ports.append(port) - - return ports - - -def create_cameras_from_config(config=None, logger=None, **kwargs): - """Create camera object(s) based on the config. - - Creates a camera for each camera item listed in the config. Ensures the - appropriate camera module is loaded. - - Args: - **kwargs (dict): Can pass a `cameras` object that overrides the info in - the configuration file. Can also pass `auto_detect`(bool) to try and - automatically discover the ports. - - Returns: - OrderedDict: An ordered dictionary of created camera objects, with the - camera name as key and camera instance as value. Returns an empty - OrderedDict if there is no camera configuration items. - - Raises: - error.CameraNotFound: Raised if camera cannot be found at specified port or if - auto_detect=True and no cameras are found. - error.PanError: Description - """ - if not logger: - logger = logger_module.get_root_logger() - - if not config: - config = load_config(**kwargs) - - # Helper method to first check kwargs then config - def kwargs_or_config(item, default=None): - return kwargs.get(item, config.get(item, default)) - - simulator_names = hardware.get_simulator_names(config=config, kwargs=kwargs) - logger.debug(f'simulator_names = {", ".join(simulator_names)}') - a_simulator = 'camera' in simulator_names - - cameras = OrderedDict() - camera_info = kwargs_or_config('cameras') - if not camera_info: - # cameras section either missing or empty - if not a_simulator: - logger.info('No camera information in config.') - return cameras - else: - # Create a minimal dummy camera config to get a simulated camera - camera_info = {'autodetect': False, - 'devices': [ - {'model': 'simulator'}, ]} - - logger.debug("Camera config: {}".format(camera_info)) - - auto_detect = camera_info.get('auto_detect', False) - - ports = list() - - # Lookup the connected ports if not using a simulator - if not a_simulator and auto_detect: - logger.debug("Auto-detecting ports for cameras") - try: - ports = list_connected_cameras() - except Exception as e: - logger.warning(e) - - if len(ports) == 0: - raise error.PanError( - msg="No cameras detected. For testing, use camera simulator.") - else: - logger.debug("Detected Ports: {}".format(ports)) - - primary_camera = None - - # Different models require different connections methods. - model_requires = { - 'simulator': 'port', - 'canon_gphoto2': 'port', - 'sbig': 'serial_number', - 'zwo': 'serial_number', - 'fli': 'serial_number', - } - - device_info = camera_info['devices'] - for cam_num, device_config in enumerate(device_info): - cam_name = 'Cam{:02d}'.format(cam_num) - - if not a_simulator: - # Assign an auto-detected port. If none are left, skip - if auto_detect: - try: - device_config['port'] = ports.pop() - except IndexError: - logger.warning("No ports left for {}, skipping.".format(cam_name)) - continue - else: - # Check for proper connection method. - model = device_config['model'] - try: - connection_method = model_requires[model] - if connection_method not in device_config: - logger.warning(f"Camera error: {connection_method} missing for {model}.") - except KeyError as e: - logger.warning(e) - - else: - logger.debug('Using camera simulator.') - # Set up a simulated camera with fully configured simulated - # focuser - device_config['model'] = 'simulator' - device_config['port'] = '/dev/camera/simulator' - device_config['focuser'] = {'model': 'simulator', - 'focus_port': '/dev/ttyFAKE', - 'initial_position': 20000, - 'autofocus_range': (40, 80), - 'autofocus_step': (10, 20), - 'autofocus_seconds': 0.1, - 'autofocus_size': 500} - device_config['filterwheel'] = {'model': 'simulator', - 'filter_names': ['one', 'deux', 'drei', 'quattro'], - 'move_time': 0.1 * u.second, - 'timeout': 0.5 * u.second} - device_config['readout_time'] = 0.5 - - # Simulator config should always ignore local settings. - device_config['ignore_local_config'] = True - - logger.debug('Creating camera: {}'.format(device_config['model'])) - - try: - module = load_module('pocs.camera.{}'.format(device_config['model'])) - logger.debug('Camera module: {}'.format(module)) - # Create the camera object - cam = module.Camera(name=cam_name, **device_config) - except error.NotFound: - logger.error(msg="Cannot find camera module: {}".format(device_config['model'])) - except Exception as e: - logger.error(msg="Cannot create camera type: {} {}".format(device_config['model'], e)) - else: - is_primary = '' - if camera_info.get('primary', '') == cam.uid: - cam.is_primary = True - primary_camera = cam - is_primary = ' [Primary]' - - logger.debug("Camera created: {} {}{}".format( - cam.name, cam.uid, is_primary)) - - cameras[cam_name] = cam - - if len(cameras) == 0: - raise error.CameraNotFound(msg="No cameras available") - - # If no camera was specified as primary use the first - if primary_camera is None: - primary_camera = list(cameras.values())[0] # First camera - primary_camera.is_primary = True - - logger.debug("Primary camera: {}", primary_camera) - logger.debug("{} cameras created", len(cameras)) - - return cameras diff --git a/pocs/camera/canon_gphoto2.py b/pocs/camera/canon_gphoto2.py deleted file mode 100644 index 53879921c..000000000 --- a/pocs/camera/canon_gphoto2.py +++ /dev/null @@ -1,158 +0,0 @@ -import os -import subprocess - -from astropy import units as u -from threading import Event -from threading import Timer - -from pocs.utils import current_time -from pocs.utils import error -from pocs.utils.images import cr2 as cr2_utils -from pocs.camera import AbstractGPhotoCamera - - -class Camera(AbstractGPhotoCamera): - - def __init__(self, *args, **kwargs): - kwargs['readout_time'] = 6.0 - kwargs['file_extension'] = 'cr2' - super().__init__(*args, **kwargs) - self.logger.debug("Connecting GPhoto2 camera") - self.connect() - self.logger.debug("{} connected".format(self.name)) - - def connect(self): - """Connect to Canon DSLR - - Gets the serial number from the camera and sets various settings - """ - self.logger.debug('Connecting to camera') - - # Get serial number - _serial_number = self.get_property('serialnumber') - if not _serial_number: - raise error.CameraNotFound("Camera not responding: {}".format(self)) - - self._serial_number = _serial_number - - # Properties to be set upon init. - prop2index = { - '/main/actions/viewfinder': 1, # Screen off - '/main/capturesettings/autoexposuremode': 3, # 3 - Manual; 4 - Bulb - '/main/capturesettings/continuousaf': 0, # No auto-focus - '/main/capturesettings/drivemode': 0, # Single exposure - '/main/capturesettings/focusmode': 0, # Manual (don't try to focus) - '/main/capturesettings/shutterspeed': 0, # Bulb - '/main/imgsettings/imageformat': 9, # RAW - '/main/imgsettings/imageformatcf': 9, # RAW - '/main/imgsettings/imageformatsd': 9, # RAW - '/main/imgsettings/iso': 1, # ISO 100 - '/main/settings/autopoweroff': 0, # Don't power off - '/main/settings/capturetarget': 0, # Capture to RAM, for download - '/main/settings/datetime': 'now', # Current datetime - '/main/settings/datetimeutc': 'now', # Current datetime - '/main/settings/reviewtime': 0, # Screen off after taking pictures - } - - owner_name = 'Project PANOPTES' - artist_name = self.config.get('unit_id', owner_name) - copyright = 'owner_name {}'.format(owner_name, current_time().datetime.year) - - prop2value = { - '/main/settings/artist': artist_name, - '/main/settings/copyright': copyright, - '/main/settings/ownername': owner_name, - } - - self.set_properties(prop2index, prop2value) - self._connected = True - - def take_observation(self, observation, headers=None, filename=None, *args, **kwargs): - """Take an observation - - Gathers various header information, sets the file path, and calls - `take_exposure`. Also creates a `threading.Event` object and a - `threading.Timer` object. The timer calls `process_exposure` after the - set amount of time is expired (`observation.exptime + self.readout_time`). - - Note: - If a `filename` is passed in it can either be a full path that includes - the extension, or the basename of the file, in which case the directory - path and extension will be added to the `filename` for output - - Args: - observation (~pocs.scheduler.observation.Observation): Object - describing the observation - headers (dict): Header data to be saved along with the file - filename (str, optional): Filename for saving, defaults to ISOT time stamp - **kwargs (dict): Optional keyword arguments (`exptime`) - - Returns: - threading.Event: An event to be set when the image is done processing - """ - # To be used for marking when exposure is complete (see `process_exposure`) - camera_event = Event() - - exptime, file_path, image_id, metadata = self._setup_observation(observation, - headers, - filename, - *args, - **kwargs) - - proc = self.take_exposure(seconds=exptime, filename=file_path) - - # Add most recent exposure to list - if self.is_primary: - if 'POINTING' in headers: - observation.pointing_images[image_id] = file_path.replace('.cr2', '.fits') - else: - observation.exposure_list[image_id] = file_path.replace('.cr2', '.fits') - - # Process the image after a set amount of time - wait_time = exptime + self.readout_time - t = Timer(wait_time, self.process_exposure, (metadata, camera_event, proc)) - t.name = '{}Thread'.format(self.name) - t.start() - - return camera_event - - def _start_exposure(self, seconds, filename, dark, header, *args, **kwargs): - """Take an exposure for given number of seconds and saves to provided filename - - Note: - See `scripts/take_pic.sh` - - Tested With: - * Canon EOS 100D - - Args: - seconds (u.second, optional): Length of exposure - filename (str, optional): Image is saved to this filename - """ - script_path = '{}/scripts/take_pic.sh'.format(os.getenv('POCS')) - - run_cmd = [script_path, self.port, str(seconds), filename] - - # Take Picture - try: - proc = subprocess.Popen(run_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True) - except error.InvalidCommand as e: - self.logger.warning(e) - except subprocess.TimeoutExpired: - self.logger.debug("Still waiting for camera") - proc.kill() - outs, errs = proc.communicate(timeout=10) - if errs is not None: - self.logger.warning(errs) - finally: - readout_args = (filename, header) - return readout_args - - def _readout(self, cr2_path, info): - """Reads out the image as a CR2 and converts to FITS""" - self.logger.debug("Converting CR2 -> FITS: {}".format(cr2_path)) - fits_path = cr2_utils.cr2_to_fits(cr2_path, headers=info, remove_cr2=False) - return fits_path diff --git a/pocs/camera/simulator/__init__.py b/pocs/camera/simulator/__init__.py deleted file mode 100644 index ccd75b2f9..000000000 --- a/pocs/camera/simulator/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from pocs.camera.simulator.dslr import Camera diff --git a/pocs/camera/simulator_sdk/__init__.py b/pocs/camera/simulator_sdk/__init__.py deleted file mode 100644 index 1a434b0c7..000000000 --- a/pocs/camera/simulator_sdk/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from pocs.camera.simulator_sdk.ccd import Camera diff --git a/pocs/core.py b/pocs/core.py deleted file mode 100644 index 6264fcb05..000000000 --- a/pocs/core.py +++ /dev/null @@ -1,744 +0,0 @@ -import os -import sys -import queue -import time -import warnings -import multiprocessing -import zmq -from contextlib import suppress - -from astropy import units as u - -from pocs.base import PanBase -from pocs.observatory import Observatory -from pocs.state.machine import PanStateMachine -from pocs.utils import current_time -from pocs.utils import get_free_space -from pocs.utils import CountdownTimer -from pocs.utils import listify -from pocs.utils import error -from pocs.utils.messaging import PanMessaging - - -class POCS(PanStateMachine, PanBase): - - """The main class representing the Panoptes Observatory Control Software (POCS). - - Interaction with a PANOPTES unit is done through instances of this class. An instance consists - primarily of an `Observatory` object, which contains the mount, cameras, scheduler, etc. - See `pocs.Observatory`. The observatory should create all attached hardware - but leave the initialization up to POCS (i.e. this class will call the observatory - `initialize` method). - - The POCS instance itself is designed to be run as a state machine via - the `run` method. - - Args: - observatory(Observatory): An instance of a `pocs.observatory.Observatory` - class. POCS will call the `initialize` method of the observatory. - state_machine_file(str): Filename of the state machine to use, defaults to - 'simple_state_table'. - messaging(bool): If messaging should be included, defaults to False. - simulator(list): A list of the different modules that can run in simulator mode. Possible - modules include: all, mount, camera, weather, night. Defaults to an empty list. - - Attributes: - name (str): Name of PANOPTES unit - observatory (`pocs.observatory.Observatory`): The `~pocs.observatory.Observatory` object - - """ - - def __init__( - self, - observatory, - state_machine_file=None, - messaging=False, - *args, **kwargs): - - # Explicitly call the base classes in the order we want - PanBase.__init__(self, *args, **kwargs) - - assert isinstance(observatory, Observatory) - - self.name = self.config.get('name', 'Generic PANOPTES Unit') - self.logger.info('Initializing PANOPTES unit - {} - {}', - self.name, - self.config['location']['name'] - ) - - self._processes = {} - - self._has_messaging = None - self.has_messaging = messaging - - self._sleep_delay = kwargs.get('sleep_delay', 2.5) # Loop delay - self._safe_delay = kwargs.get('safe_delay', 60 * 5) # Safety check delay - - if state_machine_file is None: - state_machine_file = self.config.get('state_machine', 'simple_state_table') - - self.logger.info(f'Making a POCS state machine from {state_machine_file}') - PanStateMachine.__init__(self, state_machine_file, **kwargs) - - # Add observatory object, which does the bulk of the work - self.observatory = observatory - - self._connected = True - self._initialized = False - self._interrupted = False - self.force_reschedule = False - - self._retry_attempts = kwargs.get('retry_attempts', 3) - self._obs_run_retries = self._retry_attempts - - self.status() - - self.say("Hi there!") - - @property - def is_initialized(self): - """ Indicates if POCS has been initalized or not """ - return self._initialized - - @property - def interrupted(self): - """If POCS has been interrupted - - Returns: - bool: If an interrupt signal has been received - """ - return self._interrupted - - @property - def connected(self): - """ Indicates if POCS is connected """ - return self._connected - - @property - def has_messaging(self): - return self._has_messaging - - @has_messaging.setter - def has_messaging(self, value): - self._has_messaging = value - if self._has_messaging: - self._setup_messaging() - - @property - def should_retry(self): - return self._obs_run_retries >= 0 - -################################################################################################## -# Methods -################################################################################################## - - def initialize(self): - """Initialize POCS. - - Calls the Observatory `initialize` method. - - Returns: - bool: True if all initialization succeeded, False otherwise. - """ - - if not self._initialized: - self.logger.info('*' * 80) - self.say("Initializing the system! Woohoo!") - - try: - self.logger.debug("Initializing observatory") - self.observatory.initialize() - - except Exception as e: - self.say("Oh wait. There was a problem initializing: {}".format(e)) - self.say("Since we didn't initialize, I'm going to exit.") - self.power_down() - else: - self._initialized = True - - self.status() - return self._initialized - - def status(self): - status = dict() - - try: - status['state'] = self.state - status['system'] = { - 'free_space': get_free_space().value, - } - status['observatory'] = self.observatory.status() - except Exception as e: # pragma: no cover - self.logger.warning("Can't get status: {}".format(e)) - else: - self.send_message(status, topic='STATUS') - - return status - - def say(self, msg): - """ PANOPTES Units like to talk! - - Send a message. - - Args: - msg(str): Message to be sent to topic PANCHAT. - """ - if self.has_messaging is False: - self.logger.info('Unit says: {}', msg) - self.send_message(msg, topic='PANCHAT') - - def send_message(self, msg, topic='POCS'): - """ Send a message - - This will use the `self._msg_publisher` to send a message - - Note: - The `topic` and `msg` params are switched for convenience - - Arguments: - msg {str} -- Message to be sent - - Keyword Arguments: - topic {str} -- Topic to send message on (default: {'POCS'}) - """ - if self.has_messaging: - self._msg_publisher.send_message(topic, msg) - - def check_messages(self): - """ Check messages for the system - - If `self.has_messaging` is True then there is a separate process running - responsible for checking incoming zeromq messages. That process will fill - various `queue.Queue`s with messages depending on their type. This method - is a thin-wrapper around private methods that are responsible for message - dispatching based on which queue received a message. - """ - if self.has_messaging: - self._check_messages('command', self._cmd_queue) - self._check_messages('schedule', self._sched_queue) - - def power_down(self): - """Actions to be performed upon shutdown - - Note: - This method is automatically called from the interrupt handler. The definition should - include what you want to happen upon shutdown but you don't need to worry about calling - it manually. - """ - if self.connected: - self.say("I'm powering down") - self.logger.info(f'Shutting down {self.name}, please be patient and allow for exit.') - - if not self.observatory.close_dome(): - self.logger.critical('Unable to close dome!') - - # Park if needed - if self.state not in ['parking', 'parked', 'sleeping', 'housekeeping']: - # TODO(jamessynge): Figure out how to handle the situation where we have both - # mount and dome, but this code is only checking for a mount. - if self.observatory.mount.is_connected: - if not self.observatory.mount.is_parked: - self.logger.info("Parking mount") - self.park() - - if self.state == 'parking': - if self.observatory.mount.is_connected: - if self.observatory.mount.is_parked: - self.logger.info("Mount is parked, setting Parked state") - self.set_park() - - if not self.observatory.mount.is_parked: - self.logger.info('Mount not parked, parking') - self.observatory.mount.park() - - # Observatory shut down - self.observatory.power_down() - - # Shut down messaging - self.logger.debug('Shutting down messaging system') - - for name, proc in self._processes.items(): - if proc.is_alive(): - self.logger.debug('Terminating {} - PID {}'.format(name, proc.pid)) - proc.terminate() - - self._keep_running = False - self._do_states = False - self._connected = False - self.logger.info("Power down complete") - - def reset_observing_run(self): - """Reset an observing run loop. """ - self.logger.debug("Resetting observing run attempts") - self._obs_run_retries = self._retry_attempts - -################################################################################################## -# Safety Methods -################################################################################################## - - def is_safe(self, no_warning=False, horizon='observe', **kwargs): - """Checks the safety flag of the system to determine if safe. - - This will check the weather station as well as various other environmental - aspects of the system in order to determine if conditions are safe for operation. - - Note: - This condition is called by the state machine during each transition - - Args: - no_warning (bool, optional): If a warning message should show in logs, - defaults to False. - horizon (str, optional): For night time check use given horizon, - default 'observe'. - Returns: - bool: Latest safety flag - - """ - if not self.connected: - return False - - is_safe_values = dict() - - # Check if AC power connected and return immediately if not - has_power = self.has_ac_power() - if not has_power: - return False - - is_safe_values['ac_power'] = has_power - - # Check if night time - is_safe_values['is_dark'] = self.is_dark(horizon=horizon) - - # Check weather - is_safe_values['good_weather'] = self.is_weather_safe() - - # Hard-drive space - is_safe_values['free_space'] = self.has_free_space() - - safe = all(is_safe_values.values()) - - if not safe: - if no_warning is False: - self.logger.warning('Unsafe conditions: {}'.format(is_safe_values)) - - if self.state not in ['sleeping', 'parked', 'parking', 'housekeeping', 'ready']: - self.logger.warning('Safety failed so sending to park') - self.park() - - return safe - - def is_dark(self, horizon='observe'): - """Is it dark - - Checks whether it is dark at the location provided. This checks for the config - entry `location.flat_horizon` by default. - - Args: - horizon (str, optional): Which horizon to use, 'flat''focus', or - 'observe' (default). - - Returns: - bool: Is sun below horizon at location - """ - # See if dark - we check this first because we want to know - # the sun position even if using a simulator. - is_dark = self.observatory.is_dark(horizon=horizon) - - # Check simulator - with suppress(KeyError): - if 'night' in self.config['simulator']: - is_dark = True - - self.logger.debug("Dark Check: {}".format(is_dark)) - return is_dark - - def is_weather_safe(self, stale=180): - """Determines whether current weather conditions are safe or not. - - Args: - stale (int, optional): Number of seconds before record is stale, defaults to 180 - - Returns: - bool: Conditions are safe (True) or unsafe (False) - - """ - - # Always assume False - self.logger.debug("Checking weather safety") - is_safe = False - - # Check if we are using weather simulator - with suppress(KeyError): - if 'weather' in self.config['simulator']: - self.logger.debug("Weather simulator always safe") - return True - - # Get current weather readings from database - try: - record = self.db.get_current('weather') - if record is None: - return False - - is_safe = record['data'].get('safe', False) - - timestamp = record['date'].replace(tzinfo=None) # current_time is timezone naive - age = (current_time().datetime - timestamp).total_seconds() - - self.logger.debug("Weather Safety: {} [{:.0f} sec old - {:%Y-%m-%d %H:%M:%S}]", - is_safe, - age, - timestamp) - - except (TypeError, KeyError) as e: - self.logger.warning("No record found in DB: {}", e) - except Exception as e: # pragma: no cover - self.logger.error("Error checking weather: {}", e) - else: - if age >= stale: - self.logger.warning("Weather record looks stale, marking unsafe.") - is_safe = False - - return is_safe - - def has_free_space(self, required_space=0.25 * u.gigabyte, low_space_percent=1.5): - """Does hard drive have disk space (>= 0.5 GB) - - Args: - required_space (u.gigabyte, optional): Amount of free space required - for operation - low_space_percent (float, optional): Give warning if space is less - than this times the required space, default 1.5, i.e., - the logs will show a warning at `.25 GB * 1.5 = 0.375 GB`. - - Returns: - bool: True if enough space - """ - req_space = required_space.to(u.gigabyte) - free_space = get_free_space() - - space_is_low = free_space.value <= (req_space.value * low_space_percent) - has_space = free_space.value >= req_space.value - - if not has_space: - self.logger.error(f'No disk space: Free {free_space:.02f}\tReq: {req_space:.02f}') - elif space_is_low: - self.logger.warning(f'Low disk space: Free {free_space:.02f}\tReq: {req_space:.02f}') - - return has_space - - def has_ac_power(self, stale=90): - """Check for system AC power. - - Power readings are done by the arduino and are placed in the metadata - database. This method looks for entries saved with type `power` and key - `main` the `current` collection. The method will also return False if - the record is older than `stale` seconds. - - Args: - stale (int, optional): Number of seconds before record is stale, - defaults to 90 seconds. - - Returns: - bool: True if system AC power is present. - """ - # Always assume False - self.logger.debug("Checking for AC power") - has_power = False - - # TODO(wtgee): figure out if we really want to simulate no power - # Check if we are using power simulator - with suppress(KeyError): - if 'power' in self.config['simulator']: - self.logger.debug("AC power simulator always safe") - return True - - # Get current power readings from database - try: - record = self.db.get_current('power') - if record is None: - self.logger.warning(f'No mains "power" reading found in database.') - - # Legacy control boards have `main`. - has_power = False # Assume not - for power_key in ['main', 'mains']: - with suppress(KeyError): - has_power = bool(record['data'][power_key]) - - timestamp = record['date'].replace(tzinfo=None) # current_time is timezone naive - age = (current_time().datetime - timestamp).total_seconds() - - self.logger.debug("Power Safety: {} [{:.0f} sec old - {:%Y-%m-%d %H:%M:%S}]", - has_power, - age, - timestamp) - - except (TypeError, KeyError) as e: - self.logger.warning("No record found in DB: {}", e) - except Exception as e: # pragma: no cover - self.logger.error("Error checking weather: {}", e) - else: - if age > stale: - self.logger.warning("Power record looks stale, marking unsafe.") - has_power = False - - if not has_power: - self.logger.critical('AC power not detected.') - - return has_power - - -################################################################################################## -# Convenience Methods -################################################################################################## - - def sleep(self, delay=2.5, with_status=True, **kwargs): - """ Send POCS to sleep - - Loops for `delay` number of seconds. If `delay` is more than 10.0 seconds, - `check_messages` will be called every 10.0 seconds in order to allow for - interrupt. - - Keyword Arguments: - delay {float} -- Number of seconds to sleep (default: 2.5) - with_status {bool} -- Show system status while sleeping - (default: {True if delay > 2.0}) - """ - if delay is None: - delay = self._sleep_delay - - if with_status and delay > 2.0: - self.status() - - # If delay is greater than 10 seconds check for messages during wait - if delay >= 10.0: - while delay >= 10.0: - self.check_messages() - # If we shutdown leave loop - if self.connected is False: - return - - time.sleep(10.0) - delay -= 10.0 - - if delay > 0.0: - time.sleep(delay) - - def wait_for_events(self, - events, - timeout, - sleep_delay=1 * u.second, - status_interval=10 * u.second, - msg_interval=30 * u.second, - event_type='generic'): - """Wait for event(s) to be set. - - This method will wait for a maximum of `timeout` seconds for all of the - `events` to complete. - - Will check at least every `sleep_delay` seconds for the events to be done, - and also for interrupts and bad weather. Will log debug messages approximately - every `status_interval` seconds, and will output status messages approximately - every `msg_interval` seconds. - - Args: - events (list(`threading.Event`)): An Event or list of Events to wait on. - timeout (float|`astropy.units.Quantity`): Timeout in seconds to wait for events. - sleep_delay (float, optional): Time in seconds between event checks. - status_interval (float, optional): Time in seconds between status checks of the system. - msg_interval (float, optional): Time in seconds between sending of status messages. - event_type (str, optional): The type of event, used for outputting in log messages, - default 'generic'. - - Raises: - error.Timeout: Raised if events have not all been set before `timeout` seconds. - """ - events = listify(events) - - # Remove units from these values. - if isinstance(timeout, u.Quantity): - timeout = timeout.to(u.second).value - - if isinstance(sleep_delay, u.Quantity): - sleep_delay = sleep_delay.to(u.second).value - - # ADD units to these values. Ugly. - if not isinstance(status_interval, u.Quantity): - status_interval = status_interval * u.second - - if not isinstance(msg_interval, u.Quantity): - msg_interval = msg_interval * u.second - - timer = CountdownTimer(timeout) - - start_time = current_time() - next_status_time = start_time + status_interval - next_msg_time = start_time + msg_interval - - while not all([event.is_set() for event in events]): - self.check_messages() - if self.interrupted: - self.logger.info("Waiting for events has been interrupted") - break - - now = current_time() - if now >= next_msg_time: - elapsed_secs = (now - start_time).to(u.second).value - self.logger.debug('Waiting for {} events: {} seconds elapsed', - event_type, - round(elapsed_secs)) - next_msg_time += msg_interval - now = current_time() - - if now >= next_status_time: - self.status() - next_status_time += status_interval - now = current_time() - - if timer.expired(): - raise error.Timeout("Timedout waiting for {} event".format(event_type)) - - # Sleep for a little bit. - time.sleep(sleep_delay) - - def wait_until_safe(self, **kwargs): - """ Waits until weather is safe. - - This will wait until a True value is returned from the safety check, - blocking until then. - """ - while not self.is_safe(no_warning=True, **kwargs): - self.sleep(delay=self._safe_delay, **kwargs) - -################################################################################################## -# Class Methods -################################################################################################## - - @classmethod - def check_environment(cls): - """ Checks to see if environment is set up correctly - - There are a number of environmental variables that are expected - to be set in order for PANOPTES to work correctly. This method just - sanity checks our environment and shuts down otherwise. - - PANDIR Base directory for PANOPTES - POCS Base directory for POCS - """ - if sys.version_info[:2] < (3, 0): # pragma: no cover - warnings.warn("POCS requires Python 3.x to run") - - pandir = os.getenv('PANDIR') - if not os.path.exists(pandir): - sys.exit("$PANDIR dir does not exist or is empty: {}".format(pandir)) - - pocs = os.getenv('POCS') - if pocs is None: # pragma: no cover - sys.exit('Please make sure $POCS environment variable is set') - - if not os.path.exists(pocs): - sys.exit("$POCS directory does not exist or is empty: {}".format(pocs)) - - if not os.path.exists("{}/logs".format(pandir)): - print("Creating log dir at {}/logs".format(pandir)) - os.makedirs("{}/logs".format(pandir)) - -################################################################################################## -# Private Methods -################################################################################################## - - def _check_messages(self, queue_type, q): - cmd_dispatch = { - 'command': { - 'park': self._interrupt_and_park, - 'shutdown': self._interrupt_and_shutdown, - }, - 'schedule': {} - } - - while True: - try: - msg_obj = q.get_nowait() - call_method = msg_obj.get('message', '') - # Lookup and call the method - self.logger.critical(f'Message received: {queue_type} {call_method}') - cmd_dispatch[queue_type][call_method]() - except queue.Empty: - break - except KeyError: - pass - except Exception as e: - self.logger.warning('Problem calling method from messaging: {}'.format(e)) - else: - break - - def _interrupt_and_park(self): - self.logger.critical('Park interrupt received') - self._interrupted = True - self.park() - - def _interrupt_and_shutdown(self): - self.logger.critical('Shutdown command received') - self._interrupted = True - self.power_down() - - def _setup_messaging(self): - - cmd_port = self.config['messaging']['cmd_port'] - msg_port = self.config['messaging']['msg_port'] - - def create_forwarder(port): - try: - PanMessaging.create_forwarder(port, port + 1) - except Exception: - pass - - cmd_forwarder_process = multiprocessing.Process( - target=create_forwarder, args=(cmd_port,), name='CmdForwarder') - cmd_forwarder_process.start() - - msg_forwarder_process = multiprocessing.Process( - target=create_forwarder, args=(msg_port,), name='MsgForwarder') - msg_forwarder_process.start() - - self._do_cmd_check = True - self._cmd_queue = multiprocessing.Queue() - self._sched_queue = multiprocessing.Queue() - - self._msg_publisher = PanMessaging.create_publisher(msg_port) - - def check_message_loop(cmd_queue): - cmd_subscriber = PanMessaging.create_subscriber(cmd_port + 1) - - poller = zmq.Poller() - poller.register(cmd_subscriber.socket, zmq.POLLIN) - - try: - while self._do_cmd_check: - # Poll for messages - sockets = dict(poller.poll(500)) # 500 ms timeout - - if cmd_subscriber.socket in sockets and \ - sockets[cmd_subscriber.socket] == zmq.POLLIN: - - topic, msg_obj = cmd_subscriber.receive_message(flags=zmq.NOBLOCK) - - # Put the message in a queue to be processed - if topic == 'POCS-CMD': - cmd_queue.put(msg_obj) - - time.sleep(1) - except KeyboardInterrupt: - pass - - self.logger.debug('Starting command message loop') - check_messages_process = multiprocessing.Process( - target=check_message_loop, args=(self._cmd_queue,)) - check_messages_process.name = 'MessageCheckLoop' - check_messages_process.start() - self.logger.debug('Command message subscriber set up on port {}'.format(cmd_port)) - - self._processes = { - 'check_messages': check_messages_process, - 'cmd_forwarder': cmd_forwarder_process, - 'msg_forwarder': msg_forwarder_process, - } diff --git a/pocs/filterwheel/__init__.py b/pocs/filterwheel/__init__.py deleted file mode 100644 index f14d44312..000000000 --- a/pocs/filterwheel/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from pocs.filterwheel.filterwheel import AbstractFilterWheel # pragma: no flakes diff --git a/pocs/focuser/__init__.py b/pocs/focuser/__init__.py deleted file mode 100644 index 73f849a65..000000000 --- a/pocs/focuser/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from pocs.focuser.focuser import AbstractFocuser # pragma: no flakes diff --git a/pocs/mount/__init__.py b/pocs/mount/__init__.py deleted file mode 100644 index 8d047c166..000000000 --- a/pocs/mount/__init__.py +++ /dev/null @@ -1,99 +0,0 @@ -from glob import glob - -from pocs.mount.mount import AbstractMount # pragma: no flakes -from pocs.utils.config import load_config -from pocs.utils import error -from pocs.utils import load_module -from pocs.utils.location import create_location_from_config -from pocs.utils.logger import get_root_logger - - -def create_mount_from_config(config=None, - mount_info=None, - earth_location=None, - logger=None, - *args, **kwargs): - """Create a mount instance based on the provided config. - - Creates an instance of the AbstractMount sub-class in the module specified in the config. - Specifically, the class must be in a file called pocs/mount/.py, - and the class must be called Mount. - - Args: - config: A dictionary of name to value, as produced by `panoptes.utils.config.load_config`. - mount_info: Optional param which overrides the 'mount' entry in config if provided. - Useful for testing. - earth_location: `astropy.coordinates.EarthLocation` instance, representing the - location of the mount on the Earth. If not specified, the config must include the - observatory's location (Latitude, Longitude and Altitude above mean sea level). - Useful for testing. - logger (`logging`|None, optional): A python logging instance. - *args: Other positional args will be passed to the concrete class specified in the config. - **kwargs: Other keyword args will be passed to the concrete class specified in the config. - - Returns: - An instance of the Mount class if the config (or mount_info) is complete. `None` if neither - mount_info nor config['mount'] is provided. - - Raises: - error.MountNotFound: Exception raised when mount cannot be created - because of incorrect configuration. - """ - if logger is None: - logger = get_root_logger() - - if not config: - config = load_config(**kwargs) - - # If mount_info was not passed as a paramter, check config. - if mount_info is None: - logger.debug('No mount info provided, using values from config.') - try: - mount_info = config['mount'] - except KeyError: - raise error.MountNotFound('No mount information in config, cannot create.') - - # If earth_location was not passed as a paramter, check config. - if earth_location is None: - logger.debug('No location provided, using values from config.') - - # Get detail from config. - site_details = create_location_from_config(config) - earth_location = site_details['earth_location'] - - driver = mount_info.get('driver') - if not driver or not isinstance(driver, str): - raise error.MountNotFound('Mount info in config is missing a driver name.') - - model = mount_info.get('model', driver) - logger.debug(f'Mount: driver={driver} model={model}') - - if driver != 'simulator': - # See if we have a serial connection - try: - port = mount_info['serial']['port'] - logger.debug(f'Looking for mount {driver} on {port}.') - if port is None or len(glob(port)) == 0: - msg = f'Mount port ({port}) not available. Use simulator = mount for simulator.' - raise error.MountNotFound(msg=msg) - except KeyError: - # Note: see Issue #866 - if driver == 'bisque': - logger.debug(f'Driver specifies a bisque mount type, no serial port needed.') - else: - msg = 'Mount port not specified in config file. Use simulator=mount for simulator.' - raise error.MountNotFound(msg=msg) - - logger.debug(f'Loading mount driver: pocs.mount.{driver}') - - try: - module = load_module('pocs.mount.{}'.format(driver)) - except error.NotFound as e: - raise error.MountNotFound(e) - - # Make the mount include site information - mount = module.Mount(config=config, location=earth_location, *args, **kwargs) - - logger.info(f'{driver} mount created') - - return mount diff --git a/pocs/scheduler/__init__.py b/pocs/scheduler/__init__.py deleted file mode 100644 index 1cd4a557e..000000000 --- a/pocs/scheduler/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -import os - -from astropy import units as u - -from pocs.scheduler.constraint import Altitude -from pocs.scheduler.constraint import Duration -from pocs.scheduler.constraint import MoonAvoidance -from pocs.scheduler.scheduler import BaseScheduler # pragma: no flakes -from pocs.utils.config import load_config -from pocs.utils import error -from pocs.utils import horizon as horizon_utils -from pocs.utils import load_module -from pocs.utils.location import create_location_from_config -from pocs.utils.logger import get_root_logger - - -def create_scheduler_from_config(config=None, observer=None, *args, **kwargs): - """ Sets up the scheduler that will be used by the observatory """ - - logger = get_root_logger() - - if not config: - config = load_config(**kwargs) - - if 'scheduler' not in config: - logger.info("No scheduler in config") - return None - - if not observer: - logger.debug(f'No Observer provided, creating from config.') - site_details = create_location_from_config(config) - observer = site_details['observer'] - - scheduler_config = config.get('scheduler', {}) - scheduler_type = scheduler_config.get('type', 'dispatch') - - # Read the targets from the file - fields_file = scheduler_config.get('fields_file', 'simple.yaml') - fields_path = os.path.join(config['directories']['targets'], fields_file) - logger.debug('Creating scheduler: {}'.format(fields_path)) - - if os.path.exists(fields_path): - - try: - # Load the required module - module = load_module( - 'pocs.scheduler.{}'.format(scheduler_type)) - - obstruction_list = config['location'].get('obstructions', list()) - default_horizon = config['location'].get('horizon', 30 * u.degree) - - horizon_line = horizon_utils.Horizon( - obstructions=obstruction_list, - default_horizon=default_horizon.value - ) - - # Simple constraint for now - constraints = [ - Altitude(horizon=horizon_line), - MoonAvoidance(), - Duration(default_horizon, weight=5.0) - ] - - # Create the Scheduler instance - scheduler = module.Scheduler( - observer, fields_file=fields_path, constraints=constraints) - logger.debug("Scheduler created") - except error.NotFound as e: - raise error.NotFound(msg=e) - else: - raise error.NotFound( - msg="Fields file does not exist: {}".format(fields_file)) - - return scheduler diff --git a/pocs/serial_handlers/__init__.py b/pocs/serial_handlers/__init__.py deleted file mode 100644 index 1352f70b1..000000000 --- a/pocs/serial_handlers/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -"""The protocol_*.py files in this package are based on PySerial's file -test/handlers/protocol_test.py, modified for different behaviors. -The call serial.serial_for_url("XYZ://") looks for a class Serial in a -file named protocol_XYZ.py in this package (i.e. directory). -""" diff --git a/pocs/serial_handlers/protocol_arduinosimulator.py b/pocs/serial_handlers/protocol_arduinosimulator.py deleted file mode 100644 index b931d293a..000000000 --- a/pocs/serial_handlers/protocol_arduinosimulator.py +++ /dev/null @@ -1,523 +0,0 @@ -"""Provides a simple simulator for telemetry_board.ino or camera_board.ino. - -We use the pragma "no cover" in several places that happen to never be -reached or that would only be reached if the code was called directly, -i.e. not in the way it is intended to be used. -""" - -import copy -import datetime -import json -import queue -import random -from serial import serialutil -import threading -import time -import urllib - -from pocs.tests import serial_handlers -import pocs.utils.logger - - -def _drain_queue(q): - cmd = None - while not q.empty(): - cmd = q.get_nowait() - return cmd # Present just for debugging. - - -class ArduinoSimulator: - """Simulates the serial behavior of the PANOPTES Arduino sketches. - - The RS-232 connection is simulated with an input and output queue of bytes. This class provides - a run function which can be called from a Thread to execute. Every two seconds while running it - will generate another json output line, and then send that to the json_queue in small chunks - at a rate similar to 9600 baud, the rate used by our Arduino sketches. - """ - - def __init__(self, message, relay_queue, json_queue, chunk_size, stop, logger): - """ - Args: - message: The message to be sent (millis and report_num will be added). - relay_queue: The queue.Queue instance from which relay command - bytes are read and acted upon. Elements are of type bytes. - json_queue: The queue.Queue instance to which json messages - (serialized to bytes) are written at ~9600 baud. Elements - are of type bytes (i.e. each element is a sequence of bytes of - length up to chunk_size). - chunk_size: The number of bytes to write to json_queue at a time. - stop: a threading.Event which is checked to see if run should stop executing. - logger: the Python logger to use for reporting messages. - """ - self.message = copy.deepcopy(message) - self.relay_queue = relay_queue - self.json_queue = json_queue - self.stop = stop - self.logger = logger - # Time between producing messages. - self.message_delta = datetime.timedelta(seconds=2) - self.next_message_time = None - # Size of a chunk of bytes. - self.chunk_size = chunk_size - # Interval between outputing chunks of bytes. - chunks_per_second = 1000.0 / self.chunk_size - chunk_interval = 1.0 / chunks_per_second - self.logger.debug('chunks_per_second={} chunk_interval={}', chunks_per_second, - chunk_interval) - self.chunk_delta = datetime.timedelta(seconds=chunk_interval) - self.next_chunk_time = None - self.pending_json_bytes = bytearray() - self.pending_relay_bytes = bytearray() - self.command_lines = [] - self.start_time = datetime.datetime.now() - self.report_num = 0 - self.logger.info('ArduinoSimulator created') - - def __del__(self): - if not self.stop.is_set(): # pragma: no cover - self.logger.critical('ArduinoSimulator.__del__ stop is NOT set') - - def run(self): - """Produce messages periodically and emit their bytes at a limited rate.""" - self.logger.info('ArduinoSimulator.run ENTER') - # Produce a message right away, but remove a random number of bytes at the start to reflect - # what happens when we connect at a random time to the Arduino. - now = datetime.datetime.now() - self.next_chunk_time = now - self.next_message_time = now + self.message_delta - b = self.generate_next_message_bytes(now) - cut = random.randrange(len(b)) - if cut > 0: - self.logger.info('Cutting off the leading {} bytes of the first message', - cut) - b = b[cut:] - self.pending_json_bytes.extend(b) - # Now two interleaved loops: - # 1) Generate messages every self.message_delta - # 2) Emit a chunk of bytes from pending_json_bytes every self.chunk_delta. - # Clearly we need to emit all the bytes from pending_json_bytes at least - # as fast as we append new messages to it, else we'll have a problem - # (i.e. the simulated baud rate will be too slow for the output rate). - while True: - if self.stop.is_set(): - self.logger.info('Returning from ArduinoSimulator.run EXIT') - return - now = datetime.datetime.now() - if now >= self.next_chunk_time: - self.output_next_chunk(now) - if now >= self.next_message_time: - self.generate_next_message(now) - if self.pending_json_bytes and self.next_chunk_time < self.next_message_time: - next_time = self.next_chunk_time - else: - next_time = self.next_message_time - self.read_relay_queue_until(next_time) - - def handle_pending_relay_bytes(self): - """Process complete relay commands.""" - newline = b'\n' - while True: - index = self.pending_relay_bytes.find(newline) - if index < 0: - break - line = str(self.pending_relay_bytes[0:index], 'ascii') - self.logger.info(f'Received command: {line}') - del self.pending_relay_bytes[0:index + 1] - self.command_lines.append(line) - if self.pending_relay_bytes: - self.logger.info(f'Accumulated {len(self.pending_relay_bytes)} bytes.') - - def read_relay_queue_until(self, next_time): - """Read and process relay queue bytes until time for the next action.""" - while True: - now = datetime.datetime.now() - if now >= next_time: - # Already reached the time for the next main loop event, - # so return to repeat the main loop. - return - remaining = (next_time - now).total_seconds() - assert remaining > 0 - self.logger.info('ArduinoSimulator.read_relay_queue_until remaining={}', remaining) - try: - b = self.relay_queue.get(block=True, timeout=remaining) - assert isinstance(b, (bytes, bytearray)) - self.pending_relay_bytes.extend(b) - self.handle_pending_relay_bytes() - # Fake a baud rate for reading by waiting based on the - # number of bytes we just read. - time.sleep(1.0 / 1000 * len(b)) - except queue.Empty: - # Not returning here so that the return above is will be - # hit every time this method executes. - pass - - def output_next_chunk(self, now): - """Output one chunk of pending json bytes.""" - self.next_chunk_time = now + self.chunk_delta - if len(self.pending_json_bytes) == 0: - return - last = min(self.chunk_size, len(self.pending_json_bytes)) - chunk = bytes(self.pending_json_bytes[0:last]) - del self.pending_json_bytes[0:last] - if self.json_queue.full(): - self.logger.info('Dropping chunk because the queue is full') - return - self.json_queue.put_nowait(chunk) - self.logger.debug('output_next_chunk -> {}', chunk) - - def generate_next_message(self, now): - """Append the next message to the pending bytearray and scheduled the next message.""" - b = self.generate_next_message_bytes(now) - self.pending_json_bytes.extend(b) - self.next_message_time = datetime.datetime.now() + self.message_delta - - def generate_next_message_bytes(self, now): - """Generate the next message (report) from the simulated Arduino.""" - # Not worrying here about emulating the 32-bit nature of millis (wraps in 49 days) - elapsed = int((now - self.start_time).total_seconds() * 1000) - self.report_num += 1 - self.message['millis'] = elapsed - self.message['report_num'] = self.report_num - if self.command_lines: - self.message['commands'] = self.command_lines - self.command_lines = [] - - s = json.dumps(self.message) + '\r\n' - if 'commands' in self.message: - del self.message['commands'] - - s = s.replace('"Convert to NaN"', 'NaN', 1) - s = s.replace('"Convert to nan"', 'nan', 1) - self.logger.debug('generate_next_message -> {!r}', s) - b = s.encode(encoding='ascii') - return b - - -class FakeArduinoSerialHandler(serial_handlers.NoOpSerial): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.logger = pocs.utils.logger.get_root_logger() - self.simulator_thread = None - self.relay_queue = queue.Queue(maxsize=1) - self.json_queue = queue.Queue(maxsize=1) - self.json_bytes = bytearray() - self.stop = threading.Event() - self.stop.set() - self.device_simulator = None - - def __del__(self): - if self.simulator_thread: # pragma: no cover - self.logger.critical('ArduinoSimulator.__del__ simulator_thread is still present') - self.stop.set() - self.simulator_thread.join(timeout=3.0) - - def open(self): - """Open port. - - Raises: - SerialException if the port cannot be opened. - """ - if not self.is_open: - self.is_open = True - self._reconfigure_port() - - def close(self): - """Close port immediately.""" - self.is_open = False - self._reconfigure_port() - - @property - def in_waiting(self): - """The number of input bytes available to read immediately.""" - if not self.is_open: - raise serialutil.portNotOpenError - # Not an accurate count because the elements of self.json_queue are arrays, not individual - # bytes. - return len(self.json_bytes) + self.json_queue.qsize() - - def reset_input_buffer(self): - """Flush input buffer, discarding all it’s contents.""" - self.json_bytes.clear() - _drain_queue(self.json_queue) - - def read(self, size=1): - """Read size bytes. - - If a timeout is set it may return fewer characters than requested. - With no timeout it will block until the requested number of bytes - is read. - - Args: - size: Number of bytes to read. - - Returns: - Bytes read from the port, of type 'bytes'. - """ - if not self.is_open: - raise serialutil.portNotOpenError - - # Not checking if the config is OK, so will try to read from a possibly - # empty queue if using the wrong baudrate, etc. This is deliberate. - - response = bytearray() - timeout_obj = serialutil.Timeout(self.timeout) - while True: - b = self._read1(timeout_obj) - if b: - response.extend(b) - if size is not None and len(response) >= size: - break - else: # pragma: no cover - # The timeout expired while in _read1. - break - if timeout_obj.expired(): # pragma: no cover - break - response = bytes(response) - return response - - def readline(self): - """Read and return one line from the simulator. - - This override exists just to support logging of the line. - """ - line = super().readline() - self.logger.debug('FakeArduinoSerialHandler.readline -> {!r}', line) - return line - - @property - def out_waiting(self): - """The number of bytes in the output buffer.""" - if not self.is_open: - raise serialutil.portNotOpenError - # Not an accurate count because the elements of self.relay_queue are arrays, not individual - # bytes. - return self.relay_queue.qsize() - - def reset_output_buffer(self): - """Clear output buffer. - - Aborts the current output, discarding all that is in the output buffer. - """ - if not self.is_open: - raise serialutil.portNotOpenError - _drain_queue(self.relay_queue) - - def flush(self): - """Write the buffered data to the output device. - - We interpret that here as waiting until the simulator has taken all of the - entries from the queue. - """ - if not self.is_open: - raise serialutil.portNotOpenError - while not self.relay_queue.empty(): - time.sleep(0.01) - - def write(self, data): - """Write the bytes data to the port. - - Args: - data: The data to write (bytes or bytearray instance). - - Returns: - Number of bytes written. - - Raises: - SerialTimeoutException: In case a write timeout is configured for - the port and the time is exceeded. - """ - if not isinstance(data, (bytes, bytearray)): - raise ValueError('write takes bytes') # pragma: no cover - data = bytes(data) # Make sure it can't change. - self.logger.info('FakeArduinoSerialHandler.write({!r})', data) - try: - for n in range(len(data)): - one_byte = data[n:n + 1] - self.relay_queue.put(one_byte, block=True, timeout=self.write_timeout) - return len(data) - except queue.Full: # pragma: no cover - # This exception is "lossy" in that the caller can't tell how much was written. - raise serialutil.writeTimeoutError - - # -------------------------------------------------------------------------- - - @property - def is_config_ok(self): - """Does the caller ask for the correct serial device config?""" - # The default Arduino data, parity and stop bits are: 8 data bits, no parity, one stop bit. - v = (self.baudrate == 9600 and self.bytesize == serialutil.EIGHTBITS and - self.parity == serialutil.PARITY_NONE and not self.rtscts and not self.dsrdtr) - - # All existing tests ensure the config is OK, so we never log here. - if not v: # pragma: no cover - self.logger.critical('Serial config is not OK: {!r}', (self.get_settings(), )) - - return v - - def _read1(self, timeout_obj): - """Read 1 byte of input, of type bytes.""" - - # _read1 is currently called only from read(), which checks that the - # serial device is open, so is_open is always true. - if not self.is_open: # pragma: no cover - raise serialutil.portNotOpenError - - if not self.json_bytes: - try: - entry = self.json_queue.get(block=True, timeout=timeout_obj.time_left()) - assert isinstance(entry, bytes) - self.json_bytes.extend(entry) - except queue.Empty: - return None - - # Unless something has gone wrong, json_bytes is always non-empty here. - if not self.json_bytes: # pragma: no cover - return None - - c = bytes(self.json_bytes[0:1]) - del self.json_bytes[0:1] - return c - - # -------------------------------------------------------------------------- - # There are a number of methods called by SerialBase that need to be - # implemented by sub-classes, assuming their calls haven't been blocked - # by replacing the calling methods/properties. These are no-op - # implementations. - - def _reconfigure_port(self): - """Reconfigure the open port after a property has been changed. - - If you need to know which property has been changed, override the - setter for the appropriate properties. - """ - need_thread = self.is_open and self.is_config_ok - if need_thread and not self.simulator_thread: - _drain_queue(self.relay_queue) - _drain_queue(self.json_queue) - self.json_bytes.clear() - self.stop.clear() - params = self._params_from_url(self.portstr) - self._create_simulator(params) - self.simulator_thread = threading.Thread( - name='Device Simulator', target=lambda: self.device_simulator.run(), daemon=True) - self.simulator_thread.start() - elif self.simulator_thread and not need_thread: - self.stop.set() - self.simulator_thread.join(timeout=30.0) - if self.simulator_thread.is_alive(): - # Not a SerialException, but a test infrastructure error. - raise Exception(self.simulator_thread.name + ' thread did not stop!') # pragma: no cover - self.simulator_thread = None - self.device_simulator = None - _drain_queue(self.relay_queue) - _drain_queue(self.json_queue) - self.json_bytes.clear() - - def _update_rts_state(self): - """Handle rts being set to some value. - - "self.rts = value" has been executed, for some value. This may not - have changed the value. - """ - # We never set rts in our tests, so this doesn't get executed. - pass # pragma: no cover - - def _update_dtr_state(self): - """Handle dtr being set to some value. - - "self.dtr = value" has been executed, for some value. This may not - have changed the value. - """ - # We never set dtr in our tests, so this doesn't get executed. - pass # pragma: no cover - - def _update_break_state(self): - """Handle break_condition being set to some value. - - "self.break_condition = value" has been executed, for some value. - This may not have changed the value. - Note that break_condition is set and then cleared by send_break(). - """ - # We never set break_condition in our tests, so this doesn't get executed. - pass # pragma: no cover - - # -------------------------------------------------------------------------- - # Internal (non-standard) methods. - - def _params_from_url(self, url): - """Extract various params from the URL.""" - expected = 'expected a string in the form "arduinosimulator://[?board=]"' - parts = urllib.parse.urlparse(url) - - # Unless we force things (break the normal protocol), scheme will always - # be 'arduinosimulator'. - if parts.scheme != 'arduinosimulator': - raise Exception(expected + ': got scheme {!r}'.format(parts.scheme)) # pragma: no cover - int_param_names = {'chunk_size', 'read_buffer_size', 'write_buffer_size'} - params = {} - for option, values in urllib.parse.parse_qs(parts.query, True).items(): - if option == 'board' and len(values) == 1: - params[option] = values[0] - elif option == 'name' and len(values) == 1: - # This makes it easier for tests to confirm the right serial device has - # been opened. - self.name = values[0] - elif option in int_param_names and len(values) == 1: - params[option] = int(values[0]) - else: - raise Exception(expected + ': unknown param {!r}'.format(option)) # pragma: no cover - return params - - def _create_simulator(self, params): - board = params.get('board', 'telemetry') - if board == 'telemetry': - message = json.loads(""" - { - "name":"telemetry_board", - "ver":"2017-09-23", - "power": { - "computer":1, - "fan":1, - "mount":1, - "cameras":1, - "weather":1, - "main":1 - }, - "current": {"main":387,"fan":28,"mount":34,"cameras":27}, - "amps": {"main":1083.60,"fan":50.40,"mount":61.20,"cameras":27.00}, - "humidity":42.60, - "temp_00":15.50, - "temperature":[13.00,12.81,19.75], - "not_a_number":"Convert to nan" - } - """) - elif board == 'camera': - message = json.loads(""" - { - "name":"camera_board", - "inputs":6, - "camera_00":1, - "camera_01":1, - "accelerometer": {"x":-7.02, "y":6.95, "z":1.70, "o": 6}, - "humidity":59.60, - "temp_00":12.50, - "Not_a_Number":"Convert to NaN" - } - """) - elif board == 'json_object': - # Produce an output that is json, but not what we expect - message = {} - else: - raise Exception('Unknown board: {}'.format(board)) # pragma: no cover - - # The elements of these queues are of type bytes. This means we aren't fully controlling - # the baudrate unless the chunk_size is 1, but that should be OK. - chunk_size = params.get('chunk_size', 20) - self.json_queue = queue.Queue(maxsize=params.get('read_buffer_size', 10000)) - self.relay_queue = queue.Queue(maxsize=params.get('write_buffer_size', 100)) - - self.device_simulator = ArduinoSimulator(message, self.relay_queue, self.json_queue, - chunk_size, self.stop, self.logger) - - -Serial = FakeArduinoSerialHandler diff --git a/pocs/tests/conftest.py b/pocs/tests/conftest.py deleted file mode 100644 index f3b6cdefa..000000000 --- a/pocs/tests/conftest.py +++ /dev/null @@ -1,81 +0,0 @@ -# pytest will load this file, adding the fixtures in it, if some of the tests -# in the same directory are selected, or if the current working directory when -# running pytest is the directory containing this file. -# Note that there are other fixtures defined in the conftest.py in the root -# of this project. - -import copy -import pytest - -import pocs.base -from pocs.utils.config import load_config -from pocs.utils.logger import get_root_logger - -# Global variable with the default config; we read it once, copy it each time it is needed. -_one_time_config = None - - -@pytest.fixture(scope='module') -def images_dir(tmpdir_factory): - directory = tmpdir_factory.mktemp('images') - return str(directory) - - -@pytest.fixture(scope='function') -def config(images_dir, messaging_ports): - pocs.base.reset_global_config() - - global _one_time_config - if not _one_time_config: - _one_time_config = load_config(ignore_local=True, simulator=['all']) - # Set several fields to fixed values. - _one_time_config['db']['name'] = 'panoptes_testing' - _one_time_config['name'] = 'PAN000' # Make sure always testing with PAN000 - _one_time_config['scheduler']['fields_file'] = 'simulator.yaml' - - # Make a copy before we modify based on test fixtures. - result = copy.deepcopy(_one_time_config) - - # We allow for each test to have its own images directory, and thus - # to not conflict with each other. - result['directories']['images'] = images_dir - - # For now (October 2018), POCS assumes that the pub and sub ports are - # sequential. Make sure that is what the test fixtures have in them. - # TODO(jamessynge): Remove this once pocs.yaml (or messaging.yaml?) explicitly - # lists the ports to be used. - assert messaging_ports['cmd_ports'][0] == (messaging_ports['cmd_ports'][1] - 1) - assert messaging_ports['msg_ports'][0] == (messaging_ports['msg_ports'][1] - 1) - - # We don't want to use the same production messaging ports, just in case - # these tests are running on a working scope. - result['messaging']['cmd_port'] = messaging_ports['cmd_ports'][0] - result['messaging']['msg_port'] = messaging_ports['msg_ports'][0] - - get_root_logger().debug('config fixture: {!r}', result) - return result - - -@pytest.fixture -def config_with_simulated_dome(config): - config.update({ - 'dome': { - 'brand': 'Simulacrum', - 'driver': 'simulator', - }, - }) - return config - - -@pytest.fixture -def config_with_simulated_mount(config): - config.update({ - 'mount': { - 'model': 'simulator', - 'driver': 'simulator', - 'serial': { - 'port': 'simulator' - } - }, - }) - return config diff --git a/pocs/tests/serial_handlers/__init__.py b/pocs/tests/serial_handlers/__init__.py deleted file mode 100644 index c9835e8fa..000000000 --- a/pocs/tests/serial_handlers/__init__.py +++ /dev/null @@ -1,120 +0,0 @@ -"""The protocol_*.py files in this package are based on PySerial's file -test/handlers/protocol_test.py, modified for different behaviors. -The call serial.serial_for_url("XYZ://") looks for a class Serial in a -file named protocol_XYZ.py in this package (i.e. directory). -""" - -from serial import serialutil - - -class NoOpSerial(serialutil.SerialBase): - """No-op implementation of PySerial's SerialBase. - - Provides no-op implementation of various methods that SerialBase expects - to have implemented by the sub-class. Can be used as is for a /dev/null - type of behavior. - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @property - def in_waiting(self): - """The number of input bytes available to read immediately.""" - return 0 - - def open(self): - """Open port. - - Raises: - SerialException if the port cannot be opened. - """ - self.is_open = True - - def close(self): - """Close port immediately.""" - self.is_open = False - - def read(self, size=1): - """Read size bytes. - - If a timeout is set it may return fewer characters than requested. - With no timeout it will block until the requested number of bytes - is read. - - Args: - size: Number of bytes to read. - - Returns: - Bytes read from the port, of type 'bytes'. - - Raises: - SerialTimeoutException: In case a write timeout is configured for - the port and the time is exceeded. - """ - if not self.is_open: - raise serialutil.portNotOpenError - return bytes() - - def write(self, data): - """ - Args: - data: The data to write. - - Returns: - Number of bytes written. - - Raises: - SerialTimeoutException: In case a write timeout is configured for - the port and the time is exceeded. - """ - if not self.is_open: - raise serialutil.portNotOpenError - return 0 - - def reset_input_buffer(self): - """Remove any accumulated bytes from the device.""" - pass - - def reset_output_buffer(self): - """Remove any accumulated bytes not yet sent to the device.""" - pass - - # -------------------------------------------------------------------------- - # There are a number of methods called by SerialBase that need to be - # implemented by sub-classes, assuming their calls haven't been blocked - # by replacing the calling methods/properties. These are no-op - # implementations. - - def _reconfigure_port(self): - """Reconfigure the open port after a property has been changed. - - If you need to know which property has been changed, override the - setter for the appropriate properties. - """ - pass - - def _update_rts_state(self): - """Handle rts being set to some value. - - "self.rts = value" has been executed, for some value. This may not - have changed the value. - """ - pass - - def _update_dtr_state(self): - """Handle dtr being set to some value. - - "self.dtr = value" has been executed, for some value. This may not - have changed the value. - """ - pass - - def _update_break_state(self): - """Handle break_condition being set to some value. - - "self.break_condition = value" has been executed, for some value. - This may not have changed the value. - Note that break_condition is set and then cleared by send_break(). - """ - pass diff --git a/pocs/tests/serial_handlers/protocol_buffers.py b/pocs/tests/serial_handlers/protocol_buffers.py deleted file mode 100644 index 6a558959f..000000000 --- a/pocs/tests/serial_handlers/protocol_buffers.py +++ /dev/null @@ -1,102 +0,0 @@ -# This module implements a handler for serial_for_url("buffers://"). - -from pocs.tests.serial_handlers import NoOpSerial - -import io -from serial import serialutil -import threading - -# r_buffer and w_buffer are binary I/O buffers. read(size=N) on an instance -# of Serial reads the next N bytes from r_buffer, and write(data) appends the -# bytes of data to w_buffer. -# NOTE: The caller (a test) is responsible for resetting buffers before tests. -_r_buffer = None -_w_buffer = None - -# The above I/O buffers are not thread safe, so we need to lock them during -# access. -_r_lock = threading.Lock() -_w_lock = threading.Lock() - - -def ResetBuffers(read_data=None): - SetRBufferValue(read_data) - with _w_lock: - global _w_buffer - _w_buffer = io.BytesIO() - - -def SetRBufferValue(data): - """Sets the r buffer to data (a bytes object).""" - if data and not isinstance(data, (bytes, bytearray)): - raise TypeError("data must by a bytes or bytearray object.") - with _r_lock: - global _r_buffer - _r_buffer = io.BytesIO(data) - - -def GetWBufferValue(): - """Returns an immutable bytes object with the value of the w buffer.""" - with _w_lock: - if _w_buffer: - return _w_buffer.getvalue() - - -class BuffersSerial(NoOpSerial): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @property - def in_waiting(self): - if not self.is_open: - raise serialutil.portNotOpenError - with _r_lock: - return len(_r_buffer.getbuffer()) - _r_buffer.tell() - - def read(self, size=1): - """Read size bytes. - - If a timeout is set it may return fewer characters than requested. - With no timeout it will block until the requested number of bytes - is read. - - Args: - size: Number of bytes to read. - - Returns: - Bytes read from the port, of type 'bytes'. - - Raises: - SerialTimeoutException: In case a write timeout is configured for - the port and the time is exceeded. - """ - if not self.is_open: - raise serialutil.portNotOpenError - with _r_lock: - # TODO(jamessynge): Figure out whether and how to handle timeout. - # We might choose to generate a timeout if the caller asks for data - # beyond the end of the buffer; or simply return what is left, - # including nothing (i.e. bytes()) if there is nothing left. - return _r_buffer.read(size) - - def write(self, data): - """ - Args: - data: The data to write. - - Returns: - Number of bytes written. - - Raises: - SerialTimeoutException: In case a write timeout is configured for - the port and the time is exceeded. - """ - if not isinstance(data, (bytes, bytearray)): - raise TypeError("data must by a bytes or bytearray object.") - if not self.is_open: - raise serialutil.portNotOpenError - with _w_lock: - return _w_buffer.write(data) - - -Serial = BuffersSerial diff --git a/pocs/tests/serial_handlers/protocol_hooked.py b/pocs/tests/serial_handlers/protocol_hooked.py deleted file mode 100644 index dc7d0b6b5..000000000 --- a/pocs/tests/serial_handlers/protocol_hooked.py +++ /dev/null @@ -1,31 +0,0 @@ -# This module enables a test to provide a handler for "hooked://..." urls -# passed into serial.serial_for_url. To do so, set the value of -# serial_class_for_url from your test to a function with the same API as -# ExampleSerialClassForUrl. Or assign your class to Serial. - -from pocs.tests.serial_handlers import NoOpSerial - - -def ExampleSerialClassForUrl(url): - """Implementation of serial_class_for_url called by serial.serial_for_url. - - Returns the url, possibly modified, and a factory function to be called to - create an instance of a SerialBase sub-class (or at least behaves like it). - You can return a class as that factory function, as calling a class creates - an instance of that class. - - serial.serial_for_url will call that factory function with None as the - port parameter (the first), and after creating the instance will assign - the url to the port property of the instance. - - Returns: - A tuple (url, factory). - """ - return url, Serial - - -# Assign to this global variable from a test to override this default behavior. -serial_class_for_url = ExampleSerialClassForUrl - -# Or assign your own class to this global variable. -Serial = NoOpSerial diff --git a/pocs/tests/serial_handlers/protocol_no_op.py b/pocs/tests/serial_handlers/protocol_no_op.py deleted file mode 100644 index 4af6c9396..000000000 --- a/pocs/tests/serial_handlers/protocol_no_op.py +++ /dev/null @@ -1,6 +0,0 @@ -# This module implements a handler for serial_for_url("no_op://"). - -from pocs.tests.serial_handlers import NoOpSerial - -# Export it as Serial so that it will be picked up by PySerial's serial_for_url. -Serial = NoOpSerial diff --git a/pocs/tests/test_arduino_io.py b/pocs/tests/test_arduino_io.py deleted file mode 100644 index d2e05eeb0..000000000 --- a/pocs/tests/test_arduino_io.py +++ /dev/null @@ -1,469 +0,0 @@ -# Test sensors.py ability to read from two sensor boards. - -import collections -import contextlib -import datetime -import pytest -import serial -import threading -import time - -from pocs.sensors import arduino_io -import pocs.utils.error as error -from pocs.utils.logger import get_root_logger -from pocs.utils import CountdownTimer -from pocs.utils import rs232 - -SerDevInfo = collections.namedtuple('SerDevInfo', 'device description manufacturer') - - -@pytest.fixture(scope='function') -def serial_handlers(): - # Install our test handlers for the duration. - serial.protocol_handler_packages.insert(0, 'pocs.serial_handlers') - yield True - # Remove our test handlers. - serial.protocol_handler_packages.remove('pocs.serial_handlers') - - -def get_serial_port_info(): - return [ - SerDevInfo( - device='bogus://', description='Some USB-to-Serial device', manufacturer='Acme'), - SerDevInfo(device='loop://', description='Some text', manufacturer='Arduino LLC'), - SerDevInfo( - device='arduinosimulator://?board=telemetry&name=t1', - description='Some Arduino device', - manufacturer='www.arduino.cc'), - SerDevInfo( - device='arduinosimulator://?board=camera&name=c1', - description='Arduino Micro', - manufacturer=''), - ] - - -@pytest.fixture(scope='function') -def inject_get_serial_port_info(): - saved = rs232.get_serial_port_info - rs232.get_serial_port_info = get_serial_port_info - yield True - rs232.get_serial_port_info = saved - - -def read_and_return(aio, db, retry_limit=10): - """Ask the ArduinioIO instance to read_and_record, then return the reading. - - The first line of output might be a partial report: leading bytes might be - missing, Therefore we allow for retrying. - """ - old_reading = db.get_current(aio.board) - - for _ in range(retry_limit): - if aio.read_and_record(): - new_reading = db.get_current(aio.board) - assert old_reading is not new_reading - return new_reading - - # Should only be able to get here if the retry limit was too low. - assert retry_limit < 1 - - -def receive_message_with_timeout(subscriber, timeout_secs=1.0): - """Receive the next message from a subscriber channel.""" - timer = CountdownTimer(timeout_secs) - while True: - topic, msg_obj = subscriber.receive_message(blocking=False) - if topic or msg_obj: - return topic, msg_obj - if not timer.sleep(max_sleep=0.05): - return None, None - - -@contextlib.contextmanager -def open_serial_device(*args, **kwargs): - """Context manager that opens a serial device, disconnects at exit. - - Ensures that an serial device is disconnected when exiting, which - in turn ensures that the Arduino simulator is shutdown. - """ - ser = arduino_io.open_serial_device(*args, **kwargs) - try: - yield ser - finally: - ser.disconnect() - - -# -------------------------------------------------------------------------------------------------- -# Basic tests of FakeArduinoSerialHandler. - - -def test_create_camera_simulator(serial_handlers): - """Test SerialData, FakeArduinoSerialHandler and ArduinoSimulator.""" - port = 'arduinosimulator://?board=camera' - ser = rs232.SerialData(port=port, baudrate=9600, timeout=2.0) - try: - assert ser.is_connected is True - - # Some testing/coverage of the underlying handler and simulator... - - ser.ser.open() # Redundant, but covers a branch. - - # There is nothing in the output buffer, so resetting it should - # have no effect. - assert ser.ser.out_waiting == 0 - ser.ser.reset_output_buffer() - assert ser.ser.out_waiting == 0 - - # If we flush the input, we'll be able to find zero bytes waiting eventually. - was_empty = False - for n in range(10): - ser.reset_input_buffer() - if ser.ser.in_waiting == 0: - was_empty = True - break - assert was_empty - - ser.disconnect() - assert ser.is_connected is False - - # Various methods throw if the device isn't connected/open. - with pytest.raises(Exception): - ser.ser.in_waiting - with pytest.raises(Exception): - ser.ser.out_waiting - with pytest.raises(Exception): - ser.ser.read() - with pytest.raises(Exception): - ser.ser.flush() - with pytest.raises(Exception): - ser.ser.reset_output_buffer() - - ser.connect() - assert ser.is_connected is True - # First read will typically get a fragment of a line, but then the next will - # get a full line. - s = ser.read() - assert s.endswith('\n') - s = ser.read() - assert s.startswith('{') - assert s.endswith('}\r\n') - assert 'camera_board' in s - - # If we write a bunch of commands, eventually we can detect that the - # underlying serial device has pending output. - was_full = 0 - for n in range(20): - ser.write('some bytes, again and again') - if ser.ser.out_waiting > 0: - was_full += 1 - if was_full > 3: - ser.ser.reset_output_buffer() - break - ser.ser.flush() - assert was_full > 0 - - # Read until times out, at which point read will return - # whatever it has accumulated. - ser.ser.timeout = 0.1 - total = 0 - for n in range(20): - total += len(ser.read_bytes(size=None)) - assert total > 0 - - ser.disconnect() - assert ser.is_connected is False - finally: - ser.disconnect() - - -def test_create_default_simulator(serial_handlers): - """Defaults to creating telemetry_board messages.""" - ser = arduino_io.open_serial_device('arduinosimulator://') - assert ser.is_connected is True - retry_limit = 2 - (ts, reading) = ser.get_and_parse_reading(retry_limit=retry_limit) - assert isinstance(reading, dict) - assert reading['name'] == 'telemetry_board' - report_num = reading['report_num'] - assert 1 <= report_num - assert report_num <= retry_limit - ser.disconnect() - assert ser.is_connected is False - - -def test_create_simulator_small_read_buffer(serial_handlers): - """Force the communication to be difficult by making a very small buffer. - - This should force more code paths to be covered. - """ - ser = arduino_io.open_serial_device( - 'arduinosimulator://?board=telemetry&read_buffer_size=1&chunk_size=1') - assert ser.is_connected is True - # Wait a bit so that the read buffer overflows. - time.sleep(4) - # Now start reading, which will certainly require some retries - # on very busy machines. - retry_limit = 2000 - (ts, reading) = ser.get_and_parse_reading(retry_limit=retry_limit) - assert isinstance(reading, dict) - assert reading['name'] == 'telemetry_board' - report_num = reading['report_num'] - assert 1 < report_num - assert report_num <= retry_limit - ser.disconnect() - assert ser.is_connected is False - - -# -------------------------------------------------------------------------------------------------- - - -def test_detect_board_on_port_invalid_port(): - """detect_board_on_port will fail if the port is bogus.""" - assert arduino_io.detect_board_on_port(' not a valid port ', logger=get_root_logger()) is None - - -def test_detect_board_on_port_not_a_board(): - """detect_board_on_port will fail if the port doesn't produce the expected output. - - Detection will fail because loop:// handler doesn't print anything. - """ - assert arduino_io.detect_board_on_port('loop://', logger=get_root_logger()) is None - - -def test_detect_board_on_port_no_handler_installed(): - """Can't find our simulator, so returns None. - - This test doesn't have `serial_handlers` as a param, so the arduinosimulator can't be found by - PySerial's `serial_for_url`. Therefore, detect_board_on_port won't be able to determine the - type of board. - """ - assert arduino_io.detect_board_on_port('arduinosimulator://?board=telemetry') is None - - -def test_detect_board_on_port_telemetry(serial_handlers): - """Detect a telemetry board.""" - assert arduino_io.detect_board_on_port( - 'arduinosimulator://?board=telemetry') == 'telemetry_board' - - -def test_detect_board_on_port_no_name(serial_handlers): - """Deal with dict that doesn't contain a name.""" - assert arduino_io.detect_board_on_port('arduinosimulator://?board=json_object') is None - - -# -------------------------------------------------------------------------------------------------- - - -def test_get_arduino_ports(inject_get_serial_port_info): - v = arduino_io.get_arduino_ports() - assert len(v) == 3 - assert v == [ - 'loop://', - 'arduinosimulator://?board=telemetry&name=t1', - 'arduinosimulator://?board=camera&name=c1', - ] - - -# -------------------------------------------------------------------------------------------------- - - -def test_auto_detect_arduino_devices(inject_get_serial_port_info, serial_handlers, fake_logger): - v = arduino_io.auto_detect_arduino_devices() - assert len(v) == 2 - for ndx, (board, name) in enumerate([('telemetry', 't1'), ('camera', 'c1')]): - print('ndx=%r board=%r name=%r' % (ndx, board, name)) - expected = 'arduinosimulator://?board=%s&name=%s' % (board, name) - assert v[ndx][0] == '{}_board'.format(board) - assert v[ndx][1] == expected - - # Confirm that params are handled properly - u = arduino_io.auto_detect_arduino_devices(ports=[v[0][1]], logger=fake_logger) - assert len(u) == 1 - assert u[0] == v[0] - - -# -------------------------------------------------------------------------------------------------- - - -def test_arduino_io_basic(serial_handlers, memory_db, msg_publisher, msg_subscriber, cmd_publisher, - cmd_subscriber): - board = 'telemetry' - port = 'arduinosimulator://?board=' + board - board = board + '_board' - with open_serial_device(port) as ser: - aio = arduino_io.ArduinoIO(board, ser, memory_db, msg_publisher, cmd_subscriber) - - # Wait until we get the first reading. - stored_reading = read_and_return(aio, memory_db) - assert isinstance(stored_reading, dict) - assert sorted(stored_reading.keys()) == ['_id', 'data', 'date', 'type'] - assert isinstance(stored_reading['_id'], str) - assert isinstance(stored_reading['date'], datetime.datetime) - assert stored_reading['type'] == board - - # Check that the reading was sent as a message. We need to allow some - # time for the message to pass through thee messaging system. - topic, msg_obj = receive_message_with_timeout(msg_subscriber) - assert topic == board - assert isinstance(msg_obj, dict) - assert len(msg_obj) == 3 - assert isinstance(msg_obj.get('data'), dict) - assert isinstance(msg_obj.get('timestamp'), str) - assert msg_obj.get('name') == board - assert stored_reading['data']['data'] == msg_obj['data'] - - # Check that the reading was stored. - stored_reading = memory_db.get_current(board) - assert isinstance(stored_reading, dict) - assert sorted(stored_reading.keys()) == ['_id', 'data', 'date', 'type'] - assert isinstance(stored_reading['_id'], str) - assert stored_reading['data']['data'] == msg_obj['data'] - assert isinstance(stored_reading['date'], datetime.datetime) - assert stored_reading['type'] == board - - # There should be no new messages because we haven't called read_and_record again. - topic, msg_obj = receive_message_with_timeout(msg_subscriber, timeout_secs=0.2) - assert topic is None - assert msg_obj is None - - -def test_arduino_io_auto_connect_to_read(serial_handlers, memory_db, msg_publisher, msg_subscriber, - cmd_publisher, cmd_subscriber): - """Exercise ability to reconnect if disconnected.""" - board = 'camera' - port = 'arduinosimulator://?board=' + board - board = board + '_board' - with open_serial_device(port) as ser: - aio = arduino_io.ArduinoIO(board, ser, memory_db, msg_publisher, cmd_subscriber) - - read_and_return(aio, memory_db) - aio.reconnect() - read_and_return(aio, memory_db) - aio.disconnect() - read_and_return(aio, memory_db) - - -def test_arduino_io_board_name(serial_handlers, memory_db, msg_publisher, msg_subscriber, - cmd_publisher, cmd_subscriber): - board = 'telemetry' - port = 'arduinosimulator://?board=' + board - board = board + '_board' - with open_serial_device(port) as ser: - aio = arduino_io.ArduinoIO(board, ser, memory_db, msg_publisher, cmd_subscriber) - - # Confirm that it checks the name of the board. If the reading contains - # a 'name', it must match the expected value. - aio.board = 'wrong' - with pytest.raises(error.ArduinoDataError): - read_and_return(aio, memory_db) - aio.board = board - - -def test_arduino_io_shutdown(serial_handlers, memory_db, msg_publisher, msg_subscriber, - cmd_publisher, cmd_subscriber): - """Confirm request to shutdown is recorded.""" - board = 'telemetry' - port = 'arduinosimulator://?board=' + board - board = board + '_board' - with open_serial_device(port) as ser: - aio = arduino_io.ArduinoIO(board, ser, memory_db, msg_publisher, cmd_subscriber) - - # Ask it to stop working. Just records the request in a private variable, - # but if we'd been running it in a separate process this is how we'd get it - # to shutdown cleanly; the alternative would be to kill the process. - cmd_topic = board + ':commands' - assert cmd_topic == aio._cmd_topic - - # Direct manipulation of stop_running should work. - assert not aio.stop_running - aio.stop_running = True - assert aio.stop_running - aio.stop_running = False - assert not aio.stop_running - - # And we should be able to send it the command over the command messaging system. - get_root_logger().debug('Sending shutdown command') - cmd_publisher.send_message(cmd_topic, dict(command='shutdown')) - # stop_running should still be False since we've not yet called handle_commands. - assert not aio.stop_running - - # On a lightly loaded system, the send_message will work quickly, so that - # the first call to handle_commands receives it, but it might take longer - # sometimes. - for _ in range(10): - aio.handle_commands() - if aio.stop_running: - break - get_root_logger().debug('Shutdown not handled yet') - get_root_logger().debug('ArduinoIO.stop_running == {!r}', aio.stop_running) - - assert aio.stop_running - - -def test_arduino_io_write_line(serial_handlers, memory_db, msg_publisher, msg_subscriber, - cmd_publisher, cmd_subscriber): - """Confirm request to shutdown is recorded.""" - board = 'telemetry' - port = 'arduinosimulator://?board=' + board - board = board + '_board' - with open_serial_device(port) as ser: - aio = arduino_io.ArduinoIO(board, ser, memory_db, msg_publisher, cmd_subscriber) - - cmd_topic = board + ':commands' - assert cmd_topic == aio._cmd_topic - - # Run ArduinoIO in a thread. - thread = threading.Thread(target=lambda: aio.run(), daemon=True) - thread.start() - - # Wait until messages are received, with a time limit. - topic, msg_obj = msg_subscriber.receive_message(blocking=True, timeout_ms=20000) - assert topic - assert msg_obj - assert 'commands' not in msg_obj['data'] - - # Drain available messages. - for n in range(100): - topic, msg_obj = msg_subscriber.receive_message(blocking=False) - if not topic: - print(f'Drained at n=#{n}') - break - - # Send a command. The simulator will echo it. - cmd_publisher.send_message(cmd_topic, dict(command='write_line', line='relay=on')) - - # Look for a message with our command echoed. - for n in range(5): - topic, msg_obj = msg_subscriber.receive_message(blocking=True, timeout_ms=3000) - if 'commands' in msg_obj['data']: - break - - assert 'commands' in msg_obj['data'] - assert msg_obj['data']['commands'] == ['relay=on'] - - # Confirm that later messages don't have any commands. - topic, msg_obj = msg_subscriber.receive_message(blocking=True, timeout_ms=3000) - assert 'commands' not in msg_obj['data'] - - # Send another command. The simulator will echo it. - cmd_publisher.send_message(cmd_topic, dict(command='write_line', line='relay=off')) - - # Look for a message with our command echoed. - for n in range(5): - topic, msg_obj = msg_subscriber.receive_message(blocking=True, timeout_ms=3000) - if 'commands' in msg_obj['data']: - break - - assert 'commands' in msg_obj['data'] - assert msg_obj['data']['commands'] == ['relay=off'] - - # Confirm that later messages don't have any commands. - topic, msg_obj = msg_subscriber.receive_message(blocking=True, timeout_ms=3000) - assert 'commands' not in msg_obj['data'] - - # Shutdown in the expected style. - assert not aio.stop_running - cmd_publisher.send_message(cmd_topic, dict(command='shutdown')) - thread.join(timeout=10.0) - assert not thread.is_alive() - assert aio.stop_running diff --git a/pocs/tests/test_base.py b/pocs/tests/test_base.py deleted file mode 100644 index bda78e093..000000000 --- a/pocs/tests/test_base.py +++ /dev/null @@ -1,24 +0,0 @@ -import pytest - -from pocs.base import PanBase - - -def test_mount_in_config(config): - del config['mount'] - base = PanBase() - with pytest.raises(SystemExit): - base._check_config(config) - - -def test_directories_in_config(config): - del config['directories'] - base = PanBase() - with pytest.raises(SystemExit): - base._check_config(config) - - -def test_state_machine_in_config(config): - del config['state_machine'] - base = PanBase() - with pytest.raises(SystemExit): - base._check_config(config) diff --git a/pocs/tests/test_config.py b/pocs/tests/test_config.py deleted file mode 100644 index 4a231a7ea..000000000 --- a/pocs/tests/test_config.py +++ /dev/null @@ -1,175 +0,0 @@ -import os -import pytest -import uuid -import yaml - -from astropy import units as u - -from pocs.utils.config import load_config -from pocs.utils.config import save_config - - -def test_load_simulator(config): - assert 'camera' in config['simulator'] - assert 'mount' in config['simulator'] - assert 'weather' in config['simulator'] - assert 'night' in config['simulator'] - - -def test_no_overwrite(config): - with pytest.warns(UserWarning): - save_config('pocs', config, overwrite=False) - - -def test_overwrite(config): - - config01 = { - 'foo': 'bar' - } - config02 = { - 'bar': 'foo' - } - - assert config01 != config02 - - save_config('foo', config01) - config03 = load_config('foo') - - assert config01 == config03 - - save_config('foo', config02) - config04 = load_config('foo') - - assert config02 == config04 - assert config01 != config04 - - conf_fn = '{}/conf_files/foo.yaml'.format(os.getenv('POCS')) - os.remove(conf_fn) - assert os.path.exists(conf_fn) is False - - -def test_full_path(): - temp_config_path = '/tmp/{}.yaml'.format(uuid.uuid4()) - temp_config = {'foo': 42} - save_config(temp_config_path, temp_config) - - c = load_config(temp_config_path) - - assert c == temp_config - os.remove(temp_config_path) - - -def test_local_config(): - - _local_config_file = '{}/conf_files/pocs_local.yaml'.format(os.getenv('POCS')) - - if not os.path.exists(_local_config_file): - conf = load_config(ignore_local=True) - assert conf['name'] == 'Generic PANOPTES Unit' - - local_yaml = { - 'name': 'ConfTestName' - } - with open(_local_config_file, 'w') as f: - f.write(yaml.dump(local_yaml)) - conf = load_config() - assert conf['name'] != 'Generic PANOPTES Unit' - os.remove(_local_config_file) - else: - conf = load_config() - assert conf['name'] != 'Generic PANOPTES Unit' - - -def test_multiple_config(): - config01 = {'foo': 1} - config02 = {'foo': 2, 'bar': 42} - config03 = {'bam': 'boo'} - - assert config01 != config02 - - f01 = str(uuid.uuid4()) - f02 = str(uuid.uuid4()) - f03 = str(uuid.uuid4()) - - save_config(f01, config01) - save_config(f02, config02) - save_config(f03, config03) - - config04 = load_config(f01) - config05 = load_config(f02) - config06 = load_config(f03) - - assert config01 == config04 - assert config02 == config05 - assert config03 == config06 - - config07 = load_config([f01, f02], ignore_local=True) - config08 = load_config([f02, f01], ignore_local=True) - - assert config07 != config01 - assert config07 == config02 - - assert config08 != config01 - assert config08 != config02 - assert config08 != config05 - - assert 'foo' not in config06 - assert 'bar' not in config06 - assert 'foo' in config05 - assert 'foo' in config07 - assert 'foo' in config08 - assert 'bar' in config05 - assert 'bar' in config07 - assert 'bar' in config08 - assert 'bam' in config06 - - assert config07['foo'] == 2 - assert config08['foo'] == 1 - - os.remove('{}/conf_files/{}.yaml'.format(os.getenv('POCS'), f01)) - os.remove('{}/conf_files/{}.yaml'.format(os.getenv('POCS'), f02)) - os.remove('{}/conf_files/{}.yaml'.format(os.getenv('POCS'), f03)) - - -def test_no_config(): - # Move existing config to temp - _config_file = '{}/conf_files/pocs.yaml'.format(os.getenv('POCS')) - _config_file_temp = '{}/conf_files/pocs_temp.yaml'.format(os.getenv('POCS')) - os.rename(_config_file, _config_file_temp) - - config = load_config(ignore_local=True) - - assert len(config.keys()) == 0 - - os.rename(_config_file_temp, _config_file) - - -def test_parse(config): - lat = config['location']['latitude'] - assert isinstance(lat, u.Quantity) - - -def test_no_parse(): - config = load_config(parse=False, ignore_local=True) - lat = config['location']['latitude'] - assert isinstance(lat, u.Quantity) is False - assert isinstance(lat, float) - - -def test_location_latitude(config): - lat = config['location']['latitude'] - assert lat >= -90 * u.degree and lat <= 90 * u.degree - - -def test_location_longitude(config): - lat = config['location']['longitude'] - assert lat >= -360 * u.degree and lat <= 360 * u.degree - - -def test_location_positive_elevation(config): - elev = config['location']['elevation'] - assert elev >= 0.0 * u.meter - - -def test_directories(config): - assert config['directories']['data'] == os.path.join(os.getenv('PANDIR'), 'data') diff --git a/pocs/tests/test_database.py b/pocs/tests/test_database.py deleted file mode 100644 index 1cdda0531..000000000 --- a/pocs/tests/test_database.py +++ /dev/null @@ -1,82 +0,0 @@ -import pytest - -from pocs.utils.error import InvalidCollection -from pocs.utils.logger import get_root_logger - - -def test_insert_and_no_permanent(db): - rec = {'test': 'insert'} - id0 = db.insert_current('config', rec, store_permanently=False) - - record = db.get_current('config') - assert record['data']['test'] == rec['test'] - - record = db.find('config', id0) - assert record is None - - -def test_insert_and_get_current(db): - rec = {'test': 'insert'} - db.insert_current('config', rec) - - record = db.get_current('config') - assert record['data']['test'] == rec['test'] - - -def test_clear_current(db): - rec = {'test': 'insert'} - db.insert_current('config', rec) - - record = db.get_current('config') - assert record['data']['test'] == rec['test'] - - db.clear_current('config') - - record = db.get_current('config') - assert record is None - - -def test_simple_insert(db): - rec = {'test': 'insert'} - # Use `insert` here, which returns an `ObjectId` - id0 = db.insert('config', rec) - - record = db.find('config', id0) - assert record['data']['test'] == rec['test'] - - -# Filter out (hide) "UserWarning: Collection not available" -@pytest.mark.filterwarnings('ignore') -def test_bad_collection(db): - with pytest.raises(InvalidCollection): - db.insert_current('foobar', {'test': 'insert'}) - - with pytest.raises(InvalidCollection): - db.insert('foobar', {'test': 'insert'}) - - -def test_log_bad_object(db, caplog): - if not db.logger: - db.logger = get_root_logger() - - assert db.insert_current('observations', {'junk': db}) is None - assert any([rec.levelname == 'WARNING' and - 'Problem inserting object into current collection' in rec.message - for rec in caplog.records]) - - caplog.records.clear() - - assert db.insert('observations', {'junk': db}) is None - assert any([rec.levelname == 'WARNING' and - 'Problem inserting object into collection' in rec.message - for rec in caplog.records]) - - -def test_warn_bad_object(db): - db.logger = None - - with pytest.warns(UserWarning): - db.insert_current('observations', {'junk': db}) - - with pytest.warns(UserWarning): - db.insert('observations', {'junk': db}) diff --git a/pocs/tests/test_dome_simulator.py b/pocs/tests/test_dome_simulator.py deleted file mode 100644 index 1d9fd0fac..000000000 --- a/pocs/tests/test_dome_simulator.py +++ /dev/null @@ -1,77 +0,0 @@ -import copy -import pytest - -import pocs.dome -from pocs.dome import simulator - - -# Yields two different dome controllers configurations, -# both with the pocs.dome.simulator.Dome class, but one -# overriding the specified driver with the simulator, -# the other explicitly specified. -@pytest.fixture(scope="function", params=[False, True]) -def dome(request, config): - config = copy.deepcopy(config) - is_simulator = request.param - if is_simulator: - config.update({ - 'dome': { - 'brand': 'Astrohaven', - 'driver': 'astrohaven', - }, - 'simulator': ['something', 'dome', 'another'], - }) - else: - config.update({ - 'dome': { - 'brand': 'Simulacrum', - 'driver': 'simulator', - }, - }) - del config['simulator'] - the_dome = pocs.dome.create_dome_from_config(config) - yield the_dome - if is_simulator: - # Should have marked the dome as being simulated. - assert config['dome']['simulator'] - else: - # Doesn't know that a simulator was specified. - assert 'simulator' not in config['dome'] - the_dome.disconnect() - - -def test_create(dome): - assert isinstance(dome, simulator.Dome) - assert not dome.is_connected - - -def test_connect(dome): - assert not dome.is_connected - assert dome.connect() is True - assert dome.is_connected is True - # Can repeat. - assert dome.connect() is True - assert dome.is_connected is True - - -def test_disconnect(dome): - assert dome.connect() is True - assert dome.disconnect() is True - assert dome.is_connected is False - # Can repeat. - assert dome.disconnect() is True - assert dome.is_connected is False - - -def test_open_and_close_slit(dome): - dome.connect() - - assert dome.open() is True - assert 'open' in dome.status.lower() - assert dome.is_open is True - - assert dome.close() is True - assert 'closed' in dome.status.lower() - assert dome.is_closed is True - - assert dome.disconnect() is True diff --git a/pocs/tests/test_horizon_points.py b/pocs/tests/test_horizon_points.py deleted file mode 100644 index a275e4b8b..000000000 --- a/pocs/tests/test_horizon_points.py +++ /dev/null @@ -1,99 +0,0 @@ -import pytest -import numpy as np -import random - -from pocs.utils.horizon import Horizon - - -def test_normal(): - hp = Horizon(obstructions=[ - [[20, 10], [40, 70]] - ]) - assert isinstance(hp, Horizon) - - hp2 = Horizon(obstructions=[ - [[40, 45], [50, 50], [60, 45]] - ]) - assert isinstance(hp2, Horizon) - - hp3 = Horizon() - assert isinstance(hp3, Horizon) - - -def test_bad_length_tuple(): - with pytest.raises(AssertionError): - Horizon(obstructions=[ - [[20], [40, 70]] - ]) - - -def test_bad_length_list(): - with pytest.raises(AssertionError): - Horizon(obstructions=[ - [[40, 70]] - ]) - - -def test_bad_string(): - with pytest.raises(AssertionError): - Horizon(obstructions=[ - [["x", 10], [40, 70]] - ]) - - -def test_too_many_points(): - with pytest.raises(AssertionError): - Horizon(obstructions=[[[120, 60, 300]]]) - - -def test_wrong_bool(): - with pytest.raises(AssertionError): - Horizon(obstructions=[[[20, 200], [30, False]]]) - - -def test_numpy_ints(): - range_length = 360 - points = [list(list(a) for a in zip( - [random.randrange(15, 50) for _ in range(range_length)], # Random height - np.arange(1, range_length, 25) # Set azimuth - ))] - points - assert isinstance(Horizon(points), Horizon) - - -def test_negative_alt(): - with pytest.raises(AssertionError): - Horizon(obstructions=[ - [[10, 20], [-1, 30]] - ]) - - -def test_good_negative_az(): - hp = Horizon(obstructions=[ - [[50, -10], [45, -20]] - ]) - assert isinstance(hp, Horizon) - - hp2 = Horizon(obstructions=[ - [[10, -181], [20, -190]] - ]) - assert isinstance(hp2, Horizon) - - -def test_bad_negative_az(): - with pytest.raises(AssertionError): - Horizon(obstructions=[ - [[10, -361], [20, -350]] - ]) - - -def test_sorting(): - points = [ - [[10., 10.], [20., 20.]], - [[30., 190.], [10., 180.]], - [[10., 50.], [30., 60.]], - ] - hp = Horizon(obstructions=points) - assert hp.obstructions == [[(10.0, 10.0), (20.0, 20.0)], - [(10.0, 50.0), (30.0, 60.0)], - [(10.0, 180.0), (30.0, 190.0)]] diff --git a/pocs/tests/test_messaging.py b/pocs/tests/test_messaging.py deleted file mode 100644 index 567a05a77..000000000 --- a/pocs/tests/test_messaging.py +++ /dev/null @@ -1,141 +0,0 @@ -import multiprocessing -import pytest -import time - -from datetime import datetime -from pocs.utils.messaging import PanMessaging - - -@pytest.fixture(scope='module') -def mp_manager(): - return multiprocessing.Manager() - - -@pytest.fixture(scope='function') -def forwarder(mp_manager): - ready = mp_manager.Event() - done = mp_manager.Event() - - def start_forwarder(): - PanMessaging.create_forwarder( - 12345, 54321, ready_fn=lambda: ready.set(), done_fn=lambda: done.set()) - - messaging = multiprocessing.Process(target=start_forwarder) - messaging.start() - - if not ready.wait(timeout=10.0): - raise Exception('Forwarder failed to become ready!') - # Wait a moment for the forwarder to start using those sockets. - time.sleep(0.05) - - yield messaging - - # Stop the forwarder. Since we use the same ports in multiple - # tests, we wait for the process to shutdown. - messaging.terminate() - for _ in range(100): - # We can't be sure that the sub-process will succeed in - # calling the done_fn, so we also check for the process - # ending. - if done.wait(timeout=0.01): - break - if not messaging.is_alive(): - break - - -def test_forwarder(forwarder): - assert forwarder.is_alive() is True - - -@pytest.fixture(scope='function') -def pub_and_sub(forwarder): - # Ensure that the subscriber is created first. - sub = PanMessaging.create_subscriber(54321) - time.sleep(0.05) - pub = PanMessaging.create_publisher(12345, bind=False, connect=True) - time.sleep(0.05) - yield (pub, sub) - pub.close() - sub.close() - - -def test_send_string(pub_and_sub): - pub, sub = pub_and_sub - pub.send_message('Test-Topic', 'Hello') - topic, msg_obj = sub.receive_message() - - assert topic == 'Test-Topic' - assert isinstance(msg_obj, dict) - assert 'message' in msg_obj - assert msg_obj['message'] == 'Hello' - - -def test_send_datetime(pub_and_sub): - pub, sub = pub_and_sub - pub.send_message('Test-Topic', {'date': datetime(2017, 1, 1)}) - topic, msg_obj = sub.receive_message() - assert msg_obj['date'] == '2017-01-01T00:00:00' - - -def test_storage_id(pub_and_sub, config, db): - id0 = db.insert_current('config', {'foo': 'bar'}, store_permanently=False) - pub, sub = pub_and_sub - pub.send_message('Test-Topic', db.get_current('config')) - topic, msg_obj = sub.receive_message() - assert '_id' in msg_obj - assert isinstance(msg_obj['_id'], str) - assert id0 == msg_obj['_id'] - - -################################################################################ -# Tests of the conftest.py messaging fixtures. - -def test_message_forwarder_exists(message_forwarder): - assert isinstance(message_forwarder, dict) - assert 'msg_ports' in message_forwarder - - assert isinstance(message_forwarder['msg_ports'], tuple) - assert len(message_forwarder['msg_ports']) == 2 - assert isinstance(message_forwarder['msg_ports'][0], int) - assert isinstance(message_forwarder['msg_ports'][1], int) - - assert isinstance(message_forwarder['cmd_ports'], tuple) - assert len(message_forwarder['cmd_ports']) == 2 - assert isinstance(message_forwarder['cmd_ports'][0], int) - assert isinstance(message_forwarder['cmd_ports'][1], int) - - # The ports should be unique. - msg_ports = message_forwarder['msg_ports'] - cmd_ports = message_forwarder['cmd_ports'] - - ports = set(list(msg_ports) + list(cmd_ports)) - assert len(ports) == 4 - - -def assess_pub_sub(pub, sub): - """Helper method for testing a pub-sub pair.""" - - # Can not send a message using a subscriber - with pytest.raises(Exception): - sub.send_message('topic_name', 'a string') - - # Can not receive a message using a publisher - assert (None, None) == pub.receive_message(blocking=True) - - # At first, there is nothing available to receive. - assert (None, None) == sub.receive_message(blocking=True, timeout_ms=500) - - pub.send_message('topic.name', 'a string') - topic, msg_obj = sub.receive_message() - assert isinstance(msg_obj, dict) - assert 'message' in msg_obj - assert msg_obj['message'] == 'a string' - assert 'timestamp' in msg_obj - - -def test_msg_pub_sub(msg_publisher, msg_subscriber): - assess_pub_sub(msg_publisher, msg_subscriber) - - -def test_cmd_pub_sub(cmd_publisher, cmd_subscriber): - assess_pub_sub(cmd_publisher, cmd_subscriber) diff --git a/pocs/tests/test_mount.py b/pocs/tests/test_mount.py deleted file mode 100644 index cb823a78d..000000000 --- a/pocs/tests/test_mount.py +++ /dev/null @@ -1,57 +0,0 @@ -import pytest - -from pocs.mount import create_mount_from_config, AbstractMount -from pocs.utils.error import MountNotFound -from pocs.utils.location import create_location_from_config - - -@pytest.fixture -def conf_with_mount(config_with_simulated_mount): - return config_with_simulated_mount.copy() - - -def test_mount_not_in_config(config): - conf = config.copy() - - # Remove mount info - del conf['mount'] - - with pytest.raises(MountNotFound): - create_mount_from_config(conf) - - -@pytest.mark.without_mount -def test_mount_no_config_param(): - # Will fail because it's not a simulator and no real mount attached - with pytest.raises(MountNotFound): - create_mount_from_config() - - -def test_bad_mount_port(config): - conf = config.copy() - conf['mount']['serial']['port'] = 'foobar' - with pytest.raises(MountNotFound): - create_mount_from_config(conf) - - -@pytest.mark.without_mount -def test_bad_mount_driver(config): - conf = config.copy() - conf['mount']['driver'] = 'foobar' - with pytest.raises(MountNotFound): - create_mount_from_config(conf) - conf['mount']['driver'] = 1234 - with pytest.raises(MountNotFound): - create_mount_from_config(conf) - - -def test_create_mount_with_earth_location(conf_with_mount): - site_details = create_location_from_config(conf_with_mount) - earth_location = site_details['earth_location'] - assert isinstance(create_mount_from_config( - conf_with_mount, earth_location=earth_location), AbstractMount) is True - - -def test_create_mount_without_earth_location(conf_with_mount): - assert isinstance(create_mount_from_config( - conf_with_mount, earth_location=None), AbstractMount) is True diff --git a/pocs/tests/test_observation.py b/pocs/tests/test_observation.py deleted file mode 100644 index 30da3c2bc..000000000 --- a/pocs/tests/test_observation.py +++ /dev/null @@ -1,128 +0,0 @@ -import pytest - -from astropy import units as u -from pocs.scheduler.field import Field -from pocs.scheduler.observation import Observation - - -@pytest.fixture -def field(): - return Field('Test Observation', '20h00m43.7135s +22d42m39.0645s') - - -def test_create_observation_no_field(): - with pytest.raises(TypeError): - Observation() - - -def test_create_observation_bad_field(): - with pytest.raises(AssertionError): - Observation('20h00m43.7135s +22d42m39.0645s') - - -def test_create_observation_exptime_no_units(field): - with pytest.raises(TypeError): - Observation(field, exptime=1.0) - - -def test_create_observation_exptime_bad(field): - with pytest.raises(AssertionError): - Observation(field, exptime=0.0 * u.second) - - -def test_create_observation_exptime_minutes(field): - obs = Observation(field, exptime=5.0 * u.minute) - assert obs.exptime == 300 * u.second - - -def test_bad_priority(field): - with pytest.raises(AssertionError): - Observation(field, priority=-1) - - -def test_good_priority(field): - obs = Observation(field, priority=5.0) - assert obs.priority == 5.0 - - -def test_priority_str(field): - obs = Observation(field, priority="5") - assert obs.priority == 5.0 - - -def test_bad_min_set_combo(field): - with pytest.raises(AssertionError): - Observation(field, exp_set_size=7) - with pytest.raises(AssertionError): - Observation(field, min_nexp=57) - - -def test_small_sets(field): - obs = Observation(field, exptime=1 * u.second, min_nexp=1, exp_set_size=1) - assert obs.minimum_duration == 1 * u.second - assert obs.set_duration == 1 * u.second - - -def test_good_min_set_combo(field): - obs = Observation(field, min_nexp=21, exp_set_size=3) - assert isinstance(obs, Observation) - - -def test_default_min_duration(field): - obs = Observation(field) - assert obs.minimum_duration == 7200 * u.second - - -def test_default_set_duration(field): - obs = Observation(field) - assert obs.set_duration == 1200 * u.second - - -def test_print(field): - obs = Observation(field, exptime=17.5 * u.second, min_nexp=27, exp_set_size=9) - assert str(obs) == "Test Observation: 17.5 s exposures in blocks of 9, minimum 27, priority 100" - - -def test_seq_time(field): - obs = Observation(field, exptime=17.5 * u.second, min_nexp=27, exp_set_size=9) - assert obs.seq_time is None - - -def test_no_exposures(field): - obs = Observation(field, exptime=17.5 * u.second, min_nexp=27, exp_set_size=9) - assert obs.first_exposure is None - assert obs.last_exposure is None - assert obs.pointing_image is None - - -def test_last_exposure_and_reset(field): - obs = Observation(field, exptime=17.5 * u.second, min_nexp=27, exp_set_size=9) - status = obs.status() - assert status['current_exp'] == obs.current_exp_num - - # Mimic taking exposures - obs.merit = 112.5 - - for i in range(5): - obs.exposure_list['image_{}'.format(i)] = 'full_image_path_{}'.format(i) - - last = obs.last_exposure - assert isinstance(last, tuple) - assert obs.merit > 0.0 - assert obs.current_exp_num == 5 - - assert last[0] == 'image_4' - assert last[1] == 'full_image_path_4' - - assert isinstance(obs.first_exposure, tuple) - assert obs.first_exposure[0] == 'image_0' - assert obs.first_exposure[1] == 'full_image_path_0' - - obs.reset() - status2 = obs.status() - - assert status2['current_exp'] == 0 - assert status2['merit'] == 0.0 - assert obs.first_exposure is None - assert obs.last_exposure is None - assert obs.seq_time is None diff --git a/pocs/tests/test_pocs.py b/pocs/tests/test_pocs.py deleted file mode 100644 index e37baa42d..000000000 --- a/pocs/tests/test_pocs.py +++ /dev/null @@ -1,557 +0,0 @@ -import os -import threading -import time -import shutil - -import pytest -from astropy import units as u - -from pocs import hardware -from pocs.camera import create_cameras_from_config -from pocs.core import POCS -from pocs.dome import create_dome_from_config -from pocs.mount import create_mount_from_config -from pocs.observatory import Observatory -from pocs.scheduler import create_scheduler_from_config -from pocs.utils import CountdownTimer -from pocs.utils import current_time -from pocs.utils import error -from pocs.utils.location import create_location_from_config -from pocs.utils.messaging import PanMessaging - - -def wait_for_running(sub, max_duration=90): - """Given a message subscriber, wait for a RUNNING message.""" - timeout = CountdownTimer(max_duration) - while not timeout.expired(): - topic, msg_obj = sub.receive_message() - if msg_obj and 'RUNNING' == msg_obj.get('message'): - return True - return False - - -def wait_for_state(sub, state, max_duration=90): - """Given a message subscriber, wait for the specified state.""" - timeout = CountdownTimer(max_duration) - while not timeout.expired(): - topic, msg_obj = sub.receive_message() - if topic == 'STATUS' and msg_obj and msg_obj.get('state') == state: - return True - return False - - -@pytest.fixture(scope='function') -def cameras(config): - """Get the default cameras from the config.""" - return create_cameras_from_config(config) - - -@pytest.fixture(scope='function') -def scheduler(config): - site_details = create_location_from_config(config) - return create_scheduler_from_config(config, observer=site_details['observer']) - - -@pytest.fixture(scope='function') -def dome(config_with_simulated_dome): - return create_dome_from_config(config_with_simulated_dome) - - -@pytest.fixture(scope='function') -def mount(config_with_simulated_mount): - return create_mount_from_config(config_with_simulated_mount) - - -@pytest.fixture(scope='function') -def observatory(config, db_type, cameras, scheduler, mount): - observatory = Observatory( - config=config, - cameras=cameras, - mount=mount, - scheduler=scheduler, - ignore_local_config=True, - db_type=db_type - ) - return observatory - - -@pytest.fixture(scope='function') -def pocs(config, observatory): - os.environ['POCSTIME'] = '2016-08-13 13:00:00' - - pocs = POCS(observatory, - run_once=True, - config=config, - ignore_local_config=True) - - yield pocs - - pocs.power_down() - - -@pytest.fixture(scope='function') -def pocs_with_dome(config_with_simulated_dome, db_type, dome, mount): - os.environ['POCSTIME'] = '2016-08-13 13:00:00' - observatory = Observatory(config=config_with_simulated_dome, - dome=dome, - mount=mount, - ignore_local_config=True, - db_type=db_type - ) - - pocs = POCS(observatory, - run_once=True, - config=config_with_simulated_dome, - ignore_local_config=True) - - yield pocs - - pocs.power_down() - - -def test_bad_pandir_env(pocs): - pandir = os.getenv('PANDIR') - os.environ['PANDIR'] = '/foo/bar' - with pytest.raises(SystemExit): - POCS.check_environment() - os.environ['PANDIR'] = pandir - - -def test_bad_pocs_env(pocs): - pocs_dir = os.getenv('POCS') - os.environ['POCS'] = '/foo/bar' - with pytest.raises(SystemExit): - POCS.check_environment() - os.environ['POCS'] = pocs_dir - - -def test_make_log_dir(pocs): - log_dir = "{}/logs".format(os.getcwd()) - assert os.path.exists(log_dir) is False - - old_pandir = os.environ['PANDIR'] - os.environ['PANDIR'] = os.getcwd() - POCS.check_environment() - - assert os.path.exists(log_dir) is True - os.removedirs(log_dir) - - os.environ['PANDIR'] = old_pandir - - -def test_simple_simulator(pocs, caplog): - assert isinstance(pocs, POCS) - - assert pocs.is_initialized is not True - - with pytest.raises(AssertionError): - pocs.run() - - pocs.initialize() - assert pocs.is_initialized - - pocs.state = 'parking' - pocs.next_state = 'parking' - - assert pocs._lookup_trigger() == 'set_park' - - pocs.state = 'foo' - - assert pocs._lookup_trigger() == 'parking' - - caplog.records.clear() - assert pocs.has_free_space() - # Check that no messages were generated. - assert not any([ - rec.levelname not in ['WARNING', 'ERROR'] - for rec in caplog.records - ]) - - # Test low disk space warning by requiring fraction of currently available space. - current_space = (shutil.disk_usage(os.getenv('PANDIR')).free * u.byte).to(u.gigabyte) - assert pocs.has_free_space(required_space=current_space * 0.8) - # Check that it generated an error message. - assert any([ - rec.levelname == 'WARNING' and 'Low disk space' in rec.message - for rec in caplog.records - ]) - - caplog.records.clear() - - # Test no disk space with some ridiculous requirement (dated 2020 for posterity). - assert not pocs.has_free_space(required_space=1e9 * u.gigabyte) - # Check that it generated an error message. - assert any([ - rec.levelname == 'ERROR' and 'No disk space' in rec.message - for rec - in caplog.records - ]) - - assert pocs.is_safe() - - -def test_is_weather_and_dark_simulator(pocs): - pocs = pocs - pocs.initialize() - pocs.config['simulator'] = ['camera', 'mount', 'weather', 'night'] - os.environ['POCSTIME'] = '2016-08-13 13:00:00' - assert pocs.is_dark() is True - - os.environ['POCSTIME'] = '2016-08-13 23:00:00' - assert pocs.is_dark() is True - - pocs.config['simulator'] = ['camera', 'mount', 'weather'] - os.environ['POCSTIME'] = '2016-08-13 13:00:00' - assert pocs.is_dark() is True - - os.environ['POCSTIME'] = '2016-08-13 23:00:00' - assert pocs.is_dark() is False - - pocs.config['simulator'] = ['camera', 'mount', 'weather', 'night'] - assert pocs.is_weather_safe() is True - - -def test_wait_for_events_timeout(pocs): - del os.environ['POCSTIME'] - test_event = threading.Event() - - # Test timeout - with pytest.raises(error.Timeout): - pocs.wait_for_events(test_event, 1) - - # Test timeout - with pytest.raises(error.Timeout): - pocs.wait_for_events(test_event, 5 * u.second, sleep_delay=1) - - test_event = threading.Event() - - def set_event(): - test_event.set() - - # Mark as set in 1 second - t = threading.Timer(1.0, set_event) - t.start() - - # Wait for 10 seconds (should trip in 1 second) - pocs.wait_for_events(test_event, 10) - assert test_event.is_set() - - test_event = threading.Event() - - def set_event(): - while test_event.is_set() is False: - time.sleep(1) - - def interrupt(): - pocs._interrupted = True - - # Wait for 60 seconds (interrupts below) - t = threading.Timer(60.0, set_event) - t.start() - - # Interrupt - Time to test status and messaging - t2 = threading.Timer(3.0, interrupt) - - # Wait for 60 seconds (should interrupt from above) - start_time = current_time() - t2.start() - pocs.wait_for_events(test_event, 60, sleep_delay=1., status_interval=1, msg_interval=1) - end_time = current_time() - assert test_event.is_set() is False - assert (end_time - start_time).sec < 10 - test_event.set() - t.cancel() - t2.cancel() - - -def test_is_weather_safe_no_simulator(pocs): - pocs.initialize() - pocs.config['simulator'] = ['camera', 'mount', 'night'] - - # Set a specific time - os.environ['POCSTIME'] = '2016-08-13 23:00:00' - - # Insert a dummy weather record - pocs.db.insert_current('weather', {'safe': True}) - assert pocs.is_weather_safe() is True - - # Set a time 181 seconds later - os.environ['POCSTIME'] = '2016-08-13 23:05:01' - assert pocs.is_weather_safe() is False - - -def wait_for_message(sub, type=None, attr=None, value=None): - """Wait for a message of the specified type and contents.""" - assert (attr is None) == (value is None) - while True: - topic, msg_obj = sub.receive_message() - if not msg_obj: - continue - if type and topic != type: - continue - if not attr or attr not in msg_obj: - continue - if value and msg_obj[attr] != value: - continue - return topic, msg_obj - - -def test_run_wait_until_safe(observatory, cmd_publisher, msg_subscriber): - os.environ['POCSTIME'] = '2016-09-09 08:00:00' - - # Make sure DB is clear for current weather - observatory.db.clear_current('weather') - - def start_pocs(): - observatory.logger.info('start_pocs ENTER') - # Remove weather simulator, else it would always be safe. - observatory.config['simulator'] = hardware.get_all_names(without=['weather']) - - pocs = POCS(observatory, - messaging=True, safe_delay=5) - - pocs.observatory.scheduler.clear_available_observations() - pocs.observatory.scheduler.add_observation({'name': 'KIC 8462852', - 'position': '20h06m15.4536s +44d27m24.75s', - 'priority': '100', - 'exptime': 2, - 'min_nexp': 2, - 'exp_set_size': 2, - }) - - pocs.initialize() - pocs.logger.info('Starting observatory run') - assert pocs.is_weather_safe() is False - pocs.send_message('RUNNING') - pocs.run(run_once=True, exit_when_done=True) - assert pocs.is_weather_safe() is True - pocs.power_down() - observatory.logger.info('start_pocs EXIT') - - pocs_thread = threading.Thread(target=start_pocs, daemon=True) - pocs_thread.start() - - try: - # Wait for the RUNNING message, - assert wait_for_running(msg_subscriber) - - time.sleep(2) - # Insert a dummy weather record to break wait - observatory.db.insert_current('weather', {'safe': True}) - - assert wait_for_state(msg_subscriber, 'scheduling') - finally: - cmd_publisher.send_message('POCS-CMD', 'shutdown') - pocs_thread.join(timeout=30) - - assert pocs_thread.is_alive() is False - - -def test_unsafe_park(pocs): - pocs.initialize() - assert pocs.is_initialized is True - os.environ['POCSTIME'] = '2016-08-13 13:00:00' - assert pocs.state == 'sleeping' - pocs.get_ready() - assert pocs.state == 'ready' - pocs.schedule() - assert pocs.state == 'scheduling' - - # My time goes fast... - os.environ['POCSTIME'] = '2016-08-13 23:00:00' - pocs.config['simulator'] = ['camera', 'mount', 'weather', 'power'] - assert pocs.is_safe() is False - - assert pocs.state == 'parking' - pocs.set_park() - pocs.clean_up() - pocs.goto_sleep() - assert pocs.state == 'sleeping' - pocs.power_down() - - -def test_no_ac_power(pocs): - # Simulator makes AC power safe - assert pocs.has_ac_power() is True - - # Remove 'power' from simulator - pocs.config['simulator'].remove('power') - pocs.initialize() - - # With simulator removed the power should fail - assert pocs.has_ac_power() is False - - for v in [True, 12.4, 0., False]: - has_power = bool(v) - - # Add a fake power entry in data base - pocs.db.insert_current('power', {'main': v}) - - # Check for safe entry in database - assert pocs.has_ac_power() == has_power - assert pocs.is_safe() == has_power - - # Check for stale entry in database - assert pocs.has_ac_power(stale=0.1) is False - - # But double check it still matches longer entry - assert pocs.has_ac_power() == has_power - - # Remove entry and try again - pocs.db.clear_current('power') - assert pocs.has_ac_power() is False - - -def test_power_down_while_running(pocs): - assert pocs.connected is True - assert not pocs.observatory.has_dome - pocs.initialize() - pocs.get_ready() - assert pocs.state == 'ready' - pocs.power_down() - - assert pocs.state == 'parked' - assert pocs.connected is False - - -def test_power_down_dome_while_running(pocs_with_dome): - pocs = pocs_with_dome - assert pocs.connected is True - assert pocs.observatory.has_dome - assert not pocs.observatory.dome.is_connected - pocs.initialize() - assert pocs.observatory.dome.is_connected - pocs.get_ready() - assert pocs.state == 'ready' - pocs.power_down() - - assert pocs.state == 'parked' - assert pocs.connected is False - assert not pocs.observatory.dome.is_connected - - -def test_run_no_targets_and_exit(pocs): - os.environ['POCSTIME'] = '2016-08-13 23:00:00' - pocs.config['simulator'] = ['camera', 'mount', 'weather', 'night', 'power'] - pocs.state = 'sleeping' - - pocs.initialize() - pocs.observatory.scheduler.clear_available_observations() - assert pocs.is_initialized is True - pocs.run(exit_when_done=True, run_once=True) - assert pocs.state == 'sleeping' - - -def test_run_complete(pocs): - os.environ['POCSTIME'] = '2016-09-09 08:00:00' - pocs.config['simulator'] = ['camera', 'mount', 'weather', 'night', 'power'] - pocs.state = 'sleeping' - pocs._do_states = True - - pocs.observatory.scheduler.clear_available_observations() - pocs.observatory.scheduler.add_observation({'name': 'KIC 8462852', - 'position': '20h06m15.4536s +44d27m24.75s', - 'priority': '100', - 'exptime': 2, - 'min_nexp': 2, - 'exp_set_size': 2, - }) - - pocs.initialize() - assert pocs.is_initialized is True - - pocs.run(exit_when_done=True, run_once=True) - assert pocs.state == 'sleeping' - pocs.power_down() - - -def test_run_power_down_interrupt(observatory, cmd_publisher, msg_subscriber): - os.environ['POCSTIME'] = '2016-09-09 08:00:00' - - def start_pocs(): - observatory.logger.info('start_pocs ENTER') - pocs = POCS(observatory, messaging=True) - pocs.initialize() - pocs.observatory.scheduler.clear_available_observations() - pocs.observatory.scheduler.add_observation({'name': 'KIC 8462852', - 'position': '20h06m15.4536s +44d27m24.75s', - 'priority': '100', - 'exptime': 2, - 'min_nexp': 2, - 'exp_set_size': 2, - }) - pocs.logger.info('Starting observatory run') - pocs.run() - pocs.power_down() - observatory.logger.info('start_pocs EXIT') - - pocs_thread = threading.Thread(target=start_pocs, daemon=True) - pocs_thread.start() - - try: - assert wait_for_state(msg_subscriber, 'scheduling') - finally: - cmd_publisher.send_message('POCS-CMD', 'shutdown') - pocs_thread.join(timeout=30) - - assert pocs_thread.is_alive() is False - - -def test_pocs_park_to_ready_with_observations(pocs): - # We don't want to run_once here - pocs._run_once = False - - assert pocs.is_safe() is True - assert pocs.state == 'sleeping' - pocs.next_state = 'ready' - assert pocs.initialize() - assert pocs.goto_next_state() - assert pocs.state == 'ready' - assert pocs.goto_next_state() - assert pocs.observatory.current_observation is not None - pocs.next_state = 'parking' - assert pocs.goto_next_state() - assert pocs.state == 'parking' - assert pocs.observatory.current_observation is None - assert pocs.observatory.mount.is_parked - assert pocs.goto_next_state() - assert pocs.state == 'parked' - # Should be safe and still have valid observations so next state should - # be ready - assert pocs.goto_next_state() - assert pocs.state == 'ready' - pocs.power_down() - assert pocs.connected is False - - -def test_pocs_park_to_ready_without_observations(pocs): - - os.environ['POCSTIME'] = '2016-08-13 13:00:00' - - assert pocs.is_safe() is True - assert pocs.state == 'sleeping' - pocs.next_state = 'ready' - assert pocs.initialize() - assert pocs.goto_next_state() - assert pocs.state == 'ready' - assert pocs.goto_next_state() - assert pocs.observatory.current_observation is not None - pocs.next_state = 'parking' - assert pocs.goto_next_state() - assert pocs.state == 'parking' - assert pocs.observatory.current_observation is None - assert pocs.observatory.mount.is_parked - - # No valid obs - pocs.observatory.scheduler.clear_available_observations() - - # Since we don't have valid observations we will start sleeping for 30 - # minutes so send shutdown command first. - pub = PanMessaging.create_publisher(6500) - pub.send_message('POCS-CMD', 'shutdown') - assert pocs.goto_next_state() - assert pocs.state == 'parked' - pocs.power_down() - - assert pocs.connected is False - assert pocs.is_safe() is False diff --git a/pocs/tests/test_rs232.py b/pocs/tests/test_rs232.py deleted file mode 100644 index cbcedae38..000000000 --- a/pocs/tests/test_rs232.py +++ /dev/null @@ -1,226 +0,0 @@ -import io -import pytest -import serial -from serial import serialutil - -from pocs.utils import error -from pocs.utils import rs232 - -from pocs.tests.serial_handlers import NoOpSerial -from pocs.tests.serial_handlers import protocol_buffers -from pocs.tests.serial_handlers import protocol_hooked - - -def test_port_discovery(): - ports = rs232.get_serial_port_info() - assert isinstance(ports, list) - - -def test_missing_port(): - with pytest.raises(ValueError): - rs232.SerialData() - - -def test_non_existent_device(): - """Doesn't complain if it can't find the device.""" - port = '/dev/tty12345698765' - ser = rs232.SerialData(port=port) - assert not ser.is_connected - assert port == ser.name - # Can't connect to that device. - with pytest.raises(error.BadSerialConnection): - ser.connect() - assert not ser.is_connected - - -def test_detect_uninstalled_scheme(): - """If our handlers aren't installed, will detect unknown scheme.""" - # See https://pythonhosted.org/pyserial/url_handlers.html#urls for info on the - # standard schemes that are supported by PySerial. - with pytest.raises(ValueError): - # The no_op scheme references one of our test handlers, but it shouldn't be - # accessible unless we've added our package to the list to be searched. - rs232.SerialData(port='no_op://') - - -@pytest.fixture(scope='function') -def handler(): - # Install our package that contain the test handlers. - serial.protocol_handler_packages.append('pocs.tests.serial_handlers') - yield True - # Remove that package. - serial.protocol_handler_packages.remove('pocs.tests.serial_handlers') - - -def test_detect_bogus_scheme(handler): - """When our handlers are installed, will still detect unknown scheme.""" - with pytest.raises(ValueError) as excinfo: - # The scheme (the part before the ://) must be a Python module name, so use - # a string that can't be a module name. - rs232.SerialData(port='# bogus #://') - assert '# bogus #' in repr(excinfo.value) - - -def test_custom_logger(handler, fake_logger): - s0 = rs232.SerialData(port='no_op://', logger=fake_logger) - s0.logger.debug('Testing logger') - - -def test_basic_no_op(handler): - # Confirm we can create the SerialData object. - ser = rs232.SerialData(port='no_op://', name='a name', open_delay=0) - assert ser.name == 'a name' - - # Peek inside, it should have a NoOpSerial instance as member ser. - assert ser.ser - assert isinstance(ser.ser, NoOpSerial) - - # Open is automatically called by SerialData. - assert ser.is_connected - - # connect() is idempotent. - ser.connect() - assert ser.is_connected - - # Several passes of reading, writing, disconnecting and connecting. - for _ in range(3): - # no_op handler doesn't do any reading, analogous to /dev/null, which - # never produces any output. - assert '' == ser.read(retry_delay=0.01, retry_limit=2) - assert b'' == ser.read_bytes(size=1) - assert 0 == ser.write('abcdef') - ser.reset_input_buffer() - - # Disconnect from the serial port. - assert ser.is_connected - ser.disconnect() - assert not ser.is_connected - - # Should no longer be able to read or write. - with pytest.raises(AssertionError): - ser.read(retry_delay=0.01, retry_limit=1) - with pytest.raises(AssertionError): - ser.read_bytes(size=1) - with pytest.raises(AssertionError): - ser.write('a') - ser.reset_input_buffer() - - # And we should be able to reconnect. - assert not ser.is_connected - ser.connect() - assert ser.is_connected - - -def test_basic_io(handler): - protocol_buffers.ResetBuffers(b'abc\r\ndef\n') - ser = rs232.SerialData(port='buffers://', open_delay=0.01, retry_delay=0.01, - retry_limit=2) - - # Peek inside, it should have a BuffersSerial instance as member ser. - assert isinstance(ser.ser, protocol_buffers.BuffersSerial) - - # Can read two lines. Read the first as a sensor reading: - (ts, line) = ser.get_reading() - assert 'abc\r\n' == line - - # Read the second line from the read buffer. - assert 'def\n' == ser.read(retry_delay=0.1, retry_limit=10) - - # Another read will fail, having exhausted the contents of the read buffer. - assert '' == ser.read() - - # Can write to the "device", the handler will accumulate the results. - assert 5 == ser.write('def\r\n') - assert 6 == ser.write('done\r\n') - - assert b'def\r\ndone\r\n' == protocol_buffers.GetWBufferValue() - - # If we add more to the read buffer, we can read again. - protocol_buffers.SetRBufferValue(b'line1\r\nline2\r\ndangle') - assert 'line1\r\n' == ser.read(retry_delay=10, retry_limit=20) - assert 'line2\r\n' == ser.read(retry_delay=10, retry_limit=20) - assert 'dangle' == ser.read(retry_delay=10, retry_limit=20) - - ser.disconnect() - assert not ser.is_connected - - -class HookedSerialHandler(NoOpSerial): - """Sources a line of text repeatedly, and sinks an infinite amount of input.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.r_buffer = io.BytesIO( - b"{'a': 12, 'b': [1, 2, 3, 4], 'c': {'d': 'message'}}\r\n") - - @property - def in_waiting(self): - """The number of input bytes available to read immediately.""" - if not self.is_open: - raise serialutil.portNotOpenError - total = len(self.r_buffer.getbuffer()) - avail = total - self.r_buffer.tell() - # If at end of the stream, reset the stream. - if avail <= 0: - self.r_buffer.seek(0) - avail = total - return avail - - def open(self): - """Open port. - - Raises: - SerialException if the port cannot be opened. - """ - self.is_open = True - - def close(self): - """Close port immediately.""" - self.is_open = False - - def read(self, size=1): - """Read until the end of self.r_buffer, then seek to beginning of self.r_buffer.""" - if not self.is_open: - raise serialutil.portNotOpenError - # If at end of the stream, reset the stream. - return self.r_buffer.read(min(size, self.in_waiting)) - - def write(self, data): - """Write data to bitbucket.""" - if not self.is_open: - raise serialutil.portNotOpenError - return len(data) - - -def test_hooked_io(handler): - protocol_hooked.Serial = HookedSerialHandler - ser = rs232.SerialData(port='hooked://', open_delay=0) - - # Peek inside, it should have a PySerial instance as member ser. - assert ser.ser - assert ser.ser.__class__.__name__ == 'HookedSerialHandler' - print(str(ser.ser)) - - # Open is automatically called by SerialData. - assert ser.is_connected - - # Can read many identical lines from ser. - first_line = None - for n in range(20): - line = ser.read(retry_delay=10, retry_limit=20) - if first_line: - assert line == first_line - else: - first_line = line - assert 'message' in line - reading = ser.get_reading() - assert reading[1] == line - - # Can write to the "device" many times. - line = 'abcdefghijklmnop' * 30 - line = line + '\r\n' - for n in range(20): - assert len(line) == ser.write(line) - - ser.disconnect() - assert not ser.is_connected diff --git a/pocs/tests/test_scheduler.py b/pocs/tests/test_scheduler.py deleted file mode 100644 index 3aa578aa6..000000000 --- a/pocs/tests/test_scheduler.py +++ /dev/null @@ -1,38 +0,0 @@ -import pytest - -from pocs.scheduler import create_scheduler_from_config, BaseScheduler -from pocs.utils import error -from pocs.utils.location import create_location_from_config - - -def test_bad_scheduler_type(config): - conf = config.copy() - conf['scheduler']['type'] = 'foobar' - site_details = create_location_from_config(config) - with pytest.raises(error.NotFound): - create_scheduler_from_config(conf, observer=site_details['observer']) - - -def test_mount_no_config_param(): - # Will fail because it's not a simulator and no real mount attached - scheduler = create_scheduler_from_config() - assert scheduler - - -def test_bad_scheduler_fields_file(config): - conf = config.copy() - conf['scheduler']['fields_file'] = 'foobar' - site_details = create_location_from_config(config) - with pytest.raises(error.NotFound): - create_scheduler_from_config(conf, observer=site_details['observer']) - - -def test_no_observer(config): - assert isinstance(create_scheduler_from_config(config, observer=None), BaseScheduler) is True - - -def test_no_scheduler_in_config(config): - conf = config.copy() - del conf['scheduler'] - site_details = create_location_from_config(conf) - assert create_scheduler_from_config(conf, observer=site_details['observer']) is None diff --git a/pocs/tests/test_social_messaging.py b/pocs/tests/test_social_messaging.py deleted file mode 100644 index 1a359c030..000000000 --- a/pocs/tests/test_social_messaging.py +++ /dev/null @@ -1,100 +0,0 @@ -import pytest -import tweepy -import requests -import unittest.mock - -from pocs.utils.social_twitter import SocialTwitter -from pocs.utils.social_slack import SocialSlack - - -@pytest.fixture(scope='module') -def twitter_config(): - twitter_config = {'consumer_key': 'mock_consumer_key', 'consumer_secret': 'mock_consumer_secret', 'access_token': 'mock_access_token', 'access_token_secret': 'access_token_secret'} - return twitter_config - - -@pytest.fixture(scope='module') -def slack_config(): - slack_config = {'webhook_url': 'mock_webhook_url', 'output_timestamp': False} - return slack_config - - -# Twitter sink tests -def test_no_consumer_key(twitter_config): - with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve: - del twitter_config['consumer_key'] - SocialTwitter(**twitter_config) - assert False # We don't reach this point - assert 'consumer_key parameter is not defined.' == str(ve.value) - - -def test_no_consumer_secret(twitter_config): - with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve: - del twitter_config['consumer_secret'] - SocialTwitter(**twitter_config) - assert False # We don't reach this point - assert 'consumer_secret parameter is not defined.' == str(ve.value) - - -def test_no_access_token(twitter_config): - with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve: - del twitter_config['access_token'] - SocialTwitter(**twitter_config) - assert False # We don't reach this point - assert 'access_token parameter is not defined.' == str(ve.value) - - -def test_no_access_token_secret(twitter_config): - with unittest.mock.patch.dict(twitter_config), pytest.raises(ValueError) as ve: - del twitter_config['access_token_secret'] - SocialTwitter(**twitter_config) - assert False # We don't reach this point - assert 'access_token_secret parameter is not defined.' == str(ve.value) - - -def test_send_message_twitter(twitter_config): - with unittest.mock.patch.object(tweepy.API, 'update_status') as mock_update_status: - social_twitter = SocialTwitter(**twitter_config) - mock_message = "mock_message" - mock_timestamp = "mock_timestamp" - social_twitter.send_message(mock_message, mock_timestamp) - - mock_update_status.assert_called_once_with('{} - {}'.format(mock_message, mock_timestamp)) - - -def test_send_message_twitter_no_timestamp(twitter_config): - with unittest.mock.patch.dict(twitter_config, {'output_timestamp': False}), unittest.mock.patch.object(tweepy.API, 'update_status') as mock_update_status: - social_twitter = SocialTwitter(**twitter_config) - mock_message = "mock_message" - mock_timestamp = "mock_timestamp" - social_twitter.send_message(mock_message, mock_timestamp) - - mock_update_status.assert_called_once_with(mock_message) - - -# Slack sink tests -def test_no_webhook_url(slack_config): - with unittest.mock.patch.dict(slack_config), pytest.raises(ValueError) as ve: - del slack_config['webhook_url'] - slack_config = SocialSlack(**slack_config) - assert 'webhook_url parameter is not defined.' == str(ve.value) - - -def test_send_message_slack(slack_config): - with unittest.mock.patch.object(requests, 'post') as mock_post: - social_slack = SocialSlack(**slack_config) - mock_message = "mock_message" - mock_timestamp = "mock_timestamp" - social_slack.send_message(mock_message, mock_timestamp) - - mock_post.assert_called_once_with(slack_config['webhook_url'], json={'text': mock_message}) - - -def test_send_message_slack_timestamp(slack_config): - with unittest.mock.patch.dict(slack_config, {'output_timestamp': True}), unittest.mock.patch.object(requests, 'post') as mock_post: - social_slack = SocialSlack(**slack_config) - mock_message = "mock_message" - mock_timestamp = "mock_timestamp" - social_slack.send_message(mock_message, mock_timestamp) - - mock_post.assert_called_once_with(slack_config['webhook_url'], json={'text': '{} - {}'.format(mock_message, mock_timestamp)}) diff --git a/pocs/tests/test_theskyx_utils.py b/pocs/tests/test_theskyx_utils.py deleted file mode 100644 index eb9a1c371..000000000 --- a/pocs/tests/test_theskyx_utils.py +++ /dev/null @@ -1,82 +0,0 @@ -import os -import pytest - -from mocket import Mocket - -from pocs.utils import error -from pocs.utils.theskyx import TheSkyX - - -@pytest.fixture(scope="function") -def skyx(request): - """Create TheSkyX class but don't connect.t - - If running with a real connection TheSkyX then the Mokcet will - be disabled here. - """ - - # Use `--with-hardware thesky` on cli to run without mock - Mocket.enable('theskyx', '{}/pocs/tests/data'.format(os.getenv('POCS'))) - if 'theskyx' in request.config.getoption('--with-hardware'): - Mocket.disable() - - theskyx = TheSkyX(connect=False) - - yield theskyx - - -def test_default_connect(request): - """Test connection to TheSkyX - - If not running with a real connection then use Mocket - """ - # Use `--with-hardware thesky` on cli to run without mock - if 'theskyx' not in request.config.getoption('--with-hardware'): - Mocket.enable('theskyx', '{}/pocs/tests/data'.format(os.getenv('POCS'))) - - skyx = TheSkyX() - assert skyx.is_connected is True - - -def test_no_connect_write(skyx): - with pytest.raises(error.BadConnection): - skyx.write('/* Java Script */') - - -def test_no_connect_read(skyx): - with pytest.raises(error.BadConnection): - skyx.read() - - -def test_write_bad_key(skyx): - skyx.connect() - skyx.write('FOOBAR') - with pytest.raises(error.TheSkyXKeyError): - skyx.read() - - -def test_write_no_command(skyx): - skyx.connect() - skyx.write('/* Java Script */') - assert skyx.read() == 'undefined' - - -def test_get_build(skyx): - js = ''' -/* Java Script */ -var Out; -Out=Application.version -''' - skyx.connect() - skyx.write(js) - assert skyx.read().startswith('10.5') - - -def test_error(skyx): - skyx.connect() - skyx.write(''' -/* Java Script */ -sky6RASCOMTele.FindHome() -''') - with pytest.raises(error.TheSkyXError): - skyx.read() diff --git a/pocs/tests/utils/__init__.py b/pocs/tests/utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/pocs/tests/utils/google/__init__.py b/pocs/tests/utils/google/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/pocs/tests/utils/test_fits_utils.py b/pocs/tests/utils/test_fits_utils.py deleted file mode 100644 index dab1ada7f..000000000 --- a/pocs/tests/utils/test_fits_utils.py +++ /dev/null @@ -1,67 +0,0 @@ -import os -import pytest -import subprocess -import shutil - -from astropy.io.fits import Header - -from pocs.utils.images import fits as fits_utils - - -@pytest.fixture -def solved_fits_file(data_dir): - return os.path.join(data_dir, 'solved.fits.fz') - - -def test_wcsinfo(solved_fits_file): - wcsinfo = fits_utils.get_wcsinfo(solved_fits_file) - - assert 'wcs_file' in wcsinfo - assert wcsinfo['ra_center'].value == 303.206422334 - - -def test_fpack(solved_fits_file): - new_file = solved_fits_file.replace('solved', 'solved_copy') - copy_file = shutil.copyfile(solved_fits_file, new_file) - info = os.stat(copy_file) - assert info.st_size > 0. - - uncompressed = fits_utils.funpack(copy_file, verbose=True) - assert os.stat(uncompressed).st_size > info.st_size - - compressed = fits_utils.fpack(uncompressed, verbose=True) - assert os.stat(compressed).st_size == info.st_size - - os.remove(copy_file) - - -def test_getheader(solved_fits_file): - header = fits_utils.getheader(solved_fits_file) - assert isinstance(header, Header) - assert header['IMAGEID'] == 'PAN001_XXXXXX_20160909T081152' - - -def test_getval(solved_fits_file): - img_id = fits_utils.getval(solved_fits_file, 'IMAGEID') - assert img_id == 'PAN001_XXXXXX_20160909T081152' - - -def test_solve_field(solved_fits_file): - proc = fits_utils.solve_field(solved_fits_file, verbose=True) - assert isinstance(proc, subprocess.Popen) - proc.wait() - assert proc.returncode == 0 - - -def test_solve_options(solved_fits_file): - proc = fits_utils.solve_field( - solved_fits_file, solve_opts=['--guess-scale'], verbose=False) - assert isinstance(proc, subprocess.Popen) - proc.wait() - assert proc.returncode == 0 - - -def test_solve_bad_field(solved_fits_file): - proc = fits_utils.solve_field('Foo', verbose=True) - outs, errs = proc.communicate() - assert 'ERROR' in errs diff --git a/pocs/tests/utils/test_focus_utils.py b/pocs/tests/utils/test_focus_utils.py deleted file mode 100644 index f3d89db1c..000000000 --- a/pocs/tests/utils/test_focus_utils.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -import pytest - -from astropy.io import fits - -from pocs.utils.images import focus as focus_utils - - -def test_vollath_f4(data_dir): - data = fits.getdata(os.path.join(data_dir, 'unsolved.fits')) - data = focus_utils.mask_saturated(data) - assert focus_utils.vollath_F4(data) == pytest.approx(14667.207897717599) - assert focus_utils.vollath_F4(data, axis='Y') == pytest.approx(14380.343807477504) - assert focus_utils.vollath_F4(data, axis='X') == pytest.approx(14954.071987957694) - with pytest.raises(ValueError): - focus_utils.vollath_F4(data, axis='Z') - - -def test_focus_metric_default(data_dir): - data = fits.getdata(os.path.join(data_dir, 'unsolved.fits')) - data = focus_utils.mask_saturated(data) - assert focus_utils.focus_metric(data) == pytest.approx(14667.207897717599) - assert focus_utils.focus_metric(data, axis='Y') == pytest.approx(14380.343807477504) - assert focus_utils.focus_metric(data, axis='X') == pytest.approx(14954.071987957694) - with pytest.raises(ValueError): - focus_utils.focus_metric(data, axis='Z') - - -def test_focus_metric_vollath(data_dir): - data = fits.getdata(os.path.join(data_dir, 'unsolved.fits')) - data = focus_utils.mask_saturated(data) - assert focus_utils.focus_metric( - data, merit_function='vollath_F4') == pytest.approx(14667.207897717599) - assert focus_utils.focus_metric( - data, - merit_function='vollath_F4', - axis='Y') == pytest.approx(14380.343807477504) - assert focus_utils.focus_metric( - data, - merit_function='vollath_F4', - axis='X') == pytest.approx(14954.071987957694) - with pytest.raises(ValueError): - focus_utils.focus_metric(data, merit_function='vollath_F4', axis='Z') - - -def test_focus_metric_bad_string(data_dir): - data = fits.getdata(os.path.join(data_dir, 'unsolved.fits')) - data = focus_utils.mask_saturated(data) - with pytest.raises(KeyError): - focus_utils.focus_metric(data, merit_function='NOTAMERITFUNCTION') diff --git a/pocs/tests/utils/test_image_utils.py b/pocs/tests/utils/test_image_utils.py deleted file mode 100644 index 9f80cdacd..000000000 --- a/pocs/tests/utils/test_image_utils.py +++ /dev/null @@ -1,115 +0,0 @@ -import os -import numpy as np -import pytest -import shutil -import tempfile -from glob import glob - -from pocs.utils import images as img_utils -from pocs.utils import error - - -def test_make_images_dir(save_environ): - assert img_utils.make_images_dir() - - # Invalid parent directory for 'images'. - os.environ['PANDIR'] = '/dev/null/' - with pytest.warns(UserWarning): - assert img_utils.make_images_dir() is None - - # Valid parents for 'images' that need to be created. - with tempfile.TemporaryDirectory() as tmpdir: - parent = os.path.join(tmpdir, 'some', 'dirs') - imgdir = os.path.join(parent, 'images') - os.environ['PANDIR'] = parent - assert img_utils.make_images_dir() == imgdir - - -def test_crop_data(): - ones = np.ones((201, 201)) - assert ones.sum() == 40401. - - cropped01 = img_utils.crop_data(ones, verbose=True) - assert cropped01.sum() == 40000. - - cropped02 = img_utils.crop_data(ones, verbose=True, box_width=10) - assert cropped02.sum() == 100. - - cropped03 = img_utils.crop_data(ones, verbose=True, box_width=6, center=(50, 50)) - assert cropped03.sum() == 36. - - -def test_make_pretty_image(solved_fits_file, tiny_fits_file, save_environ): - # Not a valid file type (can't automatically handle .fits.fz files). - with pytest.warns(UserWarning, match='File must be'): - assert not img_utils.make_pretty_image(solved_fits_file) - - # Make a dir and put test image files in it. - with tempfile.TemporaryDirectory() as tmpdir: - fz_file = os.path.join(tmpdir, os.path.basename(solved_fits_file)) - fits_file = os.path.join(tmpdir, os.path.basename(tiny_fits_file)) - # TODO Add a small CR2 file to our sample image files. - - # Can't operate on a non-existent files. - with pytest.warns(UserWarning, match="File doesn't exist"): - assert not img_utils.make_pretty_image(fits_file) - - # Copy the files. - shutil.copy(solved_fits_file, tmpdir) - shutil.copy(tiny_fits_file, tmpdir) - - # Not a valid file type (can't automatically handle fits.fz files). - with pytest.warns(UserWarning): - assert not img_utils.make_pretty_image(fz_file) - - # Can handle the fits file, and creating the images dir for linking - # the latest image. - imgdir = os.path.join(tmpdir, 'images') - assert not os.path.isdir(imgdir) - os.environ['PANDIR'] = tmpdir - - pretty = img_utils.make_pretty_image(fits_file, link_latest=True) - assert pretty - assert os.path.isfile(pretty) - assert os.path.isdir(imgdir) - latest = os.path.join(imgdir, 'latest.jpg') - assert os.path.isfile(latest) - os.remove(latest) - os.rmdir(imgdir) - - # Try again, but without link_latest. - pretty = img_utils.make_pretty_image(fits_file, title='some text') - assert pretty - assert os.path.isfile(pretty) - assert not os.path.isdir(imgdir) - - -@pytest.mark.skipif( - "TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", - reason="Skipping this test on Travis CI.") -def test_make_pretty_image_cr2_fail(): - with tempfile.TemporaryDirectory() as tmpdir: - tmpfile = os.path.join(tmpdir, 'bad.cr2') - with open(tmpfile, 'w') as f: - f.write('not an image file') - with pytest.raises(error.InvalidCommand): - img_utils.make_pretty_image(tmpfile, title='some text', link_latest=False) - with pytest.raises(error.InvalidCommand): - img_utils.make_pretty_image(tmpfile, verbose=True) - - -def test_clean_observation_dir(data_dir): - # First make a dir and put some files in it - with tempfile.TemporaryDirectory() as tmpdir: - # Copy fits files - for f in glob('{}/solved.*'.format(data_dir)): - shutil.copy(f, tmpdir) - - assert len(glob('{}/solved.*'.format(tmpdir))) == 2 - - # Make some jpgs - for f in glob('{}/*.fits'.format(tmpdir)): - img_utils.make_pretty_image(f) - - # Cleanup - img_utils.clean_observation_dir(tmpdir, verbose=True) diff --git a/pocs/tests/utils/test_logger.py b/pocs/tests/utils/test_logger.py deleted file mode 100644 index 8682b4ee6..000000000 --- a/pocs/tests/utils/test_logger.py +++ /dev/null @@ -1,105 +0,0 @@ -import pytest - -from pocs.utils.logger import field_name_to_key -from pocs.utils.logger import format_has_reference_keys -from pocs.utils.logger import logger_msg_formatter - - -def test_field_name_to_key(): - assert not field_name_to_key('.') - assert not field_name_to_key('[') - assert field_name_to_key('abc') == 'abc' - assert field_name_to_key(' abc ') == ' abc ' - assert field_name_to_key('abc.def') == 'abc' - assert field_name_to_key('abc[1].def') == 'abc' - - -def test_logger_msg_formatter_1_dict(): - d = dict(abc='def', xyz=123) - - tests = [ - # Single anonymous reference, satisfied by the entire dict. - ('{}', "{'abc': 'def', 'xyz': 123}"), - - # Single anonymous reference, satisfied by the entire dict. - ('{!r}', "{'abc': 'def', 'xyz': 123}"), - - # Position zero references, satisfied by the entire dict. - ('{0} {0}', "{'abc': 'def', 'xyz': 123} {'abc': 'def', 'xyz': 123}"), - - # Reference to a valid key in the dict. - ('{xyz}', "123"), - - # Invalid modern reference, so %s format applied. - ('%s {1}', "{'abc': 'def', 'xyz': 123} {1}"), - - # Valid legacy format applied to whole dict. - ('%r', "{'abc': 'def', 'xyz': 123}"), - ('%%', "%"), - ] - - for fmt, msg in tests: - assert logger_msg_formatter(fmt, d) == msg, fmt - - # Now tests with entirely invalid formats, so warnings should be issued. - tests = [ - '%(2)s', - '{def}', - '{def', - 'def}', - '%d', - # Bogus references either way. - '{0} {1} %(2)s' - ] - - for fmt in tests: - with pytest.warns(UserWarning): - assert logger_msg_formatter(fmt, d) == fmt - - -def test_logger_msg_formatter_1_non_dict(): - d = ['abc', 123] - - tests = [ - # Single anonymous reference, satisfied by first element. - ('{}', "abc"), - - # Single anonymous reference, satisfied by first element. - ('{!r}', "'abc'"), - - # Position references, satisfied by elements. - ('{1} {0!r}', "123 'abc'"), - - # Valid modern reference, %s ignored. - ('%s {1}', "%s 123"), - - # Valid legacy format applied to whole list. - ('%r', "['abc', 123]"), - - # Valid legacy format applied to whole list. - ('%s', "['abc', 123]"), - ] - - for fmt, msg in tests: - assert logger_msg_formatter(fmt, d) == msg, fmt - - # Now tests with entirely invalid formats, so warnings should be issued. - tests = [ - # We only have two args, so a reference to a third should fail. - '{2}', - '%(2)s', - # Unknown key - '{def}', - '%(def)s', - # Malformed key - '{2', - '{', - '2}', - '}', - '{}{}{}', - '%d', - ] - - for fmt in tests: - with pytest.warns(UserWarning): - assert logger_msg_formatter(fmt, d) == fmt diff --git a/pocs/tests/utils/test_polar_alignment.py b/pocs/tests/utils/test_polar_alignment.py deleted file mode 100644 index 8073cbf02..000000000 --- a/pocs/tests/utils/test_polar_alignment.py +++ /dev/null @@ -1,43 +0,0 @@ -import pytest - -from matplotlib.figure import Figure -from pocs.utils.images import polar_alignment as pa_utils - - -@pytest.fixture -def pole_fits_file(data_dir): - return '{}/pole.fits'.format(data_dir) - - -@pytest.fixture -def rotate_fits_file(data_dir): - return '{}/rotation.fits'.format(data_dir) - - -def test_analyze_polar(pole_fits_file): - x, y = pa_utils.analyze_polar_rotation(pole_fits_file) - - # Note that fits file has been cropped but values are - # based on the full WCS - assert x == pytest.approx(2885.621843270767) - assert y == pytest.approx(1897.7483982446474) - - -def test_analyze_rotation(rotate_fits_file): - x, y = pa_utils.analyze_ra_rotation(rotate_fits_file) - - assert x == pytest.approx(187) - assert y == pytest.approx(25) - - -def test_plot_center(pole_fits_file, rotate_fits_file): - pole_center = pa_utils.analyze_polar_rotation(pole_fits_file) - rotate_center = pa_utils.analyze_ra_rotation(rotate_fits_file) - - fig = pa_utils.plot_center( - pole_fits_file, - rotate_fits_file, - pole_center, - rotate_center - ) - assert isinstance(fig, Figure) diff --git a/pocs/tests/utils/test_utils.py b/pocs/tests/utils/test_utils.py deleted file mode 100644 index a03ca5655..000000000 --- a/pocs/tests/utils/test_utils.py +++ /dev/null @@ -1,275 +0,0 @@ -import os -import pytest -import signal -import time -from datetime import datetime as dt -from astropy import units as u - -from pocs.utils import current_time -from pocs.utils import DelaySigTerm -from pocs.utils import listify -from pocs.utils import load_module -from pocs.utils import CountdownTimer -from pocs.utils import error -from pocs.camera import list_connected_cameras -from pocs.utils.library import load_library as load_c_library - - -def test_error(capsys): - with pytest.raises(error.PanError) as e_info: - raise error.PanError(msg='Testing message') - - assert str(e_info.value) == 'PanError: Testing message' - - with pytest.raises(error.PanError) as e_info: - raise error.PanError() - - assert str(e_info.value) == 'PanError' - - with pytest.raises(SystemExit) as e_info: - raise error.PanError(msg="Testing exit", exit=True) - assert e_info.type == SystemExit - assert capsys.readouterr().out.strip() == 'TERMINATING: Testing exit' - - with pytest.raises(SystemExit) as e_info: - raise error.PanError(exit=True) - assert e_info.type == SystemExit - assert capsys.readouterr().out.strip() == 'TERMINATING: No reason specified' - - -def test_bad_load_module(): - with pytest.raises(error.NotFound): - load_module('FOOBAR') - - -def test_load_c_library(): - # Called without a `path` this will use find_library to locate libc. - libc = load_c_library('c') - assert libc._name[:4] == 'libc' - - -def test_listify(): - assert listify(12) == [12] - assert listify([1, 2, 3]) == [1, 2, 3] - - -def test_empty_listify(): - assert listify(None) == [] - - -def test_pretty_time(): - t0 = '2016-08-13 10:00:00' - os.environ['POCSTIME'] = t0 - - t1 = current_time(pretty=True) - assert t1 == t0 - - # This will increment one second - see docs - t2 = current_time(flatten=True) - assert t2 != t0 - assert t2 == '20160813T100001' - - # This will increment one second - see docs - t3 = current_time(datetime=True) - assert t3 == dt(2016, 8, 13, 10, 0, 2) - - -def test_list_connected_cameras(): - ports = list_connected_cameras() - assert isinstance(ports, list) - - -def test_has_camera_ports(): - ports = list_connected_cameras() - assert isinstance(ports, list) - - for port in ports: - assert port.startswith('usb:') - - -def test_countdown_timer_bad_input(): - with pytest.raises(ValueError): - assert CountdownTimer('d') - - with pytest.raises(ValueError): - assert CountdownTimer(current_time()) - - with pytest.raises(AssertionError): - assert CountdownTimer(-1) - - -def test_countdown_timer_non_blocking(): - timer = CountdownTimer(0) - assert timer.is_non_blocking - assert timer.time_left() == 0 - - for arg, expected_duration in [(2, 2.0), (0.5, 0.5), (1 * u.second, 1.0)]: - timer = CountdownTimer(arg) - assert timer.duration == expected_duration - - -def test_countdown_timer(): - count_time = 1 - timer = CountdownTimer(count_time) - assert timer.time_left() > 0 - assert timer.expired() is False - assert timer.is_non_blocking is False - - counter = 0. - while timer.time_left() > 0: - time.sleep(0.1) - counter += 0.1 - - assert counter == pytest.approx(1) - assert timer.time_left() == 0 - assert timer.expired() is True - - -def test_delay_of_sigterm_with_nosignal(): - orig_sigterm_handler = signal.getsignal(signal.SIGTERM) - - with DelaySigTerm(): - assert signal.getsignal(signal.SIGTERM) != orig_sigterm_handler - - assert signal.getsignal(signal.SIGTERM) == orig_sigterm_handler - - -def test_delay_of_sigterm_with_handled_signal(): - """Confirm that another type of signal can be handled. - - In this test we'll send SIGCHLD, which should immediately call the - signal_handler the test installs, demonstrating that only SIGTERM - is affected by this DelaySigTerm. - """ - test_signal = signal.SIGCHLD - - # Booleans to keep track of how far we've gotten. - before_signal = False - after_signal = False - signal_handled = False - after_with = False - - def signal_handler(signum, frame): - assert before_signal - - nonlocal signal_handled - assert not signal_handled - signal_handled = True - - assert not after_signal - - old_test_signal_handler = signal.getsignal(test_signal) - orig_sigterm_handler = signal.getsignal(signal.SIGTERM) - try: - # Install our handler. - signal.signal(test_signal, signal_handler) - - with DelaySigTerm(): - assert signal.getsignal(signal.SIGTERM) != orig_sigterm_handler - before_signal = True - # Send the test signal. It should immediately - # call our handler. - os.kill(os.getpid(), test_signal) - assert signal_handled - after_signal = True - - after_with = True - assert signal.getsignal(signal.SIGTERM) == orig_sigterm_handler - finally: - assert before_signal - assert signal_handled - assert after_signal - assert after_with - assert signal.getsignal(signal.SIGTERM) == orig_sigterm_handler - signal.signal(test_signal, old_test_signal_handler) - - -def test_delay_of_sigterm_with_raised_exception(): - """Confirm that raising an exception inside the handler is OK.""" - test_signal = signal.SIGCHLD - - # Booleans to keep track of how far we've gotten. - before_signal = False - after_signal = False - signal_handled = False - exception_caught = False - - def signal_handler(signum, frame): - assert before_signal - - nonlocal signal_handled - assert not signal_handled - signal_handled = True - - assert not after_signal - raise UserWarning() - - old_test_signal_handler = signal.getsignal(test_signal) - orig_sigterm_handler = signal.getsignal(signal.SIGTERM) - try: - # Install our handler. - signal.signal(test_signal, signal_handler) - - with DelaySigTerm(): - assert signal.getsignal(signal.SIGTERM) != orig_sigterm_handler - before_signal = True - # Send the test signal. It should immediately - # call our handler. - os.kill(os.getpid(), test_signal) - # Should not reach this point because signal_handler() should - # be called because we called: - # signal.signal(other-handler, signal_handler) - after_signal = True - assert False, "Should not get here!" - except UserWarning: - assert before_signal - assert signal_handled - assert not after_signal - assert not exception_caught - assert signal.getsignal(signal.SIGTERM) == orig_sigterm_handler - exception_caught = True - finally: - # Restore old handler before asserts. - signal.signal(test_signal, old_test_signal_handler) - - assert before_signal - assert signal_handled - assert not after_signal - assert exception_caught - assert signal.getsignal(signal.SIGTERM) == orig_sigterm_handler - - -def test_delay_of_sigterm_with_sigterm(): - """Confirm that SIGTERM is in fact delayed.""" - - # Booleans to keep track of how far we've gotten. - before_signal = False - after_signal = False - signal_handled = False - - def signal_handler(signum, frame): - assert before_signal - assert after_signal - - nonlocal signal_handled - assert not signal_handled - signal_handled = True - - orig_sigterm_handler = signal.getsignal(signal.SIGTERM) - try: - # Install our handler. - signal.signal(signal.SIGTERM, signal_handler) - - with DelaySigTerm(): - before_signal = True - # Send SIGTERM. It should not call the handler yet. - os.kill(os.getpid(), signal.SIGTERM) - assert not signal_handled - after_signal = True - - assert signal.getsignal(signal.SIGTERM) == signal_handler - assert before_signal - assert after_signal - assert signal_handled - finally: - signal.signal(signal.SIGTERM, orig_sigterm_handler) diff --git a/pocs/utils/__init__.py b/pocs/utils/__init__.py deleted file mode 100644 index febe2f4f2..000000000 --- a/pocs/utils/__init__.py +++ /dev/null @@ -1,409 +0,0 @@ -import contextlib -import os -import shutil -import signal -import time - -from astropy import units as u -from astropy.coordinates import AltAz -from astropy.coordinates import ICRS -from astropy.coordinates import SkyCoord -from astropy.time import Time -from astropy.utils import resolve_name - - -def current_time(flatten=False, datetime=False, pretty=False): - """ Convenience method to return the "current" time according to the system. - - Note: - If the ``$POCSTIME`` environment variable is set then this will return - the time given in the variable. This is used for setting specific times - during testing. After checking the value of POCSTIME the environment - variable will also be incremented by one second so that subsequent - calls to this function will generate monotonically increasing times. - - Operation of POCS from `$POCS/bin/pocs_shell` will clear the POCSTIME - variable. - - Note: - The time returned from this function is **not** timezone aware. All times - are UTC. - - - .. doctest:: - - >>> os.environ['POCSTIME'] = '1999-12-31 23:59:59' - >>> party_time = current_time(pretty=True) - >>> party_time - '1999-12-31 23:59:59' - - # Next call is one second later - >>> y2k = current_time(pretty=True) - >>> y2k - '2000-01-01 00:00:00' - - >>> del os.environ['POCSTIME'] - >>> from pocs.utils import current_time - >>> now = current_time() - >>> now # doctest: +SKIP -