diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..3b4e477
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,7 @@
+*.*~
+*_flymake.py
+*.pyc
+_build
+_static
+_templates
+
diff --git a/COPYING b/COPYING
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..65c5ca8
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,165 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..e113904
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+include *.rst
+include COPYING
+include requirements.txt
+include scripts/*
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..73e6bd4
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,37 @@
+========
+Synopsis
+========
+
+This module helps with the processing and analysis of data from the `FAAM `_ aircraft.
+
+
+==========
+Motivation
+==========
+
+The faampy repository provides python modules for working with data from the FAAM aircraft. Its main goals are to minimize code duplication and to increase the efficiency of the data analysis. Spend less time data wrangling; spend more time on analysis.
+
+
+============
+Installation
+============
+
+The installation of the module is done in the usual way::
+
+ git clone https://github.com/ncasuk/faampy.git
+ python setup.py build
+ sudo python setup.py install
+
+
+============
+Contributors
+============
+
+* Axel Wellpott (FAAM)
+
+
+=======
+License
+=======
+
+faampy is licenced under GNU Lesser General Public License (LGPLv3).
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..cc65a00
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,225 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help
+help:
+ @echo "Please use \`make ' where is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " applehelp to make an Apple Help Book"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " epub3 to make an epub3"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " coverage to run coverage check of the documentation (if enabled)"
+ @echo " dummy to check syntax errors of document sources"
+
+.PHONY: clean
+clean:
+ rm -rf $(BUILDDIR)/*
+
+.PHONY: html
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+.PHONY: dirhtml
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+.PHONY: singlehtml
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+.PHONY: pickle
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+.PHONY: json
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+.PHONY: htmlhelp
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+.PHONY: qthelp
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/faampy.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/faampy.qhc"
+
+.PHONY: applehelp
+applehelp:
+ $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
+ @echo
+ @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
+ @echo "N.B. You won't be able to view it unless you put it in" \
+ "~/Library/Documentation/Help or install it in your application" \
+ "bundle."
+
+.PHONY: devhelp
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/faampy"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/faampy"
+ @echo "# devhelp"
+
+.PHONY: epub
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+.PHONY: epub3
+epub3:
+ $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
+ @echo
+ @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
+
+.PHONY: latex
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+.PHONY: latexpdf
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: latexpdfja
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: text
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+.PHONY: man
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+.PHONY: texinfo
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+.PHONY: info
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+.PHONY: gettext
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+.PHONY: changes
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+.PHONY: linkcheck
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+.PHONY: doctest
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+.PHONY: coverage
+coverage:
+ $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
+ @echo "Testing of coverage in the sources finished, look at the " \
+ "results in $(BUILDDIR)/coverage/python.txt."
+
+.PHONY: xml
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+.PHONY: pseudoxml
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
+
+.PHONY: dummy
+dummy:
+ $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
+ @echo
+ @echo "Build finished. Dummy builder generates no files."
diff --git a/docs/cli_tools.rst b/docs/cli_tools.rst
new file mode 100644
index 0000000..1415a27
--- /dev/null
+++ b/docs/cli_tools.rst
@@ -0,0 +1,91 @@
+==================
+Command line tools
+==================
+
+The faampy module provides a number of command line tools. All those commands are called via::
+
+ faampy SUBCOMMAND [OPTIONS] ARGUMENTS
+
+A list of available subcommands is shown by just typing "faampy" on the command line.
+
+
+nimrod_to_nc
+~~~~~~~~~~~~
+.. argparse::
+ :module: faampy.data_io.nimrod_to_nc
+ :func: _argparser
+ :prog: faampy nimrod_to_nc
+ :nodefault:
+
+nc_to_gpx
+~~~~~~~~~
+.. argparse::
+ :module: faampy.mapping.nc_to_gpx
+ :func: _argparser
+ :prog: faampy nc_to_gpx
+ :nodefault:
+
+ge_ncas_airquality
+~~~~~~~~~~~~~~~~~~
+
+.. argparse::
+ :module: faampy.mapping.ge_ncas_airquality
+ :func: _argparser
+ :prog: faampy ge_ncas_airquality
+ :nodefault:
+
+ge_nimrod_to_kmz
+~~~~~~~~~~~~~~~~
+.. argparse::
+ :module: faampy.mapping.ge_nimrod_to_kmz
+ :func: _argparser
+ :prog: faampy ge_nimrod_to_kmz
+ :nodefault:
+
+ge_photo_album
+~~~~~~~~~~~~~~
+.. argparse::
+ :module: faampy.mapping.ge_photo_album
+ :func: _argparser
+ :prog: faampy ge_photo_album
+ :nodefault:
+
+ge_ncvar_to_kml
+~~~~~~~~~~~~~~~
+.. argparse::
+ :module: faampy.mapping.ge_ncvar_to_kml
+ :func: _argparser
+ :prog: faampy ge_ncvar_to_kml
+ :nodefault:
+
+ge_was_to_kmz
+~~~~~~~~~~~~~
+.. argparse::
+ :module: faampy.mapping.ge_was_to_kmz
+ :func: _argparser
+ :prog: faampy ge_was_to_kmz
+ :nodefault:
+
+world_map
+~~~~~~~~~
+.. argparse::
+ :module: faampy.mapping.world_map
+ :func: _argparser
+ :prog: faampy world_map
+ :nodefault:
+
+sat_tracker
+~~~~~~~~~~~
+.. argparse::
+ :module: faampy.mapping.sat_tracker
+ :func: _argparser
+ :prog: faampy sat_tracker
+ :nodefault:
+
+plt_quicklooks
+~~~~~~~~~~~~~~
+.. argparse::
+ :module: faampy.plotting.quicklooks
+ :func: _argparser
+ :prog: faampy plt_quicklooks
+ :nodefault:
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..8dbedf9
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,375 @@
+# -*- coding: utf-8 -*-
+#
+# faampy documentation build configuration file, created by
+# sphinx-quickstart on Sat Dec 3 20:57:34 2016.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import mock
+import os
+import sys
+sys.path.insert(0, os.path.abspath('..'))
+
+
+
+MOCK_MODULES = ['numpy', 'scipy', 'matplotlib', 'matplotlib.pyplot', 'numpy.ma', 'matplotlib.ticker',
+ 'matplotlib.image', 'matplotlib.dates', 'matplotlib.cbook', 'PIL', 'PIL.Image', 'PIL.ExfiTags', 'ephem',
+ 'mpl_toolkits.basemap.pyproj', 'Basemap', 'json', 'urllib2', 'math', 'mpl_toolkits', 'mpl_toolkits.basemap', 'mapnik',
+ 'pandas', 'iris', 'netCDF4', 'osgeo', 'haversine', 'pyspatialite', 'pyspatialite.dbapi2',
+ 'osgeo.ogr', 'gdal', 'simplekml', 'rdp', 'seaborn', '_geoslib', 'proj']
+
+
+#for mod_name in MOCK_MODULES:
+# sys.modules[mod_name] = mock.Mock()
+
+from mock import Mock as MagicMock
+
+class Mock(MagicMock):
+ @classmethod
+ def __getattr__(cls, name):
+ return MagicMock()
+
+sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
+
+
+
+
+import faampy
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.viewcode',
+# 'sphinxcontrib.autoprogram',
+ 'sphinxarg.ext',
+# 'sphinx.ext.imgmath',
+# 'sphinxcontrib.programoutput',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The encoding of source files.
+#
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'faampy'
+copyright = u'2017, Axel Wellpott'
+author = u'Axel Wellpott'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = faampy.__version__
+# The full version, including alpha/beta/rc tags.
+release = faampy.__version__ + " (" + faampy.__status__ + ")"
+
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#
+# today = ''
+#
+# Else, today_fmt is used as the format for a strftime call.
+#
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#
+html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+html_theme_path = ["_themes", ]
+
+# The name for this set of Sphinx documents.
+# " v documentation" by default.
+#
+# html_title = u'faampy v0.1.1'
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#
+# html_logo = None
+
+# The name of an image file (relative to this directory) to use as a favicon of
+# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#
+# html_extra_path = []
+
+# If not None, a 'Last updated on:' timestamp is inserted at every page
+# bottom, using the given strftime format.
+# The empty string is equivalent to '%b %d, %Y'.
+#
+# html_last_updated_fmt = None
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+#
+# html_domain_indices = True
+
+# If false, no index is generated.
+#
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
+#
+# html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# 'ja' uses this config value.
+# 'zh' user can custom change `jieba` dictionary path.
+#
+# html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+#
+# html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'faampydoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ # 'preamble': '',
+
+ # Latex figure (float) alignment
+ #
+ # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'faampy.tex', u'faampy Documentation',
+ u'Axel Wellpott', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+#
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#
+# latex_appendices = []
+
+# It false, will not define \strong, \code, itleref, \crossref ... but only
+# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
+# packages.
+#
+# latex_keep_old_macro_names = True
+
+# If false, no module index is generated.
+#
+# latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'faampy', u'faampy Documentation',
+ [author], 1)
+]
+
+# If true, show URL addresses after external links.
+#
+# man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'faampy', u'faampy Documentation',
+ author, 'faampy', 'Modules for handling FAAM data.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+#
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#
+# texinfo_no_detailmenu = False
diff --git a/docs/faam_data.rst b/docs/faam_data.rst
new file mode 100644
index 0000000..fcac918
--- /dev/null
+++ b/docs/faam_data.rst
@@ -0,0 +1,17 @@
+
+============
+FAAM_Dataset
+============
+
+The FAAM_Dataset class handles the core_faam*nc files and smoothes out the reading process of the data and ensures that older files are read in the same way as newer ones. The class copies the behaviour of netCDF4.Dataset class.
+
+A nifty method of the class is merge, which enables you to merge data from a separate file. The data type that can be merged is a numpy.recarray. The index for the procedure is the timestamp, of the FAAM_Dataset. Care is taken off gaps in the recarray.
+
+A convenient option is exporting the Dataset into a pandas DataFrame, which then gives you all the amazing features of pandas. Due to the fact that pandas can not deal with multidimensional arrays, only the first measurement within a row is used for the DataFrame.
+
+
+
+.. automodule:: faampy.core.faam_data
+ :members: FAAM_Dataset
+
+
diff --git a/docs/full_list.rst b/docs/full_list.rst
new file mode 100644
index 0000000..08769e5
--- /dev/null
+++ b/docs/full_list.rst
@@ -0,0 +1,15 @@
+
+Full List
+=========
+
+
+faampy.utils
+------------
+
+.. automodule:: faampy.utils.file_info
+ :members: File_Info
+
+.. automodule:: faampy.utils.file_list
+ :members: File_List
+
+
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..45ca8af
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,29 @@
+.. faampy documentation master file, created by
+ sphinx-quickstart on Sat Dec 3 20:57:34 2016.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to faampy's documentation!
+==================================
+
+Contents:
+
+.. toctree::
+ :maxdepth: 3
+ :glob:
+
+ intro
+ installation
+ faam_data
+ cli_tools
+ recipe/*
+ full_list
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/docs/installation.rst b/docs/installation.rst
new file mode 100644
index 0000000..4cd06e0
--- /dev/null
+++ b/docs/installation.rst
@@ -0,0 +1,27 @@
+
+Installing faampy
+=================
+
+Installation of faampy is done in the usual way using the setup script::
+
+ git clone https://github.com/ncasuk/faampy.git
+ python setup.py build
+ sudo python setup.py install
+
+So far the module has only been tested on linux machines and most of the code development has been done with python 2.7. However the idea is to make faampy python3 compatible and platform independent.
+
+
+Example flight data, databases, ...
+-----------------------------------
+
+Example data and databases of flight tracks are available for download. After installing the faampy module you can run::
+
+ faampy data_download ZIP_PASSWORD
+
+from the command line. This will download a zip file and copies its content to a 'faampy_data' directory in your $HOME directory. However, for the moment the zip file that you download is password protected. Please contact me if you think you need the data and I will give you the password.
+
+
+Disclaimer
+----------
+
+faampy is in its early stages and has not been thoroughly tested. There will more modules been added in the near future. A backlog of moduls exists that have been written, but will need to be tidied up, before being added to the repository.
diff --git a/docs/intro.rst b/docs/intro.rst
new file mode 100644
index 0000000..afb2033
--- /dev/null
+++ b/docs/intro.rst
@@ -0,0 +1,5 @@
+==========
+Motivation
+==========
+
+The faampy repository contains a number of python modules and scripts that handle data from the `FAAM `_ aircraft. The goal of faampy is it to provide useful and stable tools that deals with many common tasks. It focuses on general utilities rather than very specific scientific problems. From experience many tasks (data merging, mapping, etc.) are common no matter which background the user has (Aerosol, Chemistry, Cloud Physics, ...). faampy is meant to be for the FAAM community **by** the FAAM community. Therefore users are encouraged to report/fix bugs and send in suggestions for improvements.
diff --git a/docs/recipe/img/b991_flight_track.png b/docs/recipe/img/b991_flight_track.png
new file mode 100644
index 0000000..74bc39b
Binary files /dev/null and b/docs/recipe/img/b991_flight_track.png differ
diff --git a/docs/recipe/recipes_01_core.rst b/docs/recipe/recipes_01_core.rst
new file mode 100644
index 0000000..85277da
--- /dev/null
+++ b/docs/recipe/recipes_01_core.rst
@@ -0,0 +1,17 @@
+
+Recipe - FAAM core
+==================
+
+To come.
+
+
+Batch processing
+----------------
+
+To come.
+
+
+Data Mining
+-----------
+
+To come.
\ No newline at end of file
diff --git a/docs/recipe/recipes_02_post_flight_analysis.rst b/docs/recipe/recipes_02_post_flight_analysis.rst
new file mode 100644
index 0000000..2c9f5b6
--- /dev/null
+++ b/docs/recipe/recipes_02_post_flight_analysis.rst
@@ -0,0 +1,185 @@
+
+
+Recipe - Post Flight Analysis
+=============================
+
+This is an example for what the post flight analysis for typical chemistry FAAM flight could look like.
+
+The data we are using are from the "Into the Blue" flight b991 on the 24th October 2016. This flight took us up and down the west coast between Morecambe and Wales and "plumes" were sampled, that originated from the Manchester/Liverpool area.
+
+.. image:: ./img/b991_flight_track.png
+
+.. warning:: All the provided chemistry data are preliminary and uncalibrated. Therefore the data are not suitable for scientific analysis.
+
+
+FAAM Dataset
+------------
+
+At the start we need to import a number of modules and define a few variables that we need in later steps.
+
+.. code-block:: python
+
+ import datetime
+ import os
+ import numpy as np
+ import pandas as pd
+ import faampy
+ from faampy.core.faam_data import FAAM_Dataset
+
+ year, month, day = 2016, 10, 24
+ FID = 'b991'
+
+Reading the data from the NOx instrument into a pandas.Dataframe.
+
+.. code-block:: python
+
+ # define the input data file
+ nox_file = os.path.join(faampy.FAAMPY_DATA_PATH,
+ 'b991',
+ 'chem_data',
+ 'NOx_161024_090507')
+
+ # defining the function that calculates the timestamp
+ nox_dateparse = lambda x: pd.datetime(year, month, day) + \
+ datetime.timedelta(seconds=int(float(float(x) % 1)*86400.))
+
+ df_nox = pd.read_csv(nox_file, parse_dates=[0], date_parser=nox_dateparse)
+ df_nox = df_nox.set_index('TheTime') # Setting index
+ t = df_nox.index.values
+ df_nox['timestamp'] = t.astype('datetime64[s]') # Converting index data type
+ df_nox = df_nox[['timestamp', 'no_conc', 'no2_conc', 'nox_conc']]
+ df_nox[df_nox < 0] = np.nan
+
+
+Now reading in the FGGA data.
+
+.. code-block:: python
+
+ fgga_file = os.path.join(faampy.FAAMPY_DATA_PATH,
+ 'b991',
+ 'chem_data',
+ 'FGGA_20161024_092223_B991.txt')
+
+ fgga_dateparse = lambda x: pd.datetime.utcfromtimestamp(int(x))
+ fgga_names = ['identifier', 'packet_length', 'timestamp', 'ptp_sync',
+ 'MFM', 'flight_num', 'CPU_Load', 'USB_disk_space', 'ch4', 'co2',
+ 'h2o', 'press_torr', 'temp_c', 'fit_flag', 'rda_usec',
+ 'rdb_usec', 'ch4_ppb', 'co2_ppm', 'MFC_1_absolute_pressure',
+ 'MFC_1_temperature', 'MFC_1volumetic_flow', 'MFC_1mass_flow',
+ 'MFC_1set_point', 'V1', 'V2', 'V3', 'V4', 'restart_FGGA',
+ 'FGGA_Pump', 'CAL_MFC_1Set_Value']
+ df_fgga = pd.read_csv(fgga_file,
+ names=fgga_names,
+ delimiter=',',
+ parse_dates=[2],
+ date_parser=fgga_dateparse,
+ skiprows=100) # To be sure to skip the header
+
+ # Using the Valve states for flagging out calibration periods
+ df_fgga.loc[df_fgga['V1'] != 0, 'ch4_ppb'] = np.nan
+ df_fgga.loc[df_fgga['V2'] != 0, 'co2_ppm'] = np.nan
+ df_fgga.loc[df_fgga['V2'] != 0, 'ch4_ppb'] = np.nan
+
+
+.. code-block:: python
+
+ # Reading in the FAAM core data file using the FAAM_Dataset object from the
+ # faampy module
+
+ core_file = os.path.join(faampy.FAAMPY_DATA_PATH,
+ 'b991',
+ 'core',
+ 'core_faam_20161024_v004_r0_b991.nc')
+ ds = FAAM_Dataset(core_file)
+
+
+ # merge chemistry data with the core data set
+ ds.merge(df_nox.to_records(convert_datetime64=False), index='timestamp', delay=3)
+ ds.merge(df_fgga.to_records(convert_datetime64=False), index='timestamp', delay=4)
+
+ # define variable list, that we like to extract
+ var_list = ['Time', 'LAT_GIN', 'LON_GIN', 'ALT_GIN', 'HGT_RADR',
+ 'CO_AERO', 'U_C', 'V_C', 'W_C', 'U_NOTURB', 'V_NOTURB',
+ 'WOW_IND', 'TAT_DI_R', 'TDEW_GE', 'PS_RVSM', 'ch4_ppb', 'co2_ppm',
+ 'no_conc', 'no2_conc', 'nox_conc', 'TSC_BLUU', 'TSC_GRNU',
+ 'TSC_REDU', 'BSC_BLUU', 'BSC_GRNU', 'BSC_REDU', 'IAS_RVSM']
+
+ # write the netcdf out to you HOME directory
+ outfile = os.path.join(os.environ['HOME'], '%s_merged.nc' % (FID.lower()))
+ ds.write(outfile,
+ clobber=True,
+ v_name_list=var_list)
+
+
+Google-Earth overlays
+---------------------
+
+The commands in this section are run from the konsole. To keep the filenames short we move into the directory where the data for b991 are located::
+
+ cd ~/faampy_data/b991
+
+
+We create a gpx (`GPS Exchange Format `_) file::
+
+ faampy nc_to_gpx core/core_faam_20161024_v004_r0_b991.nc .
+
+
+We use the gpx data file to geotag a few photographs that were taking during the flight. The gpscorrelate utility can be installed from the linux distribution package manager::
+
+ gpscorrelate --gps b991_20161024.gpx --photooffset -3600 photos/*jpg
+
+
+Now that the photos are geotagged it is possible to creaet a photo album::
+
+ faampy ge_photo_album ./photos ./ge_photo_album_20161024_b991.kmz
+
+
+WAS bottle overlay::
+
+ faampy ge_was_to_kmz ./chem_data/B991.WAS ./core/core_faam_20161024_v004_r0_b991_1hz.nc .
+
+
+Make profiles for some of the chemicals in the created merged file::
+
+ cd ~/faampy_data/b991
+ faampy ge_ncvar_to_kml --offset -100 --scale_factor 500 \
+ --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt CO_AERO b991_merged.nc .
+
+ faampy ge_ncvar_to_kml --offset -435 --scale_factor 1500 \
+ --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt co2_ppm b991_merged.nc .
+
+ faampy ge_ncvar_to_kml --offset -2115 --scale_factor 500 \
+ --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt ch4_ppb b991_merged.nc .
+
+ faampy ge_ncvar_to_kml --scale_factor 0.4 \
+ --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt nox_conc b991_merged.nc .
+
+
+
+Quicklook Figures
+-----------------
+
+faampy provides a command line tool to create quicklook figures from using the information from the flight summary. According to the event name (e.g. Profile, Run, ...) either a time series or a profile plot is created. Maps are created for every event and skewt plots for every profile. Again, to keep filenames on the command line brief we move into the b991 directory::
+
+ cd ~/faampy_data/b991
+ mkdir quicklooks
+
+Create a quicklooks configuration file (quicklooks.cfg) which defines the figure layout and which variables should be plotted::
+
+ touch quicklooks.cfg
+
+Add the following text to the quicklooks.cfg file using a text editor::
+
+ [['TSC_BLUU', 'TSC_GRNU' , 'TSC_REDU'], ['BSC_BLUU', 'BSC_GRNU', 'BSC_REDU']]
+ [['CO_AERO'],]
+ [['ch4_ppb'], ['co2_ppm']]
+ [['no_conc'], ['no2_conc'], ['nox_conc']]
+
+Every line defines one figure and the number of suplots. For example the first line ([['TSC_BLUU', 'TSC_GRNU' , 'TSC_REDU'], ['BSC_BLUU', 'BSC_GRNU', 'BSC_REDU']]) will create two subplots. In the first the total scatter values from the Nephelometer will be plotted and in the second subplot the backscatter values will be plotted.
+
+We will use the merged data file, which we produced in the previous section. This file contains the NOx and FGGA data. The command for creating the quicklooks is::
+
+ faampy plt_quicklooks --config_file quicklooks.cfg b991_merged.nc \
+ ./core/flight-sum_faam_20161024_r0_b991.txt ./quicklooks/
+
+If the above commmand was successful the figures should have been created in the quicklooks directory.
diff --git a/docs/recipe/recipes_03_spatial.rst b/docs/recipe/recipes_03_spatial.rst
new file mode 100644
index 0000000..f3da146
--- /dev/null
+++ b/docs/recipe/recipes_03_spatial.rst
@@ -0,0 +1,270 @@
+
+
+Recipe - Spatial Analysis
+=========================
+
+FAAM core data are stored as netCDF and come with *Time* dimension. However, since the FAAM aircraft is a moving plaform location is obviously also an important dimension and spatial queries of the FAAM data can add useful functionality. To provide this feature the FAAM flight tracks are inserted as *linestring* into a database with spatial capabilities. Such a database allows queries like:
+
+ * Give me all the flights that have crossed Scotland
+ * On which flights did we fly closer than 10nm miles pass the Queen's palace
+ * What length was the flight track
+
+
+DB description
+--------------
+
+The spatialite DB is stored in one single file, which is very convenient and does not require the setup of an advanced database, which can come with a lots of obstacles. In direct comparison spatialite is less powerful but has all the features that we need. For more information see:
+
+ | https://www.gaia-gis.it/fossil/libspatialite/index
+ | http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.4.0.html
+ | https://www.gaia-gis.it/gaia-sins/spatialite-tutorial-2.3.1.html
+ | http://postgis.net/docs/
+
+The example database has currently only one table and three colums:
+
+ | fid - Flight id
+ | date - Start date of flight
+ | the_geom - Holds the linestring geometry
+
+
+For the below examples python is our tool of choice, which has all the necessary modules ot interact with the db. The code is from the *faampy_spatial_examples.py* file.
+
+
+Imports and DB connection
+-------------------------
+
+For the examples below to work we need to import some common modules and connect to the database.
+
+.. code-block:: python
+
+ import json
+ import numpy as np
+ import os
+ import osgeo.ogr
+ import simplekml
+
+ try:
+ from faampy.core.faam_spatial import FAAM_Spatial_DB
+ except:
+ import faam_spatial
+
+ LATEST_FID = 'b991'
+
+ DB_FILE = os.path.join(faampy.FAAMPY_DATA_PATH, 'db', 'faam_spatial_db.sqlite')
+
+ # connecting to the database
+ db = FAAM_Spatial_DB(DB_FILE)
+
+
+Example: Get some db info
+-------------------------
+
+Just get some basic information from the database.
+
+.. code-block:: python
+
+ print('Some DB info')
+
+ # Count how many records are in the DB
+ sql="""SELECT COUNT(*) FROM flight_tracks;"""
+ cur = db.conn.cursor()
+ cur.execute(sql)
+ cnt = cur.fetchone()[0]
+
+ print('Number of flights in the DB: %i' % (cnt,))
+
+ # Get all fids that are in the DB
+ sql = """SELECT fid FROM flight_tracks ORDER BY fid;"""
+ cur = db.conn.cursor()
+ cur.execute(sql)
+ fids = [i[0] for i in cur.fetchall()]
+ # By today FAAM has flown a total of 991 missions
+ all_fids = set(['b%0.3i' % i for i in range(int(LATEST_FID[1:]))])
+ missing_fids = sorted(all_fids.difference(fids))
+
+ print('Number Missing flights: %i' % (len(missing_fids),))
+ print('Missing flights ids: %s' % (','.join(missing_fids),))
+
+
+Example: Find flights that go over the North Sea
+------------------------------------------------
+
+The goal is to find all FAAM flights that go over the North Sea. To do this we need
+the boundaries for the North Sea. A shapefile with the Polygon can be downloaded from
+the web:
+http://www.marineregions.org/gazetteer.php?p=details&id=2350
+
+To read it in we use the osgeo module.
+
+.. code-block:: python
+
+ sf = osgeo.ogr.Open('north_sea.shp')
+ layer = sf.GetLayer()
+ ns = layer.GetFeature(0) # there is only one feature in the layer
+ geometry = ns.GetGeometryRef()
+ ns_wkt = geometry.ExportToWkt() # Getting Well-known text representation
+
+Give me all flights where the track intersects the North Sea Polygon. Now that
+we have the Geometry in wkt format we can use it to create a sql query that we
+can send to the spatialite DB
+
+.. code-block:: python
+
+ sql = "SELECT FT.fid FROM flight_tracks FT where "
+ sql += "ST_Intersects( GeomFromText('%s'), FT.the_geom) " % (ns_wkt,)
+ sql += "ORDER BY FT.fid;"
+ cur = db.conn.cursor() # connect
+ cur.execute(sql) # execute
+ fids = [i[0] for i in cur.fetchall()] # flatten the result
+
+ print('Number of flights that intersect the North Sea: %i' % (len(fids),))
+ print('List flights that intersect the North Sea: %s\n' % (','.join(fids),))
+
+Now that we have all the fids that intersected the North Sea, we want
+to look at them using google-earth. Spatialite has the capability of
+formatting the geometries into kml format (askml).
+
+.. code-block:: python
+
+ sql = "SELECT askml(Simplify(FT.the_geom, 0.01)) FROM flight_tracks FT WHERE"
+ sql += " FT.fid IN (%s)" % (str(','.join(["'%s'" % fid for fid in fids])))
+ cur.execute(sql)
+ flight_tracks_kml = cur.fetchall()
+
+ #Create a new kml file
+ kml = simplekml.Kml()
+ folder = kml.newfolder(name='Spatialite result')
+ lines = kml.kml().split('\n')
+ lines.insert(-4, '')
+ lines.insert(-4, 'North Sea')
+ lines.insert(-4, geometry.ExportToKML())
+ lines.insert(-4, '')
+ for i, flight_track in enumerate(flight_tracks_kml):
+ lines.insert(-4, '')
+ lines.insert(-4, '%s' % (fids[i],))
+ lines.insert(-4, flight_track[0])
+ lines.insert(-4, '')
+
+ ofilename = os.path.join(os.environ['HOME'], 'fids_crossing_ns.kml')
+ print 'Writing North Sea Polygon and the flight track linestrings as kml'
+ print 'kml written to: %s' % (ofilename,)
+ ofile = open(ofilename, 'w')
+ ofile.write('\n'.join(lines))
+ ofile.close()
+
+
+Example: Get the length of a flight track
+------------------------------------------
+
+Get the length of a flight track. The database can do this for us
+using the *GreatCircleLength* function.
+
+.. code-block:: python
+
+ fid = 'b659'
+ sql = "SELECT GreatCircleLength(the_geom) from "
+ sql += "flight_tracks where fid = '%s';" % (fid, )
+ cur = db.conn.cursor() # connect
+ cur.execute(sql) # execute
+ length = cur.fetchone()[0]/1000.
+ print('Flight %s was %.2f km long.' % (fid, length))
+
+
+Example: Get all flights when the ARA climbed above a certain altitude
+----------------------------------------------------------------------
+
+We are trying to find all the flights where we climbed above a certain gps
+altitude. For this we loop over all individual flight tracks. The steps are:
+1. Get flight track from DB in json format
+2. Use the 'coordinates' key from the json and extract the z-coordinate
+3. Check if the maximum z-value is greater than the MAX_ALT and store
+the fid in the result list if that's the case
+
+
+.. code-block:: python
+
+ MAX_ALT = 11000
+ print('TASK: Finding flights exceeding %i m altitude' % (int(MAX_ALT,)))
+ sql = """SELECT fid, AsGeoJSON(the_geom) from flight_tracks;"""
+ cur = db.conn.cursor() # connect
+ cur.execute(sql) # execute
+ result = cur.fetchall()
+ fid_max_alt_list = []
+ for r in result:
+ fid = r[0]
+ # get the coordinates from the geojson
+ coords = np.array(json.loads(r[1])['coordinates'])
+ # the alt coordinate is the 3rd column
+ alt_max = np.nanmax(coords[:,2])
+ fid_max_alt_list.append((fid, alt_max))
+
+ fids = sorted([i[0] for i in fid_max_alt_list if i[1] > MAX_ALT])
+ print('N fids with gps altitude > %i: %i' % (int(MAX_ALT), len(fids),))
+ print('List of flight ids: %s\n' % (','.join(fids),))
+
+
+Example: Get all flights that took off from Cranfield
+-----------------------------------------------------------------
+Some code is needed to calculate the distance between two WGS84 coordinates
+
+.. code-block:: python
+
+ # http://stackoverflow.com/questions/19412462/getting-distance-between-two-points-based-on-latitude-longitude-python
+ def calc_distance(lat1, lon1, lat2, lon2):
+ from math import sin, cos, sqrt, atan2, radians
+ # approximate radius of earth in m
+ R = 6373000.0
+ lat1 = radians(lat1)
+ lon1 = radians(lon1)
+ lat2 = radians(lat2)
+ lon2 = radians(lon2)
+ dlon = lon2 - lon1
+ dlat = lat2 - lat1
+ a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
+ c = 2 * atan2(sqrt(a), sqrt(1 - a))
+
+ distance = R * c
+ return distance
+
+This is the code to get all the flights from the database.
+
+.. code-block:: python
+
+ print('TASK: Finding flights that took off in Cranfield in every year')
+ Cranfield_Coords = (52.072222, -0.616667) # Cranfield Airport coordinates
+ # in m; the distance is rather large to cover flights
+ # when the GIN didn't work straight away
+ MAX_DISTANCE = 15000
+ sql = """SELECT fid, date, AsGeoJSON(the_geom) from flight_tracks order by date;"""
+ cur = db.conn.cursor() # connect
+ cur.execute(sql) # execute
+ result = cur.fetchall()
+ # get a list of all years for which we do the analysis
+ years = list(set([r[1].split('-')[0] for r in result]))
+ dist_dict = {}
+ for y in years:
+ dist_dict[y] = []
+
+ for r in result:
+ fid = r[0]
+ # get the coordinates from the geojson
+ coords = np.array(json.loads(r[2])['coordinates'])
+ # extract year string from sql result
+ year = r[1].split('-')[0]
+ lat1, lon1 = Cranfield_Coords
+ # pull coordinates form the very first array
+ lon2 = coords[0, 0]
+ lat2 = coords[0, 1]
+ dist = calc_distance(lat1, lon1, lat2, lon2)
+ if dist < MAX_DISTANCE:
+ dist_dict[year].append((fid, dist))
+
+ # print summary
+ total = 0
+ # print the number for every year
+ for year in sorted(dist_dict.keys()):
+ n = len(dist_dict[year])
+ total += n
+ print('%7s: %3s' % (year, n))
+ print('%7s: %3s' % ('total', total))
+
\ No newline at end of file
diff --git a/docs/recipe/recipes_04_cis.rst b/docs/recipe/recipes_04_cis.rst
new file mode 100644
index 0000000..400f595
--- /dev/null
+++ b/docs/recipe/recipes_04_cis.rst
@@ -0,0 +1,122 @@
+
+Recipe - FAAM meets cis
+=======================
+
+
+CIS Installation
+----------------
+
+Information about installing anaconda2 and cis:
+ | anaconda2: https://www.continuum.io/downloads
+ | cis: http://cistools.net/get-started#installation
+
+Please note that the cis instructions say that you should install python 2.7 and **not** 3.x. If you are new to python you might be irritated why you wouldn't install the very latest version. In brief: The two versions are not fully compatible and many people decided to stick with 2.7.
+
+FAAM netCDF preparation
+-----------------------
+
+The FAAM core data do not work with the cis tool straight away. The netCDF need a little tweaking to make them fully CF compliant, so that cis interpretets the data correctly. The global attributes "Conventions" and "Coordinates" need to be added. A small bash script is provide (faam_edit.sh) that does the changes using nc utilities. The example netCDF has already been edited and works with cis.
+
+The example data (core_faam_20161024_v004_r0_b991_1hz_editted.nc) are for flight b991 (24-10-2016), when the aircraft was flying downwind of Manchester and Liverpool measuring the emissions from the two cities ahead of the *Into the Blue* event.
+
+
+Starting cis
+------------
+
+The next thing to do is to start the cis environment that we installed earlier. Go to the bin directory of your conda installation::
+
+ cd ~/anaconda2/bin/
+
+and activate the environment::
+
+ source activate cis_env
+
+From now on the shell should have the string '(cis_env)' in front indicating that we are working in the cis envrionment.
+
+In a next step we need to tell cis where to look for the FAAM_NetCDF plugin which is needed to read in the FAAM core data. The envronment variable CIS_PLUGIN_HOME has to be set to the directory that contains the FAAM_NetCDF.py file. For me the command looks like this::
+
+ export CIS_PLUGIN_HOME=/home/axel/cis_plugin_development
+
+Go to the directory where the edited faam core netcdf is stored to keep the file path short in the cis commands::
+
+ cd ~/cis_plugin_development/data
+
+
+Working with cis and FAAM data
+------------------------------
+
+Below are several one line examples that show the functionality of the cis tools. Most of the examples have been taken and adapted from the cis online documentation.
+
+.. note::
+ All the commands below go on **one** line in your shell. The page is just too small to get it all printed on one line.
+
+.. note::
+ If you get an Error message similar to:
+
+ ERROR - 'DimCoord' object has no attribute 'data' - check cis.log for details
+
+ cis can not find the FAAM_NetCDF plugin and it is most likely that the CIS_PLUGIN_HOME variable was not defined correctly.
+
+Get information about the netCDF::
+
+ cis info TAT_ND_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc
+
+Create scatter plot to compare the deiced (TAT_DI_R) and non-deiced (TAT_ND_R) temperature measurements on the ARA::
+
+ cis plot TAT_ND_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc \
+ TAT_DI_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc \
+ --type comparativescatter --grid \
+ --title "True air temperature comparison" \
+ --xlabel "non deiced sensor (K)" --ylabel "deiced sensor (K)"
+
+And print some statistics about the TAT_DI_R variable::
+
+ cis stats TAT_ND_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc \
+ TAT_DI_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc
+
+
+Make a coloured line plot, showing the CO concentration on a map::
+
+ cis plot CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc \
+ --xaxis longitude --yaxis latitude --xmin -5 --xmax -2 --ymin 52.2 --ymax 55
+
+
+Calculate mean,min,max for 1min time intervals for the CO_AERO data for the time interval 11:45 to 14:45. The results are written to a netCDF::
+
+ cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=mean \
+ t=[2016-10-24T11:45,2016-10-24T14:45,PT1M] -o b991_co_aero_1min_mean.nc
+
+ cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=max \
+ t=[2016-10-24T11:45,2016-10-24T14:45,PT1M] -o b991_co_aero_1min_max.nc
+
+ cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=min \
+ t=[2016-10-24T11:45,2016-10-24T14:45,PT1M] -o b991_co_aero_1min_min.nc
+
+Plot the three lines in one figure::
+
+ cis plot CO_AERO:b991_co_aero_1min_max.nc \
+ CO_AERO:b991_co_aero_1min_mean.nc \
+ CO_AERO:b991_co_aero_1min_min.nc
+
+Reproducing an aggregation example from the documentation:
+ http://cis.readthedocs.io/en/stable/aggregation.html#aircraft-track
+
+The results from the aggregation will be saved to a netCDF (option -o). The following line aggregates over 5 minutes and over altiude in 200 meter steps in the range of 0 to 1000m::
+
+ cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc \
+ t=[2016-10-24T11:45,2016-10-24T14:45,PT5M],z=[0,1000,200] \
+ -o b991_co_aero_alt_time.nc
+
+Plot a curtain using the netCDF that we just created::
+
+ cis plot CO_AERO:b991_co_aero_alt_time.nc --xaxis time --yaxis altitude
+
+
+Make a grid plot from the mean, where each grid cell is 0.2 in size. The results are written to a netCDF::
+
+ cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=mean \
+ x=[-5,0,0.2],y=[52,55,0.2] -o b991_co_aero_grid_mean.nc
+
+Now plot the grid on a map using the netcdf that we just created::
+
+ cis plot CO_AERO:b991_co_aero_grid_mean.nc
diff --git a/examples/faampy_spatial_examples.py b/examples/faampy_spatial_examples.py
new file mode 100644
index 0000000..8da7377
--- /dev/null
+++ b/examples/faampy_spatial_examples.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+
+"""
+This scripts contains examples that work with an example spatialite DB, which
+stores (almost) all FAAM flight tracks. An entry in this database can be
+accompanied by a geometry (e.g. point, line, polygon, etc.) and it is possible
+to make spatial queries, that a normal database is not able to perform. For
+example distance measurements are possible.
+
+The spatialite db is one single file, which is very convenient and does
+not require the setup of an advanced database, which can come with a lots of
+obstacles. In direct comparison spatialite is less powerful but has all the
+features that we need. For more information see:
+
+ https://www.gaia-gis.it/fossil/libspatialite/index
+
+ http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.4.0.html
+
+ https://www.gaia-gis.it/gaia-sins/spatialite-tutorial-2.3.1.html
+
+ http://postgis.net/docs/
+
+"""
+
+
+import json
+import numpy as np
+import os
+import osgeo.ogr
+import simplekml
+
+from faam_spatial import FAAM_Spatial_DB
+
+LATEST_FID='b991'
+
+DB_FILE = './faam_spatial_db.sqlite'
+
+db = FAAM_Spatial_DB(DB_FILE)
+
+#=============================================================================
+
+print '\n'*3
+print '=' * 40
+print '\n'*3
+
+print 'Some DB info'
+
+# Count how many records are in the DB
+sql="""SELECT COUNT(*) FROM flight_tracks;"""
+cur = db.conn.cursor()
+cur.execute(sql)
+cnt = cur.fetchone()[0]
+
+print 'Number of flights in the DB: %i' % (cnt,)
+print ''
+
+sql="""SELECT fid FROM flight_tracks ORDER BY fid;"""
+cur = db.conn.cursor()
+cur.execute(sql)
+fids = [i[0] for i in cur.fetchall()]
+#By today FAAM has flown a total of 991 missions
+all_fids = set(['b%0.3i' % i for i in range(int(LATEST_FID[1:]))])
+missing_fids = sorted(all_fids.difference(fids))
+
+print 'Number Missing flights: %i' % (len(missing_fids),)
+print 'Missing flights ids: %s' % (','.join(missing_fids),)
+
+
+#=============================================================================
+
+
+print '\n'*3
+print '=' * 40
+print '\n'*3
+print 'TASK: Finding all flights that go over the North Sea'
+print '\n'
+# The shape (Polygon from the North Sea was downloaded from the web
+# http://www.marineregions.org/gazetteer.php?p=details&id=2350
+print 'Reading in the shape file for the North Sea'
+sf = osgeo.ogr.Open('north_sea.shp')
+layer = sf.GetLayer()
+ns = layer.GetFeature(0) # there is only one feature in the layer
+geometry = ns.GetGeometryRef()
+ns_wkt = geometry.ExportToWkt() # Getting a Well-known text representation
+
+print 'Give me all flights where the track intersects the North Sea Polygon'
+
+# Now that we have the Geometry in a wkt format we can use
+# that to create a sql query that we can send to the spatialite DB
+sql = "SELECT FT.fid FROM flight_tracks FT where "
+sql += "ST_Intersects( GeomFromText('%s'), FT.the_geom) " % (ns_wkt,)
+sql += "ORDER BY FT.fid;"
+cur = db.conn.cursor() # connect
+cur.execute(sql) # execute
+fids = [i[0] for i in cur.fetchall()] # flatten the result
+
+print ''
+print 'Number of flights that intersect the North Sea: %i' % (len(fids),)
+print ''
+print 'List flights that intersect the North Sea: %s\n' % (','.join(fids),)
+
+
+# Now that we have all the fids that intersected the North Sea, we want
+# to look at them using google-earth. Spatialite has the capability of
+# formatting the geometries into a kml string (askml)
+
+sql = "SELECT askml(Simplify(FT.the_geom, 0.01)) FROM flight_tracks FT WHERE"
+sql += " FT.fid IN (%s)" % (str(','.join(["'%s'" % fid for fid in fids])))
+cur.execute(sql)
+flight_tracks_kml = cur.fetchall()
+
+
+#Create a new kml file
+kml=simplekml.Kml()
+folder=kml.newfolder(name='Spatialite result')
+lines=kml.kml().split('\n')
+lines.insert(-4, '')
+lines.insert(-4, 'North Sea')
+lines.insert(-4, geometry.ExportToKML())
+lines.insert(-4, '')
+for i, flight_track in enumerate(flight_tracks_kml):
+ lines.insert(-4, '')
+ lines.insert(-4, '%s' % (fids[i],))
+ lines.insert(-4, flight_track[0])
+ lines.insert(-4, '')
+
+
+ofilename = os.path.join(os.environ['HOME'], 'fids_crossing_ns.kml')
+print 'Writing North Sea Polygon and the flight track linestrings as kml'
+print 'kml written to: %s' % (ofilename,)
+ofile = open(ofilename, 'w')
+ofile.write('\n'.join(lines))
+ofile.close()
+
+
+#=============================================================================
+
+
+print '\n'*3
+print '=' * 40
+print '\n'*3
+# Find the length of flight track for a specific flight
+fid = 'b659'
+print 'TASK: Give me the length of %s' % (fid,)
+print '\n'
+sql = "SELECT GreatCircleLength(the_geom) from "
+sql += "flight_tracks where fid = '%s';" % (fid, )
+cur = db.conn.cursor() # connect
+cur.execute(sql) # execute
+length = cur.fetchone()[0]/1000.
+print 'Flight %s was %.2f km long.' % (fid, length)
+
+
+#=============================================================================
+
+
+print '\n'*3
+print '=' * 40
+print '\n'*3
+# Print give me all flights when we climbed above 11000m
+# There does not seem to be a way to do this directly in spatialite, so we
+# do some simple data crunching in python
+#
+# To do this we need to get the xyz coordinates for each flight first and check
+# those. I did not find a way to query the linestring directly
+# Spatialite can return the geometry in json format which can then easily
+# converted into a dictionary with 'coordinates' being one of the keys
+MAX_HEIGHT = 11000
+print 'TASK: Finding flights exceeding %i m altitude' % (int(MAX_HEIGHT,))
+sql = """SELECT fid, AsGeoJSON(the_geom) from flight_tracks;"""
+cur = db.conn.cursor() # connect
+cur.execute(sql) # execute
+result = cur.fetchall()
+fid_max_alt_list = []
+for r in result:
+ fid = r[0]
+ # get the coordinates from the geojson
+ coords = np.array(json.loads(r[1])['coordinates'])
+ # the alt coordinate is the 3rd column
+ alt_max = np.nanmax(coords[:,2])
+ fid_max_alt_list.append((fid, alt_max))
+
+fids = sorted([i[0] for i in fid_max_alt_list if i[1] > MAX_HEIGHT])
+print 'N fids with gps height > %i: %i' % (int(MAX_HEIGHT), len(fids),)
+print ''
+print 'List of flight ids: %s\n' % (','.join(fids),)
+
+
+#=============================================================================
+
+
diff --git a/faampy/_3rdparty/__init__.py b/faampy/_3rdparty/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/faampy/_3rdparty/rdp/__init__.py b/faampy/_3rdparty/rdp/__init__.py
new file mode 100644
index 0000000..94d5d1d
--- /dev/null
+++ b/faampy/_3rdparty/rdp/__init__.py
@@ -0,0 +1,182 @@
+"""
+rdp
+~~~
+
+Python implementation of the Ramer-Douglas-Peucker algorithm.
+
+:copyright: 2014-2016 Fabian Hirschmann
+:license: MIT, see LICENSE.txt for more details.
+
+"""
+from math import sqrt
+from functools import partial
+import numpy as np
+import sys
+
+if sys.version_info[0] >= 3:
+ xrange = range
+
+
+def pldist(point, start, end):
+ """
+ Calculates the distance from ``point`` to the line given
+ by the points ``start`` and ``end``.
+
+ :param point: a point
+ :type point: numpy array
+ :param start: a point of the line
+ :type start: numpy array
+ :param end: another point of the line
+ :type end: numpy array
+ """
+ if np.all(np.equal(start, end)):
+ return np.linalg.norm(point, start)
+
+ return np.divide(
+ np.abs(np.linalg.norm(np.cross(end - start, start - point))),
+ np.linalg.norm(end - start))
+
+
+def rdp_rec(M, epsilon, dist=pldist):
+ """
+ Simplifies a given array of points.
+
+ Recursive version.
+
+ :param M: an array
+ :type M: numpy array
+ :param epsilon: epsilon in the rdp algorithm
+ :type epsilon: float
+ :param dist: distance function
+ :type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pldist`
+ """
+ dmax = 0.0
+ index = -1
+
+ for i in xrange(1, M.shape[0]):
+ d = dist(M[i], M[0], M[-1])
+
+ if d > dmax:
+ index = i
+ dmax = d
+
+ if dmax > epsilon:
+ r1 = rdp_rec(M[:index + 1], epsilon, dist)
+ r2 = rdp_rec(M[index:], epsilon, dist)
+
+ return np.vstack((r1[:-1], r2))
+ else:
+ return np.vstack((M[0], M[-1]))
+
+
+def _rdp_iter(M, start_index, last_index, epsilon, dist=pldist):
+ stk = []
+ stk.append([start_index, last_index])
+ global_start_index = start_index
+ indices = np.ones(last_index - start_index + 1, dtype=bool)
+
+ while stk:
+ start_index, last_index = stk.pop()
+
+ dmax = 0.0
+ index = start_index
+
+ for i in xrange(index + 1, last_index):
+ if indices[i - global_start_index]:
+ d = dist(M[i], M[start_index], M[last_index])
+ if d > dmax:
+ index = i
+ dmax = d
+
+ if dmax > epsilon:
+ stk.append([start_index, index])
+ stk.append([index, last_index])
+ else:
+ for i in xrange(start_index + 1, last_index):
+ indices[i - global_start_index] = False
+
+ return indices
+
+
+def rdp_iter(M, epsilon, dist=pldist, return_mask=False):
+ """
+ Simplifies a given array of points.
+
+ Iterative version.
+
+ :param M: an array
+ :type M: numpy array
+ :param epsilon: epsilon in the rdp algorithm
+ :type epsilon: float
+ :param dist: distance function
+ :type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pldist`
+ :param return_mask: return the mask of points to keep instead
+ :type return_mask: bool
+ """
+ mask = _rdp_iter(M, 0, len(M) - 1, epsilon, dist)
+
+ if return_mask:
+ return mask
+
+ return M[mask]
+
+
+def rdp(M, epsilon=0, dist=pldist, algo="iter", return_mask=False):
+ """
+ Simplifies a given array of points using the Ramer-Douglas-Peucker
+ algorithm.
+
+ Example:
+
+ >>> from rdp import rdp
+ >>> rdp([[1, 1], [2, 2], [3, 3], [4, 4]])
+ [[1, 1], [4, 4]]
+
+ This is a convenience wrapper around both :func:`rdp.rdp_iter`
+ and :func:`rdp.rdp_rec` that detects if the input is a numpy array
+ in order to adapt the output accordingly. This means that
+ when it is called using a Python list as argument, a Python
+ list is returned, and in case of an invocation using a numpy
+ array, a NumPy array is returned.
+
+ The parameter ``return_mask=True`` can be used in conjunction
+ with ``algo="iter"`` to return only the mask of points to keep. Example:
+
+ >>> from rdp import rdp
+ >>> import numpy as np
+ >>> arr = np.array([1, 1, 2, 2, 3, 3, 4, 4]).reshape(4, 2)
+ >>> arr
+ array([[1, 1],
+ [2, 2],
+ [3, 3],
+ [4, 4]])
+ >>> mask = rdp(arr, algo="iter", return_mask=True)
+ >>> mask
+ array([ True, False, False, True], dtype=bool)
+ >>> arr[mask]
+ array([[1, 1],
+ [4, 4]])
+
+ :param M: a series of points
+ :type M: numpy array with shape ``(n,d)`` where ``n`` is the number of points and ``d`` their dimension
+ :param epsilon: epsilon in the rdp algorithm
+ :type epsilon: float
+ :param dist: distance function
+ :type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pldist`
+ :param algo: either ``iter`` for an iterative algorithm or ``rec`` for a recursive algorithm
+ :type algo: string
+ :param return_mask: return mask instead of simplified array
+ :type return_mask: bool
+ """
+
+ if algo == "iter":
+ algo = partial(rdp_iter, return_mask=return_mask)
+ elif algo == "rec":
+ if return_mask:
+ raise NotImplementedError("return_mask=True not supported with algo=\"rec\"")
+ algo = rdp_rec
+
+ if "numpy" in str(type(M)):
+ return algo(M, epsilon, dist)
+
+ return algo(np.array(M), epsilon, dist).tolist()
diff --git a/faampy/_3rdparty/tephi/__init__.py b/faampy/_3rdparty/tephi/__init__.py
new file mode 100644
index 0000000..700ca9c
--- /dev/null
+++ b/faampy/_3rdparty/tephi/__init__.py
@@ -0,0 +1,760 @@
+# (C) British Crown Copyright 2014, Met Office
+#
+# This file is part of tephi.
+#
+# Tephi is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Tephi is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with tephi. If not, see .
+"""
+The tephi module provides tephigram plotting of pressure, temperature and wind
+barb data.
+
+.. warning::
+ This is a beta release module and is liable to change.
+
+"""
+
+from collections import Iterable, namedtuple
+from functools import partial
+from matplotlib.font_manager import FontProperties
+import matplotlib.pyplot as plt
+from mpl_toolkits.axisartist.grid_helper_curvelinear import GridHelperCurveLinear
+from mpl_toolkits.axisartist import Subplot
+import numbers
+import numpy as np
+import os.path
+
+import isopleths
+import transforms
+
+
+__version__ = '0.1.0'
+
+
+#
+# Miscellaneous constants.
+#
+DEFAULT_WIDTH = 700 # in pixels
+
+ISOBAR_SPEC = [(25, .03), (50, .10), (100, .25), (200, 1.5)]
+ISOBAR_LINE = {'color':'blue', 'linewidth':0.5, 'clip_on':True}
+ISOBAR_TEXT = {'size':8, 'color':'blue', 'clip_on':True, 'va':'bottom', 'ha':'right'}
+ISOBAR_FIXED = [50, 1000]
+
+WET_ADIABAT_SPEC = [(1, .05), (2, .15), (4, 1.5)]
+WET_ADIABAT_LINE = {'color':'orange', 'linewidth':0.5, 'clip_on':True}
+WET_ADIABAT_TEXT = {'size':8, 'color':'orange', 'clip_on':True, 'va':'bottom', 'ha':'left'}
+WET_ADIABAT_FIXED = None
+
+MIXING_RATIO_SPEC = [(1, .05), (2, .18), (4, .3), (8, 1.5)]
+MIXING_RATIO_LINE = {'color':'green', 'linewidth':0.5, 'clip_on':True}
+MIXING_RATIO_TEXT = {'size':8, 'color':'green', 'clip_on':True, 'va':'bottom', 'ha':'right'}
+MIXING_RATIOS = [.001, .002, .005, .01, .02, .03, .05, .1, .15, .2, .3, .4, .5, .6, .8,
+ 1.0, 1.5, 2.0, 2.5, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 12.0, 14.0, 16.0,
+ 18.0, 20.0, 24.0, 28.0, 32.0, 36.0, 40.0, 44.0, 48.0, 52.0, 56.0, 60.0, 68.0, 80.0]
+MIXING_RATIO_FIXED = None
+
+MIN_PRESSURE = 50 # mb = hPa
+MAX_PRESSURE = 1000 # mb = hPa
+MIN_THETA = 0 # degC
+MAX_THETA = 250 # degC
+MIN_WET_ADIABAT = 1 # degC
+MAX_WET_ADIABAT = 60 # degC
+MIN_TEMPERATURE = -50 # degC
+
+
+RESOURCES_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'etc')
+DATA_DIR = os.path.join(RESOURCES_DIR, 'test_data')
+RESULTS_DIR = os.path.join(RESOURCES_DIR, 'test_results')
+
+
+def loadtxt(*filenames, **kwargs):
+ """
+ Load one or more text files of pressure, temperature, wind speed and wind
+ direction value sets.
+
+ Each line should contain, at minimum, a single pressure value (mb or hPa),
+ and a single temperature value (degC), but may also contain a dewpoint
+ value (degC), wind speed (knots) and wind direction value (degrees from
+ north).
+
+ Note that blank lines and comment lines beginning with a '#' are ignored.
+
+ For example:
+
+ >>> import os.path
+ >>> import tephi
+
+ >>> winds = os.path.join(tephi.DATA_DIR, 'barbs.txt')
+ >>> columns = ('pressure', 'dewpoint', 'wind_speed', 'wind_direction')
+ >>> data = tephi.loadtxt(winds, column_titles=columns)
+ >>> pressure = data.pressure
+ >>> dews = data.dewpoint
+ >>> wind_speed = data.wind_speed
+ >>> wind_direction = data.wind_direction
+
+ .. seealso:: :func:`numpy.loadtxt`.
+
+ Args:
+
+ * filenames: one or more filenames.
+
+ Kwargs:
+
+ * column_titles:
+ List of iterables, or None. If specified, should contain one title
+ string for each column of data per specified file. If all of multiple
+ files loaded have the same column titles, then only one tuple of column
+ titles need be specified.
+
+ * delimiter:
+ The string used to separate values. This is passed directly to
+ :func:`np.loadtxt`, which defaults to using any whitespace as delimiter
+ if this keyword is not specified.
+
+ * dtype:
+ The datatype to cast the data in the text file to. Passed directly to
+ :func:`np.loadtxt`.
+
+ Returns:
+ A :func:`collections.namedtuple` instance containing one tuple, named
+ with the relevant column title if specified, for each column of data
+ in the text file loaded. If more than one file is loaded, a sequence
+ of namedtuples is returned.
+
+ """
+ def _repr(nt):
+ """An improved representation of namedtuples over the default."""
+
+ typename = nt.__class__.__name__
+ fields = nt._fields
+ n_fields = len(fields)
+ return_str = '{}(\n'.format(typename)
+ for i, t in enumerate(fields):
+ gap = ' ' * 4
+ if i == n_fields - 1:
+ ender = ''
+ else:
+ ender = '\n'
+ return_str += '{}{}={!r}{}'.format(gap, t, getattr(nt, t), ender)
+ return_str += ')'
+ return return_str
+
+ column_titles = kwargs.pop('column_titles', None)
+ delimiter = kwargs.pop('delimiter', None)
+ dtype = kwargs.pop('dtype', 'f4')
+
+ if column_titles is not None:
+ fields = column_titles[0]
+ if not isinstance(column_titles, basestring):
+ if isinstance(fields, Iterable) and \
+ not isinstance(fields, basestring):
+ # We've an iterable of iterables - multiple titles is True.
+ multiple_titles = True
+ if len(column_titles) > len(filenames):
+ msg = 'Received {} files but {} sets of column titles.'
+ raise ValueError(msg.format(len(column_titles),
+ len(filenames)))
+ elif isinstance(fields, basestring):
+ # We've an iterable of title strings - use for namedtuple.
+ tephidata = namedtuple('tephidata', column_titles)
+ multiple_titles = False
+ else:
+ # Whatever we've got it isn't iterable, so raise TypeError.
+ msg = 'Expected title to be string, got {!r}.'
+ raise TypeError(msg.format(type(column_titles)))
+ else:
+ msg = 'Expected column_titles to be iterable, got {!r}.'
+ raise TypeError(msg.format(type(column_titles)))
+
+ else:
+ tephidata = namedtuple('tephidata', ('pressure', 'temperature'))
+ multiple_titles = False
+
+ data = []
+ for ct, arg in enumerate(filenames):
+ if isinstance(arg, basestring):
+ if os.path.isfile(arg):
+ if multiple_titles:
+ tephidata = namedtuple('tephidata', column_titles[ct])
+ tephidata.__repr__ = _repr
+ payload = np.loadtxt(arg, dtype=dtype, delimiter=delimiter)
+ item = tephidata(*payload.T)
+ data.append(item)
+ else:
+ msg = 'Item {} is either not a file or does not exist.'
+ raise OSError(msg.format(arg))
+
+ if len(data) == 1:
+ data = data[0]
+
+ return data
+
+
+class _FormatterTheta(object):
+ """Dry adiabats potential temperature axis tick formatter."""
+
+ def __call__(self, direction, factor, values):
+ return [r"$\theta=%s$" % str(value) for value in values]
+
+
+class _FormatterIsotherm(object):
+ """Isotherms temperature axis tick formatter."""
+
+ def __call__(self, direction, factor, values):
+ return [r" $T=%s$" % str(value) for value in values]
+
+
+class Locator(object):
+ """Determine the fixed step axis tick locations when called with a tick range."""
+
+ def __init__(self, step):
+ """
+ Set the fixed step value for the axis tick locations.
+
+ Generate tick location specification when called with a tick range.
+
+ For example:
+
+ >>> from tephi import Locator
+ >>> locator = Locator(10)
+ >>> locator(-45, 23)
+ (array([-50, -40, -30, -20, -10, 0, 10, 20]), 8, 1)
+
+ Args:
+
+ * step: the step value for each axis tick.
+
+ """
+ self.step = int(step)
+
+ def __call__(self, start, stop):
+ """Calculate the axis ticks given the provided tick range."""
+
+ step = self.step
+ start = (int(start) / step) * step
+ stop = (int(stop) / step) * step
+ ticks = np.arange(start, stop + step, step)
+
+ return ticks, len(ticks), 1
+
+
+def _refresh_isopleths(axes):
+ """
+ Refresh the plot isobars, wet adiabats and mixing ratios and associated
+ text labels.
+
+ Args:
+
+ * axes:
+ Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance.
+
+ Returns:
+ Boolean, whether the plot has changed.
+
+ """
+ changed = False
+
+ # Determine the current zoom level.
+ xlim = axes.get_xlim()
+ delta_xlim = xlim[1] - xlim[0]
+ ylim = axes.get_ylim()
+ zoom = delta_xlim / axes.tephigram_original_delta_xlim
+
+ # Determine the display mid-point.
+ x_point = xlim[0] + delta_xlim * 0.5
+ y_point = ylim[0] + (ylim[1] - ylim[0]) * 0.5
+ xy_point = axes.tephigram_inverse.transform(np.array([[x_point, y_point]]))[0]
+
+ for profile in axes.tephigram_profiles:
+ profile.refresh()
+
+ for isopleth in axes.tephigram_isopleths:
+ changed = isopleth.refresh(zoom, xy_point) or changed
+
+ return changed
+
+
+def _handler(event):
+ """Matplotlib event handler."""
+
+ for axes in event.canvas.figure.axes:
+ if hasattr(axes, 'tephigram'):
+ if _refresh_isopleths(axes):
+ event.canvas.figure.show()
+
+
+class _PlotGroup(dict):
+ """
+ Container for a related group of tephigram isopleths.
+
+ Manages the creation and plotting of all isopleths within the group.
+
+ """
+ def __init__(self, axes, plot_func, text_kwargs, step, zoom, tags, fixed=None, xfocus=None):
+ self.axes = axes
+ self.text_kwargs = text_kwargs
+ self.step = step
+ self.zoom = zoom
+
+ pairs = []
+ for tag in tags:
+ text = plt.text(0, 0, str(tag), **text_kwargs)
+ text.set_bbox(dict(boxstyle='Round,pad=0.3', facecolor='white',
+ edgecolor='white', alpha=0.5, clip_on=True,
+ clip_box=self.axes.bbox))
+ pairs.append((tag, [plot_func(tag), text]))
+
+ dict.__init__(self, pairs)
+ for line, text in self.itervalues():
+ line.set_visible(True)
+ text.set_visible(True)
+ self._visible = True
+
+ if fixed is None:
+ fixed = []
+
+ if not isinstance(fixed, Iterable):
+ fixed = [fixed]
+
+ if zoom is None:
+ self.fixed = set(tags)
+ else:
+ self.fixed = set(tags) & set(fixed)
+
+ self.xfocus = xfocus
+
+ def __setitem__(self, tag, item):
+ raise ValueError('Cannot add or set an item into the plot group %r' % self.step)
+
+ def __getitem__(self, tag):
+ if tag not in self.keys():
+ raise KeyError('Tag item %r is not a member of the plot group %r' % (tag, self.step))
+ return dict.__getitem__(self, tag)
+
+ def refresh(self, zoom, xy_point):
+ """
+ Refresh all isopleths within the plot group.
+
+ Args:
+
+ * zoom:
+ Zoom level of the current plot, relative to the initial plot.
+ * xy_point:
+ The center point of the current point, transformed into
+ temperature and potential temperature.
+
+ Returns:
+ Boolean, whether the plot group has changed.
+
+ """
+ if self.zoom is None or zoom <= self.zoom:
+ changed = self._item_on()
+ else:
+ changed = self._item_off()
+ self._refresh_text(xy_point)
+ return changed
+
+ def _item_on(self, zoom=None):
+ changed = False
+ if zoom is None or self.zoom is None or zoom <= self.zoom:
+ if not self._visible:
+ for line, text in self.itervalues():
+ line.set_visible(True)
+ text.set_visible(True)
+ changed = True
+ self._visible = True
+ return changed
+
+ def _item_off(self, zoom=None):
+ changed = False
+ if self.zoom is not None and (zoom is None or zoom > self.zoom):
+ if self._visible:
+ for tag, (line, text) in self.iteritems():
+ if tag not in self.fixed:
+ line.set_visible(False)
+ text.set_visible(False)
+ changed = True
+ self._visible = False
+ return changed
+
+ def _generate_text(self, tag, xy_point):
+ line, text = self[tag]
+ x_data = line.get_xdata()
+ y_data = line.get_ydata()
+
+ if self.xfocus:
+ delta = np.power(x_data - xy_point[0], 2)
+ else:
+ delta = np.power(x_data - xy_point[0], 2) + np.power(y_data - xy_point[1], 2)
+ index = np.argmin(delta)
+ text.set_position((x_data[index], y_data[index]))
+
+ def _refresh_text(self, xy_point):
+ if self._visible:
+ for tag in self:
+ self._generate_text(tag, xy_point)
+ elif self.fixed:
+ for tag in self.fixed:
+ self._generate_text(tag, xy_point)
+
+
+class _PlotCollection(object):
+ """
+ Container for tephigram isopleths.
+
+ Manages the creation and plotting of all tephigram isobars, mixing ratio
+ lines and pseudo saturated wet adiabats.
+
+ """
+ def __init__(self, axes, spec, stop, plot_func, text_kwargs, fixed=None, minimum=None, xfocus=None):
+ if isinstance(stop, Iterable):
+ if minimum and minimum > max(stop):
+ raise ValueError('Minimum value of %r exceeds all other values' % minimum)
+
+ items = [[step, zoom, set(stop[step - 1::step])] for step, zoom in sorted(spec, reverse=True)]
+ else:
+ if minimum and minimum > stop:
+ raise ValueError('Minimum value of %r exceeds maximum threshold %r' % (minimum, stop))
+
+ items = [[step, zoom, set(range(step, stop + step, step))] for step, zoom in sorted(spec, reverse=True)]
+
+ for index, item in enumerate(items):
+ if minimum:
+ item[2] = set([value for value in item[2] if value >= minimum])
+
+ for subitem in items[index + 1:]:
+ subitem[2] -= item[2]
+
+ self.groups = {item[0]:
+ _PlotGroup(axes, plot_func, text_kwargs, *item, fixed=fixed, xfocus=xfocus) for item in items if item[2]}
+
+ if not self.groups:
+ raise ValueError('The plot collection failed to generate any plot groups')
+
+ def refresh(self, zoom, xy_point):
+ """
+ Refresh all isopleth groups within the plot collection.
+
+ Args:
+
+ * zoom:
+ Zoom level of the current plot, relative to the initial plot.
+ * xy_point:
+ The center point of the current plot, transformed into
+ temperature and potential temperature.
+
+ Returns:
+ Boolean, whether any plot group has changed.
+
+ """
+ changed = False
+
+ for group in self.groups.itervalues():
+ changed = group.refresh(zoom, xy_point) or changed
+
+ return changed
+
+
+class Tephigram(object):
+ """
+ Generate a tephigram of one or more pressure and temperature data sets.
+
+ """
+
+ def __init__(self, figure=None, isotherm_locator=None,
+ dry_adiabat_locator=None, anchor=None):
+ """
+ Initialise the tephigram transformation and plot axes.
+
+ Kwargs:
+
+ * figure:
+ An existing :class:`matplotlib.figure.Figure` instance for the
+ tephigram plot. If a figure is not provided, a new figure will
+ be created by default.
+ * isotherm_locator:
+ A :class:`tephi.Locator` instance or a numeric step size
+ for the isotherm lines.
+ * dry_adiabat_locator:
+ A :class:`tephi.Locator` instance or a numeric step size
+ for the dry adiabat lines.
+ * anchor:
+ A sequence of two pressure, temperature pairs specifying the extent
+ of the tephigram plot in terms of the bottom left hand corner and
+ the top right hand corner. Pressure data points must be in units of
+ mb or hPa, and temperature data points must be in units of degC.
+
+ For example:
+
+ .. plot::
+ :include-source:
+
+ import matplotlib.pyplot as plt
+ import os.path
+ import tephi
+ from tephi import Tephigram
+
+ dew_point = os.path.join(tephi.DATA_DIR, 'dews.txt')
+ dry_bulb = os.path.join(tephi.DATA_DIR, 'temps.txt')
+ dew_data, temp_data = tephi.loadtxt(dew_point, dry_bulb)
+ dews = zip(dew_data.pressure, dew_data.temperature)
+ temps = zip(temp_data.pressure, temp_data.temperature)
+ tpg = Tephigram()
+ tpg.plot(dews, label='Dew-point', color='blue', linewidth=2, marker='s')
+ tpg.plot(temps, label='Dry-bulb', color='red', linewidth=2, marker='o')
+ plt.show()
+
+ """
+ if not figure:
+ # Create a default figure.
+ self.figure = plt.figure(0, figsize=(9, 9))
+ else:
+ self.figure = figure
+
+ # Configure the locators.
+ if isotherm_locator and not isinstance(isotherm_locator, Locator):
+ if not isinstance(isotherm_locator, numbers.Number):
+ raise ValueError('Invalid isotherm locator')
+ locator_isotherm = Locator(isotherm_locator)
+ else:
+ locator_isotherm = isotherm_locator
+
+ if dry_adiabat_locator and not isinstance(dry_adiabat_locator, Locator):
+ if not isinstance(dry_adiabat_locator, numbers.Number):
+ raise ValueError('Invalid dry adiabat locator')
+ locator_theta = Locator(dry_adiabat_locator)
+ else:
+ locator_theta = dry_adiabat_locator
+
+ # Define the tephigram coordinate-system transformation.
+ self.tephi_transform = transforms.TephiTransform()
+ grid_helper1 = GridHelperCurveLinear(self.tephi_transform,
+ tick_formatter1=_FormatterIsotherm(),
+ grid_locator1=locator_isotherm,
+ tick_formatter2=_FormatterTheta(),
+ grid_locator2=locator_theta)
+ self.axes = Subplot(self.figure, 1, 1, 1, grid_helper=grid_helper1)
+ self.transform = self.tephi_transform + self.axes.transData
+ self.axes.axis['isotherm'] = self.axes.new_floating_axis(1, 0)
+ self.axes.axis['theta'] = self.axes.new_floating_axis(0, 0)
+ self.axes.axis['left'].get_helper().nth_coord_ticks = 0
+ self.axes.axis['left'].toggle(all=True)
+ self.axes.axis['bottom'].get_helper().nth_coord_ticks = 1
+ self.axes.axis['bottom'].toggle(all=True)
+ self.axes.axis['top'].get_helper().nth_coord_ticks = 0
+ self.axes.axis['top'].toggle(all=False)
+ self.axes.axis['right'].get_helper().nth_coord_ticks = 1
+ self.axes.axis['right'].toggle(all=True)
+ self.axes.gridlines.set_linestyle('solid')
+
+ self.figure.add_subplot(self.axes)
+
+ # Configure default axes.
+ axis = self.axes.axis['left']
+ axis.major_ticklabels.set_fontsize(10)
+ axis.major_ticklabels.set_va('baseline')
+ axis.major_ticklabels.set_rotation(135)
+ axis = self.axes.axis['right']
+ axis.major_ticklabels.set_fontsize(10)
+ axis.major_ticklabels.set_va('baseline')
+ axis.major_ticklabels.set_rotation(-135)
+ self.axes.axis['top'].major_ticklabels.set_fontsize(10)
+ axis = self.axes.axis['bottom']
+ axis.major_ticklabels.set_fontsize(10)
+ axis.major_ticklabels.set_ha('left')
+ axis.major_ticklabels.set_va('top')
+ axis.major_ticklabels.set_rotation(-45)
+
+ # Isotherms: lines of constant temperature (degC).
+ axis = self.axes.axis['isotherm']
+ axis.set_axis_direction('right')
+ axis.set_axislabel_direction('-')
+ axis.major_ticklabels.set_rotation(90)
+ axis.major_ticklabels.set_fontsize(10)
+ axis.major_ticklabels.set_va('bottom')
+ axis.major_ticklabels.set_color('grey')
+ axis.major_ticklabels.set_visible(False) # turned-off
+
+ # Dry adiabats: lines of constant potential temperature (degC).
+ axis = self.axes.axis['theta']
+ axis.set_axis_direction('right')
+ axis.set_axislabel_direction('+')
+ axis.major_ticklabels.set_fontsize(10)
+ axis.major_ticklabels.set_va('bottom')
+ axis.major_ticklabels.set_color('grey')
+ axis.major_ticklabels.set_visible(False) # turned-off
+ axis.line.set_linewidth(3)
+ axis.line.set_linestyle('--')
+
+ # Lock down the aspect ratio.
+ self.axes.set_aspect(1.)
+ self.axes.grid(True)
+
+ # Initialise the text formatter for the navigation status bar.
+ self.axes.format_coord = self._status_bar
+
+ # Factor in the tephigram transform.
+ ISOBAR_TEXT['transform'] = self.transform
+ WET_ADIABAT_TEXT['transform'] = self.transform
+ MIXING_RATIO_TEXT['transform'] = self.transform
+
+ # Create plot collections for the tephigram isopleths.
+ func = partial(isopleths.isobar, MIN_THETA, MAX_THETA, self.axes, self.transform, ISOBAR_LINE)
+ self._isobars = _PlotCollection(self.axes, ISOBAR_SPEC, MAX_PRESSURE, func, ISOBAR_TEXT,
+ fixed=ISOBAR_FIXED, minimum=MIN_PRESSURE)
+
+ func = partial(isopleths.wet_adiabat, MAX_PRESSURE, MIN_TEMPERATURE, self.axes, self.transform, WET_ADIABAT_LINE)
+ self._wet_adiabats = _PlotCollection(self.axes, WET_ADIABAT_SPEC, MAX_WET_ADIABAT, func, WET_ADIABAT_TEXT,
+ fixed=WET_ADIABAT_FIXED, minimum=MIN_WET_ADIABAT, xfocus=True)
+
+ func = partial(isopleths.mixing_ratio, MIN_PRESSURE, MAX_PRESSURE, self.axes, self.transform, MIXING_RATIO_LINE)
+ self._mixing_ratios = _PlotCollection(self.axes, MIXING_RATIO_SPEC, MIXING_RATIOS, func, MIXING_RATIO_TEXT,
+ fixed=MIXING_RATIO_FIXED)
+
+ # Initialise for the tephigram plot event handler.
+ plt.connect('motion_notify_event', _handler)
+ self.axes.tephigram = True
+ self.axes.tephigram_original_delta_xlim = self.original_delta_xlim = DEFAULT_WIDTH
+ self.axes.tephigram_transform = self.tephi_transform
+ self.axes.tephigram_inverse = self.tephi_transform.inverted()
+ self.axes.tephigram_isopleths = [self._isobars, self._wet_adiabats, self._mixing_ratios]
+
+ # The tephigram profiles.
+ self._profiles = []
+ self.axes.tephigram_profiles = self._profiles
+
+ # Center the plot around the anchor extent.
+ self._anchor = anchor
+ if self._anchor is not None:
+ self._anchor = np.asarray(anchor)
+ if self._anchor.ndim != 2 or self._anchor.shape[-1] != 2 or \
+ len(self._anchor) != 2:
+ msg = 'Invalid anchor, expecting [(bottom-left-pressure, ' \
+ 'bottom-left-temperature), (top-right-pressure, ' \
+ 'top-right-temperature)]'
+ raise ValueError(msg)
+ (bottom_pressure, bottom_temp), \
+ (top_pressure, top_temp) = self._anchor
+
+ if (bottom_pressure - top_pressure) < 0:
+ raise ValueError('Invalid anchor pressure range')
+ if (bottom_temp - top_temp) < 0:
+ raise ValueError('Invalid anchor temperature range')
+
+ self._anchor = isopleths.Profile(anchor, self.axes)
+ self._anchor.plot(visible=False)
+ xlim, ylim = self._calculate_extents()
+ self.axes.set_xlim(xlim)
+ self.axes.set_ylim(ylim)
+
+ def plot(self, data, **kwargs):
+ """
+ Plot the environmental lapse rate profile of the pressure and
+ temperature data points.
+
+ The pressure and temperature data points are transformed into
+ potential temperature and temperature data points before plotting.
+
+ By default, the tephigram will automatically center the plot around
+ all profiles.
+
+ .. warning::
+ Pressure data points must be in units of mb or hPa, and temperature
+ data points must be in units of degC.
+
+ Args:
+
+ * data: pressure and temperature pair data points.
+
+ .. note::
+ All keyword arguments are passed through to
+ :func:`matplotlib.pyplot.plot`.
+
+ For example:
+
+ .. plot::
+ :include-source:
+
+ import matplotlib.pyplot as plt
+ from tephi import Tephigram
+
+ tpg = Tephigram()
+ data = [[1006, 26.4], [924, 20.3], [900, 19.8],
+ [850, 14.5], [800, 12.9], [755, 8.3]]
+ profile = tpg.plot(data, color='red', linestyle='--',
+ linewidth=2, marker='o')
+ barbs = [(10, 45, 900), (20, 60, 850), (25, 90, 800)]
+ profile.barbs(barbs)
+ plt.show()
+
+ For associating wind barbs with an environmental lapse rate profile,
+ see :meth:`~tephi.isopleths.Profile.barbs`.
+
+ """
+ profile = isopleths.Profile(data, self.axes)
+ profile.plot(**kwargs)
+ self._profiles.append(profile)
+
+ # Center the tephigram plot around all the profiles.
+ if self._anchor is None:
+ xlim, ylim = self._calculate_extents(xfactor=.25, yfactor=.05)
+ self.axes.set_xlim(xlim)
+ self.axes.set_ylim(ylim)
+
+ # Refresh the tephigram plot isopleths.
+ _refresh_isopleths(self.axes)
+
+ # Show the plot legend.
+ if 'label' in kwargs:
+ font_properties = FontProperties(size='x-small')
+ plt.legend(loc='upper left', fancybox=True, shadow=True, prop=font_properties)
+
+ return profile
+
+ def _status_bar(self, x_point, y_point):
+ """Generate text for the interactive backend navigation status bar."""
+
+ temperature, theta = transforms.xy_to_temperature_theta(x_point, y_point)
+ pressure, _ = transforms.temperature_theta_to_pressure_temperature(temperature, theta)
+ xlim = self.axes.get_xlim()
+ zoom = (xlim[1] - xlim[0]) / self.original_delta_xlim
+ text = "T:%.2f, theta:%.2f, phi:%.2f (zoom:%.3f)" % (float(temperature), float(theta), float(pressure), zoom)
+
+ return text
+
+ def _calculate_extents(self, xfactor=None, yfactor=None):
+ min_x = min_y = 1e10
+ max_x = max_y = -1e-10
+ profiles = self._profiles
+
+ if self._anchor is not None:
+ profiles = [self._anchor]
+
+ for profile in profiles:
+ xy_points = self.tephi_transform.transform(np.concatenate((profile.temperature.reshape(-1, 1),
+ profile.theta.reshape(-1, 1)),
+ axis=1))
+ x_points = xy_points[:, 0]
+ y_points = xy_points[:, 1]
+ min_x, min_y = np.min([min_x, np.min(x_points)]), np.min([min_y, np.min(y_points)])
+ max_x, max_y = np.max([max_x, np.max(x_points)]), np.max([max_y, np.max(y_points)])
+
+ if xfactor is not None:
+ delta_x = max_x - min_x
+ min_x, max_x = min_x - xfactor * delta_x, max_x + xfactor * delta_x
+
+ if yfactor is not None:
+ delta_y = max_y - min_y
+ min_y, max_y = min_y - yfactor * delta_y, max_y + yfactor * delta_y
+
+ return ([min_x, max_x], [min_y, max_y])
diff --git a/faampy/_3rdparty/tephi/_constants.py b/faampy/_3rdparty/tephi/_constants.py
new file mode 100644
index 0000000..b9e394e
--- /dev/null
+++ b/faampy/_3rdparty/tephi/_constants.py
@@ -0,0 +1,27 @@
+# (C) British Crown Copyright 2014, Met Office
+#
+# This file is part of tephi.
+#
+# Tephi is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Tephi is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with tephi. If not, see .
+"""Tephigram transform and isopleth constants."""
+
+# TODO: Discover the meaning of the magic constant numbers.
+
+CONST_CP = 1.01e3
+CONST_K = 0.286
+CONST_KELVIN = 273.15 # celsius to kelvin offset.
+CONST_L = 2.5e6
+CONST_MA = 300.0
+CONST_RD = 287.0
+CONST_RV = 461.0
diff --git a/faampy/_3rdparty/tephi/isopleths.py b/faampy/_3rdparty/tephi/isopleths.py
new file mode 100644
index 0000000..aa4e8a7
--- /dev/null
+++ b/faampy/_3rdparty/tephi/isopleths.py
@@ -0,0 +1,456 @@
+# (C) British Crown Copyright 2014, Met Office
+#
+# This file is part of tephi.
+#
+# Tephi is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Tephi is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with tephi. If not, see .
+"""
+Tephigram isopleth support for generating and plotting tephigram lines,
+environment profiles and barbs.
+
+"""
+
+import math
+from matplotlib.collections import PathCollection
+import matplotlib.pyplot as plt
+import matplotlib.transforms as mtransforms
+from matplotlib.path import Path
+import numpy as np
+from scipy.interpolate import interp1d
+
+from _constants import CONST_CP, CONST_L, CONST_KELVIN, CONST_RD, CONST_RV
+import transforms
+
+
+# Wind barb speed (knots) ranges used since 1 January 1955.
+_BARB_BINS = np.arange(20) * 5 + 3
+_BARB_GUTTER = 0.1
+_BARB_DTYPE = np.dtype(dict(names=('speed', 'angle', 'pressure', 'barb'),
+ formats=('f4', 'f4', 'f4', np.object)))
+
+#
+# Reference: http://www-nwp/~hadaa/tephigram/tephi_plot.html
+#
+
+
+def mixing_ratio(min_pressure, max_pressure, axes,
+ transform, kwargs, mixing_ratio_value):
+ """
+ Generate and plot a humidity mixing ratio line.
+
+ A line of constant saturation mixing ratio with respect to a
+ plane water surface (g kg-1).
+
+ Args:
+
+ * min_pressure:
+ Minumum pressure, in mb or hPa, for the mixing ratio line extent.
+
+ * max_pressure:
+ Maximum pressure, in mb or hPa, for the mixing ratio line extent.
+
+ * axes:
+ Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance.
+
+ * transform:
+ Tephigram plotting transformation
+ :class:`matplotlib.transforms.CompositeGenericTransform` instance.
+
+ * kwargs:
+ Keyword arguments for the mixing ratio :class:`matplotlib.lines.Line2D`
+ instance.
+
+ * mixing_ratio_value:
+ The mixing ratio value to be plotted.
+
+ Returns:
+ The mixing ratio :class:`matplotlib.lines.Line2D` instance.
+
+ """
+ pressures = np.linspace(min_pressure, max_pressure, 100)
+ temps = transforms.pressure_mixing_ratio_to_temperature(pressures,
+ mixing_ratio_value)
+ _, thetas = transforms.pressure_temperature_to_temperature_theta(pressures,
+ temps)
+ line, = axes.plot(temps, thetas, transform=transform, **kwargs)
+
+ return line
+
+
+def isobar(min_theta, max_theta, axes, transform, kwargs, pressure):
+ """
+ Generate and plot an isobar line.
+
+ A line of constant pressure (mb).
+
+ Args:
+
+ * min_theta:
+ Minimum potential temperature, in degC, for the isobar extent.
+
+ * max_theta:
+ Maximum potential temperature, in degC, for the isobar extent.
+
+ * axes:
+ Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance.
+
+ * transform:
+ Tephigram plotting transformation :class:`matplotlib.transforms.CompositeGenericTransform`
+ instance.
+
+ * kwargs:
+ Keyword arguments for the isobar :class:`matplotlib.lines.Line2D` instance.
+
+ * pressure:
+ The isobar pressure value, in mb or hPa, to be plotted.
+
+ Returns:
+ The isobar :class:`matplotlib.lines.Line2D` instance.
+
+ """
+ steps = 100
+ thetas = np.linspace(min_theta, max_theta, steps)
+ _, temps = transforms.pressure_theta_to_pressure_temperature([pressure] * steps, thetas)
+ line, = axes.plot(temps, thetas, transform=transform, **kwargs)
+
+ return line
+
+
+def _wet_adiabat_gradient(min_temperature, pressure, temperature, dp):
+ """
+ Calculate the wet adiabat change in pressure and temperature.
+
+ Args:
+
+ * min_temperature:
+ Minimum potential temperature, in degC, for the wet adiabat line extent.
+
+ * pressure:
+ Pressure point value, in mb or hPa, from which to calculate the gradient
+ difference.
+
+ * temperature:
+ Potential temperature point value, in degC, from which to calculate
+ the gradient difference.
+
+ * dp:
+ The wet adiabat change in pressure, in mb or hPa, from which to calculate
+ the gradient difference.
+
+ Returns:
+ The gradient change as a pressure, potential temperature value pair.
+
+ """
+
+ # TODO: Discover the meaning of the magic numbers.
+
+ kelvin = temperature + CONST_KELVIN
+ lsbc = (CONST_L / CONST_RV) * ((1.0 / CONST_KELVIN) - (1.0 / kelvin))
+ rw = 6.11 * np.exp(lsbc) * (0.622 / pressure)
+ lrwbt = (CONST_L * rw) / (CONST_RD * kelvin)
+ nume = ((CONST_RD * kelvin) / (CONST_CP * pressure)) * (1.0 + lrwbt)
+ deno = 1.0 + (lrwbt * ((0.622 * CONST_L) / (CONST_CP * kelvin)))
+ gradi = nume / deno
+ dt = dp * gradi
+
+ if (temperature + dt) < min_temperature:
+ dt = min_temperature - temperature
+ dp = dt / gradi
+
+ return dp, dt
+
+
+def wet_adiabat(max_pressure, min_temperature, axes,
+ transform, kwargs, temperature):
+ """
+ Generate and plot a pseudo saturated wet adiabat line.
+
+ A line of constant equivalent potential temperature for saturated
+ air parcels (degC).
+
+ Args:
+
+ * max_pressure:
+ Maximum pressure, in mb or hPa, for the wet adiabat line extent.
+
+ * min_temperature:
+ Minimum potential temperature, in degC, for the wet adiabat line extent.
+
+ * axes:
+ Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance.
+
+ * transform:
+ Tephigram plotting transformation
+ :class:`matplotlib.transforms.CompositeGenericTransform` instance.
+
+ * kwargs:
+ Keyword arguments for the mixing ratio :class:`matplotlib.lines.Line2D`
+ instance.
+
+ * temperature:
+ The wet adiabat value, in degC, to be plotted.
+
+ Returns:
+ The wet adiabat :class:`matplotlib.lines.Line2D` instance.
+
+ """
+ temps = [temperature]
+ pressures = [max_pressure]
+ dp = -5.0
+
+ for i in xrange(200):
+ dp, dt = _wet_adiabat_gradient(min_temperature, pressures[i],
+ temps[i], dp)
+ temps.append(temps[i] + dt)
+ pressures.append(pressures[i] + dp)
+
+ _, thetas = transforms.pressure_temperature_to_temperature_theta(pressures,
+ temps)
+ line, = axes.plot(temps, thetas, transform=transform, **kwargs)
+
+ return line
+
+
+class Barbs(object):
+ """Generate a wind arrow barb."""
+
+ def __init__(self, axes):
+ """
+ Create a wind arrow barb for the given axes.
+
+ Args:
+
+ * axes:
+ A :class:`matplotlib.axes.AxesSubplot` instance.
+
+ """
+ self.axes = axes
+ self.barbs = None
+ self._gutter = None
+ self._transform = axes.tephigram_transform + axes.transData
+ self._kwargs = None
+ self._custom_kwargs = None
+ self._custom = dict(color=['barbcolor', 'color', 'edgecolor', 'facecolor'],
+ linewidth=['lw', 'linewidth'],
+ linestyle=['ls', 'linestyle'])
+
+ @staticmethod
+ def _uv(magnitude, angle):
+ """
+ Convert magnitude and angle measured in degrees to u and v components,
+ where u is -x and v is -y.
+
+ """
+ angle = angle % 360
+ u = v = 0
+ # Snap the magnitude of the barb vector to fall into one of the
+ # _BARB_BINS ensuring it's a multiple of five. Five is the increment
+ # step size for decorating with barb with flags.
+ magnitude = np.searchsorted(_BARB_BINS, magnitude, side='right') * 5
+ modulus = angle % 90
+ if modulus:
+ quadrant = int(angle / 90)
+ radians = math.radians(modulus)
+ y = math.cos(radians) * magnitude
+ x = math.sin(radians) * magnitude
+ if quadrant == 0:
+ u, v = -x, -y
+ elif quadrant == 1:
+ u, v = -y, x
+ elif quadrant == 2:
+ u, v = x, y
+ else:
+ u, v = y, -x
+ else:
+ angle = int(angle)
+ if angle == 0:
+ v = -magnitude
+ elif angle == 90:
+ u = -magnitude
+ elif angle == 180:
+ v = magnitude
+ else:
+ u = magnitude
+ return u, v
+
+ def _make_barb(self, temperature, theta, speed, angle):
+ """Add the barb to the plot at the specified location."""
+ u, v = self._uv(speed, angle)
+ if 0 < speed < _BARB_BINS[0]:
+ # Plot the missing barbless 1-2 knots line.
+ length = self._kwargs['length']
+ pivot_points = dict(tip=0.0, middle=-length / 2.)
+ pivot = self._kwargs.get('pivot', 'tip')
+ offset = pivot_points[pivot]
+ verts = [(0.0, offset), (0.0, length + offset)]
+ verts = mtransforms.Affine2D().rotate(math.radians(-angle)).transform(verts)
+ codes = [Path.MOVETO, Path.LINETO]
+ path = Path(verts, codes)
+ size = length ** 2 / 4
+ xy = np.array([[temperature, theta]])
+ barb = PathCollection([path], (size,), offsets=xy,
+ transOffset=self._transform, **self._custom_kwargs)
+ barb.set_transform(mtransforms.IdentityTransform())
+ self.axes.add_collection(barb)
+ else:
+ barb = plt.barbs(temperature, theta, u, v,
+ transform=self._transform, **self._kwargs)
+ return barb
+
+ def refresh(self):
+ """Refresh the plot with the barbs."""
+ if self.barbs is not None:
+ xlim = self.axes.get_xlim()
+ ylim = self.axes.get_ylim()
+ y = np.linspace(*ylim)[::-1]
+ xdelta = xlim[1] - xlim[0]
+ x = np.asarray([xlim[1] - (xdelta * self._gutter)] * y.size)
+ points = self.axes.tephigram_inverse.transform(np.asarray(zip(x, y)))
+ temperature, theta = points[:, 0], points[:, 1]
+ pressure, _ = transforms.temperature_theta_to_pressure_temperature(temperature,
+ theta)
+ min_pressure, max_pressure = np.min(pressure), np.max(pressure)
+ func = interp1d(pressure, temperature)
+ for i, (speed, angle, pressure, barb) in enumerate(self.barbs):
+ if min_pressure < pressure < max_pressure:
+ temperature, theta = transforms.pressure_temperature_to_temperature_theta(pressure,
+ func(pressure))
+ if barb is None:
+ self.barbs[i]['barb'] = self._make_barb(temperature, theta, speed, angle)
+ else:
+ barb.set_offsets(np.array([[temperature, theta]]))
+ barb.set_visible(True)
+ else:
+ if barb is not None:
+ barb.set_visible(False)
+
+ def plot(self, barbs, **kwargs):
+ """
+ Plot the sequence of barbs.
+
+ Args:
+
+ * barbs:
+ Sequence of speed, direction and pressure value triples for
+ each barb. Where speed is measured in units of knots, direction
+ in units of degrees (clockwise from north), and pressure must
+ be in units of mb or hPa.
+
+ Kwargs:
+
+ * gutter:
+ Proportion offset from the right hand side axis to plot the
+ barbs. Defaults to 0.1
+
+ Also see :func:`matplotlib.pyplot.barbs`
+
+ """
+ self._gutter = kwargs.pop('gutter', _BARB_GUTTER)
+ self._kwargs = dict(length=7, zorder=10)
+ self._kwargs.update(kwargs)
+ self._custom_kwargs = dict(color=None, linewidth=1.5,
+ zorder=self._kwargs['zorder'])
+ for key, values in self._custom.iteritems():
+ common = set(values).intersection(kwargs)
+ if common:
+ self._custom_kwargs[key] = kwargs[sorted(common)[0]]
+ barbs = np.asarray(barbs)
+ if barbs.ndim != 2 or barbs.shape[-1] != 3:
+ msg = 'The barbs require to be a sequence of wind speed, ' \
+ 'wind direction and pressure value triples.'
+ raise ValueError(msg)
+ self.barbs = np.empty(barbs.shape[0], dtype=_BARB_DTYPE)
+ for i, barb in enumerate(barbs):
+ self.barbs[i] = tuple(barb) + (None,)
+ self.refresh()
+
+
+class Profile(object):
+ """Generate an environmental lapse rate profile."""
+
+ def __init__(self, data, axes):
+ """
+ Create an environmental lapse rate profile from the sequence of
+ pressure and temperature point data.
+
+ Args:
+
+ * data:
+ Sequence of pressure and temperature points defining the
+ environmental lapse rate.
+
+ * axes:
+ The axes on which to plot the profile.
+
+ """
+ self.data = np.asarray(data)
+ if self.data.ndim != 2 or self.data.shape[-1] != 2:
+ msg = 'The environment profile data requires to be a sequence ' \
+ 'of pressure, temperature value pairs.'
+ raise ValueError(msg)
+ self.axes = axes
+ self._transform = axes.tephigram_transform + axes.transData
+ self.pressure = self.data[:, 0]
+ self.temperature = self.data[:, 1]
+ _, self.theta = transforms.pressure_temperature_to_temperature_theta(self.pressure,
+ self.temperature)
+ self.line = None
+ self._barbs = Barbs(axes)
+
+ def plot(self, **kwargs):
+ """
+ Plot the environmental lapse rate profile.
+
+ Kwargs:
+
+ See :func:`matplotlib.pyplot.plot`.
+
+ Returns:
+ The profile :class:`matplotlib.lines.Line2D`
+
+ """
+ if self.line is not None and line in self.axes.lines:
+ self.axes.lines.remove(line)
+
+ if 'zorder' not in kwargs:
+ kwargs['zorder'] = 10
+
+ self.line, = self.axes.plot(self.temperature, self.theta,
+ transform=self._transform, **kwargs)
+ return self.line
+
+ def refresh(self):
+ """Refresh the plot with the profile and any associated barbs."""
+ self._barbs.refresh()
+
+ def barbs(self, barbs, **kwargs):
+ """
+ Plot the sequence of barbs associated with this profile.
+
+ Args:
+
+ * barbs:
+ Sequence of speed, direction and pressure value triples for
+ each barb. Where speed is measured in units of knots, direction
+ in units of degrees (clockwise from north), and pressure must
+ be in units of mb or hPa.
+
+ Kwargs:
+
+ See :func:`matplotlib.pyplot.barbs`
+
+ """
+ colors = ['color', 'barbcolor', 'edgecolor', 'facecolor']
+ if not set(colors).intersection(kwargs):
+ kwargs['color'] = self.line.get_color()
+ self._barbs.plot(barbs, **kwargs)
diff --git a/faampy/_3rdparty/tephi/tests/__init__.py b/faampy/_3rdparty/tephi/tests/__init__.py
new file mode 100644
index 0000000..6f6579d
--- /dev/null
+++ b/faampy/_3rdparty/tephi/tests/__init__.py
@@ -0,0 +1,275 @@
+# (C) British Crown Copyright 2014, Met Office
+#
+# This file is part of tephi.
+#
+# Tephi is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Tephi is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with tephi. If not, see .
+"""
+Provides enhanced testing capabilities.
+
+The primary class for this module is :class:`TephiTest`.
+
+When importing this module, sys.argv is inspected to identify the flags
+``-d`` and ``-sf`` which toggle displaying and saving image tests respectively.
+
+.. note:: The ``-d`` option sets the matplotlib backend to either agg or
+ tkagg. For this reason ``tephi.tests`` **must** be imported before
+ ``matplotlib.pyplot``
+
+"""
+
+import collections
+import contextlib
+import difflib
+import logging
+import os
+import os.path
+import platform
+import StringIO
+import sys
+import tempfile
+import unittest
+import zlib
+
+import matplotlib
+# NB pyplot is imported after main() so that a backend can be defined.
+# import matplotlib.pyplot as plt
+import numpy
+
+from tephi import DATA_DIR, RESULTS_DIR
+
+
+_DATA_PATH = DATA_DIR
+"""Basepath for test data."""
+
+_RESULT_PATH = RESULTS_DIR
+"""Basepath for test results."""
+
+
+# A shared logger for use by unit tests
+logging.basicConfig()
+logger = logging.getLogger('tests')
+
+
+# Whether to display matplotlib output to the screen.
+_DISPLAY_FIGURES = False
+
+# Whether to save matplotlib output to files.
+_SAVE_FIGURES = True
+
+if '-d' in sys.argv:
+ sys.argv.remove('-d')
+ matplotlib.use('tkagg')
+ _DISPLAY_FIGURES = True
+else:
+ matplotlib.use('agg')
+
+# Imported now so that matplotlib.use can work
+import matplotlib.pyplot as plt
+
+if '-sf' in sys.argv or os.environ.get('TEPHI_TEST_SAVE_FIGURES', '') == '1':
+ if '-sf' in sys.argv:
+ sys.argv.remove('-sf')
+ _SAVE_FIGURES = True
+
+
+_PLATFORM = '%s_%s' % (''.join(platform.dist()[:2]), platform.architecture()[0])
+
+
+def main():
+ """
+ A wrapper for unittest.main() which adds customised options to the
+ help (-h) output.
+
+ """
+ if '-h' in sys.argv or '--help' in sys.argv:
+ stdout = sys.stdout
+ buff = StringIO.StringIO()
+ # NB. unittest.main() raises an exception after it's shown the help text
+ try:
+ sys.stdout = buff
+ unittest.main()
+ finally:
+ sys.stdout = stdout
+ lines = buff.getvalue().split('\n')
+ lines.insert(9, 'Tephi-specific options:')
+ lines.insert(10, ' -d Display matplotlib figures (uses tkagg)')
+ lines.insert(11, ' -sf Save matplotlib figures to subfolder "image_results"')
+ print '\n'.join(lines)
+ else:
+ unittest.main()
+
+
+def get_data_path(relative_path):
+ """
+ Returns the absolute path to a data file when given the relative path
+ as a string, or sequence of strings.
+
+ """
+ if not isinstance(relative_path, basestring):
+ relative_path = os.path.join(*relative_path)
+ return os.path.abspath(os.path.join(_DATA_PATH, relative_path))
+
+
+def get_result_path(relative_path):
+ """
+ Returns the absolute path to a result file when given the relative path
+ as a string, or sequence of strings.
+
+ """
+ if not isinstance(relative_path, basestring):
+ relative_path = os.path.join(*relative_path)
+ return os.path.abspath(os.path.join(_RESULT_PATH, relative_path))
+
+
+class TephiTest(unittest.TestCase):
+ """
+ A subclass of unittest.TestCase which provides testing functionality
+ specific to tephi.
+
+ """
+
+ _assertion_counts = collections.defaultdict(int)
+
+ def file_checksum(self, file_path):
+ """
+ Generate checksum from file.
+ """
+ in_file = open(file_path, "rb")
+ return zlib.crc32(in_file.read())
+
+ def _unique_id(self):
+ """
+ Returns the unique ID for the current assertion.
+
+ The ID is composed of two parts: a unique ID for the current test
+ (which is itself composed of the module, class, and test names), and
+ a sequential counter (specific to the current test) that is incremented
+ on each call.
+
+ For example, calls from a "test_tx" routine followed by a "test_ty"
+ routine might result in::
+ test_plot.TestContourf.test_tx.0
+ test_plot.TestContourf.test_tx.1
+ test_plot.TestContourf.test_tx.2
+ test_plot.TestContourf.test_ty.0
+
+ """
+ # Obtain a consistent ID for the current test.
+
+ # NB. unittest.TestCase.id() returns different values depending on
+ # whether the test has been run explicitly, or via test discovery.
+ # For example:
+ # python tests/test_brand.py
+ # => '__main__.TestBranding.test_combo'
+ # python -m unittest discover
+ # => 'tephi.tests.test_brand.TestBranding.test_combo'
+ bits = self.id().split('.')[-3:]
+ if bits[0] == '__main__':
+ file_name = os.path.basename(sys.modules['__main__'].__file__)
+ bits[0] = os.path.splitext(file_name)[0]
+ test_id = '.'.join(bits)
+
+ # Derive the sequential assertion ID within the test
+ assertion_id = self._assertion_counts[test_id]
+ self._assertion_counts[test_id] += 1
+
+ return test_id + '.' + str(assertion_id)
+
+ def _ensure_folder(self, path):
+ dir_path = os.path.dirname(path)
+ if not os.path.exists(dir_path):
+ logger.warning('Creating folder: %s', dir_path)
+ os.makedirs(dir_path)
+
+ def create_temp_filename(self, suffix=''):
+ """
+ Return a temporary file name.
+
+ Args:
+
+ * suffix - Optional filename extension.
+
+ """
+ temp_file = tempfile.mkstemp(suffix)
+ os.close(temp_file[0])
+ return temp_file[1]
+
+ @contextlib.contextmanager
+ def temp_filename(self, suffix=''):
+ filename = self.create_temp_filename(suffix)
+ yield filename
+ os.remove(filename)
+
+ def assertArrayEqual(self, a, b):
+ return numpy.testing.assert_array_equal(a, b)
+
+ def assertArrayAlmostEqual(self, a, b, *args, **kwargs):
+ return numpy.testing.assert_array_almost_equal(a, b, *args, **kwargs)
+
+
+class GraphicsTest(TephiTest):
+ def tearDown(self):
+ # If a plotting test bombs out it can leave the current figure in an
+ # odd state, so we make sure it's been disposed of.
+ plt.close()
+
+ def _get_image_checksum(self, unique_id, resultant_checksum):
+ checksum_result_path = get_result_path(('image_checksums', _PLATFORM, unique_id + '.txt'))
+ if os.path.isfile(checksum_result_path):
+ with open(checksum_result_path, 'r') as checksum_file:
+ checksum = int(checksum_file.readline().strip())
+ else:
+ self._ensure_folder(checksum_result_path)
+ logger.warning('Creating image checksum result file: %s', checksum_result_path)
+ checksum = resultant_checksum
+ open(checksum_result_path, 'w').writelines(str(checksum))
+ return checksum
+
+ def check_graphic(self):
+ """
+ Checks the CRC matches for the current matplotlib.pyplot figure, and
+ closes the figure.
+
+ """
+ unique_id = self._unique_id()
+
+ figure = plt.gcf()
+
+ try:
+ suffix = '.png'
+ if _SAVE_FIGURES:
+ file_path = get_result_path(('image_results',
+ unique_id + suffix))
+ dir_path = os.path.dirname(file_path)
+ if not os.path.isdir(dir_path):
+ os.makedirs(dir_path)
+ else:
+ file_path = self.create_temp_filename(suffix)
+
+ figure.savefig(file_path)
+ resultant_checksum = self.file_checksum(file_path)
+
+ if not _SAVE_FIGURES:
+ os.remove(file_path)
+
+ checksum = self._get_image_checksum(unique_id, resultant_checksum)
+
+ if _DISPLAY_FIGURES:
+ if resultant_checksum != checksum:
+ print 'Test would have failed (new checksum: %s ; old checksum: %s)' % (resultant_checksum, checksum)
+ plt.show()
+ else:
+ self.assertEqual(resultant_checksum, checksum, 'Image checksums not equal for %s' % unique_id)
+ finally:
+ plt.close()
diff --git a/faampy/_3rdparty/tephi/tests/test_tephigram.py b/faampy/_3rdparty/tephi/tests/test_tephigram.py
new file mode 100644
index 0000000..a95ea13
--- /dev/null
+++ b/faampy/_3rdparty/tephi/tests/test_tephigram.py
@@ -0,0 +1,269 @@
+# (C) British Crown Copyright 2014, Met Office
+#
+# This file is part of tephi.
+#
+# Tephi is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Tephi is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with tephi. If not, see .
+"""
+Tests the tephigram plotting capability provided by tephi.
+
+"""
+# Import tephi test package first so that some things can be initialised
+# before importing anything else.
+import tephi.tests as tests
+
+import cPickle
+
+import matplotlib.pyplot as plt
+import numpy as np
+
+import tephi
+from tephi import Tephigram
+
+
+def _load_result(filename):
+ with open(tephi.tests.get_result_path(filename)) as f:
+ result = cPickle.load(f)
+ return result
+
+
+_expected_dews = _load_result('dews.pkl')
+_expected_temps = _load_result('temps.pkl')
+_expected_barbs = _load_result('barbs.pkl')
+
+
+class TestTephigramLoadTxt(tests.TephiTest):
+ def setUp(self):
+ self.filename_dews = tephi.tests.get_data_path('dews.txt')
+ self.filename_temps = tephi.tests.get_data_path('temps.txt')
+ self.filename_barbs = tephi.tests.get_data_path('barbs.txt')
+ self.filename_comma = tephi.tests.get_data_path('comma_sep.txt')
+
+ def test_is_not_file(self):
+ with self.assertRaises(OSError):
+ tephi.loadtxt('wibble')
+
+ def test_load_data_no_column_names(self):
+ dews = tephi.loadtxt(self.filename_dews)
+ self.assertEqual(dews._fields, ('pressure', 'temperature'))
+ self.assertArrayEqual(dews.pressure, _expected_dews[0])
+ self.assertArrayEqual(dews, _expected_dews)
+
+ def test_load_data_with_column_names(self):
+ # Column titles test all valid namedtuple characters (alphanumeric, _).
+ columns = ('pressure', 'dewpoint2', 'wind_speed', 'WindDirection')
+ barbs = tephi.loadtxt(self.filename_barbs, column_titles=columns)
+ self.assertEqual(barbs._fields, columns)
+ self.assertArrayEqual(barbs.wind_speed, _expected_barbs[2])
+ self.assertArrayEqual(barbs, _expected_barbs)
+
+ def test_load_multiple_files_same_column_names(self):
+ columns = ('foo', 'bar')
+ dews, temps = tephi.loadtxt(self.filename_dews,
+ self.filename_temps,
+ column_titles=columns)
+ self.assertEqual(dews._fields, columns)
+ self.assertEqual(temps._fields, columns)
+
+ def test_load_data_too_many_column_iterables(self):
+ columns = [('pressure', 'dewpoint'),
+ ('pressure', 'wind_speed', 'wind_direction')]
+ with self.assertRaises(ValueError):
+ dews = tephi.loadtxt(self.filename_dews, column_titles=columns)
+
+ def test_number_of_columns_and_titles_not_equal(self):
+ columns = ('pressure', 'dewpoint', 'wind_speed')
+ with self.assertRaises(TypeError):
+ barbs = tephi.loadtxt(self.filename_barbs,
+ column_titles=columns)
+
+ def test_invalid_column_titles(self):
+ columns = ('pres-sure', 'dew+point', 5)
+ with self.assertRaises(ValueError):
+ dews = tephi.loadtxt(self.filename_dews,
+ column_titles=columns)
+
+ def test_non_iterable_column_title(self):
+ # For the case of column titles, strings are considered non-iterable.
+ columns = 'pressure'
+ with self.assertRaises(TypeError):
+ dews = tephi.loadtxt(self.filename_dews,
+ column_titles=columns)
+
+ def test_delimiter(self):
+ columns = ('pressure', 'temperature', 'wind_direction', 'wind_speed')
+ data = tephi.loadtxt(self.filename_comma, column_titles=columns,
+ delimiter=',')
+ self.assertEqual(data.pressure.shape, (2,))
+
+ def test_dtype(self):
+ dews = tephi.loadtxt(self.filename_dews, dtype='i4')
+ self.assertIsInstance(dews.pressure[0], np.int32)
+ self.assertIsInstance(dews.temperature[0], np.int32)
+
+
+class TestTephigramPlot(tests.GraphicsTest):
+ def setUp(self):
+ dew_data = _expected_dews
+ self.dews = zip(dew_data[0], dew_data[1])
+ temp_data = _expected_temps
+ self.temps = zip(temp_data[0], temp_data[1])
+
+ def test_plot_dews(self):
+ tpg = Tephigram()
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+ def test_plot_temps(self):
+ tpg = Tephigram()
+ tpg.plot(self.temps)
+ self.check_graphic()
+
+ def test_plot_dews_temps(self):
+ tpg = Tephigram()
+ tpg.plot(self.dews)
+ tpg.plot(self.temps)
+ self.check_graphic()
+
+ def test_plot_dews_label(self):
+ tpg = Tephigram()
+ tpg.plot(self.dews, label='Dew-point temperature')
+ self.check_graphic()
+
+ def test_plot_temps_label(self):
+ tpg = Tephigram()
+ tpg.plot(self.temps, label='Dry-bulb temperature')
+ self.check_graphic()
+
+ def test_plot_dews_custom(self):
+ tpg = Tephigram()
+ tpg.plot(self.dews, label='Dew-point temperature', linewidth=2, color='blue', marker='s')
+ self.check_graphic()
+
+ def test_plot_temps_custom(self):
+ tpg = Tephigram()
+ tpg.plot(self.temps, label='Dry-bulb emperature', linewidth=2, color='red', marker='o')
+ self.check_graphic()
+
+ def test_plot_dews_temps_custom(self):
+ tpg = Tephigram()
+ tpg.plot(self.dews, label='Dew-point temperature', linewidth=2, color='blue', marker='s')
+ tpg.plot(self.temps, label='Dry-bulb emperature', linewidth=2, color='red', marker='o')
+ self.check_graphic()
+
+ def test_plot_dews_locator_isotherm_numeric(self):
+ tpg = Tephigram(isotherm_locator=10)
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+ def test_plot_dews_locator_isotherm_object(self):
+ tpg = Tephigram(isotherm_locator=tephi.Locator(10))
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+ def test_plot_dews_locator_adiabat_numeric(self):
+ tpg = Tephigram(dry_adiabat_locator=10)
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+ def test_plot_dews_locator_adiabat_object(self):
+ tpg = Tephigram(dry_adiabat_locator=tephi.Locator(10))
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+ def test_plot_dews_locator_numeric(self):
+ tpg = Tephigram(isotherm_locator=10, dry_adiabat_locator=10)
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+ def test_plot_dews_locator_object(self):
+ locator = tephi.Locator(10)
+ tpg = Tephigram(isotherm_locator=locator, dry_adiabat_locator=locator)
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+ def test_plot_anchor(self):
+ tpg = Tephigram(anchor=[(1000, 0), (300, 0)])
+ tpg.plot(self.dews)
+ self.check_graphic()
+
+
+class TestTephigramBarbs(tests.GraphicsTest):
+ def setUp(self):
+ self.dews = zip(_expected_dews[0], _expected_dews[1])
+ temp_data = _expected_temps
+ self.temps = zip(_expected_temps[0], _expected_temps[1])
+ magnitude = np.hstack(([0], np.arange(20) * 5 + 2, [102]))
+ self.barbs = [(m, 45, 1000 - i*35) for i, m in enumerate(magnitude)]
+
+ def test_rotate(self):
+ tpg = Tephigram()
+ profile = tpg.plot(self.temps)
+ profile.barbs([(0, 0, 900),
+ (1, 30, 850),
+ (5, 60, 800),
+ (10, 90, 750),
+ (15, 120, 700),
+ (20, 150, 650),
+ (25, 180, 600),
+ (30, 210, 550),
+ (35, 240, 500),
+ (40, 270, 450),
+ (45, 300, 400),
+ (50, 330, 350),
+ (55, 360, 300)], zorder=10)
+ self.check_graphic()
+
+ def test_barbs(self):
+ tpg = Tephigram()
+ profile = tpg.plot(self.temps)
+ profile.barbs(self.barbs, zorder=10)
+ self.check_graphic()
+
+ def test_barbs_from_file(self):
+ tpg = Tephigram()
+ dews = zip(_expected_barbs[0], _expected_barbs[1])
+ barbs = zip(_expected_barbs[2], _expected_barbs[3], _expected_barbs[0])
+ profile = tpg.plot(dews)
+ profile.barbs(barbs, zorder=10)
+ self.check_graphic()
+
+ def test_gutter(self):
+ tpg = Tephigram()
+ profile = tpg.plot(self.temps)
+ profile.barbs(self.barbs, gutter=0.5, zorder=10)
+ self.check_graphic()
+
+ def test_length(self):
+ tpg = Tephigram()
+ profile = tpg.plot(self.temps)
+ profile.barbs(self.barbs, gutter=0.9, length=10, zorder=10)
+ self.check_graphic()
+
+ def test_color(self):
+ tpg = Tephigram()
+ profile = tpg.plot(self.temps)
+ profile.barbs(self.barbs, color='green', zorder=10)
+ self.check_graphic()
+
+ def test_pivot(self):
+ tpg = Tephigram()
+ tprofile = tpg.plot(self.temps)
+ tprofile.barbs(self.barbs, gutter=0.2, pivot='tip', length=8)
+ dprofile = tpg.plot(self.dews)
+ dprofile.barbs(self.barbs, gutter=0.3, pivot='middle', length=8)
+ self.check_graphic()
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/faampy/_3rdparty/tephi/transforms.py b/faampy/_3rdparty/tephi/transforms.py
new file mode 100644
index 0000000..68ec461
--- /dev/null
+++ b/faampy/_3rdparty/tephi/transforms.py
@@ -0,0 +1,262 @@
+# (C) British Crown Copyright 2014, Met Office
+#
+# This file is part of tephi.
+#
+# Tephi is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Tephi is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with tephi. If not, see .
+"""
+Tephigram transform support.
+
+"""
+
+import matplotlib as mpl
+from matplotlib.transforms import Transform
+import numpy as np
+import types
+
+from _constants import CONST_K, CONST_KELVIN, CONST_L, CONST_MA, CONST_RV
+
+
+#
+# Reference: http://www-nwp/~hadaa/tephigram/tephi_plot.html
+#
+
+
+def temperature_theta_to_pressure_temperature(temperature, theta):
+ """
+ Transform temperature and potential temperature into
+ pressure and temperature.
+
+ Args:
+
+ * temperature:
+ Temperature in degC.
+
+ * theta:
+ Potential temperature in degC.
+
+ Returns:
+ Tuple of pressure, in mb or hPa, and temperature, in degC.
+
+ """
+ temperature, theta = np.asarray(temperature), np.asarray(theta)
+
+ # Convert temperature and theta from degC to kelvin.
+ kelvin = temperature + CONST_KELVIN
+ theta = theta + CONST_KELVIN
+
+ # Calculate the associated pressure given the temperature and
+ # potential temperature.
+ pressure = 1000.0 * np.power(kelvin / theta, 1 / CONST_K)
+
+ return pressure, temperature
+
+
+def pressure_temperature_to_temperature_theta(pressure, temperature):
+ """
+ Transform pressure and temperature into temperature and
+ potential temperature.
+
+ Args:
+
+ * pressure:
+ Pressure in mb or hPa.
+
+ * temperature:
+ Temperature in degC.
+
+ Returns:
+ Tuple of temperature, in degC, and potential temperature, in degC.
+
+ """
+ pressure, temperature = np.asarray(pressure), np.asarray(temperature)
+
+ # Convert temperature from degC to kelvin.
+ kelvin = temperature + CONST_KELVIN
+
+ # Calculate the potential temperature given the pressure and temperature.
+ theta = kelvin * ((1000.0 / pressure) ** CONST_K)
+
+ # Convert potential temperature from kelvin to degC.
+ return temperature, theta - CONST_KELVIN
+
+
+def pressure_theta_to_pressure_temperature(pressure, theta):
+ """
+ Transform pressure and potential temperature into pressure and temperature.
+
+ Args:
+
+ * pressure:
+ Pressure in mb or hPa.
+
+ * theta:
+ Potential temperature in degC.
+
+ * Returns:
+ Tuple of pressure, in mb or hPa, and temperature, in degC.
+
+ """
+ pressure, theta = np.asarray(pressure), np.asarray(theta)
+
+ # Convert potential temperature from degC to kelvin.
+ theta = theta + CONST_KELVIN
+
+ # Calculate the temperature given the pressure and
+ # potential temperature.
+ kelvin = theta * (pressure ** CONST_K) / (1000.0 ** CONST_K)
+
+ # Convert temperature from kelvin to degC.
+ return pressure, kelvin - CONST_KELVIN
+
+
+def temperature_theta_to_xy(temperature, theta):
+ """
+ Transform temperature and potential temperature to native display
+ coordinates.
+
+ Args:
+
+ * temperature:
+ Temperature in degC.
+
+ * theta:
+ Potential temperature in degC.
+
+ Returns:
+ Native display x and y coordinates.
+
+ """
+ temperature, theta = np.asarray(temperature), np.asarray(theta)
+
+ # Convert potential temperature from degC to kelvin.
+ theta = theta + CONST_KELVIN
+ theta = np.clip(theta, 1, 1e10)
+
+ phi = np.log(theta)
+
+ x_data = phi * CONST_MA + temperature
+ y_data = phi * CONST_MA - temperature
+
+ return x_data, y_data
+
+
+def xy_to_temperature_theta(x_data, y_data):
+ """
+ Transform native display coordinates to temperature and
+ potential temperature.
+
+ Args:
+
+ * x_data:
+ Native display x-coordinate/s.
+
+ * y_data:
+ Native display y-coordinate/s.
+
+ Returns:
+ Temperature, in degC, and potential temperature, in degC.
+
+ """
+ x_data, y_data = np.asarray(x_data), np.asarray(y_data)
+
+ phi = (x_data + y_data) / (2 * CONST_MA)
+ temperature = (x_data - y_data) / 2.
+
+ theta = np.exp(phi) - CONST_KELVIN
+
+ return temperature, theta
+
+
+def pressure_mixing_ratio_to_temperature(pressure, mixing_ratio):
+ """
+ Transform pressure and mixing ratios to temperature.
+
+ Args:
+
+ * pressure:
+ Pressure in mb in hPa.
+
+ * mixing_ratio:
+ Dimensionless mixing ratios.
+
+ Returns:
+ Temperature in degC.
+
+ """
+ pressure = np.array(pressure)
+
+ # Calculate the dew-point.
+ vapp = pressure * (8.0 / 5.0) * (mixing_ratio / 1000.0)
+ temp = 1.0 / ((1.0 / CONST_KELVIN) - ((CONST_RV / CONST_L) * np.log(vapp / 6.11)))
+
+ return temp - CONST_KELVIN
+
+
+class TephiTransform(Transform):
+ """
+ Tephigram transformation to convert from temperature and
+ potential temperature to native plotting device coordinates.
+
+ """
+ input_dims = 2
+ output_dims = 2
+ is_separable = False
+ has_inverse = True
+
+ def transform_non_affine(self, values):
+ """
+ Transform from tephigram temperature and potential temperature
+ to native plotting device coordinates.
+
+ Args:
+
+ * values:
+ Values to be transformed, with shape (N, 2).
+
+ """
+ return np.concatenate(temperature_theta_to_xy(values[:, 0:1], values[:, 1:2]), axis=1)
+
+ def inverted(self):
+ """Return the inverse transformation."""
+ return TephiTransformInverted()
+
+
+class TephiTransformInverted(Transform):
+ """
+ Tephigram inverse transformation to convert from native
+ plotting device coordinates to tephigram temperature and
+ potential temperature.
+
+ """
+ input_dims = 2
+ output_dims = 2
+ is_separable = False
+ has_inverse = True
+
+ def transform_non_affine(self, values):
+ """
+ Transform from native plotting display coordinates to tephigram
+ temperature and potential temperature.
+
+ Args:
+
+ * values:
+ Values to be transformed, with shape (N, 2).
+
+ """
+ return np.concatenate(xy_to_temperature_theta(values[:, 0:1], values[:, 1:2]), axis=1)
+
+ def inverted(self):
+ """Return the inverse transformation."""
+ return TephiTransform()
diff --git a/faampy/__init__.py b/faampy/__init__.py
new file mode 100644
index 0000000..8ec53ad
--- /dev/null
+++ b/faampy/__init__.py
@@ -0,0 +1,81 @@
+"""
+faampy main command which calls subcommands
+
+The usage is for example
+ faampy nc_to_gpx ....
+
+"""
+
+
+import argparse
+import importlib
+import os
+import sys
+
+
+__version__ = '0.1.2'
+__status__ = 'a'
+
+
+def command_line():
+ # list of subcommands and assosciated python script
+ # this has to be maintained manually and has to be
+ # checked/updated with every release
+
+ MODULES = [('data_download', 'faampy.utils.data_download'),
+ ('nimrod_to_nc', 'faampy.data_io.nimrod_to_nc'),
+ ('ge_ncas_airquality', 'faampy.mapping.ge_ncas_airquality'),
+ ('ge_nimrod_to_kmz', 'faampy.mapping.ge_nimrod_to_kmz'),
+ ('ge_photo_album', 'faampy.mapping.ge_photo_album'),
+ ('ge_was_to_kmz', 'faampy.mapping.ge_was_to_kmz'),
+ ('nc_to_gpx', 'faampy.mapping.nc_to_gpx'),
+ ('ge_ncvar_to_kml', 'faampy.mapping.ge_ncvar_to_kml'),
+ ('world_map', 'faampy.mapping.world_map'),
+ ('sat_tracker', 'faampy.mapping.sat_tracker'),
+ ('flight_summary', 'faampy.core.flight_summary'),
+ ('plt_quicklooks', 'faampy.plotting.quicklooks')]
+
+ parser = argparse.ArgumentParser(description="faampy",
+ usage="""faampy []
+
+%s
+""" % (' \n'.join([' '+m[0] for m in MODULES])))
+ parser.add_argument('command', help='Subcommand to run')
+ # parse_args defaults to [1:] for args, but you need to
+ # exclude the rest of the args too, or validation will fail
+ args = parser.parse_args(sys.argv[1:2])
+ # check if subcommand is available. If not leave
+ if args.command not in zip(*MODULES)[0]:
+ sys.stdout.write('Unrecognized command\n')
+ parser.print_help()
+ sys.exit(1)
+ else:
+ sys.argv = sys.argv[2:]
+ # use dispatch pattern to invoke method with same name
+ for MOD in MODULES:
+ if args.command == MOD[0]:
+ mod = importlib.import_module(MOD[1])
+ mod.main() # the main method is called
+
+FAAMPY_DATA_PATH = os.path.join(os.environ['HOME'], 'faampy_data')
+SRTM_DATA_PATH = os.path.join(FAAMPY_DATA_PATH, 'srtm')
+
+FAAM_DATA = ['icons/camera_icon_32x32.png',
+ 'icons/dot_blue_32x32.png',
+ 'icons/dot_green_32x32.png',
+ 'icons/dot_orange_32x32.png',
+ 'icons/dot_red_32x32.png',
+ 'icons/dropsonde_32x32.png',
+ 'icons/was_bottle_32x32.png',
+ 'img/world_osm_54030_6804x3450.png',
+ 'img/uk_osm_background_2244x2642.png',
+ 'img/world_osm_54030_27234x13812.png',
+ 'img/world_osm_54030_4806x2437.png',
+ 'img/world_osm_54030_13617x6906.png',
+ 'img/uk_osm_background_3366x3963.png',
+ 'img/world_osm_54030_9633x4885.png',
+ 'img/world_osm_54030_3402x1725.png',
+ 'img/uk_osm_background_9507x11195.png',
+ 'img/uk_osm_background_6732x7927.png',
+ 'img/uk_osm_background_19014x22390.png',
+ 'img/world_osm_54030_1134x575.png', ]
diff --git a/faampy/aimms/aimms_process.py b/faampy/aimms/aimms_process.py
new file mode 100644
index 0000000..bc70a84
--- /dev/null
+++ b/faampy/aimms/aimms_process.py
@@ -0,0 +1,147 @@
+# -*- coding: utf-8 -*-
+"""
+The processing of the AIMMS data is done in several steps using win32
+scripts that are called from the command line. This python script is only a
+wrapper for those steps calling the scripts using wine, which allows the
+processing to be done on a linux machine.
+
+The final step is the conversion into a netCDF, which is currently done by a
+idl routine. Therefore an idl installation is also required on the machine,
+where the script runs.
+
+The script is called as:
+
+ python aimms_process.py FID
+
+
+"""
+
+import os
+import re
+import shutil
+import sys
+import tempfile
+
+from file_info import *
+from file_list import *
+
+# AIMMS_ROOT_PATH = '/mnt/faamarchive/Data/cloudphysics'
+# CORE_ROOT_PATH = '/mnt/faamarchive/badcMirror/'
+
+AIMMS_ROOT_PATH = '/home/tardis/FAAMarchive/Data/cloudphysics'
+CORE_ROOT_PATH = '/home/tardis/FAAMarchive/badcMirror/'
+
+TEMPPATH = tempfile.mkdtemp()
+OUTPATH = '/home/tardis/'
+CALFILE = 'AIMMScalB884.dat'
+
+_VERSION_ = 'v003'
+
+
+def do_checks():
+ # checking if wine and idl are installed
+ return True
+
+
+def get_aimms_raw_files(AIMMS_ROOT_PATH, fid):
+ """The AIMMS probe produces severl raw data files for
+ a single flight. Those files needs to be sticked together.
+ """
+ pattern = '.*%s.*odms.*\d{8}.*r\d{2}' % (fid,)
+ file_list = []
+ for root, subFolders, files in os.walk(AIMMS_ROOT_PATH):
+ for f in files:
+ filename = os.path.join(root, f)
+ if re.match(pattern, filename.lower()):
+ file_list.append(filename)
+ return file_list
+
+
+def get_core_file(CORE_ROOT_PATH, fid):
+ fl = File_List(CORE_ROOT_PATH)
+ fl.filter_by_data_type('core-lowres')
+ fl.filter_latest_revision()
+ for f in fl:
+ if f.fid == fid:
+ return os.path.join(os.path.join(f.path, f.filename))
+
+
+def process(fid, revision, outpath):
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+
+ # get the flight number
+ # fid = sys.argv[1]
+
+ aimms_raw_files=get_aimms_raw_files(AIMMS_ROOT_PATH, fid)
+ core_file = get_core_file(CORE_ROOT_PATH, fid)
+
+ sys.stdout.write('AIMMS ODMS Files:\n%s\n' % ' \n'.join(['%s' % f for f in aimms_raw_files]))
+ sys.stdout.write('FAAM Core: %s\n' % core_file)
+ sys.stdout.write('Temp dir: %s\n' % TEMPPATH)
+
+ date_string = os.path.basename(core_file).split('_')[2]
+
+ # copy all the files that are needed for processing
+ # into a temporary directory
+ for f in aimms_raw_files:
+ shutil.copy(f, os.path.join(TEMPPATH, os.path.basename(f)))
+ shutil.copy(core_file, os.path.join(TEMPPATH, os.path.basename(core_file)))
+
+ # all the files that are needed for the processing will be copied to the TEMPPATH to
+ # keep things simple
+ for f in ['asmbl.exe',
+ 'canextr4.exe',
+ 'ekf556.exe',
+ 'convert_aimms_netcdf.pro',
+ 'findcorefile.pro',
+ 'cal_files/'+CALFILE]:
+ shutil.copy(os.path.join(dir_path, f), os.path.join(TEMPPATH, f))
+
+ # move into the temporary directory
+ os.chdir(TEMPPATH)
+
+ # get the identifier from the raw files
+ idents = list(set([os.path.splitext(os.path.basename(f))[0] for f in aimms_raw_files]))
+
+ for i in idents:
+ # stick the odms files together
+ cmd = "wine asmbl.exe %s 1" % (i,)
+ os.system(cmd)
+ #cmd = "wine canextr4.exe %s.raw"
+ aimms_out_file = '%s_%s_aimms.out' % (fid, date_string)
+ cmd = "wine ekf556.exe %s %s.raw -c on -o %s" % (CALFILE, i, aimms_out_file)
+ os.system(cmd)
+ aimms_ncfile = os.path.join(outpath, 'metoffice-aimms_faam_%s_%s_r%i_%s.nc' % (date_string, _VERSION_, revision, fid))
+ # call the idl script from the command line
+ cmd = """idl -e "convert_aimms_netcdf, '%s/', '%s', '%s', '%s'" """ % (os.path.join(TEMPPATH), os.path.join(TEMPPATH, os.path.basename(core_file)), aimms_ncfile, fid)
+ os.system(cmd)
+
+
+def main():
+ import argparse
+ from argparse import RawTextHelpFormatter
+
+ parser = argparse.ArgumentParser(description=__doc__,
+ version=__version__,
+ epilog="Report bugs to %s." % __email__,
+ formatter_class=RawTextHelpFormatter)
+ parser.add_argument('fid', action="store", type=str, help='Flight Number')
+ parser.add_argument('-r', '--revision', action="store", type=int,
+ required=False, default=0,
+ help='revision nmuber of the AIMMS netcdf.')
+ parser.add_argument('-o', '--outpath', action="store", type=str, required=False,
+ default=os.environ['HOME'],
+ help='Outpath for the netCDF Default: $HOME.')
+ parser.add_argument('-k', '--keep-folder', action="store_true", required=False, default=False,
+ help='If option is set the temporary directory will *not* be deleted. Default: False')
+ args=parser.parse_args()
+
+ process(args.fid, args.revision, args.outpath)
+ # clean_up behind ourselves
+ if not args.keep_folder:
+ sys.stdout.write('Deleting temporary folder %s ... \n' % (TEMPPATH))
+ shutil.rmtree(TEMPPATH)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/faampy/aimms/asmbl.exe b/faampy/aimms/asmbl.exe
new file mode 100644
index 0000000..e2b37b7
Binary files /dev/null and b/faampy/aimms/asmbl.exe differ
diff --git a/faampy/aimms/cal_files/AIMMScalB863.dat b/faampy/aimms/cal_files/AIMMScalB863.dat
new file mode 100644
index 0000000..d603ac4
--- /dev/null
+++ b/faampy/aimms/cal_files/AIMMScalB863.dat
@@ -0,0 +1,33 @@
+-1.60
+-2.60
++0.05
+-2.45
+-0.95
+-1.75
+-1.15
++0.85
++0.00
++0.00
++0.00
++0.00
++0.00
++0.00
+3.953
+-8.275
+-1.29
+2
++0.18179
+-0.18999
+-0.09484
++0.00000
++0.00000
+-4.542
++2.224
++15.080
+-4.705
++8.555
++2.015
+0.70
++0.98
++2.08
+-0.41
\ No newline at end of file
diff --git a/faampy/aimms/cal_files/AIMMScalB884.dat b/faampy/aimms/cal_files/AIMMScalB884.dat
new file mode 100644
index 0000000..224faaa
--- /dev/null
+++ b/faampy/aimms/cal_files/AIMMScalB884.dat
@@ -0,0 +1,33 @@
+-1.70
+-1.55
+-0.20
+-0.60
+-1.10
+-0.90
+-1.10
++0.65
++0.00
++0.00
++0.00
++0.00
++0.00
++0.00
+3.953
+-8.275
+-1.29
+2
++0.18188
+-0.22580
+-0.09083
++0.00000
++0.00000
+-3.988
++0.096
++15.682
+-4.446
++8.445
++1.064
+0.70
++0.98
++2.08
++0.41
\ No newline at end of file
diff --git a/faampy/aimms/canextr4.exe b/faampy/aimms/canextr4.exe
new file mode 100644
index 0000000..635bf10
Binary files /dev/null and b/faampy/aimms/canextr4.exe differ
diff --git a/faampy/aimms/convert_aimms_netcdf.pro b/faampy/aimms/convert_aimms_netcdf.pro
new file mode 100644
index 0000000..665749c
--- /dev/null
+++ b/faampy/aimms/convert_aimms_netcdf.pro
@@ -0,0 +1,619 @@
+PRO convert_aimms_netcdf, basedir, corefile, ofile, fnum, $
+ time,atime,btime,ctime,dtime, $
+ average1hz=make_average, $
+ release_data=datarelease
+;
+; PROCEDURE: CONVERT_AIMMS_NETCDF
+;
+; PURPOSE: To read post-processed data from an AIMMS ascii file and output to NetCDF format.
+; Assumes that the ascii file has 20 data values per second. Time array is recreated
+; to overcome apparent AIMMS clock-rate error.
+;
+; ARGUMENTS: BASEDIR - directory in which AIMMS ascii file is located and NetCDF written, assumed
+; to point to a project directory such as '/project/obr/COPE'
+; FNUM - the flight number, bnnn
+; AVERAGE1HZ = MAKE_AVERAGE - If set and non-zero, output dataset is 1Hz average
+; RELEASE_DATA = DATARELEASE - If set, specifies an alternative data release number (default = 0)
+;
+; DATE: 12/11/2013
+; AUTHOR: Steve Abel, with modifications by Phil Brown
+;
+; VERSION: v001 - 06/12/2013 Initial version set up to idenify and process all AIMMS 20Hz ascii files found
+; in the target directory. Options to match output time array to Core data file and to
+; generate 1Hz average output. Contains a fix to identify and correct time jumps in the input
+; ascii data. Ascii file post-processing using "ekf553_oemv.exe".
+; v002 - 05/02/2014 Update to NetCDF attributes:
+; - additional GLOBAL attributes specified for similarity with FAAM Core files,
+; - additional CF-1.6 compliant standard_name attributes supplied for variables, where possible
+; - applies adjustment to AIMMS date/time where Core data starts before midnight on the previous day
+; v003 - 18/02/2014 Further updates to NetCDF attributes:
+; - units for dimensionless variables = 1
+; - standard names of latitude and longitude parameters
+; - time units
+; - status variable excluded
+; - correct long_name now given for AOSS and AOA
+; - 28/02/2014 FILE_SEARCH replaces FINDFILE (line 50)
+; - 07/03/2014 minor correction to standard_name attribute for latitude
+; - 21/05/2014 minor change to dealing with skip/jump times - jumps preceding the first skip are ignored
+; - 23/05/2014 fudge skip times for B800 and b807
+; - 04/12/2014 Test for existence of Core and Core1hz files before trying to open them. Allows processing
+; when only the Core1hz is available
+; - 03/08/2015 Changes to allow runng in garden-variety IDL (using IDL standard functions rather
+; than Met Office specific ones.
+;
+on_error, 0
+
+; COMMON to hold NetCDF file and variable id's
+common varids, id, LTIME, LCAL, cp0_id,cpa_id,cpb_id,B0_id,Ba_id,Bb_id,A0_id,Aa_id,Ab_id, $
+ time_id,tk_id,rh_id,p_id,u_id,v_id,w_id,lat_id,lon_id,alt_id,vn_id,ve_id,vz_id, $
+ roll_id,ptch_id,hdg_id,tas_id,aoss_id,dpaoa_id,dpaoss_id,aoa_id
+
+version = 'v003'
+if not(keyword_set(datarelease)) then datarelease=0
+rstring = 'r'+STRTRIM(STRING(datarelease),1)
+
+;fdir = STRUPCASE(fnum)+'/' ; find ascii input files under base directory
+;daq_file = basedir+'/AIMMS/'+fdir+'*'+fnum+'*.out'
+daq_file = basedir+fnum+'*.out'
+daq_files = file_search(daq_file)
+;daq_file = aimmsfile
+nfiles = n_elements(daq_files)
+;if daq_files(0) eq '' then begin
+; print,'Input data not found: ', daq_file
+; return
+;ndif
+;print,nfiles,' data files for reading.',daq_files
+
+; identify Core file and read time data from it. Time data is the same in Core and Core1hz files. If you don't find the Core file
+; then look for the Core1hz. If you still don't find that, then exit.
+;
+;corefile = findcorefile(basedir,fnum,core1hz)
+;print, '***', corefile
+;if strlen(corefile) gt 0 then begin
+cid = NCDF_OPEN(corefile,/NOWRITE)
+;endif else begin
+; if strlen(core1hz) gt 0 then begin
+; cid = NCDF_OPEN(core1hz)
+; endif else begin
+; print,'No core files could be found. Exiting.....'
+; return
+; endelse
+;endelse
+
+NCDF_VARGET,cid,'Time',core_time ; Core data time in seconds after midnight
+NCDF_ATTGET,cid,'Time','units',coretimeunits ; units of Core data time (specifies which day)
+print,string(coretimeunits)
+print,'Range of core time = ',gmt(core_time(0)),gmt(core_time(n_elements(core_time)-1))
+;
+cyear = fix(string(coretimeunits(14:17))) ; year, month, day of Core data
+cmonth= fix(string(coretimeunits(19:20)))
+cday = fix(string(coretimeunits(22:23)))
+
+start_time = core_time(0)
+if not(keyword_set(make_average)) then begin ; create 20Hz time array to span range of Core time
+ core_time = findgen(n_elements(core_time)*20)*0.05 + start_time
+endif
+
+; **********************************************************************************************************
+; start of loop over the number of ascii input file
+; **********************************************************************************************************
+
+FOR jfile=0,nfiles-1 DO BEGIN
+print,'Reading file ',jfile
+
+; first read aerodynamic calibration coefficients from top of ascii file
+cal_coeff = READ_ASCII(daq_file,DATA_START=1,NUM_RECORDS=1)
+Cp_0 = cal_coeff.field01(0)
+Cp_alpha = cal_coeff.field01(1)
+Cp_beta = cal_coeff.field01(2)
+B_0 = cal_coeff.field01(5)
+B_alpha = cal_coeff.field01(6)
+B_beta = cal_coeff.field01(7)
+A_0 = cal_coeff.field01(8)
+A_alpha = cal_coeff.field01(9)
+A_beta = cal_coeff.field01(10)
+
+; now read data from the ascii file
+
+DATA = READ_ASCII(DAQ_FILES(JFILE),COUNT=NT,DATA_START=2,HEADER=HEADER)
+Time = REFORM(data.FIELD01(0,*))
+Tc = REFORM(data.FIELD01(1,*))
+RH = REFORM(data.FIELD01(2,*))
+Pres = REFORM(data.FIELD01(3,*))
+V = REFORM(data.FIELD01(4,*))
+U = REFORM(data.FIELD01(5,*))
+Lat = REFORM(data.FIELD01(6,*))
+Lon = REFORM(data.FIELD01(7,*))
+alt = REFORM(data.FIELD01(8,*))
+Vn = REFORM(data.FIELD01(9,*))
+Ve = REFORM(data.FIELD01(10,*))
+Vz = REFORM(data.FIELD01(11,*))
+Roll = REFORM(data.FIELD01(12,*))
+Pitch= REFORM(data.FIELD01(13,*))
+Hdg = REFORM(data.FIELD01(14,*))
+Tas = REFORM(data.FIELD01(15,*))
+W = REFORM(data.FIELD01(16,*))
+Aoss = REFORM(data.FIELD01(17,*))
+Dpaoa= REFORM(data.FIELD01(18,*))
+Dpaoss= REFORM(data.FIELD01(19,*))
+; Status= REFORM(data.FIELD01(20,*))
+
+Time = Time*3600. ; seconds after midnight
+print,'Raw time converted to seconds.'
+
+; **********************************************************************************************************
+; First, detect any time data where the clock has wrapped around midnight. In this case, times will be less than
+; the initial time in the data so add 24 hours to them.
+initial_time = Time(0)
+next_day = where(Time lt initial_time)
+if next_day(0) ne -1 then begin
+ Time(next_day) = Time(next_day) + 86400.
+ print,'Time adjustment made where AIMMS clock has crossed midnight.'
+endif
+
+; **********************************************************************************************************
+; now search for periods when time skips due to purge event. These events take the form of about 1 second's worth
+; of data points that skip back in time by about 0.5 sec. At the end of this event, the time jumps ahead by about
+; the same amount to resume the expected sequence.
+
+stime = time
+index = lindgen(n_elements(time))
+
+if (fnum eq 'b807') then begin ; fudge time skips for b807 only
+ time(231691:231700) = time(231691:231700) + 0.45
+ time(231701:231710) = time(231701:231710) + 0.90
+ time(231711:231719) = time(231711:231719) + 1.35
+endif
+
+skip = where(time(index)-time(index-1) lt 0.0) ; first element where time has skipped back
+jump = where(time(index+1)-time(index) gt 0.06) ; last element before time skips forward again
+help,skip,jump
+
+nfirst = min(where(jump ge skip(0)))
+njump = n_elements(jump)
+jump = jump(nfirst:njump-1) ; trims any jumps before the first skip
+
+; if (jump(0) lt skip(0) and n_elements(jump) gt n_elements(skip)) then begin
+; njump = n_elements(jump)
+; jump = jump(1:njump-1)
+; help,skip,jump
+; endif
+;
+; Here are some ad-hoc adjustments to cope with individual flights
+;
+if (fnum eq 'b765') then skip = skip([0,2,3,4])
+if (fnum eq 'b800') then skip = skip(0:6)
+;if (fnum eq 'b882') then begin
+; skip = skip(0:2)
+; jump = [jump(0:1),jump(3:(n_elements(jump)-1))]
+;endif
+if (fnum eq 'b884') then begin
+ jump = [jump(0),jump(2:(n_elements(jump)-1))]
+endif
+
+if skip(0) gt -1 then begin
+ print,'Skip times: ',gmt(time(skip))
+ print,'Jump times: ',gmt(time(jump))
+ nevent = n_elements(skip) ; count of number of events
+ for j=0,nevent-1 do begin
+ deltat = time(skip(j)-1) - time(skip(j)) +0.05 ; amount to shift times forward
+ stime(skip(j):jump(j)) = stime(skip(j):jump(j)) + deltat ; move this group of times forward
+ endfor
+endif
+
+; **********************************************************************************************************
+; now re-create a 20Hz time array starting at the same time and find the start of the first FULL second
+
+atime = stime ; atime holds original time values read from data corrected for jumps
+
+btime = round(atime*100) ; using ROUND for IDL compatibility in place of NINT
+ctime = round(atime)*100
+diff = btime - ctime
+nsec = max(btime)/100 - min(btime)/100 - 2 ; remove the first and last second that has data because
+ ; it will generally not be full
+help,nsec
+nvals20 = long(nsec*20) ; the number of 20Hz values in the full seconds
+help,nvals20
+nstart = min(where(diff eq 0)) ; start index of the first full second of data
+help,nstart
+
+dtime = dindgen(nvals20)*0.05+double(btime(nstart))/100. ; regular 20Hz time array spanning same interval
+help,dtime
+
+; **********************************************************************************************************
+; now spline interpolate data onto the regular time array
+print,'Interpolate data onto regular time array.'
+
+help,atime,tc
+Tc = spline(atime, tc, dtime, 1.0)
+help,dtime,tc
+RH = spline(atime, rh, dtime, 1.0)
+Pres = spline(atime, pres, dtime, 1.0)
+V = spline(atime, v, dtime, 1.0)
+U = spline(atime, u, dtime, 1.0)
+W = spline(atime, w, dtime, 1.0)
+Lat = spline(atime, lat, dtime, 1.0)
+Lon = spline(atime, lon, dtime, 1.0)
+alt = spline(atime, alt, dtime, 1.0)
+Vn = spline(atime, vn, dtime, 1.0)
+Ve = spline(atime, ve, dtime, 1.0)
+Vz = spline(atime, vz, dtime, 1.0)
+Roll = spline(atime, roll, dtime, 1.0)
+Pitch= spline(atime, pitch, dtime, 1.0)
+Hdg = spline(atime, hdg, dtime, 1.0)
+Tas = spline(atime, tas, dtime, 1.0)
+Aoss = spline(atime, aoss, dtime, 1.0)
+Dpaoa= spline(atime, dpaoa, dtime, 1.0)
+Dpaoss= spline(atime, dpaoss, dtime, 1.0)
+;Status= spline(atime, status, dtime, 1.0)
+
+print,'Interpolation complete.'
+; tplot,dtime,lat,psym=1
+
+; **********************************************************************************************************
+; create additional data arrays, to be calculated from input data
+
+Aoa = -1.0*(A_0 +(A_alpha*dpaoa) +(A_beta*dpaoss)) ; calculate AOA from existing variables
+
+; **********************************************************************************************************
+; if output is to be 1hz averages then first create these
+if keyword_set(make_average) then begin
+ dtime = REFORM(dtime,20,nsec)
+ dtime = REFORM(dtime(0,*))
+ print,dtime(0)
+ Tc = MEAN(REFORM(tc,20,nsec),DIMENSION=1,/NAN)
+ RH = MEAN(REFORM(rh,20,nsec),DIMENSION=1,/NAN)
+ Pres = MEAN(REFORM(pres,20,nsec),DIMENSION=1,/NAN)
+ V = MEAN(REFORM(v,20,nsec),DIMENSION=1,/NAN)
+ U = MEAN(REFORM(u,20,nsec),DIMENSION=1,/NAN)
+ W = MEAN(REFORM(w,20,nsec),DIMENSION=1,/NAN)
+ Lat = MEAN(REFORM(lat,20,nsec),DIMENSION=1,/NAN)
+ Lon = MEAN(REFORM(lon,20,nsec),DIMENSION=1,/NAN)
+ alt = MEAN(REFORM(alt,20,nsec),DIMENSION=1,/NAN)
+ Vn = MEAN(REFORM(vn,20,nsec),DIMENSION=1,/NAN)
+ Ve = MEAN(REFORM(ve,20,nsec),DIMENSION=1,/NAN)
+ Vz = MEAN(REFORM(vz,20,nsec),DIMENSION=1,/NAN)
+ Roll = MEAN(REFORM(roll,20,nsec),DIMENSION=1,/NAN)
+ Pitch= MEAN(REFORM(pitch,20,nsec),DIMENSION=1,/NAN)
+ Hdg = MEAN(REFORM(hdg,20,nsec),DIMENSION=1,/NAN)
+ Tas = MEAN(REFORM(tas,20,nsec),DIMENSION=1,/NAN)
+ Aoss = MEAN(REFORM(aoss,20,nsec),DIMENSION=1,/NAN)
+ Aoa = MEAN(REFORM(aoa,20,nsec),DIMENSION=1,/NAN)
+ Dpaoa= MEAN(REFORM(dpaoa,20,nsec),DIMENSION=1,/NAN)
+ Dpaoss= MEAN(REFORM(dpaoss,20,nsec),DIMENSION=1,/NAN)
+; Status= MEAN(REFORM(status,20,nsec),DIMENSION=1,/NAN)
+ help,dtime,tc
+endif
+
+start_time = dtime(0)
+date = READ_ASCII(daq_files(jfile),DATA_START=0,NUM_RECORDS=1) ; read date information from ascii file
+day = FLOOR(date.field1(0))
+month = FLOOR(date.field1(1))
+year = FLOOR(date.field1(2))
+
+; **********************************************************************************************************
+; Test AIMMS date information against Core. If Core data started on the previous day, then adjust accordingly.
+; The only plausible circumstance is that Core data recording commence before midnight and AIMMS data after midnight,
+; so set AIMMS date information equal to Core and increment AIMMS time by 24*60*60 = 86400.
+; **********************************************************************************************************
+
+if (day ne cday) or (month ne cmonth) or (year ne cyear) then begin
+ day = cday
+ month = cmonth
+ year = cyear
+ start_time = start_time + 86400.
+ print,'AIMMS date and start time re-alligned with Core.'
+endif
+
+print,'Start time =',start_time
+
+; **********************************************************************************************************
+; if this is the first ascii file, read date information, create NetCDF file name, open it and write other required
+; creation information -
+; **********************************************************************************************************
+
+if jfile eq 0 then begin
+ date_str = '00000000' ; create date string for NC file - yyyymmdd
+ STRPUT,date_str,STRTRIM(year,1),0
+ if month lt 10 then pos=5 else pos=4
+ STRPUT,date_str,STRTRIM(month,1),pos
+ if day lt 10 then pos=7 else pos=6
+ STRPUT,date_str,STRTRIM(day,1),pos
+
+; create NC filename
+ ;nc_file = basedir+'/AIMMS/'+fdir+'metoffice-aimms_faam_'+date_str+'_'+version+'_'+rstring+'_'+fnum
+ ;if keyword_set(make_average) then nc_file=nc_file+'_1hz'
+ ;nc_file = nc_file+'.nc'
+ nc_file = ofile
+ print,'Input: ', daq_files(jfile)
+ Print,'Output: ', nc_file
+
+; open the NetCDF output file and create global attributes and variable information
+; id=NCDF_CREATE(nc_file,/CLOBBER)
+
+ id=NCDF_CREATE(ofile, /CLOBBER)
+ s_start = strtrim(string(gmt(dtime(0))),2)
+ if strlen(s_start) lt 6 then s_start='0'+s_start
+ s_end = strtrim(string(gmt(max(dtime))),2)
+ if strlen(s_end) lt 6 then s_end='0'+s_end
+
+ NCDF_ATTPUT,id,/GLOBAL,'title','Standard AIMMS-20 data from '+fnum+' on '+date_str
+ NCDF_ATTPUT,id,/GLOBAL,'AIMMS_files',daq_file
+ NCDF_ATTPUT,id,/GLOBAL,'Date',date_str
+ NCDF_ATTPUT,id,/GLOBAL,'TimeInterval',s_start+'-'+s_end
+ NCDF_ATTPUT,id,/GLOBAL,'Conventions','CF-1.6'
+ NCDF_ATTPUT,id,/GLOBAL,'INSTITUTION','FAAM'
+ NCDF_ATTPUT,id,/GLOBAL,'SOURCE','FAAM BAe146 aircraft data'
+ NCDF_ATTPUT,id,/GLOBAL,'REFERENCES','http://www.faam.ac.uk'
+ ;
+ descriptor = 'Post-processed 20Hz AIMMS data converted to NetCDF'
+ if keyword_set(make_average) then descriptor=descriptor+' and 1hz averages'
+ NCDF_ATTPUT,id,/GLOBAL,'Description',descriptor
+ NCDF_ATTPUT,id,/GLOBAL,'Post_processor_executable','ekf556'
+ NCDF_ATTPUT,id,/GLOBAL,'Software_version',version
+
+; **********************************************************************************************************
+; Define output variables and attributes
+; **********************************************************************************************************
+
+ LTIME = NCDF_DIMDEF(id,'TIME',/UNLIMITED) ; TIME dimension created unlimited so as to be extensible
+ LCAL = NCDF_DIMDEF(id,'CONST',1)
+
+ cp0_id = NCDF_VARDEF(id,'CP_0',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,cp0_id,'units','1'
+ NCDF_ATTPUT,id,cp0_id,'long_name','CP_0 calibration coefficient'
+
+ cpa_id = NCDF_VARDEF(id,'CP_alpha',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,cpa_id,'units','1'
+ NCDF_ATTPUT,id,cpa_id,'long_name','CP_alpha calibration coefficient'
+
+ cpb_id = NCDF_VARDEF(id,'CP_beta',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,cpb_id,'units','1'
+ NCDF_ATTPUT,id,cpb_id,'long_name','CP_beta calibration coefficient'
+
+ B0_id = NCDF_VARDEF(id,'B_0',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,B0_id,'units','degree'
+ NCDF_ATTPUT,id,B0_id,'long_name','B_0 calibration coefficient'
+
+ Ba_id = NCDF_VARDEF(id,'B_alpha',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,Ba_id,'units','degree'
+ NCDF_ATTPUT,id,Ba_id,'long_name','B_alpha calibration coefficient'
+
+ Bb_id = NCDF_VARDEF(id,'B_beta',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,Bb_id,'units','degree'
+ NCDF_ATTPUT,id,Bb_id,'long_name','B_beta calibration coefficient'
+
+ A0_id = NCDF_VARDEF(id,'A_0',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,A0_id,'units','degree'
+ NCDF_ATTPUT,id,A0_id,'long_name','A_0 calibration coefficient'
+
+ Aa_id = NCDF_VARDEF(id,'A_alpha',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,Aa_id,'units','degree'
+ NCDF_ATTPUT,id,Aa_id,'long_name','A_alpha calibration coefficient'
+
+ Ab_id = NCDF_VARDEF(id,'A_beta',[LCAL],/FLOAT)
+ NCDF_ATTPUT,id,Ab_id,'units','degree'
+ NCDF_ATTPUT,id,Ab_id,'long_name','A_beta calibration coefficient'
+
+; **********************************************************************************************************
+
+ time_units = 'seconds since '+strmid(date_str,0,4)+'-'+strmid(date_str,4,2)+'-'+strmid(date_str,6,2)+' 00:00:00 +0000'
+ time_id = NCDF_VARDEF(id,'TIME',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,time_id,'units',time_units
+ NCDF_ATTPUT,id,time_id,'long_name','time of measurement'
+ NCDF_ATTPUT,id,time_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,time_id,'standard_name','time'
+
+ tk_id = NCDF_VARDEF(id,'TK',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,tk_id,'units','K'
+ NCDF_ATTPUT,id,tk_id,'long_name','AIMMS true air temperature'
+ NCDF_ATTPUT,id,tk_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,tk_id,'standard_name','air_temperature'
+
+ rh_id = NCDF_VARDEF(id,'RH',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,rh_id,'units','percent'
+ NCDF_ATTPUT,id,rh_id,'long_name','AIMMS Relative humidity wrt water'
+ NCDF_ATTPUT,id,rh_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,rh_id,'standard_name','relative_humidity'
+
+ p_id = NCDF_VARDEF(id,'PRES',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,p_id,'units','hPa'
+ NCDF_ATTPUT,id,p_id,'long_name','AIMMS Static pressure'
+ NCDF_ATTPUT,id,p_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,p_id,'standard_name','air_pressure'
+
+ u_id = NCDF_VARDEF(id,'U',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,u_id,'units','m s-1'
+ NCDF_ATTPUT,id,u_id,'long_name','AIMMS eastwards wind component'
+ NCDF_ATTPUT,id,u_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,u_id,'standard_name','eastward_wind'
+
+ v_id = NCDF_VARDEF(id,'V',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,v_id,'units','m s-1'
+ NCDF_ATTPUT,id,v_id,'long_name','AIMMS northwards wind component'
+ NCDF_ATTPUT,id,v_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,v_id,'standard_name','northward_wind'
+
+ w_id = NCDF_VARDEF(id,'W',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,w_id,'units','m s-1'
+ NCDF_ATTPUT,id,w_id,'long_name','AIMMS vertical wind component'
+ NCDF_ATTPUT,id,w_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,w_id,'standard_name','upward_air_velocity'
+
+ lat_id = NCDF_VARDEF(id,'LAT',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,lat_id,'units','degree_north'
+ NCDF_ATTPUT,id,lat_id,'long_name','AIMMS GPS latitude'
+ NCDF_ATTPUT,id,lat_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,lat_id,'standard_name','latitude'
+
+ lon_id = NCDF_VARDEF(id,'LON',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,lon_id,'units','degree_east'
+ NCDF_ATTPUT,id,lon_id,'long_name','AIMMS GPS longitude'
+ NCDF_ATTPUT,id,lon_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,lon_id,'standard_name','longitude'
+
+ alt_id = NCDF_VARDEF(id,'ALT',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,alt_id,'units','m'
+ NCDF_ATTPUT,id,alt_id,'long_name','AIMMS GPS altitude'
+ NCDF_ATTPUT,id,alt_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,alt_id,'standard_name','altitude'
+
+ vn_id = NCDF_VARDEF(id,'VN',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,vn_id,'units','m s-1'
+ NCDF_ATTPUT,id,vn_id,'long_name','AIMMS Northwards ground speed'
+ NCDF_ATTPUT,id,vn_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,vn_id,'standard_name','platform_speed_wrt_ground'
+
+
+ ve_id = NCDF_VARDEF(id,'VE',[LTIME],/FLOAT); now read data from the ascii file
+ NCDF_ATTPUT,id,ve_id,'units','m s-1'
+ NCDF_ATTPUT,id,ve_id,'long_name','AIMMS Eastwards ground speed'
+ NCDF_ATTPUT,id,ve_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,ve_id,'standard_name','platform_speed_wrt_ground'
+
+ vz_id = NCDF_VARDEF(id,'VZ',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,vz_id,'units','m s-1'
+ NCDF_ATTPUT,id,vz_id,'long_name','AIMMS vertical speed'
+ NCDF_ATTPUT,id,vz_id,'_FillValue',-9999.0
+; no standard name
+
+ roll_id = NCDF_VARDEF(id,'ROLL',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,roll_id,'units','degree'
+ NCDF_ATTPUT,id,roll_id,'long_name','AIMMS roll angle'
+ NCDF_ATTPUT,id,roll_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,roll_id,'standard_name','platform_roll_angle'
+
+ ptch_id = NCDF_VARDEF(id,'PITCH',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,ptch_id,'units','degree'
+ NCDF_ATTPUT,id,ptch_id,'long_name','AIMMS pitch angle'
+ NCDF_ATTPUT,id,ptch_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,ptch_id,'standard_name','platform_pitch_angle'
+
+ hdg_id = NCDF_VARDEF(id,'HDG',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,hdg_id,'units','degree'
+ NCDF_ATTPUT,id,hdg_id,'long_name','AIMMS Heading angle'
+ NCDF_ATTPUT,id,hdg_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,hdg_id,'standard_name','platform_yaw_angle'
+
+ tas_id = NCDF_VARDEF(id,'TAS',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,tas_id,'units','m s-1'
+ NCDF_ATTPUT,id,tas_id,'long_name','AIMMS True air speed'
+ NCDF_ATTPUT,id,tas_id,'_FillValue',-9999.0
+ NCDF_ATTPUT,id,tas_id,'standard_name','platform_speed_wrt_air'
+
+ aoss_id = NCDF_VARDEF(id,'AOSS',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,aoss_id,'units','degree'
+ NCDF_ATTPUT,id,aoss_id,'long_name','AIMMS angle of sideslip (positive, flow from left)'
+ NCDF_ATTPUT,id,aoss_id,'_FillValue',-9999.0
+; no standard name
+
+ dpaoa_id = NCDF_VARDEF(id,'DPAOA',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,dpaoa_id,'units','1'
+ NCDF_ATTPUT,id,dpaoa_id,'long_name','AIMMS non-dimensional angle of attack differential pressure'
+ NCDF_ATTPUT,id,dpaoa_id,'_FillValue',-9999.0
+; no standard name
+
+ dpaoss_id = NCDF_VARDEF(id,'DPAOSS',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,dpaoss_id,'units','1'
+ NCDF_ATTPUT,id,dpaoss_id,'long_name','AIMMS non-dimensional angle of sideslip differential pressure'
+ NCDF_ATTPUT,id,dpaoss_id,'_FillValue',-9999.0
+; no standard name
+
+; status_id = NCDF_VARDEF(id,'STATUS',[LTIME],/FLOAT)
+; NCDF_ATTPUT,id,status_id,'units',' '
+; NCDF_ATTPUT,id,status_id,'long_name','AIMMS Status flag (0 - solution invalid)'
+; NCDF_ATTPUT,id,status_id,'_FillValue',-9999.0
+; no standard name
+
+; additional variables
+ aoa_id = NCDF_VARDEF(id,'AOA',[LTIME],/FLOAT)
+ NCDF_ATTPUT,id,aoa_id,'units','degree'
+ NCDF_ATTPUT,id,aoa_id,'long_name','AIMMS angle of attack (positive, flow from below aircraft)'
+ NCDF_ATTPUT,id,aoa_id,'_FillValue',-9999.0
+; no standard name
+
+ NCDF_CONTROL,id,/ENDEF ; end of file definition stage
+ print,'NetCDF file definition completed.'
+
+; first file so write the aerodynamic calibration coeffs to output
+
+ NCDF_VARPUT,id,cp0_id,cp_0
+ NCDF_VARPUT,id,cpa_id,cp_alpha
+ NCDF_VARPUT,id,cpb_id,cp_beta
+ NCDF_VARPUT,id,A0_id,A_0
+ NCDF_VARPUT,id,Aa_id,A_alpha
+ NCDF_VARPUT,id,Ab_id,A_beta
+ NCDF_VARPUT,id,B0_id,B_0
+ NCDF_VARPUT,id,Ba_id,B_alpha
+ NCDF_VARPUT,id,Bb_id,B_beta
+ print,'NetCDF aerodynamic constants written.'
+
+; First file, so write out the entire Core time array to the time variable and then pad all variables with NaN
+ NCDF_VARPUT,id,time_id,core_time,OFFSET=0L
+ pad_value = -9999.0
+ padval = fltarr(n_elements(core_time))+pad_value ; set array of fill-in values
+
+ NCDF_VARPUT, id, tk_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, p_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, lat_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, lon_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, rh_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, u_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, v_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, w_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, alt_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, vn_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, ve_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, vz_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, roll_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, ptch_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, hdg_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, tas_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, aoss_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, aoa_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, dpaoa_id, padval, OFFSET=0L
+ NCDF_VARPUT, id, dpaoss_id, padval, OFFSET=0L
+; NCDF_VARPUT, id, status_id, padval, OFFSET=0L
+
+endif ; end of items required when processing first file
+
+; output only the data for full seconds and on adjusted time
+; if not(keyword_set(extendtime)) then NCDF_VARPUT,id,time_id, dtime, OFFSET=data_offset
+; TIME variable already pre-filled with core_time
+
+data_offset = where(core_time eq start_time)
+print,'Data output offset =',data_offset
+
+NCDF_VARPUT,id,tk_id,tc + 273.15, OFFSET=data_offset ; convert to K
+NCDF_VARPUT,id,rh_id,rh * 100.0, OFFSET=data_offset ; convert to percent
+NCDF_VARPUT,id,p_id,pres* 0.01, OFFSET=data_offset ; convert to hPa
+NCDF_VARPUT,id,u_id,u, OFFSET=data_offset
+NCDF_VARPUT,id,v_id,v, OFFSET=data_offset
+NCDF_VARPUT,id,w_id,-1.0 * w, OFFSET=data_offset ; change sign
+NCDF_VARPUT,id,lat_id,lat, OFFSET=data_offset
+NCDF_VARPUT,id,lon_id,lon, OFFSET=data_offset
+NCDF_VARPUT,id,alt_id,alt, OFFSET=data_offset
+NCDF_VARPUT,id,vn_id,vn, OFFSET=data_offset
+NCDF_VARPUT,id,ve_id,ve, OFFSET=data_offset
+NCDF_VARPUT,id,vz_id,vz, OFFSET=data_offset ; not multiplied by -1.0
+NCDF_VARPUT,id,roll_id,roll, OFFSET=data_offset
+NCDF_VARPUT,id,ptch_id,pitch, OFFSET=data_offset
+NCDF_VARPUT,id,hdg_id,hdg, OFFSET=data_offset
+NCDF_VARPUT,id,tas_id,tas, OFFSET=data_offset
+NCDF_VARPUT,id,aoss_id,-1.0*aoss, OFFSET=data_offset ; change sign
+NCDF_VARPUT,id,aoa_id,Aoa, OFFSET=data_offset
+NCDF_VARPUT,id,dpaoa_id,dpaoa, OFFSET=data_offset ;x -1.0?
+NCDF_VARPUT,id,dpaoss_id,dpaoss, OFFSET=data_offset
+;NCDF_VARPUT,id,status_id,status, OFFSET=data_offset
+
+last_time = max(dtime) ; save the end of data from this ascii file
+
+ENDFOR
+; ******************************************************************************************************************
+; end of loop over the number of ascii input files
+; ******************************************************************************************************************
+
+NCDF_CLOSE,id ; close the NetCDF output
+
+
+END
\ No newline at end of file
diff --git a/faampy/aimms/ekf556.exe b/faampy/aimms/ekf556.exe
new file mode 100644
index 0000000..f2b5e07
Binary files /dev/null and b/faampy/aimms/ekf556.exe differ
diff --git a/faampy/aimms/file_info.py b/faampy/aimms/file_info.py
new file mode 100644
index 0000000..be477e5
--- /dev/null
+++ b/faampy/aimms/file_info.py
@@ -0,0 +1,151 @@
+import os
+import re
+
+
+DATA_TYPES = {'core-hires': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].nc$',
+ 'core-lowres': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_1[Hh]z.nc$',
+ 'core-descrip': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_descrip.txt$',
+ 'core-quality': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_quality.txt$',
+ 'dropsonde-proc': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_proc.nc$',
+ 'dropsonde-raw': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_raw.nc$',
+ 'dropsonde-descrip': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_descrip.txt$',
+ 'flight-cst': 'flight-cst_faam_20[0-9][0-9][0-1][0-9][0-3][0-9]_r.*_[bBcC][0-9][0-9][0-9].txt$',
+ 'flight-log': 'flight-log_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].pdf$',
+ 'flight-sum': 'flight-sum_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].txt$',
+ 'rawdrs': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawdrs.zip$',
+ 'rawgin': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawgin.zip$',
+ 'rawgps': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawgps.zip$',
+ 'rawdlu': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawdlu.zip$'}
+
+
+def get_revision_from_filename(filename):
+ """
+ Extracts the revision number from the netCDF core filename
+
+ Example:
+ >>> file = 'core_faam_20090529_v004_r1_b450.nc'
+ >>> getRevisionFromFilename(file)
+ 1
+ >>>
+ """
+ fn = os.path.basename(filename)
+ fn = fn.split('.')[0]
+ parts = fn.split('_')
+ for p in parts:
+ if re.match('r\d', p):
+ result = int(p[1:])
+ return result
+ return
+
+
+def get_data_type_from_filename(filename):
+ """
+ returns the datatype for the input filename determined using the DATA_TYPES
+ dictionary
+ """
+ for key in DATA_TYPES.keys():
+ if re.match(DATA_TYPES[key], os.path.basename(filename)):
+ return key
+ return
+
+
+def get_fid_from_filename(filename):
+ """
+ Extracts the flight number from the netCDF core filename
+
+ Example:
+ >>> ncfile = 'core_faam_20090529_v004_r1_b450.nc'
+ >>> getFlightNumbserFromFilename(ncfile)
+ b450
+ >>>
+ """
+ fn = os.path.basename(filename)
+ fn = fn.split('.')[0]
+ parts = fn.split('_')
+ for p in parts:
+ if re.match('[bBcC][0-9][0-9][0-9]', p):
+ return p.lower()
+ return
+
+
+def get_date_from_filename(filename):
+ """
+ Extracts the flight date from the netCDF core filename
+
+ Example:
+ >>> ncfile = 'core_faam_20090529_v004_r1_b450.nc'
+ >>> getDateFromFilename(ncfile)
+ 20090529
+ >>>
+ """
+ fn = os.path.basename(filename)
+ fn = fn.split('.')[0]
+ parts = fn.split('_')
+ for p in parts:
+ if re.match('20\d{6}', p):
+ return p
+ elif re.match('20\d{12}', p):
+ return p
+ else:
+ pass
+ return
+
+
+class File_Info(object):
+ """
+ Holds all file specific information for a FAAM data file:
+ * filename
+ * path
+ * Flight Number (fid)
+ * date
+ * revision
+ * datatype
+ """
+ def __init__(self, filename):
+ self.filename = os.path.basename(filename)
+ self.path = os.path.dirname(filename)
+ self.fid = get_fid_from_filename(filename)
+ self.date = get_date_from_filename(filename)
+ self.rev = get_revision_from_filename(filename)
+ self.data_type = get_data_type_from_filename(filename)
+
+ def __str__(self):
+ output = '\n'
+ labels = ['Filename', 'Path', 'FID', 'Date', 'Revision', 'Data Type']
+ values = [self.filename,
+ self.path,
+ self.fid,
+ self.date,
+ str(self.rev),
+ self.data_type]
+ for s in zip(labels, values):
+ output += '%9s: %s\n' % s
+ return output
+
+# def __cmp__(self, obj):
+# cmp_key = '%4s_%0.3i_%s' % (self.fid, self.rev, self.datatype)
+# cmp_key_other = '%4s_%0.3i_%s' % (obj.fid, obj.rev, obj.datatype)
+# if cmp_key < cmp_key_other:
+# return -1
+# elif cmp_key == cmp_key_other:
+# return 0
+# elif cmp_key > cmp_key_other:
+# return 1
+# else:
+# pass
+
+ def __eq__(self, other):
+ return ((self.fid, self.rev, self.data_type) ==
+ (other.fid, other.rev, other.data_type))
+ def __ne__(self, other):
+ return not self == other
+ def __gt__(self, other):
+ return (self.fid, self.rev) > (other.fid, other.rev)
+ def __lt__(self, other):
+ return (self.fid, self.rev) < (other.fid, other.rev)
+ def __ge__(self, other):
+ return (self > other) or (self == other)
+ def __le__(self, other):
+ return (self < other) or (self == other)
+
+
diff --git a/faampy/aimms/file_list.py b/faampy/aimms/file_list.py
new file mode 100644
index 0000000..60a0b39
--- /dev/null
+++ b/faampy/aimms/file_list.py
@@ -0,0 +1,73 @@
+import os
+import sys
+
+import file_info
+
+
+class File_List(list):
+ """
+ A list of File_Info objects. The list can be sorted and filtered which can
+ be useful for batch processing.
+
+ For example it is possible to (i) get all DECADES rawdlu and flight-constant
+ files from a path, (ii) filter those for the latest revisions and reprocess
+ them.
+
+ """
+ def __init__(self, path):
+ """
+ Get all FAAM data files in the path.
+
+ :param path: path which will be walked and checked for FAAM data files
+ """
+ self.Path = path
+ if os.path.isdir(path):
+ for root, subFolders, files in os.walk(self.Path):
+ for f in files:
+ if file_info.get_data_type_from_filename(f):
+ self.append(file_info.File_Info(os.path.join(root, f)))
+ else:
+ sys.stdout.write('%s is not a directory.\n' % path)
+ self.sort()
+
+ def filter_by_data_type(self, dtype):
+ """
+ Filtering by data type.
+ """
+ if not dtype in file_info.DATA_TYPES:
+ sys.stdout.write('Submitted dtype unknown.\nValid data types are: %s\n' % ', '.join(sorted(file_info.DATA_TYPES.keys())))
+
+ bad_index = []
+ for i in self:
+ if not i.data_type == dtype:
+ bad_index.append(i)
+ for b in bad_index:
+ self.remove(b)
+
+ def filter_latest_revision(self):
+ """
+ Compresses the list and keeps only the latest revision file for a FID
+ """
+ bad_index = []
+ self.sort(key=lambda i: '%4s_%s_%s_%0.3i' % (i.fid, i.date, i.data_type, i.rev))
+ self.reverse()
+ for i in range(len(self)-1):
+ if ((self[i].fid, self[i].date, self[i].data_type)) == ((self[i+1].fid, self[i+1].date, self[i+1].data_type)):
+ bad_index.append(self[i+1])
+ for b in bad_index:
+ self.remove(b)
+ self.sort()
+
+ def __str__(self):
+ output = ''
+ for i in self:
+ output += '%s\n' % (i.filename,)
+ return output
+
+ def get_filenames(self):
+ """
+ Returns the filenames
+ """
+ result = [os.path.join(i.path, i.filename) for i in self]
+ return result
+
diff --git a/faampy/aimms/findcorefile.pro b/faampy/aimms/findcorefile.pro
new file mode 100644
index 0000000..4c61615
--- /dev/null
+++ b/faampy/aimms/findcorefile.pro
@@ -0,0 +1,25 @@
+FUNCTION findcorefile, basedir, fnum, core1hz
+
+; FUNCTION: FINDCOREFILE
+;
+; PURPOSE: Returns a string value containing the full path/filename information for a FAAM Core NetCDF file.
+;
+; ARGUMENTS: BASEDIR - String variable containing the base directory in which to start searching. This assumes that data files are organised
+; within a directory structure: /project/obr/project_name with sub-directories for Core and
+; other data below that level.
+; Example: /pr
+
+fnum=STRLOWCASE(fnum)
+help,fnum
+corefile=basedir+'/faam_core/'+'*'+fnum+'*.nc'
+corefile=file_search(corefile)
+print,'Core: ',corefile
+
+core1hz=basedir+'/faam_core/'+'*'+fnum+'*_1hz.nc'
+core1hz=file_search(core1hz)
+print,'Core_1hz: ',core1hz
+
+
+return, core1hz
+END
+
\ No newline at end of file
diff --git a/faampy/aimms/qa_figure.py b/faampy/aimms/qa_figure.py
new file mode 100644
index 0000000..27d08bd
--- /dev/null
+++ b/faampy/aimms/qa_figure.py
@@ -0,0 +1,195 @@
+core_filename = '/home/axel/Dropbox/campaigns/vanaheim2016/b988-oct-20/core_faam_20161020_v004_r0_b988.nc'
+aimms_filename = '/home/axel/Dropbox/campaigns/vanaheim2016/b988-oct-20/metoffice-aimms_faam_20161020_v003_r0_b988.nc'
+
+core_filename = '/home/axel/Dropbox/campaigns/wintex2017/b994-feb-03/core_faam_20170203_v004_r0_b994.nc'
+aimms_filename = '/mnt/faamarchive/scanner/aimms/metoffice-aimms_faam_20170203_v003_r0_b994.nc'
+
+
+import sys
+sys.path.insert(0, '/home/axel/git-repos/faampy/')
+
+
+import netCDF4
+import pandas as pd
+import numpy as np
+
+import matplotlib.gridspec as gridspec
+
+from faampy.core.utils import get_mpl_time
+from matplotlib.dates import num2date, date2num
+from matplotlib import dates
+import matplotlib.pyplot as plt
+from matplotlib.lines import Line2D
+
+plt.style.use('seaborn-deep')
+
+
+# plot time series of the u,v,w and delta; add top figure with altitude and liquid water content
+# plot time series of t and rh and delta
+# plot spectra for u,v,w,tas
+# plot scatter plots matrix for u,v,w,tas
+
+def plot_wind_vector_components():
+ pass
+
+
+core_ds = netCDF4.Dataset(core_filename, 'r')
+aimms_ds = netCDF4.Dataset(aimms_filename, 'r')
+
+
+#def get_data(var, core_ds, aimms_ds):
+varnames = [['U_C', 'U'],
+ ['V_C', 'V'],
+ ['W_C', 'W'],
+ ['TAS', 'TAS']]
+
+cols_c, cols_a = zip(*varnames)
+t_c = np.array(num2date(get_mpl_time(core_ds, 32)))
+t_a = netCDF4.num2date(aimms_ds.variables['TIME'][:], aimms_ds.variables['TIME'].units)
+
+
+df_colnames = ['core_%s' % s.lower() for s in cols_a]
+df_c = pd.DataFrame(index=t_c.ravel(), columns=df_colnames)
+for i,j in zip(df_colnames, cols_c):
+ df_c[i] = core_ds.variables[j][:].data.ravel()
+df_colnames = ['aimms_%s' % s.lower() for s in cols_a]
+df_a = pd.DataFrame(index=t_a, columns=df_colnames)
+df_a = df_a.tz_localize('UTC')
+for i,j in zip(df_colnames, cols_a):
+ df_a[i] = aimms_ds.variables[j][:].data.ravel()
+
+# filter data frame; only use data points when the aircraft was in the air
+wow = core_ds.variables['WOW_IND'][:].data
+wow_ix = (np.min(np.where(wow == 0)[0]),
+ np.max(np.where(wow == 0)[0]))
+t_ix = (t_c[wow_ix[0],0], t_c[wow_ix[1],0])
+df_c = df_c[(df_c.index >= t_ix[0]) & (df_c.index < t_ix[1])]
+df_a = df_a[(df_a.index >= t_ix[0]) & (df_a.index < t_ix[1])]
+
+#a = aimms_data.variables[varnames[1]][:]
+#wow = core_data.variables['WOW'][:]
+
+
+# time series plots
+fig = plt.figure()
+gs = gridspec.GridSpec(4, 1, left=0.1, hspace=0.05)
+
+_ax = {}
+_ax['ax_u'] = fig.add_subplot(gs[3]) # axes u wind component
+_ax['ax_v'] = fig.add_subplot(gs[2], sharex=fig.get_axes()[0]) # axes v wind component
+_ax['ax_w'] = fig.add_subplot(gs[1], sharex=fig.get_axes()[0]) # axes w wind component
+_ax['ax_tas'] = fig.add_subplot(gs[0], sharex=fig.get_axes()[0]) # axes tas wind component
+
+
+for wv in ['u', 'v', 'w', 'tas']:
+ #df_c['core_'+wv].plot(ax=_ax['ax_'+wv], label='core-'+wv, ylabe)
+ #df_a['aimms_'+wv].plot(ax=_ax['ax_'+wv], label='aimms-'+wv))
+ ax = _ax['ax_'+wv]
+ l1 = ax.plot_date(date2num(df_c.index.to_pydatetime()), df_c['core_'+wv].values, '-', label='core')
+ l2 = ax.plot_date(date2num(df_a.index.to_pydatetime()), df_a['aimms_'+wv].values, '-', label='aimms')
+ #ax.legend()
+ ax.grid(True)
+ ax.text(0.02, 0.95, wv, verticalalignment='top', transform=ax.transAxes)
+ ax.set_ylabel(r'%s $(ms^{-1})$' % wv)
+ # the delta is plotted for 1Hz averaged data to keep it simple
+ #delta = df_c['core_'+wv].resample('S').mean()-df_a['aimms_'+wv].resample('S').mean()
+ delta = df_c['core_'+wv].resample('S')-df_a['aimms_'+wv].resample('S')
+ ax2 = _ax['ax_'+wv].twinx()
+ l3 = ax2.plot_date(date2num(delta.index.to_pydatetime()), delta.values, '-', color='firebrick', label=r'$\Delta$')
+ ax2.set_ylabel(r'$\Delta \quad (ms^{-1})$')
+ if wv != 'u':
+ for label in ax.get_xticklabels()[:]:
+ label.set_visible(False)
+ else:
+ hfmt = dates.DateFormatter('%H:%M')
+ ax.xaxis.set_major_locator(dates.MinuteLocator(byminute=range(0, 60, 30)))
+ ax.xaxis.set_major_formatter(hfmt)
+ ax.set_xlabel('time (utc)')
+
+ if wv == 'tas':
+ lines = l1+l2+l3
+ labs = [l.get_label() for l in lines]
+ ax.legend(lines,labs, loc='upper right')
+
+
+
+
+# scatter plots
+fig = plt.figure()
+gs = gridspec.GridSpec(2, 2)
+
+_ax = {}
+_ax['ax_u'] = fig.add_subplot(gs[3], aspect='equal') # axes u wind component
+_ax['ax_v'] = fig.add_subplot(gs[2], aspect='equal') # axes v wind component
+_ax['ax_w'] = fig.add_subplot(gs[1], aspect='equal') # axes w wind component
+_ax['ax_tas'] = fig.add_subplot(gs[0], aspect='equal') # axes tas wind component
+
+for wv in ['u', 'v', 'w', 'tas']:
+ ax = _ax['ax_'+wv]
+ x = np.array(df_c['core_'+wv].resample('S'))
+ y = np.array(df_a['aimms_'+wv].resample('S'))
+ ax.plot(x, y, '.')
+ ax.grid(True)
+ _lim=(np.min([ax.axes.get_xlim()[0], ax.axes.get_ylim()[0]]),
+ np.max([ax.axes.get_xlim()[1], ax.axes.get_ylim()[1]]))
+ ax.set_xlim(_lim)
+ ax.set_ylim(_lim)
+ l = Line2D([0,1],[0,1], color='0.3', transform=ax.transAxes)
+ ax.add_line(l)
+ ax.text(0.05, 0.95, wv, verticalalignment='top', transform=ax.transAxes)
+
+
+
+def running_mean(x, N):
+ return np.convolve(x, np.ones((N,))/N)[(N-1):]
+
+
+def power_spectrum(data, *args):
+ """
+ calculates the power spectrum
+
+ """
+ if args:
+ freq = args[0]
+ else:
+ freq = data.shape[1]
+
+ ps = np.abs(np.fft.fft(data.ravel()))**2
+ time_step = 1./freq
+ freqs = np.fft.fftfreq(data.size, time_step)
+ idx = np.argsort(freqs)
+ return(freqs[idx], ps[idx])
+
+
+# spectra plots
+
+fig = plt.figure()
+gs = gridspec.GridSpec(2, 2)
+
+_ax = {}
+_ax['ax_u'] = fig.add_subplot(gs[3]) # axes u wind component
+_ax['ax_v'] = fig.add_subplot(gs[2]) # axes v wind component
+_ax['ax_w'] = fig.add_subplot(gs[1]) # axes w wind component
+_ax['ax_tas'] = fig.add_subplot(gs[0]) # axes tas wind component
+
+for wv in ['u', 'v', 'w', 'tas']:
+ ax = _ax['ax_'+wv]
+ c_freqs, c_ps = power_spectrum(np.array(df_c['core_'+wv]), 32)
+ a_freqs, a_ps = power_spectrum(np.array(df_a['aimms_'+wv]), 20)
+
+ c_freqs = c_freqs[:-1]
+ a_freqs = a_freqs[:-1]
+ c_ps = c_ps[:-1]
+ a_ps = a_ps[:-1]
+
+ ax.plot(c_freqs[c_freqs < 15.5], running_mean(c_ps, 200)[c_freqs < 15.5])
+ ax.plot(a_freqs[a_freqs < 9.5], running_mean(a_ps, 200)[a_freqs < 9.5])
+
+ ax.set_xscale('log')
+ ax.set_yscale('log')
+ ax.set_ylim(1, 10**8)
+ ax.set_xlim(0.1, 16)
+ xticks = [1, 2, 3, 4, 5, 6, 8, 10, 16]
+ ax.set_xticks(np.array(xticks))
+ ax.set_xticklabels([str(i) for i in xticks])
+ ax.text(0.05, 0.95, wv, verticalalignment='top', transform=ax.transAxes)
diff --git a/faampy/avaps/__init__.py b/faampy/avaps/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/faampy/avaps/dropsonde.py b/faampy/avaps/dropsonde.py
new file mode 100644
index 0000000..5dc9a10
--- /dev/null
+++ b/faampy/avaps/dropsonde.py
@@ -0,0 +1,399 @@
+#!/usr/bin/python
+
+'''
+Created on 19 May 2010
+
+@author: axel
+'''
+
+import datetime
+import matplotlib
+import matplotlib.pyplot as plt
+import netCDF4
+import numpy as np
+import os
+import re
+import sys
+import tempfile
+import zipfile
+import time
+
+#from faampy._3rdparty.haversine import points2distance, recalculate_coordinate
+
+from haversine import points2distance, recalculate_coordinate
+
+_DROPSONDE_ICON = '\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x03\x00\x00\x00D\xa4\x8a\xc6\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x003PLTE@\x00\x00\t\x06\x03I\x17\x182.*{&)bICFPc\xa99=\xcd28wP7\xe5_`\xaa{Zt\x97\xc8\xa1\x92\x93\xd7\x86\x82\xf7\xb2\xae\xd8\xcf\xd0o\x99\xb5\xad\x00\x00\x00\x01tRNS\x00@\xe6\xd8f\x00\x00\x00\x01bKGD\x00\x88\x05\x1dH\x00\x00\x00\tpHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xda\n\x05\t\x0e \xa8#K\xb3\x00\x00\x00\xe5IDAT8\xcb\xa5\x92\xc1b\xc4 \x08D\x1d\x81\x88\xa4\xa8\xff\xff\xb5\x8b\xb9\xb4\xdb\x86\xf4\xb0\x1c\xbc\xccs\x98I,\xe5m\x18\x95[\xab(w#\x8a\xc6\x18\xc3\xcc\x82\xf9\x03\xd5\xb5\xe2r \xd56\x81\xfe\x8e\x88\xa9\x1a+W\xe6\xc6\x9b0\x0e\xb39\xe7\x0f\x07SkZyg\x08*\x0e\xc4\x1e\xf9\x06\x94Uu\xef\x0e9|b~\xa7\x08\xddj\xd5\x06\xde\x0c\xdf4\xc1\x0eo\xd0\xba\xe7\xbe\xe8\x1c\x83m\xb4L\x8ei\x91{"\x95K!\x9b\xfa _A\xcag\xfa\xbf\xc0\xfa\xc4`\x01 B\xde\x91B~\x04.\xfd\x198\x8e\r\xa4\xba\xf4.TS\x00\xd4\xcfM +Jr\x06@\x90L\xef}\x03\xe9\x02\xa1K\x17Pjp\x06 O\x0f\xa1x\x17\x81g_\xf9\xab,\x97\xa8\x90U\\\xcb\x9d\x8e\xb5\x8e\xfb\xeb\x0e\x12w\xc0\xb3\x84\x88\x8aN\xf9\x82\xb0p\xd9?)\x01^\xa0$\x07L\x0c\x13\x1c\x17\x00\x00\x00\x00IEND\xaeB`\x82'
+
+_KML_HEADER = """
+
+
+ Dropsonde
+ 0
+"""
+
+_KML_PLACEMARK = """
+ %s
+ %s
+
+
+"""
+
+_KML_POINT = """
+ absolute
+
+ %f,%f,%f
+
+ """
+
+_KML_LINESTRING = """
+ 1
+ 1
+ absolute
+
+ %s
+
+
+
+
+"""
+
+
+_KML_FOOTER = """
+
+
+"""
+
+
+class Dropsonde(object):
+ """Class that processes the dropsonde files. It's main purpose
+ at the moment is to create a kmz-file, that is viewable in googleearth."""
+
+ def __init__(self):
+ self.kmz_filename = None
+ self.kmz_path = None
+ #creates directory structure for the kmz
+ self.kmz_tmp_directory = tempfile.mkdtemp()
+ os.mkdir(os.path.join(self.kmz_tmp_directory, 'icons'))
+ os.mkdir(os.path.join(self.kmz_tmp_directory, 'figures'))
+ icon = open(os.path.join(self.kmz_tmp_directory, 'icons', 'dropsonde_32x32.png'), 'wb')
+ icon.write(_DROPSONDE_ICON)
+ icon.close()
+
+ def __decdeg2dms__(self, dd):
+ """converts degree representation of lon/lat to decimal"""
+ mnt,sec = divmod(dd * 3600, 60)
+ deg,mnt = divmod(mnt, 60)
+ return deg,mnt,sec
+
+ def __zip__(self):
+ files4zipping = []
+ for root, subFolders, files in os.walk(self.kmz_tmp_directory):
+ for file in files:
+ files4zipping.append(os.path.join(root,file))
+ #if not self.kmz_file:
+ # self.setOutputfile()
+ outfile = os.path.join(self.kmz_path, self.kmz_filename)
+ zip = zipfile.ZipFile(outfile, mode='w')
+ for file in files4zipping:
+ zipname = file[len(self.kmz_tmp_directory) + (len(os.sep)):]
+ zip.write(file, zipname)
+
+ def close(self):
+ self.ds.close()
+
+ def set_outpath(self, path):
+ if os.path.exists(path):
+ self.kmz_path = path
+ else:
+ os.mkdir(path)
+ self.kmz_path = path
+
+ def process(self):
+ self.kml = _KML_HEADER
+ for ncfile in self.FileList:
+ self.read(ncfile)
+ self.calc_drift()
+ self.create_figure()
+ self.create_kml()
+ self.close()
+ self.kml += _KML_FOOTER
+ self.write_kmz()
+ self.__zip__()
+
+ def read(self, file):
+ """Wrapper for the netcdf or text file read functions"""
+ self.kmz_filename = os.path.splitext(os.path.basename(file))[0] + '.kmz'
+ if os.path.splitext(file)[1] == '.nc':
+ self.__read_netcdf__(file)
+ else:
+ self.__read_txt__(file)
+
+ def __read_txt__(self, txtfile):
+ #open the netcdf file
+ f = open(txtfile, 'r')
+ data = f.readlines()
+ self.lat_raw, self.lon_raw, self.alt_raw = [],[], []
+ self.alt = []
+ self.press, self.rh_raw, self.wspd_raw, self.wdir_raw, self.dz, self.sat_num, self.temp_raw = [], [], [], [], [], [], []
+ self.theta_raw, self.gps_alt = [], []
+ self.id=''
+ self.launch_time=''
+ self.project_name=''
+ self.mission_id=''
+
+ for line in data:
+ if line.startswith('AVAPS-T'):
+ if 'LAU' in line:
+ self.id = line.split()[2]
+ elif 'Launch Time' in line:
+ self.launch_time = line[45:].strip()
+ elif 'Project Name' in line:
+ self.project_name=line.split(':')[1].split(',')[0].strip()
+ self.mission_id=line.split(':')[1].split(',')[1].strip()
+ elif ((self.id == '') and (line.split()[1] == 'END')):
+ self.id = line.split()[2]
+ elif line.startswith('AVAPS-D'):
+ self.lon_raw.append(float(line.split()[11]))
+ self.lat_raw.append(float(line.split()[12]))
+ self.alt_raw.append(float(line.split()[19]))
+
+ R = 8.3114472 # gas constant
+ T = float(line.split()[5]) + 273.15# current temperature in K
+ cp = 1.0038 # heat capacity of air
+ P = float(line.split()[5])
+ P_0 = 1000.0
+ self.theta_raw.append(T * (P/P_0)**(R/cp))
+ self.press.append(float(line.split()[5]))
+ self.temp_raw.append(float(line.split()[6]))
+ self.rh_raw.append(float(line.split()[7]))
+ self.wspd_raw.append(float(line.split()[9]))
+ self.wdir_raw.append(float(line.split()[8]))
+ self.dz.append(float(line.split()[10]))
+ self.sat_num.append(float(line.split()[14]))
+ self.alt.append(float(line.split()[12]))
+
+ # create empty lists for latitude, longitude, altitude
+ self.lat = []; self.lon = []; self.alt =[]
+
+ for i in range(len(self.lat_raw)):
+ if not (-999 in [self.lat_raw[i],self.lon_raw[i], self.alt_raw[i]]):
+ if not (99999 in [self.lat_raw[i],self.lon_raw[i], self.alt_raw[i]]):
+ if not (99 in [self.lat_raw[i],self.lon_raw[i], self.alt_raw[i]]):
+ self.lat.append(self.lat_raw[i])
+ self.lon.append(self.lon_raw[i])
+ self.alt.append(self.alt_raw[i])
+
+ def __read_netcdf__(self, ncfile):
+
+ # open the netcdf file
+ self.ds = netCDF4.Dataset(ncfile, 'r')
+
+ self.id = self.ds.SoundingDescription.split()[1]
+ self.launch_time = datetime.datetime.strptime(self.ds.variables['base_time'].string, '%a %b %d %H:%M:%S %Y')
+
+ # create empty lists for latitude, longitude, alitude
+ self.lat = []; self.lon = []; self.alt =[]
+
+ self.lat_raw = list(self.ds.variables['lat'][:])
+ self.lon_raw = list(self.ds.variables['lon'][:])
+ self.alt_raw = list(self.ds.variables['alt'][:])
+ self.theta_raw = list(self.ds.variables['theta'][:])
+ self.rh_raw = list(self.ds.variables['rh'][:])
+ wspd_raw = list(self.ds.variables['wspd'][:])
+ wdir_raw = list(self.ds.variables['wdir'][:])
+
+ for i in range(len(self.lat_raw)):
+ if self.lat_raw[i] != -999 and self.lon_raw[i] != -999 and self.alt_raw[i] != -999:
+ self.lat.append(self.lat_raw[i])
+ self.lon.append(self.lon_raw[i])
+ self.alt.append(self.alt_raw[i])
+
+ def __get_fid__(self):
+ """get the flight number from the netcdf global attributes"""
+ fid = None
+ pattern = '[b,B]\d{3}'
+ #check several attributes to make sure that we get the fid
+ vars = [self.ds.SoundingDescription,]
+
+ for var in vars:
+ fid = re.search('[b,B]\d{3}', var)
+ if fid:
+ result = fid.group().lower()
+ return result
+
+ def create_kml(self):
+ kml = ""
+ fall_time = max(self.ds.variables['time'][:]) - min(self.ds.variables['time'][:])
+
+ self.kml_name = '%s-%s' % (self.__get_fid__(),
+ re.split('[_,.]', self.ds.SoundingDescription)[1])
+
+ description = "" +self.launch_time.strftime('%Y-%m-%d %H:%M:%S')+"
\n'
+ for e in self.Entries:
+ row=[e.Name, e.Start_time, e.Start_height, e.Stop_time, e.Stop_height, e.Comment]
+ row2=[]
+ for i in row:
+ if i == None:
+ row2.append('')
+ else:
+ row2.append(i)
+ row=row2
+ html+='
\n
'+'
'.join(row)+'
\n
\n'
+ html+='
\n'
+ if ofile:
+ f=open(ofile, 'w')
+ f.write(html)
+ f.close()
+ return html
+
+
+def process(fltsummfile, ncfile, outpath):
+ ds=netCDF4.Dataset(ncfile, 'r')
+ basetime=faampy.core.utils.get_base_time(ds)
+ fid=faampy.core.utils.get_fid(ds)
+ fs=FlightSummary(fltsummfile)
+ #for i in range(len(fs.Entries)):
+ for ent in fs.Entries:
+ try:
+ ent.fix_time(ds)
+ ent.set_index(ds)
+ ent.set_coords(ds)
+ except:
+ pass
+ basename='flight-sum_faam_%s_r0_%s' % (basetime.strftime('%Y%m%d'), fid)
+ fs.as_kml(ofile=os.path.join(outpath, basename+'.kml'), fid=fid, date=basetime.strftime('%d/%m/%Y'))
+ fs.as_html(ofile=os.path.join(outpath, basename+'.html'))
+ fs.as_txt(ofile=os.path.join(outpath, basename+'.txt'), fid=fid, date=basetime.strftime('%d/%m/%Y'))
+ ds.close()
+ return fs
+
+
+def _argparser():
+ import argparse
+ from argparse import RawTextHelpFormatter
+ sys.argv.insert(0, 'faampy flight_summary')
+ parser=argparse.ArgumentParser(prog='faampy flight_summary',
+ description=__doc__,
+ formatter_class=RawTextHelpFormatter)
+ parser.add_argument('fltsummfile', action="store", type=str, help='Flight Summary file')
+ parser.add_argument('ncfile', action="store", type=str, help='core_faam netCDF')
+ parser.add_argument('outpath', action="store", type=str, help='outpath where the newly formatted flight summaries will be saved')
+ return parser
+
+
+def main():
+ parser = _argparser()
+ args = parser.parse_args()
+ process(args.fltsummfile, args.ncfile, args.outpath)
+ sys.stdout.write('Done ...\n')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/faampy/core/nc_to_csv.py b/faampy/core/nc_to_csv.py
new file mode 100644
index 0000000..7832c9f
--- /dev/null
+++ b/faampy/core/nc_to_csv.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python
+
+"""
+Extracts variables from netCDF and writes those to a csv file.
+
+"""
+
+
+from faampy.core.utils import get_mpl_time
+from matplotlib.dates import num2date
+
+import netCDF4
+import numpy as np
+import os
+import re
+import sys
+import zipfile
+
+
+def get_fid(ds):
+ """
+ Get the flight ID ([b,d]nnn) from netCDF4.Dataset.
+
+ :param ds: core_faam dataset
+ :type param: netCDF4.Dataset
+
+ >>> ncfile = 'core_faam_20130403_v004_r0_b768.nc'
+ >>> ds = netCDF4.Dataset(ncfile, 'r')
+ >>> print(get_fid(ds))
+ b768
+
+ """
+ # parse the title global attribute of the netCDF
+ if hasattr(ds, 'title'):
+ result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.title)[0])
+ result = result.lower()
+ elif hasattr(ds, 'Title'):
+ result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.Title)[0])
+ result = result.lower()
+ else:
+ result = ""
+ return result
+
+
+VARNAMES = ['ACLD_GIN',
+ 'ACLF_GIN',
+ 'ACLS_GIN',
+ 'ALT_GIN',
+ 'AOA',
+ 'AOSS',
+ 'GSPD_GIN',
+ 'HDGR_GIN',
+ 'HDG_GIN',
+ 'HGT_RADR',
+ 'IAS_RVSM',
+ 'LAT_GIN',
+ 'LON_GIN',
+ 'LWC_JW_U',
+ 'NV_LWC_U',
+ 'NV_TWC_U',
+ 'P0_S10',
+ 'P9_STAT',
+ 'PALT_RVS',
+ 'PA_TURB',
+ 'PB_TURB',
+ 'PITR_GIN',
+ 'PSP_TURB',
+ 'PS_RVSM',
+ 'PTCH_GIN',
+ 'Q_RVSM',
+ 'ROLL_GIN',
+ 'ROLR_GIN',
+ 'TAS',
+ 'TAS_RVSM',
+ 'TAT_DI_R',
+ 'TAT_ND_R',
+ 'TDEW_GE',
+ 'TRCK_GIN',
+ 'TWC_EVAP',
+ 'TWC_TDEW',
+ 'U_C',
+ 'U_NOTURB',
+ 'VELD_GIN',
+ 'VELE_GIN',
+ 'VELN_GIN',
+ 'VMR_CR2',
+ 'VMR_C_U',
+ 'V_C',
+ 'V_NOTURB',
+ 'WOW_IND',
+ 'WVSS2F_VMR',
+ 'WVSS2R_VMR',
+ 'W_C']
+
+
+def process(ds, varnames, outfile=None, include_flag=False):
+ """
+ :param ds: netCDF4.Dataset object
+ :param varnamess: list of variables ot
+ :param outfile: csv filename output
+ :param include_flag: adding flags
+
+ """
+
+ if include_flag:
+ new_varnames = []
+ for p in varnames:
+ new_varnames.append(p)
+ new_varnames.append(p+'_FLAG')
+ varnames = new_varnames
+
+ n = ds.variables['Time'][:].size
+ dt = [i[0].strftime('%Y-%m-%d %H:%M:%S') for i in num2date(get_mpl_time(ds, 1))]
+
+ header =','.join(['utc',] + varnames)
+
+ _dtype = [('utc', 'S20'), ]
+ _fmt = ['%s', ]
+ for v in varnames:
+ if v.endswith('_FLAG'):
+ _dtype.append((v, int))
+ _fmt.append('%i')
+ else:
+ _dtype.append((v, float))
+ _fmt.append('%.3f')
+
+ result = np.recarray((n,), dtype=_dtype)
+
+ for v in result.dtype.fields.keys():
+ if v.endswith('_FLAG'):
+ result[v] = 3
+ else:
+ result[v] = -9999.
+ result['utc'] = dt
+
+ for v in varnames:
+ if v.lower() in [i.lower() for i in ds.variables.keys()]:
+ if len(ds.variables[v][:].shape) == 2:
+ result[v] = ds.variables[v][:, 0]
+ else:
+ result[v] = ds.variables[v][:]
+ if v.endswith('_FLAG'):
+ result[v][result[v] < 0] = 3
+
+ if outfile:
+ lines = []
+ for r in result:
+ lines.append(','.join(_fmt[:]) % tuple(list(r)[:]))
+ out = open(outfile, 'w')
+ out.write(header + '\n' + '\n'.join(lines) + '\n')
+ out.close()
+ return (result, header)
+
+
+if __name__ == '__main__':
+ import argparse
+
+ parser = argparse.ArgumentParser(description='Creates a csv file from a netCDF.')
+ parser.add_argument('ncfile', action='store', type=str,
+ help="FAAM core file *or* a directory. When input is a directory the script finds recursively all 1hz netCDF files inside thed processes them.")
+ parser.add_argument('--outpath',
+ action='store',
+ type=str,
+ default=os.environ['HOME'],
+ help="Outpath for the csv file.")
+ parser.add_argument('--suffix',
+ action='store',
+ type=str,
+ default='_extract',
+ help="Outpath for the csv file.")
+ args = parser.parse_args()
+
+ file_list=[]
+ if os.path.isdir(args.ncfile):
+ pattern = 'core_faam_\d{8}.*_?\d{3}.nc'
+ for root, subFolders, files in os.walk(args.ncfile):
+ for f in files:
+ if re.match(pattern, os.path.basename(f)):
+ file_list.append(os.path.join(root, f))
+ if not file_list:
+ sys.stdout.write('No core netcdf files found in ... %s\n Leaving \n\n' % args.ncfile)
+ sys.exit()
+ else:
+ file_list = [args.ncfile,]
+
+ for f in file_list:
+ sys.stdout.write('Processing ... %s\n' % (f,))
+ ds = netCDF4.Dataset(f, 'r')
+ fid = get_fid(ds)
+ outfile = os.path.join(args.outpath, os.path.basename(f)[:-3] + args.suffix + '.csv')
+ hdr, data = process(ds, VARNAMES, outfile=outfile, include_flag=True)
+ arc = zipfile.ZipFile(os.path.splitext(outfile)[0]+'.zip', 'w', zipfile.ZIP_DEFLATED)
+ arc.write(outfile, os.path.basename(outfile))
+ sys.stdout.write('Output written to ... \n %s\n' % (outfile,))
+ arc.close()
diff --git a/faampy/core/utils.py b/faampy/core/utils.py
new file mode 100644
index 0000000..ec2f0e9
--- /dev/null
+++ b/faampy/core/utils.py
@@ -0,0 +1,324 @@
+import datetime
+import netCDF4
+import numpy as np
+import os
+import pandas as pd
+import re
+import sys
+import shutil
+
+from matplotlib.dates import date2num, num2date
+
+
+def sub_nans(ncfilename):
+ dst=os.path.splitext(ncfilename)[0]+'_edited'+os.path.splitext(ncfilename)[1]
+ shutil.copy(ncfilename, dst)
+ ds=netCDF4.Dataset(dst, 'a')
+ for k in ds.variables.keys():
+ if not k.endswith('_FLAG'):
+ dat=ds.variables[k][:]
+ if hasattr(dat, 'mask'):
+ dat[dat.mask]=-9999
+ dat[np.isnan(dat)]=-9999
+ ds.variables[k][:]=dat
+ ds.close()
+
+
+def data_filter(ds, var_filter, verbose=None):
+ """
+ :param ds:
+ :type ds: netCDF4.Dataset
+ :param var_filter: filter definition in form of list of tuplesa tuples
+
+ filter(ds, [('Time', (20000 , 22000)), ('GIN_ALT', (0, 40000))]
+ """
+ ix = np.empty(0)
+ n = len(ds.variables['Time'])
+ for item in var_filter:
+ key = item[0]
+ val = item[1]
+ tmp_var = ds.variables[key][:].ravel()
+ ix_tmp = np.where((tmp_var < val[0]) | (tmp_var > val[1]))[0]
+ if verbose:
+ sys.stdout.write('Filtering %s ... %.2f %% removed' % (key, float(ix_tmp.size)/float(tmp_var.size)*100.0))
+ ix = np.concatenate((ix, ix_tmp))
+ bad_index = set(np.unique(ix))
+ full_index = set(range(n))
+ good_index = list(full_index.difference(bad_index))
+ if verbose:
+ sys.stdout.write('Remaining points: %i (%5.2f percent)\n' % (len(good_index), float(len(good_index))/float(len(n))*100.0))
+ return good_index
+
+
+def conv_secs_to_time(secs, no_colons=True):
+ """converts seconds past midnight to a time string HH:MM:SS
+
+ :param int secs: seconds past midnight
+ :param no_colons: by default HH, MM, SS are not separated by ':'
+ default True
+ :type no_colons: boolean
+ :return time string:
+ """
+ hh = secs/3600
+ mm = (secs % 3600)/60
+ ss = (secs % 3600)%60
+ if no_colons:
+ result = '%.2i%.2i%.2i' % (hh, mm, ss)
+ else:
+ result = '%.2i:%.2i:%.2i' % (hh, mm, ss)
+ return result
+
+
+def conv_time_to_secs(hhmmss):
+ """converts time string as it used for ecample in the flight summary
+ to seconds past midnight
+
+ :param str hhmmss: timestamp
+
+ >>> conv_time_to_secs('122312')
+ Out[1]: 44592
+ >>>
+
+ """
+ hhmmss=hhmmss.strip()
+ l_hhmmss=list(hhmmss)
+ while ':' in l_hhmmss:
+ l_hhmmss.remove(':')
+ hhmmss=''.join(l_hhmmss)
+ hhmmss=hhmmss[-6:]
+ result=int(hhmmss[0:2])*3600+int(hhmmss[2:4])*60+int(hhmmss[4:6])
+ return result
+
+
+def get_fid(ds):
+ """
+ Get the flight ID ([b,c]nnn) from a netCDF4.Dataset
+
+ :param ds: core_faam dataset
+ :type param: netCDF4.Dataset
+
+ >>> ncfile = 'core_faam_20130403_v004_r0_b768.nc'
+ >>> ds = netCDF4.Dataset(ncfile, 'r')
+ >>> print(get_fid(ds))
+ b768
+
+ """
+ #parse the title global attribute of the netcdf
+ if hasattr(ds, 'title'):
+ result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.title)[0])
+ result = result.lower()
+ elif hasattr(ds, 'Title'):
+ result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.Title)[0])
+ result = result.lower()
+ else:
+ result = ""
+
+ return result
+
+
+def get_base_time(ds):
+ """Get the base time from the units of the time dimension
+
+ :param ds: core_faam dataset
+ :type param: netCDF4.Dataset
+ :return datetime:
+ :type return: datetime.datetime object
+ """
+ if 'Time' in ds.variables.keys():
+ result=datetime.datetime.strptime(str(ds.variables['Time'].units).strip(),
+ 'seconds since %Y-%m-%d 00:00:00 +0000')
+ elif 'TIME' in ds.variables.keys():
+ result=datetime.datetime.strptime(str(ds.variables['TIME'].units).strip(),
+ 'seconds since %Y-%m-%d 00:00:00 +0000')
+ elif 'time' in ds.variables.keys():
+ result=datetime.datetime.strptime(re.findall('\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}', ds.variables['time'].units)[0], '%Y-%m-%d 00:00:00')
+ elif 'PARA0515' in ds.variables.keys():
+ result=datetime.datetime.strptime(ds.title.split()[-1], '%d-%b-%y')
+ else:
+ return None
+ return result
+
+
+def get_mpl_time(ds, *freq):
+ """Return a float wihich is usabale for plt.plot_date from matplotlib.
+
+ :param ds: core_faam dataset
+ :type param: netCDF4.Dataset
+ :param freq: frequency of the time stamp default=1; if freq > 1 a multidimensional array is returned
+ :return: array containing the matplotlib timestamps
+ :rtype: numpy.array
+
+ >>> ds = netCDF4.Dataset('core_faam_20130403_v004_r0_b768.nc', 'r')
+ >>> t_1hz = get_mpl_time(ds)
+ >>> t_1hz.shape
+ Out[1]: (37137,)
+ >>> t_32hz = get_mpl_time(ds, 32)
+ >>> t_32hz.shape
+ Out[1]: (37137, 32)
+ >>> plot_date(t_32hz.ravel(), ds.variables['U_C'][:].ravel(), 'b-')
+ >>>
+
+ """
+ if 'Time' in ds.variables.keys():
+ vtime=ds.variables['Time'][:]
+ elif 'time' in ds.variables.keys():
+ vtime=ds.variables['time'][:]
+ elif 'TIME' in ds.variables.keys():
+ vtime=ds.variables['TIME'][:]
+ #in old core files the 'Time' variable was c2alled PARA0515
+ elif 'PARA0515' in ds.variables.keys():
+ vtime=ds.variables['PARA0515'][:]
+ else:
+ return None
+ vtime=np.array(vtime)
+ if freq:
+ rows = len(vtime)
+ vtime = vtime.repeat(freq[0]).reshape((rows, freq[0])) + np.array(range(freq[0]), dtype=np.float64)/freq[0]
+ result=np.float64(vtime/86400.) + np.float64(date2num(get_base_time(ds)))
+ return result
+
+
+def get_flight_duration(ds, verbose=False):
+ """Estimates the flight durations of a flight. If the Weight on Wheels
+ indicator flag is available in the netCDF4 dataset, this variable is
+ used for determing take-off and landing. Alternatively the method looks
+ at the indicated airspeed (IAS) and ground speed (GSPD) data that are
+ greater than 60ms-1 and lower tahn 200ms-1
+
+ :param ds: core_faam dataset
+ :type param: netCDF4.Dataset
+ :param verbose: more output if needed
+ :type verbose: boolean
+ """
+
+ ias=ds.variables['IAS_RVSM'][:]
+ if len(ds.variables['IAS_RVSM'].shape) > 1:
+ ias=ias[:,0].ravel()
+ else:
+ ias=ias[:]
+
+ if 'GSPD_GIN' in ds.variables.keys():
+ gspd=ds.variables['GSPD_GIN'][:]
+ if len(gspd.shape) > 1:
+ gspd=gspd[:,0].ravel()
+
+ if 'WOW_IND' in ds.variables.keys():
+ wow_ind=ds.variables['WOW_IND'][:]
+ if len(wow_ind.shape) > 1:
+ wow_ind=wow_ind[:,0].ravel()
+
+ if 'WOW_IND' in ds.variables.keys():
+ ix=np.where(wow_ind.filled() == 0)[0]
+ ix_min, ix_max=np.min(ix), np.max(ix)
+ dur=ds.variables['Time'][ix_max]-ds.variables['Time'][ix_min]
+ elif 'GSPD_GIN' in ds.variables.keys():
+ #filter for indicated airspeed greater 60
+ ix=np.where((ias > 60) & (ias < 200) & (gspd > 60))
+ ix_min, ix_max=np.min(ix), np.max(ix)
+ dur=ds.variables['Time'][ix_max]-ds.variables['Time'][ix_min]
+ else:
+ if not isinstance(ias, np.ndarray):
+ ias=ias.data
+ #filter for indicated airspeed greater 60
+ ix=np.where((ias > 60) & (ias < 200))
+ ix_min, ix_max=np.min(ix), np.max(ix)
+ dur=ds.variables['Time'][ix_max]-ds.variables['Time'][ix_min]
+
+ if verbose:
+ sys.stdout.write('*'*40+'\n')
+ sys.stdout.write(' FID: %s\n' % (get_fid(ds),))
+ sys.stdout.write(' T/O: %s\n' % conv_secs_to_time(np.min(ds.variables['Time'][ix_min])))
+ sys.stdout.write(' Land: %s\n' % conv_secs_to_time(np.min(ds.variables['Time'][ix_max])))
+ sys.stdout.write(' Duration: %s (%i secs)\n' % (conv_secs_to_time(dur), dur))
+ sys.stdout.write('*'*40+'\n')
+ return float(dur)
+
+
+def get_index_from_secs(ds, secs):
+ """Return index for seconds
+ :param ds:
+ :type ds: netCDF4.Dataset
+ :param secs: seconds past midnight
+ :type secs: int
+ """
+ ix = np.where(ds.variables['Time'][:] == secs)
+ return ix[0][0]
+
+
+def get_index_from_hhmmss(ds, hhmmss):
+ """return the index from a FAAM core netcdf where the 'Time'-variables matches
+ the time 'hhmmss' string matches the
+
+
+
+ >>> import netCDF4
+ >>> ds = netCDF4.Dataset('core_faam_20130225_v004_r0_b753.nc', 'r')
+ >>> get_index_from_hhmmss(ds, '120000')
+ Out[1]: 7668
+ >>>
+
+ """
+ # convert time in string format to seconds past midnight
+ secs=conv_time_to_secs(hhmmss)
+ ix=get_index_from_secs(ds, secs)
+ return ix
+
+
+def get_index(ds, inp):
+ """
+ wrapper for the methods
+ get_index_from_secs
+ and
+ get_index_from_hhmmss
+
+ function determines what type the input is
+ and calls the appropriate function accordingly.
+
+ :param ds: core_faam dataset
+ :type param: netCDF4.Dataset
+ """
+ if isinstance(inp, int):
+ ix=get_index_from_secs(ds, inp)
+ elif len(inp) == 6 and re.match('\d{6}', inp):
+ ix=get_index_from_hhmmss(ds, inp)
+ else:
+ pass
+ return ix
+
+
+def core_to_pandas(ds):
+ """converts a netCDF4.Dataset into a pandas Dataframe using the timestamp
+ as index.
+
+ ..note: Only the first column of the two dimensional data set is grabbed,
+ because of performance issues.
+
+ :param ds: core_faam dataset
+ :type param: netCDF4.Dataset
+ :return: pandas.Dataframe
+ :type return: pandas.Dataframe
+
+ """
+
+ # TODO: make it work for the full dataset too
+ vars=sorted(ds.variables.keys())
+ vars.remove('Time')
+
+ index=get_mpl_time(ds, 1)
+ index=num2date(index.ravel())
+ #initialize an empty Dataframe
+ df=pd.DataFrame(index=index)
+ for v in vars:
+ shp=ds.variables[v].shape
+ if len(shp) ==2:
+ data=np.copy(ds.variables[v][:,0].data).ravel()
+ else:
+ data=np.copy(ds.variables[v][:].data)
+ df_tmp=pd.DataFrame(data[:], index=index, columns=[v,])
+ df = pd.concat([df, df_tmp], axis=1)
+ #set all missing values to nan
+ df[df == -9999.0] = np.nan
+ #set timezone to None otherwise there might be issues merging the data
+ #frame with others
+ df.index.tz=None
+ return df
diff --git a/faampy/data_io/__init__.py b/faampy/data_io/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/faampy/data_io/chem.py b/faampy/data_io/chem.py
new file mode 100644
index 0000000..38e2839
--- /dev/null
+++ b/faampy/data_io/chem.py
@@ -0,0 +1,47 @@
+import datetime
+import os
+
+import numpy as np
+import pandas as pd
+
+
+def read_nox(ifile):
+ _date = datetime.datetime.strptime('%d%m%y', os.path.basename(ifile).split('_')[1])
+ year = _date.year
+ month = _date.month
+ day = _date.day
+ nox_dateparse = lambda x: pd.datetime(year, month, day) + \
+ datetime.timedelta(seconds=int(float(float(x) % 1)*86400.))
+
+ df_nox = pd.read_csv(ifile, parse_dates=[0], date_parser=nox_dateparse)
+ df_nox = df_nox.set_index('TheTime') # Setting index
+ t = df_nox.index.values
+ df_nox['timestamp'] = t.astype('datetime64[s]') # Converting index data type
+ df_nox = df_nox[['timestamp', 'no_conc', 'no2_conc', 'nox_conc']]
+ df_nox[df_nox < 0] = np.nan
+ return df_nox
+
+
+def read_fgga(ifile) :
+ fgga_dateparse = lambda x: pd.datetime.utcfromtimestamp(int(x))
+ fgga_names = ['identifier', 'packet_length', 'timestamp', 'ptp_sync',
+ 'MFM', 'flight_num', 'CPU_Load', 'USB_disk_space', 'ch4',
+ 'co2', 'h2o', 'press_torr', 'temp_c', 'fit_flag',
+ 'rda_usec', 'rdb_usec', 'ch4_ppb', 'co2_ppm',
+ 'MFC_1_absolute_pressure', 'MFC_1_temperature',
+ 'MFC_1volumetic_flow', 'MFC_1mass_flow', 'MFC_1set_point',
+ 'V1', 'V2', 'V3', 'V4', 'restart_FGGA', 'FGGA_Pump',
+ 'CAL_MFC_1Set_Value']
+ df_fgga = pd.read_csv(ifile,
+ names=fgga_names,
+ delimiter=',',
+ parse_dates=[2],
+ date_parser=fgga_dateparse,
+ skiprows=100) # To be sure to skip the header
+
+ # Using the Valve states for flagging out calibration periods
+ # TODO: add time buffer around calibration periods
+ df_fgga.loc[df_fgga['V1'] != 0, 'ch4_ppb'] = np.nan
+ df_fgga.loc[df_fgga['V2'] != 0, 'co2_ppm'] = np.nan
+ df_fgga.loc[df_fgga['V2'] != 0, 'ch4_ppb'] = np.nan
+ return df_fgga
\ No newline at end of file
diff --git a/faampy/data_io/cp.py b/faampy/data_io/cp.py
new file mode 100644
index 0000000..fb3e076
--- /dev/null
+++ b/faampy/data_io/cp.py
@@ -0,0 +1,48 @@
+import sys
+
+
+import netCDF4
+import numpy as np
+from numpy.lib import recfunctions
+import pandas as pd
+from faampy.core.utils import get_mpl_time
+from matplotlib.dates import num2date
+
+
+def read_core_cloud(ifile):
+ """reads in the core cloud data and
+
+ :param str ifile: core cloud netcdf file
+ :return: pandas.Dataframe
+ :type return: pandas.Dataframe
+
+ """
+ ds = netCDF4.Dataset(ifile, 'r')
+ vars = sorted(ds.variables.keys())
+ vars.remove('Time')
+
+ # create and indexed pandas DataFrame
+ tindex = get_mpl_time(ds, 1)
+ tindex = num2date(tindex.ravel())
+ # initialize an empty Dataframe
+ df = pd.DataFrame(index=tindex)
+ for v in vars:
+ shp = ds.variables[v].shape
+ if not shp[0] == len(index):
+ continue
+ if len(shp) == 2:
+ data = np.copy(ds.variables[v][:,0].data).ravel()
+ else:
+ data = np.copy(ds.variables[v][:])
+ df_tmp = pd.DataFrame(data[:].ravel(), index=index, columns=[v,])
+ df = pd.concat([df, df_tmp], axis=1)
+
+ df[df == -9999.0] = np.nan # set all missing values to nan
+ t = df.index.values
+ df['timestamp'] = t.astype('datetime64[s]') # Converting index data type
+
+ # TODO: Check that this is really necessary
+ # set timezone to None otherwise there might be issues merging
+ # this DataFrame with others
+ df.index.tz = None
+ return df
diff --git a/faampy/data_io/nimrod_to_nc.py b/faampy/data_io/nimrod_to_nc.py
new file mode 100644
index 0000000..44e49e3
--- /dev/null
+++ b/faampy/data_io/nimrod_to_nc.py
@@ -0,0 +1,331 @@
+# -*- coding: utf-8 -*-
+
+"""
+This script converts the NIMROD weather radar data format into a netCDF with
+the dimensions:
+
+ * Timestamp
+ * Longitude
+ * Latitude
+
+The original array in the NIMROD data fits the OSGB 1936 spatial reference
+system. However, to make the results work with for example cis it is necessary
+to warp the array to EPSG:4326 so that longitude and latitude are available as
+dimensions.
+
+The created netCDF results were tested with the cistools_. If the netCDF
+stores more than one timestamp it is necessary to extract one layer using the
+subset command like this::
+
+ cis subset rain_intensity:nimrod.nc timestamp=['2012-03-04T00:50'] \
+-o nimrod_20120304T0050.nc
+
+The above command extracts the data for the timestamp '2012-03-04T00:50' and
+writes a new netCDF with the filename "nimrod_20120304T0050.nc".
+
+Now it is possible to plot the data as a heatmap using cis::
+
+ cis plot rain_intensity:nimrod_20120304T0050.nc
+
+Maybe there is a way to skip the subset step but so far I have not found it.
+
+.. _cistools: http://www.cistools.net/
+
+"""
+
+
+import array
+import datetime
+import gzip
+import netCDF4
+from multiprocessing import Pool
+import numpy as np
+import os
+from osgeo import osr, gdal
+import shutil
+import struct
+import sys
+import tarfile
+import tempfile
+import time
+
+
+_TEMP_FOLDER=tempfile.mkdtemp()
+
+# taken from: http://badc.nerc.ac.uk/browse/badc/ukmo-nimrod/software/python/read_nimrod.py
+def read_nimrod(pathed_file, quite=True):
+
+ file_id=open(pathed_file,"rb")
+ record_length,=struct.unpack(">l", file_id.read(4))
+ if record_length != 512:
+ sys.stdout.write("Unexpected record length: %i" % record_length)
+ sys.exit(1)
+
+ gen_ints=array.array("h")
+ gen_reals=array.array("f")
+ spec_reals=array.array("f")
+ characters=array.array("c")
+ spec_ints=array.array("h")
+
+ gen_ints.read(file_id, 31)
+ gen_ints.byteswap()
+
+ gen_reals.read(file_id, 28)
+ gen_reals.byteswap()
+
+ spec_reals.read(file_id, 45)
+ spec_reals.byteswap()
+ characters.read(file_id, 56)
+ spec_ints.read(file_id, 51)
+ spec_ints.byteswap()
+
+ record_length,=struct.unpack(">l", file_id.read(4))
+ if record_length != 512:
+ raise ("Unexpected record length", record_length)
+
+ chars=characters.tostring()
+
+ if not quite:
+ sys.stdout.write("\nDate %4.4d%2.2d%2.2d Time %2.2d:%2.2d Grid %d x %d\n" %(gen_ints[0], gen_ints[1], gen_ints[2], gen_ints[3], gen_ints[4], gen_ints[15], gen_ints[16]))
+ sys.stdout.write("start northing %.1f, row interval %.1f, start easting %.1f, column interval %.1f\n" %(gen_reals[2], gen_reals[3], gen_reals[4], gen_reals[5]))
+ sys.stdout.write("Units are %s\n" % chars[0:8])
+ sys.stdout.write("Data source is %s\n" % chars[8:32])
+ sys.stdout.write("Parameter is %s\n" % chars[32:55])
+ for i in range(gen_ints[22]):
+ print (i+108, spec_ints[i])
+ for i in gen_reals:
+ print(i)
+ for i in spec_reals:
+ print(i)
+ #Read data
+ cols=gen_ints[15]
+ rows=gen_ints[16]
+ array_size=cols*rows
+
+ record_length,=struct.unpack(">l", file_id.read(4))
+ if record_length != array_size*2:
+ raise ("Unexpected record length", record_length)
+
+ data=array.array("h")
+ try:
+ data.read(file_id, array_size)
+ record_length,=struct.unpack(">l", file_id.read(4))
+ if record_length != array_size*2:
+ raise ("Unexpected record length", record_length)
+ data.byteswap()
+ except:
+ sys.stdout.write("Read failed\n")
+ file_id.close()
+ result={'data': np.array(data),
+ 'cols': cols,
+ 'rows': rows,
+ 'start_northing': gen_reals[2],
+ 'row_interval': gen_reals[3],
+ 'start_easting': gen_reals[4],
+ 'column_interval': gen_reals[5],
+ 'year': gen_ints[0],
+ 'month': gen_ints[1],
+ 'day': gen_ints[2],
+ 'hour': gen_ints[3],
+ 'min': gen_ints[4]}
+ return result
+
+
+def unzip(gzip_fname):
+ """
+ uncompression of gzip file
+ """
+ with gzip.open(gzip_fname, 'rb') as in_file:
+ s = in_file.read()
+ # Now store the uncompressed data
+ path_to_store = gzip_fname[:-3] # remove the '.gz' from the filename
+ # store uncompressed file data from 's' variable
+ with open(path_to_store, 'w') as f:
+ f.write(s)
+
+
+def untar(fname):
+ """
+ extracting files from tar
+ """
+ tar = tarfile.open(fname)
+ tar.extractall(path=_TEMP_FOLDER)
+ tar.close()
+
+
+def extract(tar_file):
+ """
+ extract all data files from the tar-file into a temporary directory and unzip them
+
+ """
+ untar(tar_file)
+ for i in [os.path.join(_TEMP_FOLDER, gzip_file) for gzip_file in os.listdir(_TEMP_FOLDER)]:
+ if i.endswith('.gz'):
+ unzip(i)
+
+
+def warp(nimrod_dataset):
+ """
+ Warps the data array into one that has longitude/latitude as axes an fits
+ the EPSG:4326 spatial reference system. The original array has the srs
+ EPSG:27700 (OSGB 1936).
+ :param nimrod_dataset: dictionary containing the data from the NIMROD file
+ """
+
+ # http://gis.stackexchange.com/questions/139906/replicating-result-of-gdalwarp-using-gdal-python-bindings
+ # Create synthetic data
+ gtiff_drv = gdal.GetDriverByName('MEM')
+ cols, rows = nimrod_dataset['cols'], nimrod_dataset['rows']
+ raster = np.reshape(nimrod_dataset['data'], (cols, rows))
+ raster = np.int16(raster)
+ top_left = (nimrod_dataset['start_easting'], nimrod_dataset['start_northing'])
+ pixel_height = nimrod_dataset['column_interval']
+ pixel_width = nimrod_dataset['row_interval']
+
+ src_srs = osr.SpatialReference()
+ src_srs.ImportFromEPSG(27700)
+
+ src_geotran = [top_left[0], pixel_width, 0,
+ top_left[1], 0, -pixel_height]
+
+ rows, cols = raster.shape
+ src_ds = gtiff_drv.Create(
+ 'test_epsg3413.tif',
+ cols, rows, 1,
+ gdal.GDT_Byte)
+ src_ds.SetGeoTransform(src_geotran)
+ src_ds.SetProjection(src_srs.ExportToWkt())
+ src_ds.GetRasterBand(1).WriteArray(raster)
+
+ # Transform to EPSG: 4326
+ dest_srs = osr.SpatialReference()
+ dest_srs.ImportFromEPSG(4326)
+
+ int_ds = gdal.AutoCreateWarpedVRT(src_ds, src_srs.ExportToWkt(), dest_srs.ExportToWkt())
+ nimrod_dataset['data_warped'] = int_ds.GetRasterBand(1).ReadAsArray()
+ nimrod_dataset['GeoTransform'] = int_ds.GetGeoTransform()
+
+ src_ds = None
+ int_ds = None
+
+ return nimrod_dataset
+
+
+def nimrod_to_nc(nimrod_file_list, ncoutfilename):
+ """
+ Converts the nimrod data into netCDF. The output netCDF has the dimensions
+ * Timestamp
+ * Longitude
+ * Latitude
+
+ :param list nimrod_file_list: list of files that should be added to the netCDF.
+ These are the extracted and unzipped data files from the tar-file. The tar
+ file contains all the files for one day in five minute timestamps (288 files).
+ :param string ncoutfilename: name of the new netCDF that will be created
+ """
+
+ # check that nimrod_file_list is not a string or unicode and convert to
+ # list if so
+ if isinstance(nimrod_file_list, (str, unicode)):
+ nimrod_file_list = [nimrod_file_list,]
+
+ nimrod_list = []
+ for f in nimrod_file_list:
+ # Only work with the uncompressed files
+ if ((os.path.exists(f)) and (f.endswith('.dat'))):
+ nimrod_list.append(read_nimrod(f))
+
+ ds = netCDF4.Dataset(ncoutfilename, 'w', clobber=True)
+
+ #GeoTransformInfo = None
+
+ pool = Pool(processes=_NUM_PROCESSES)
+ nimrod_list = pool.map(warp, nimrod_list)
+
+ nimrod_list.sort(key=lambda x: datetime.datetime(x['year'], x['month'], x['day'], x['hour'], x['min']))
+
+ GeoTransformInfo = nimrod_list[0]['GeoTransform']
+
+ rows, cols = nimrod_list[0]['data_warped'].shape
+
+ ds.Conventions = "CF-1.6"
+
+ # Create the dimensions
+ ds.createDimension('lon', int(cols))
+ ds.createDimension('lat', int(rows))
+ ds.createDimension('timestamp', None)
+
+ lat = ds.createVariable('lat', np.float32, ('lat',))
+ lat.units = 'degrees_north'
+ lat.standard_name = 'latitude'
+ lat.valid_range = [-90.0, 90.0]
+
+ lon = ds.createVariable('lon', np.float32, ('lon',))
+ lon.units = 'degrees_east'
+ lon.standard_name = 'longitude'
+ lon.valid_range = [-180.0, 180.0]
+
+ times = ds.createVariable('timestamp', np.float64, ('timestamp',))
+ times.units = 'hours since 0001-01-01 00:00:00'
+ times.calendar = 'gregorian'
+
+ # The zlib option is awesome. The compression makes the result much smaller
+ rain = ds.createVariable('rain_intensity', np.float32, ('lat', 'lon', 'timestamp'), fill_value=-9999., zlib=True)
+ rain.units = 'mm/hr'
+
+ lon[:] = GeoTransformInfo[0] + np.arange(cols, dtype=np.float32) * GeoTransformInfo[1]
+ lat[:] = GeoTransformInfo[3] + np.arange(rows, dtype=np.float32) * GeoTransformInfo[5]
+
+ times[:] = netCDF4.date2num([datetime.datetime(nimrod['year'],
+ nimrod['month'],
+ nimrod['day'],
+ nimrod['hour'],
+ nimrod['min']) for nimrod in nimrod_list], units = times.units, calendar = times.calendar)
+
+ data = np.zeros((rows, cols, len(nimrod_list)), dtype=np.float32)
+ for i in range(len(nimrod_list)):
+ data[:,:,i] = np.array(nimrod_list[i]['data_warped'], dtype=np.float32)/32.
+
+ data[data <= 0] = -9999.
+ rain[:] = data
+
+ ds.sync()
+ ds.close()
+ return
+
+
+def _argparser():
+ import argparse
+ from argparse import RawTextHelpFormatter
+ sys.argv.insert(0, 'faampy nimrod_to_nc')
+ parser=argparse.ArgumentParser(description=__doc__,
+ formatter_class=RawTextHelpFormatter)
+ parser.add_argument('rain_radar_tar_file', action="store", type=str, help='MetOffice compressed rain radar file')
+ parser.add_argument('-n', '--number_of_processes', action="store", type=int, required=False,
+ default=2,
+ help='Number of processes that can be used.')
+ parser.add_argument('-o', '--outpath', action="store", type=str, required=False,
+ default=os.environ['HOME'],
+ help='Directory where the netCDF file will be stored. Default: $HOME.')
+ return parser
+
+
+def main():
+ global _NUM_PROCESSES
+ start_time = time.time()
+ parser = _argparser()
+ args = parser.parse_args()
+ _NUM_PROCESSES = args.number_of_processes
+ extract(args.rain_radar_tar_file)
+ nimrod_file_list = [os.path.join(_TEMP_FOLDER, f) for f in os.listdir(_TEMP_FOLDER)]
+ ncoutfilename = os.path.join(args.outpath, os.path.basename(args.rain_radar_tar_file).split('.')[0]+'.nc')
+ nimrod_to_nc(nimrod_file_list, ncoutfilename)
+ # Delete the _TEMP_FOLDER where we kept all the unzipped files
+ shutil.rmtree(_TEMP_FOLDER)
+ sys.stdout.write('Done ... ')
+ sys.stdout.write('Processing time %i seconds ... \n' % (time.time()-start_time))
+ sys.stdout.write('netCDF written to\n %s.\n' % ncoutfilename)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/faampy/fltcons/FltConsDiff.py b/faampy/fltcons/FltConsDiff.py
new file mode 100644
index 0000000..bd03146
--- /dev/null
+++ b/faampy/fltcons/FltConsDiff.py
@@ -0,0 +1,140 @@
+'''
+Created on 30 Dec 2009
+
+@author: axel
+'''
+
+import CoreData
+
+import copy
+import difflib
+import os
+import re
+
+
+def unique(alist):
+ """returns a list of unique values and keeps the order of their occurence"""
+
+ aset = set(alist)
+ index = []
+ for val in aset:
+ index.append( alist.index( val ))
+
+ tmp = zip( index, list(aset) )
+ tmp.sort()
+ newlist = []
+ for val in tmp:
+ newlist.append( val[1] )
+
+ return newlist
+
+
+def __getFlightConstants__(ncfilename):
+
+ cd = CoreData()
+ cd.open(ncfilename)
+
+ try:
+ fc = cd.ds.Flight_Constants
+ except:
+ pass
+
+ try:
+ fc = cd.ds.FLIGHT_CONSTANTS
+ except:
+ pass
+
+ cd.close()
+
+ return fc.split('\n')
+
+
+
+class FlightConstants(object):
+
+ def __init__(self, Filelist):
+
+ self.Filelist = Filelist
+
+
+ def diff( self, id1, id2, r1=None, r2=None ):
+
+ cdfl = CoreData.FileList()
+ cdfl.__createFileList__()
+ fn1 = cdf.__filterByFlightNumberAndRevision__( id1, r1 )
+ fn2 = cdf.__filterByFlightNumberAndRevision__( id2, r2 )
+
+ fc1 = __getFlightConstants__( fn1 )
+ fc2 = __getFlightConstants__( fn2 )
+
+ differ = difflib.HtmlDiff( tabsize=4, wrapcolumn=40 )
+ html = differ.make_file( fc1, fc2, context=False )
+
+ outfile = open( '/home/axel/tmp/difftest.html', 'w' )
+ outfile.write(html)
+ outfile.close()
+
+
+ def get_table( self ):
+
+ for file in self.Filelist:
+ #print(file)
+ cd = CoreData.Summary()
+ cd.open( file )
+ cd.get_file_info()
+ cd.get_flight_info()
+ cd.get_data_info()
+ cd.get_var_info()
+ cd.get_flicon_info()
+
+ self.table.append(cd)
+
+ cd.close()
+ del(cd)
+
+
+ def check_calfactor(self, par=None):
+
+ caltable = []
+ for entry in self.table:
+ caltable.append(entry)
+
+ #sort by flying time
+ _time = []
+ _cal = []
+ for entry in caltable:
+ _time.append( time.mktime( entry.file_info['created'] ))
+ _cal.append( entry.cal_info[par][0] )
+
+ ucal = unique(_cal)
+ print('INFO: %i different cal factors' % len(ucal))
+
+ numit = dict(zip(ucal, range(len(ucal))))
+ new = []
+ for i in _cal:
+ new.append( numit[i] )
+
+ print(new)
+ for i in ucal: print(i)
+
+
+#if __name__ == '__main__':
+calfactor_list = ['TASCORR', 'CALCABT', 'GELIMS', 'CALGE', 'PRTCCAL', 'HEIMCAL', 'INSLEVL', 'CALLWC', 'CALNPRS', 'CALNTMP', 'CALNBTS', 'CALNGTS', 'CALNRTS', 'CALNBBS', 'CALNGBS', 'CALNRBS', 'CALNHUM', 'CALNSTS', 'CALNVLW', 'CALNVLR', 'CALNVLC', 'CALNVTW', 'CALNVTR', 'CALNVTC', 'CALRSL', 'CALRST', 'CALO3', 'CALO3P', 'CALO3T', 'CALO3F', 'CALO3MX', 'CALNO', 'CALNO2', 'CALNOX', 'CALNOMX', 'CALSO2', 'CALCOMR', 'CALCOMX', 'CALCABP', 'CALS9SP', 'CALPLIN', 'CALPLOG', 'CALUP1S', 'CALUP2S', 'CALUIRS', 'CALLP1S', 'CALLP2S', 'CALLIRS', 'CALCUCF', 'CALCURF', 'CALCUIF', 'CALCLCF', 'CALCLRF', 'CALCLIF', 'TRFCTR', 'CALDIT', 'CALNDT', 'CALTP1', 'CALTP2', 'CALTP3', 'CALTP4', 'CALTP5', 'AOA_A0', 'AOA_A1', 'AOSS_B0', 'AOSS_B1', 'TOLER', 'TASCOR1', 'ALPH0', 'ALPH1', 'BET0', 'BET1', 'CALTNOS', 'CALTSAM', 'CALTAMB', 'CALTSRC', 'CALHTR1', 'CALHTR2', 'CALISRC', 'INSPOSN']
+
+cdfl = CoreData.FileList()
+cdfl.__createFileList__()
+cdfl.__filterLatestRevison__()
+cdfl.__sortByFlightNumber__()
+
+
+FC = FlightConstants(cdfl.FileList[420:])
+#FC.diff(457, 460, r1=1, r2=0)
+
+FC.get_table()
+#FC.check_calfactor()
+for cal in calfactor_list:
+ FC.check_calfactor(cal)
+
+print('*** END ***')
+
+
diff --git a/faampy/fltcons/Plot.py b/faampy/fltcons/Plot.py
new file mode 100644
index 0000000..bb9f4e5
--- /dev/null
+++ b/faampy/fltcons/Plot.py
@@ -0,0 +1,193 @@
+'''
+Created on 13 Jan 2010
+
+@author: axel
+'''
+
+import sys
+
+import matplotlib
+import matplotlib.pyplot as plt
+import os
+import re
+import scipy
+
+import faampy
+import faampy.utils
+
+#from orderedset import OrderedSet
+from faampy.fltcons.db import DB
+
+
+import collections
+
+# http://code.activestate.com/recipes/576694-orderedset/
+class OrderedSet(collections.MutableSet):
+
+ def __init__(self, iterable=None):
+ self.end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.map = {} # key --> [key, prev, next]
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ return len(self.map)
+
+ def __contains__(self, key):
+ return key in self.map
+
+ def add(self, key):
+ if key not in self.map:
+ end = self.end
+ curr = end[1]
+ curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+ def discard(self, key):
+ if key in self.map:
+ key, prev, next = self.map.pop(key)
+ prev[2] = next
+ next[1] = prev
+
+ def __iter__(self):
+ end = self.end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+ def __reversed__(self):
+ end = self.end
+ curr = end[1]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[1]
+
+ def pop(self, last=True):
+ if not self:
+ raise KeyError('set is empty')
+ key = self.end[1][0] if last else self.end[2][0]
+ self.discard(key)
+ return key
+
+ def __repr__(self):
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, list(self))
+
+ def __eq__(self, other):
+ if isinstance(other, OrderedSet):
+ return len(self) == len(other) and list(self) == list(other)
+ return set(self) == set(other)
+
+
+def fid_as_integer(fid):
+ if fid[0].lower() == 'c':
+ offset = 1000
+ else:
+ offset = 0
+ return int(fid[1:]) + offset
+
+
+class Plot(object):
+
+ def __init__ (self, par):
+ self.par = par
+ self.outpath = None
+ self.Figure = None
+
+ def get_data(self):
+ """gets the data for the plot"""
+ fcdb = DB()
+ fcdb.connect()
+ sql = """SELECT f.par,f.fid,f.rev,f.rdate,f.line,f.fname
+ FROM ( SELECT fid, max(rev) AS maxrev
+ FROM fltcons GROUP BY fid ORDER BY fid
+ ) AS x INNER JOIN fltcons AS f ON f.fid=x.fid AND f.rev=x.maxrev AND f.par='%s' ORDER BY x.fid""" % (self.par)
+
+ cur = fcdb.con.cursor()
+ cur.execute(sql)
+ data = cur.fetchall()
+ cur.close()
+ par, fid, rev, rdate, cal, fname = zip(*data)
+ newcal = []
+ for c in cal:
+ c = c.split(' ', 1)[1]
+ c = c.split('!')[0]
+ c = c.strip()
+ newcal.append(c)
+ cal = newcal
+ allBarData = []
+ for c in OrderedSet(cal):
+ curBarData, beg, end = ([], None, None)
+ for i in range(len(fid)):
+ if c == cal[i]:
+ if not beg and not end:
+ #beg = int(fid[i])
+ #end = int(fid[i])
+ beg = fid_as_integer(fid[i])
+ end = fid_as_integer(fid[i])
+ else:
+ end = fid_as_integer(fid[i])
+ if i == len(fid) - 1:
+ end = fid_as_integer(fid[i])
+ curBarData.append((beg, end-beg))
+ else:
+ if (beg and end):
+ curBarData.append((beg, end-beg))
+ beg = None
+ end = None
+ else:
+ pass
+
+ allBarData.append(curBarData)
+ self.allBarData = allBarData
+ print(curBarData)
+ pltData = []
+ for i in range(len( allBarData)):
+ for n in allBarData[i]:
+ pltData.append((float(n[0])-0.5, n[1]))
+ self.pltData = pltData
+ return pltData
+
+ def create(self):
+ allBarData = self.allBarData
+ pltData = self.pltData
+ fig = plt.figure(figsize=(8, 2))
+ ax = fig.add_subplot(111)
+ ax.grid(True)
+ xsize, ysize = (8, 3.5) # x-size for all plots
+ fcol = ['#a6cee3',
+ '#1f78b4',
+ '#b2df8a',
+ '#33a02c',
+ '#fb9a99',
+ '#e31a1c',
+ '#fdbf6f',
+ '#ff7f00',
+ '#cab2d6',
+ '#6a3d9a',
+ '#ffff99',
+ '#b15928']*12
+ ypos = 2
+ for i in range(1):
+ xpltData = []
+ for pair in pltData:
+ xpltData.append((pair[0], pair[1]+1))
+ ax.broken_barh(xpltData, (1, 8), facecolors=fcol[0:len(pltData)])
+ ax.set_ylim(0, 10)
+ ax.set_yticks([5])
+ ax.set_yticklabels([' '])
+ ax.set_ylabel(self.par)
+ fig.canvas.draw()
+ xlabels = [item.get_text() for item in ax.get_xticklabels()]
+ for i, l in enumerate(xlabels):
+ if not l:
+ continue
+ if int(l) < 1000:
+ xlabels[i] = 'b%0.3i' % int(l)
+ else:
+ xlabels[i] = 'c%0.3i' % (int(l)-999)
+ ax.set_xticklabels(xlabels)
+ fig.savefig(os.path.join(faampy.fltcons.FIGURES_PATH, str.strip(self.par) + '.png'))
+ self.Figure = fig
diff --git a/faampy/fltcons/Summary.py b/faampy/fltcons/Summary.py
new file mode 100644
index 0000000..02367d3
--- /dev/null
+++ b/faampy/fltcons/Summary.py
@@ -0,0 +1,68 @@
+'''
+
+Created on 20 Dec 2010
+
+@author: axel
+'''
+
+import re
+from faampy.fltcons.db import DB
+#from faampy.utils import *
+
+
+class Summary(object):
+
+ def __init__ (self, par, filtered=True):
+ self.filtered = filtered
+ self.par = par
+ self.Data = None
+ self.Flag = None
+ self.__fetchData__()
+ self.__flagData__()
+
+ def __flagData__( self ):
+
+ self.Flag = []
+ self.Flag.append(0)
+ for i in range(1, len(self.Data)):
+ cal0 = re.sub(r'\s', '', str( self.Data[i-1][4].split('!')[0]))
+ cal1 = re.sub(r'\s', '', str( self.Data[i][4].split('!')[0]))
+ if cal0 == cal1:
+ self.Flag.append(self.Flag[-1])
+ else:
+ self.Flag.append(self.Flag[-1]+1)
+
+ def __fetchData__(self):
+ fcdb = DB()
+ fcdb.connect()
+
+ if self.filtered:
+ sql = """SELECT f.par,f.fid,f.rev,f.rdate,f.line,f.fname
+ FROM ( SELECT fid, max(rev) AS maxrev
+ FROM fltcons GROUP BY fid ORDER BY fid
+ ) AS x INNER JOIN fltcons AS f ON f.fid=x.fid AND f.rev=x.maxrev AND f.par='%s'""" % (self.par)
+ else:
+ sql = """SELECT par,fid,rev,rdate,line FROM fltcons WHERE par='%s' ORDER BY fid,rev""" % (self.par)
+ cur = fcdb.con.cursor()
+ cur.execute(sql)
+ self.Data = cur.fetchall()
+ cur.close()
+
+ def __str__(self):
+ ref = -9999
+
+ out = ""
+ out += 80 * "=" + "\n"
+ out += " %2s | %12s | %5s | %3s | %10s | %s \n" % ('I', 'Par', 'Fid', 'rev', 'date', 'Value/Comment')
+ out += 80 * "=" + "\n"
+ for i in range(len(self.Data)):
+ if self.Flag[i] != ref:
+ out += " %02i | %12s | %5s | %3s | %10s | %s \n" % (self.Flag[i], self.Data[i][0], self.Data[i][1],self.Data[i][2],self.Data[i][3],self.Data[i][4])
+ if self.filtered:
+ ref = self.Flag[i]
+ out += 80 * "="
+ return out
+
+
+if __name__ == '__main__':
+ pass
diff --git a/faampy/fltcons/__init__.py b/faampy/fltcons/__init__.py
new file mode 100644
index 0000000..831b960
--- /dev/null
+++ b/faampy/fltcons/__init__.py
@@ -0,0 +1,29 @@
+import os
+
+__version__ = '0.3'
+__author__ = 'axll[at]faam[dot]ac[dot]uk'
+
+DB_NAME = os.path.join(os.environ['HOME'], '.faampy', 'dbs', 'fltcons.sqlite')
+FIGURES_PATH = os.path.join(os.environ['HOME'], '.faampy', 'figures', 'fltcons')
+
+# Setting for web service
+#FIGURES_PATH = '/home/htdocs.dacru/figures/fltcons/'
+FIGURES_URL = '/'
+
+PARAMETERS = ['TASCORR', 'CALCABT', 'GELIMS', 'CALGE', 'PRTCCAL',
+ 'HEIMCAL', 'INSLEVL', 'CALLWC', 'CALNPRS', 'CALNTMP',
+ 'CALNBTS', 'CALNGTS', 'CALNRTS', 'CALNBBS', 'CALNGBS',
+ 'CALNRBS', 'CALNHUM', 'CALNSTS', 'CALNVLW', 'CALNVLR',
+ 'CALNVLC', 'CALNVTW', 'CALNVTR', 'CALNVTC', 'CALRSL',
+ 'CALRST', 'CALO3', 'CALO3P', 'CALO3T', 'CALO3F',
+ 'CALO3MX', 'CALNO', 'CALNO2', 'CALNOX', 'CALNOMX',
+ 'CALSO2', 'CALCOMR', 'CALCOMX', 'CALCABP', 'CALS9SP',
+ 'CALPLIN', 'CALPLOG', 'CALUP1S', 'CALUP2S', 'CALUIRS',
+ 'CALLP1S', 'CALLP2S', 'CALLIRS', 'CALCUCF', 'CALCURF',
+ 'CALCUIF', 'CALCLCF', 'CALCLRF', 'CALCLIF', 'TRFCTR',
+ 'CALDIT', 'CALNDT', 'CALTP1', 'CALTP2', 'CALTP3',
+ 'CALTP4', 'CALTP5', 'AOA_A0', 'AOA_A1', 'AOSS_B0',
+ 'AOSS_B1', 'TOLER', 'TASCOR1', 'ALPH0', 'ALPH1',
+ 'BET0', 'BET1', 'CALTNOS', 'CALTSAM', 'CALTAMB',
+ 'CALTSRC', 'CALHTR1', 'CALHTR2', 'CALISRC', 'INSPOSN',
+ 'BUCK']
diff --git a/faampy/fltcons/cgi-fltcons-summary.py b/faampy/fltcons/cgi-fltcons-summary.py
new file mode 100644
index 0000000..c405e38
--- /dev/null
+++ b/faampy/fltcons/cgi-fltcons-summary.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+
+import sys
+import os
+import matplotlib
+matplotlib.use('Agg')
+import cgi
+
+import faampy.fltcons
+from faampy.fltcons.db import DB
+from faampy.fltcons.Summary import Summary
+from faampy.fltcons.Plot import Plot
+
+fltcons_list = faampy.fltcons.PARAMETERS
+fltcons_list.sort()
+
+#faampy.fltcons.FIGURES_PATH = '/home/axel/.faampy/tmp/'
+
+dirname, filename = os.path.split(os.path.abspath(__file__))
+
+faampy.fltcons.FIGURES_PATH = os.path.join(dirname, '..', 'img')
+
+
+spacer = "